diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/ethereum/EthereumSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/ethereum/EthereumSuite.java index c40b68bb6dfe..af147bd7ba7c 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/ethereum/EthereumSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/ethereum/EthereumSuite.java @@ -22,7 +22,6 @@ import static com.hedera.services.bdd.spec.HapiPropertySource.asContractIdWithEvmAddress; import static com.hedera.services.bdd.spec.HapiPropertySource.asHexedSolidityAddress; import static com.hedera.services.bdd.spec.HapiPropertySource.asSolidityAddress; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.HapiSpec.namedHapiTest; import static com.hedera.services.bdd.spec.assertions.AccountInfoAsserts.accountWith; @@ -427,15 +426,14 @@ final Stream matrixedPayerRelayerTest( @HapiTest final Stream invalidTxData() { - return defaultHapiSpec("InvalidTxData") - .given( - newKeyNamed(SECP_256K1_SOURCE_KEY).shape(SECP_256K1_SHAPE), - cryptoCreate(RELAYER).balance(6 * ONE_MILLION_HBARS), - cryptoTransfer(tinyBarsFromAccountToAlias(GENESIS, SECP_256K1_SOURCE_KEY, ONE_HUNDRED_HBARS)) - .via(AUTO_ACCOUNT_TRANSACTION_NAME), - getTxnRecord(AUTO_ACCOUNT_TRANSACTION_NAME).andAllChildRecords(), - uploadInitCode(PAY_RECEIVABLE_CONTRACT)) - .when(ethereumContractCreate(PAY_RECEIVABLE_CONTRACT) + return hapiTest( + newKeyNamed(SECP_256K1_SOURCE_KEY).shape(SECP_256K1_SHAPE), + cryptoCreate(RELAYER).balance(6 * ONE_MILLION_HBARS), + cryptoTransfer(tinyBarsFromAccountToAlias(GENESIS, SECP_256K1_SOURCE_KEY, ONE_HUNDRED_HBARS)) + .via(AUTO_ACCOUNT_TRANSACTION_NAME), + getTxnRecord(AUTO_ACCOUNT_TRANSACTION_NAME).andAllChildRecords(), + uploadInitCode(PAY_RECEIVABLE_CONTRACT), + ethereumContractCreate(PAY_RECEIVABLE_CONTRACT) .type(EthTxData.EthTransactionType.EIP1559) .signingWith(SECP_256K1_SOURCE_KEY) .payingWith(RELAYER) @@ -446,8 +444,7 @@ final Stream invalidTxData() { .invalidateEthereumData() .gasLimit(1_000_000L) .hasPrecheck(INVALID_ETHEREUM_TRANSACTION) - .via(PAY_TXN)) - .then(); + .via(PAY_TXN)); } @HapiTest @@ -695,7 +692,7 @@ final Stream etx008ContractCreateExecutesWithExpectedRecord() { final var record = op.getResponseRecord(); final var creationResult = record.getContractCreateResult(); final var createdIds = creationResult.getCreatedContractIDsList().stream() - .sorted(Comparator.comparing(id -> id.getContractNum())) + .sorted(Comparator.comparing(ContractID::getContractNum)) .toList(); assertEquals(4, createdIds.size(), "Expected four creations but got " + createdIds); @@ -974,89 +971,84 @@ final Stream accountDeletionResetsTheAliasNonce() { final var ercUser = "ercUser"; final var HBAR_XFER = "hbarXfer"; - return defaultHapiSpec("accountDeletionResetsTheAliasNonce") - .given( - cryptoCreate(PARTY).maxAutomaticTokenAssociations(2), - cryptoCreate(TOKEN_TREASURY), - newKeyNamed(SECP_256K1_SOURCE_KEY).shape(SECP_256K1_SHAPE), - withOpContext((spec, opLog) -> { - final var registry = spec.registry(); - final var ecdsaKey = registry.getKey(SECP_256K1_SOURCE_KEY); - final var tmp = ecdsaKey.getECDSASecp256K1().toByteArray(); - final var addressBytes = recoverAddressFromPubKey(tmp); - final var evmAddressBytes = ByteString.copyFrom(addressBytes); - partyId.set(registry.getAccountID(PARTY)); - partyAlias.set(ByteString.copyFrom(asSolidityAddress(partyId.get()))); - counterAlias.set(evmAddressBytes); - }), - tokenCreate("token") - .tokenType(TokenType.FUNGIBLE_COMMON) - .initialSupply(totalSupply) - .treasury(TOKEN_TREASURY) - .adminKey(SECP_256K1_SOURCE_KEY) - .supplyKey(SECP_256K1_SOURCE_KEY) - .exposingCreatedIdTo(tokenNum::set)) - .when( - withOpContext((spec, opLog) -> { - var op1 = cryptoTransfer((s, b) -> b.setTransfers(TransferList.newBuilder() - .addAccountAmounts(aaWith(partyAlias.get(), -2 * ONE_HBAR)) - .addAccountAmounts(aaWith(counterAlias.get(), +2 * ONE_HBAR)))) - .signedBy(DEFAULT_PAYER, PARTY) - .via(HBAR_XFER); - - var op2 = getAliasedAccountInfo(counterAlias.get()) - .logged() - .exposingIdTo(aliasedAccountId::set) - .has(accountWith() - .hasEmptyKey() - .noAlias() - .nonce(0) - .autoRenew(THREE_MONTHS_IN_SECONDS) - .receiverSigReq(false) - .memo(LAZY_MEMO)); - - // send eth transaction signed by the ecdsa key - var op3 = ethereumCallWithFunctionAbi( - true, - "token", - getABIFor(Utils.FunctionType.FUNCTION, "totalSupply", ERC20_ABI)) - .type(EthTxData.EthTransactionType.EIP1559) - .signingWith(SECP_256K1_SOURCE_KEY) - .payingWith(GENESIS) + return hapiTest( + cryptoCreate(PARTY).maxAutomaticTokenAssociations(2), + cryptoCreate(TOKEN_TREASURY), + newKeyNamed(SECP_256K1_SOURCE_KEY).shape(SECP_256K1_SHAPE), + withOpContext((spec, opLog) -> { + final var registry = spec.registry(); + final var ecdsaKey = registry.getKey(SECP_256K1_SOURCE_KEY); + final var tmp = ecdsaKey.getECDSASecp256K1().toByteArray(); + final var addressBytes = recoverAddressFromPubKey(tmp); + final var evmAddressBytes = ByteString.copyFrom(addressBytes); + partyId.set(registry.getAccountID(PARTY)); + partyAlias.set(ByteString.copyFrom(asSolidityAddress(partyId.get()))); + counterAlias.set(evmAddressBytes); + }), + tokenCreate("token") + .tokenType(TokenType.FUNGIBLE_COMMON) + .initialSupply(totalSupply) + .treasury(TOKEN_TREASURY) + .adminKey(SECP_256K1_SOURCE_KEY) + .supplyKey(SECP_256K1_SOURCE_KEY) + .exposingCreatedIdTo(tokenNum::set), + withOpContext((spec, opLog) -> { + var op1 = cryptoTransfer((s, b) -> b.setTransfers(TransferList.newBuilder() + .addAccountAmounts(aaWith(partyAlias.get(), -2 * ONE_HBAR)) + .addAccountAmounts(aaWith(counterAlias.get(), +2 * ONE_HBAR)))) + .signedBy(DEFAULT_PAYER, PARTY) + .via(HBAR_XFER); + + var op2 = getAliasedAccountInfo(counterAlias.get()) + .logged() + .exposingIdTo(aliasedAccountId::set) + .has(accountWith() + .hasEmptyKey() + .noAlias() .nonce(0) - .gasPrice(50L) - .maxGasAllowance(FIVE_HBARS) - .maxPriorityGas(2L) - .gasLimit(1_000_000L) - .hasKnownStatus(ResponseCodeEnum.SUCCESS); - - // assert account nonce is increased to 1 - var op4 = getAliasedAccountInfo(counterAlias.get()) - .logged() - .has(accountWith().nonce(1)); - - allRunFor(spec, op1, op2, op3, op4); - - spec.registry().saveAccountId(ercUser, aliasedAccountId.get()); - spec.registry().saveKey(ercUser, spec.registry().getKey(SECP_256K1_SOURCE_KEY)); - }), - // delete the account currently holding the alias - cryptoDelete(ercUser)) - .then( - // try to create a new account with the same alias - withOpContext((spec, opLog) -> { - var op1 = cryptoTransfer((s, b) -> b.setTransfers(TransferList.newBuilder() - .addAccountAmounts(aaWith(partyAlias.get(), -2 * ONE_HBAR)) - .addAccountAmounts(aaWith(counterAlias.get(), +2 * ONE_HBAR)))) - .signedBy(DEFAULT_PAYER, PARTY) - .hasKnownStatus(SUCCESS); - - var op2 = getAliasedAccountInfo(counterAlias.get()) - // TBD: balance should be 4 or 2 hbars - .has(accountWith().nonce(0).balance(2 * ONE_HBAR)); - - allRunFor(spec, op1, op2); - })); + .autoRenew(THREE_MONTHS_IN_SECONDS) + .receiverSigReq(false) + .memo(LAZY_MEMO)); + + // send eth transaction signed by the ecdsa key + var op3 = ethereumCallWithFunctionAbi( + true, "token", getABIFor(Utils.FunctionType.FUNCTION, "totalSupply", ERC20_ABI)) + .type(EthTxData.EthTransactionType.EIP1559) + .signingWith(SECP_256K1_SOURCE_KEY) + .payingWith(GENESIS) + .nonce(0) + .gasPrice(50L) + .maxGasAllowance(FIVE_HBARS) + .maxPriorityGas(2L) + .gasLimit(1_000_000L) + .hasKnownStatus(ResponseCodeEnum.SUCCESS); + + // assert account nonce is increased to 1 + var op4 = getAliasedAccountInfo(counterAlias.get()) + .logged() + .has(accountWith().nonce(1)); + + allRunFor(spec, op1, op2, op3, op4); + + spec.registry().saveAccountId(ercUser, aliasedAccountId.get()); + spec.registry().saveKey(ercUser, spec.registry().getKey(SECP_256K1_SOURCE_KEY)); + }), + // delete the account currently holding the alias + cryptoDelete(ercUser), + // try to create a new account with the same alias + withOpContext((spec, opLog) -> { + var op1 = cryptoTransfer((s, b) -> b.setTransfers(TransferList.newBuilder() + .addAccountAmounts(aaWith(partyAlias.get(), -2 * ONE_HBAR)) + .addAccountAmounts(aaWith(counterAlias.get(), +2 * ONE_HBAR)))) + .signedBy(DEFAULT_PAYER, PARTY) + .hasKnownStatus(SUCCESS); + + var op2 = getAliasedAccountInfo(counterAlias.get()) + // TBD: balance should be 4 or 2 hbars + .has(accountWith().nonce(0).balance(2 * ONE_HBAR)); + + allRunFor(spec, op1, op2); + })); } // test unprotected legacy ethereum transactions before EIP155, @@ -1104,20 +1096,19 @@ final Stream etx007FungibleTokenCreateWithFeesHappyPath() { final var EXISTING_TOKEN = "EXISTING_TOKEN"; final var firstTxn = "firstCreateTxn"; final long DEFAULT_AMOUNT_TO_SEND = 20 * ONE_HBAR; - return defaultHapiSpec("etx007FungibleTokenCreateWithFeesHappyPath") - .given( - newKeyNamed(SECP_256K1_SOURCE_KEY).shape(SECP_256K1_SHAPE), - cryptoTransfer(tinyBarsFromAccountToAlias(GENESIS, SECP_256K1_SOURCE_KEY, ONE_HUNDRED_HBARS)) - .via(AUTO_ACCOUNT_TRANSACTION_NAME), - cryptoCreate(feeCollectorAndAutoRenew) - .keyShape(SigControl.ED25519_ON) - .balance(ONE_MILLION_HBARS), - uploadInitCode(contract), - contractCreate(contract).gas(GAS_LIMIT), - tokenCreate(EXISTING_TOKEN).decimals(5), - tokenAssociate(feeCollectorAndAutoRenew, EXISTING_TOKEN), - cryptoUpdate(feeCollectorAndAutoRenew).key(SECP_256K1_SOURCE_KEY)) - .when(withOpContext((spec, opLog) -> allRunFor( + return hapiTest( + newKeyNamed(SECP_256K1_SOURCE_KEY).shape(SECP_256K1_SHAPE), + cryptoTransfer(tinyBarsFromAccountToAlias(GENESIS, SECP_256K1_SOURCE_KEY, ONE_HUNDRED_HBARS)) + .via(AUTO_ACCOUNT_TRANSACTION_NAME), + cryptoCreate(feeCollectorAndAutoRenew) + .keyShape(SigControl.ED25519_ON) + .balance(ONE_MILLION_HBARS), + uploadInitCode(contract), + contractCreate(contract).gas(GAS_LIMIT), + tokenCreate(EXISTING_TOKEN).decimals(5), + tokenAssociate(feeCollectorAndAutoRenew, EXISTING_TOKEN), + cryptoUpdate(feeCollectorAndAutoRenew).key(SECP_256K1_SOURCE_KEY), + withOpContext((spec, opLog) -> allRunFor( spec, ethereumCall( contract, @@ -1142,26 +1133,23 @@ final Stream etx007FungibleTokenCreateWithFeesHappyPath() { opLog.info("Explicit create result" + " is {}", result[0]); final var res = (Address) result[0]; createdTokenNum.set(res.value().longValueExact()); - })))) - .then( - getTxnRecord(firstTxn).andAllChildRecords().logged(), - childRecordsCheck( - firstTxn, - SUCCESS, - TransactionRecordAsserts.recordWith().status(ResponseCodeEnum.SUCCESS)), - withOpContext((spec, ignore) -> { - final var op = getTxnRecord(firstTxn); - allRunFor(spec, op); - - final var callResult = op.getResponseRecord().getContractCallResult(); - final var gasUsed = callResult.getGasUsed(); - final var amount = callResult.getAmount(); - final var gasLimit = callResult.getGas(); - Assertions.assertEquals(DEFAULT_AMOUNT_TO_SEND, amount); - Assertions.assertEquals(GAS_LIMIT, gasLimit); - Assertions.assertTrue(gasUsed > 0L); - Assertions.assertTrue(callResult.hasContractID() && callResult.hasSenderId()); - })); + }))), + getTxnRecord(firstTxn).andAllChildRecords().logged(), + childRecordsCheck( + firstTxn, SUCCESS, TransactionRecordAsserts.recordWith().status(ResponseCodeEnum.SUCCESS)), + withOpContext((spec, ignore) -> { + final var op = getTxnRecord(firstTxn); + allRunFor(spec, op); + + final var callResult = op.getResponseRecord().getContractCallResult(); + final var gasUsed = callResult.getGasUsed(); + final var amount = callResult.getAmount(); + final var gasLimit = callResult.getGas(); + Assertions.assertEquals(DEFAULT_AMOUNT_TO_SEND, amount); + Assertions.assertEquals(GAS_LIMIT, gasLimit); + Assertions.assertTrue(gasUsed > 0L); + Assertions.assertTrue(callResult.hasContractID() && callResult.hasSenderId()); + })); } @HapiTest @@ -1176,21 +1164,19 @@ final Stream fungibleTokenCreateWithAmountLookingNegativeInTwosComp new BigInteger(Bytes.fromHex("FAC7230489E80000").toByteArray()); // ^^^^ 10000000000000000000 wasn't enough to pay the tx fee, so changed the leading `8` to an `F` final var BIG_INTEGER_WEIBAR = new BigInteger("18070450532247928832"); // this is the actual value - return defaultHapiSpec("fungibleTokenCreateWithAmountLookingNegativeInTwosComplement") - .given( - newKeyNamed(SECP_256K1_SOURCE_KEY).shape(SECP_256K1_SHAPE), - cryptoTransfer(tinyBarsFromAccountToAlias( - GENESIS, SECP_256K1_SOURCE_KEY, 20 * ONE_HUNDRED_HBARS)) - .via(AUTO_ACCOUNT_TRANSACTION_NAME), - cryptoCreate(feeCollectorAndAutoRenew) - .keyShape(SigControl.ED25519_ON) - .balance(ONE_MILLION_HBARS), - uploadInitCode(contract), - contractCreate(contract).gas(GAS_LIMIT), - tokenCreate(EXISTING_TOKEN).decimals(5), - tokenAssociate(feeCollectorAndAutoRenew, EXISTING_TOKEN), - cryptoUpdate(feeCollectorAndAutoRenew).key(SECP_256K1_SOURCE_KEY)) - .when(withOpContext((spec, opLog) -> allRunFor( + return hapiTest( + newKeyNamed(SECP_256K1_SOURCE_KEY).shape(SECP_256K1_SHAPE), + cryptoTransfer(tinyBarsFromAccountToAlias(GENESIS, SECP_256K1_SOURCE_KEY, 20 * ONE_HUNDRED_HBARS)) + .via(AUTO_ACCOUNT_TRANSACTION_NAME), + cryptoCreate(feeCollectorAndAutoRenew) + .keyShape(SigControl.ED25519_ON) + .balance(ONE_MILLION_HBARS), + uploadInitCode(contract), + contractCreate(contract).gas(GAS_LIMIT), + tokenCreate(EXISTING_TOKEN).decimals(5), + tokenAssociate(feeCollectorAndAutoRenew, EXISTING_TOKEN), + cryptoUpdate(feeCollectorAndAutoRenew).key(SECP_256K1_SOURCE_KEY), + withOpContext((spec, opLog) -> allRunFor( spec, ethereumCall( contract, @@ -1215,30 +1201,24 @@ final Stream fungibleTokenCreateWithAmountLookingNegativeInTwosComp opLog.info("Explicit create result is {}", result[0]); final var res = (Address) result[0]; createdTokenNum.set(res.value().longValueExact()); - })))) - .then( - getTxnRecord(firstTxn).andAllChildRecords().logged(), - childRecordsCheck( - firstTxn, - SUCCESS, - TransactionRecordAsserts.recordWith().status(ResponseCodeEnum.SUCCESS)), - withOpContext((spec, ignore) -> { - final var op = getTxnRecord(firstTxn); - allRunFor(spec, op); - - final var callResult = op.getResponseRecord().getContractCallResult(); - final var gasUsed = callResult.getGasUsed(); - final var amountTinybar = callResult.getAmount(); - final var gasLimit = callResult.getGas(); - Assertions.assertEquals( - BIG_INTEGER_WEIBAR - .divide(WEIBARS_IN_A_TINYBAR) - .longValueExact(), - amountTinybar); - Assertions.assertEquals(GAS_LIMIT, gasLimit); - Assertions.assertTrue(gasUsed > 0L); - Assertions.assertTrue(callResult.hasContractID() && callResult.hasSenderId()); - })); + }))), + getTxnRecord(firstTxn).andAllChildRecords().logged(), + childRecordsCheck( + firstTxn, SUCCESS, TransactionRecordAsserts.recordWith().status(ResponseCodeEnum.SUCCESS)), + withOpContext((spec, ignore) -> { + final var op = getTxnRecord(firstTxn); + allRunFor(spec, op); + + final var callResult = op.getResponseRecord().getContractCallResult(); + final var gasUsed = callResult.getGasUsed(); + final var amountTinybar = callResult.getAmount(); + final var gasLimit = callResult.getGas(); + Assertions.assertEquals( + BIG_INTEGER_WEIBAR.divide(WEIBARS_IN_A_TINYBAR).longValueExact(), amountTinybar); + Assertions.assertEquals(GAS_LIMIT, gasLimit); + Assertions.assertTrue(gasUsed > 0L); + Assertions.assertTrue(callResult.hasContractID() && callResult.hasSenderId()); + })); } @HapiTest diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/ethereum/HelloWorldEthereumSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/ethereum/HelloWorldEthereumSuite.java index 0b54a10d2e5c..e8c2aa57241e 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/ethereum/HelloWorldEthereumSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/ethereum/HelloWorldEthereumSuite.java @@ -21,7 +21,6 @@ import static com.hedera.services.bdd.spec.HapiPropertySource.asHexedSolidityAddress; import static com.hedera.services.bdd.spec.HapiPropertySource.asSolidityAddress; import static com.hedera.services.bdd.spec.HapiPropertySource.asToken; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.assertions.AccountInfoAsserts.accountWith; import static com.hedera.services.bdd.spec.assertions.AccountInfoAsserts.changeFromSnapshot; @@ -121,82 +120,78 @@ final Stream canCreateTokenWithCryptoAdminKeyOnlyIfHasTopLevelSig() final AtomicReference adminKey = new AtomicReference<>(); final AtomicReference creationDetails = new AtomicReference<>(); - return defaultHapiSpec("canCreateTokenWithCryptoAdminKeyOnlyIfHasTopLevelSig") - .given( - // Deploy our test contract - uploadInitCode(contract), - contractCreate(contract).gas(5_000_000L), + return hapiTest( + // Deploy our test contract + uploadInitCode(contract), + contractCreate(contract).gas(5_000_000L), - // Create an ECDSA key - newKeyNamed(cryptoKey) - .shape(SECP256K1_ON) - .exposingKeyTo( - k -> adminKey.set(k.getECDSASecp256K1().toByteArray())), - // Create an account with an EVM address derived from this key - cryptoTransfer(tinyBarsFromToWithAlias(DEFAULT_PAYER, cryptoKey, 2 * ONE_HUNDRED_HBARS)) - .via("creation"), - // Get its EVM address for later use in the contract call - getTxnRecord("creation") - .exposingCreationDetailsTo(allDetails -> creationDetails.set(allDetails.getFirst())), - // Update key to a threshold key authorizing our contract use this account as a token treasury - newKeyNamed(thresholdKey) - .shape(threshOf(1, PREDEFINED_SHAPE, CONTRACT).signedWith(sigs(cryptoKey, contract))), - sourcing(() -> cryptoUpdate( - asAccountString(creationDetails.get().createdId())) + // Create an ECDSA key + newKeyNamed(cryptoKey) + .shape(SECP256K1_ON) + .exposingKeyTo(k -> adminKey.set(k.getECDSASecp256K1().toByteArray())), + // Create an account with an EVM address derived from this key + cryptoTransfer(tinyBarsFromToWithAlias(DEFAULT_PAYER, cryptoKey, 2 * ONE_HUNDRED_HBARS)) + .via("creation"), + // Get its EVM address for later use in the contract call + getTxnRecord("creation") + .exposingCreationDetailsTo(allDetails -> creationDetails.set(allDetails.getFirst())), + // Update key to a threshold key authorizing our contract use this account as a token treasury + newKeyNamed(thresholdKey) + .shape(threshOf(1, PREDEFINED_SHAPE, CONTRACT).signedWith(sigs(cryptoKey, contract))), + sourcing( + () -> cryptoUpdate(asAccountString(creationDetails.get().createdId())) .key(thresholdKey) - .signedBy(DEFAULT_PAYER, cryptoKey))) - .when( - // First verify we fail to create without the admin key's top-level signature - sourcing(() -> contractCall( - contract, - "createFungibleTokenWithSECP256K1AdminKeyPublic", - // Treasury is the EVM address - creationDetails.get().evmAddress(), - // Admin key is the ECDSA key - adminKey.get()) - .via("creationWithoutTopLevelSig") - .gas(5_000_000L) - .sending(100 * ONE_HBAR) - .hasKnownStatus(CONTRACT_REVERT_EXECUTED)), - // Next verify we succeed when using the top-level SignatureMap to - // sign with the admin key - sourcing(() -> contractCall( - contract, - "createFungibleTokenWithSECP256K1AdminKeyPublic", - creationDetails.get().evmAddress(), - adminKey.get()) - .via("creationActivatingAdminKeyViaSigMap") - .gas(5_000_000L) - .sending(100 * ONE_HBAR) - // This is the important change, include a top-level signature with the admin key - .alsoSigningWithFullPrefix(cryptoKey)), - // Finally confirm we ALSO succeed when providing the admin key's - // signature via an EthereumTransaction signature - cryptoCreate(RELAYER).balance(10 * THOUSAND_HBAR), - sourcing(() -> ethereumCall( - contract, - "createFungibleTokenWithSECP256K1AdminKeyPublic", - creationDetails.get().evmAddress(), - adminKey.get()) - .type(EthTxData.EthTransactionType.EIP1559) - .nonce(0) - .signingWith(cryptoKey) - .payingWith(RELAYER) - .sending(50 * ONE_HBAR) - .maxGasAllowance(ONE_HBAR * 10) - .gasLimit(5_000_000L) - .via("creationActivatingAdminKeyViaEthTxSig"))) - .then( - childRecordsCheck( - "creationWithoutTopLevelSig", - CONTRACT_REVERT_EXECUTED, - recordWith().status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)), - getTxnRecord("creationActivatingAdminKeyViaSigMap") - .exposingTokenCreationsTo(createdIds -> - assertFalse(createdIds.isEmpty(), "Top-level sig map creation failed")), - getTxnRecord("creationActivatingAdminKeyViaEthTxSig") - .exposingTokenCreationsTo( - createdIds -> assertFalse(createdIds.isEmpty(), "EthTx sig creation failed"))); + .signedBy(DEFAULT_PAYER, cryptoKey)), + // First verify we fail to create without the admin key's top-level signature + sourcing(() -> contractCall( + contract, + "createFungibleTokenWithSECP256K1AdminKeyPublic", + // Treasury is the EVM address + creationDetails.get().evmAddress(), + // Admin key is the ECDSA key + adminKey.get()) + .via("creationWithoutTopLevelSig") + .gas(5_000_000L) + .sending(100 * ONE_HBAR) + .hasKnownStatus(CONTRACT_REVERT_EXECUTED)), + // Next verify we succeed when using the top-level SignatureMap to + // sign with the admin key + sourcing(() -> contractCall( + contract, + "createFungibleTokenWithSECP256K1AdminKeyPublic", + creationDetails.get().evmAddress(), + adminKey.get()) + .via("creationActivatingAdminKeyViaSigMap") + .gas(5_000_000L) + .sending(100 * ONE_HBAR) + // This is the important change, include a top-level signature with the admin key + .alsoSigningWithFullPrefix(cryptoKey)), + // Finally confirm we ALSO succeed when providing the admin key's + // signature via an EthereumTransaction signature + cryptoCreate(RELAYER).balance(10 * THOUSAND_HBAR), + sourcing(() -> ethereumCall( + contract, + "createFungibleTokenWithSECP256K1AdminKeyPublic", + creationDetails.get().evmAddress(), + adminKey.get()) + .type(EthTxData.EthTransactionType.EIP1559) + .nonce(0) + .signingWith(cryptoKey) + .payingWith(RELAYER) + .sending(50 * ONE_HBAR) + .maxGasAllowance(ONE_HBAR * 10) + .gasLimit(5_000_000L) + .via("creationActivatingAdminKeyViaEthTxSig")), + childRecordsCheck( + "creationWithoutTopLevelSig", + CONTRACT_REVERT_EXECUTED, + recordWith().status(INVALID_FULL_PREFIX_SIGNATURE_FOR_PRECOMPILE)), + getTxnRecord("creationActivatingAdminKeyViaSigMap") + .exposingTokenCreationsTo( + createdIds -> assertFalse(createdIds.isEmpty(), "Top-level sig map creation failed")), + getTxnRecord("creationActivatingAdminKeyViaEthTxSig") + .exposingTokenCreationsTo( + createdIds -> assertFalse(createdIds.isEmpty(), "EthTx sig creation failed"))); } @HapiTest diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/ethereum/NonceSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/ethereum/NonceSuite.java index 98a57f1fc600..7c59880add7d 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/ethereum/NonceSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/ethereum/NonceSuite.java @@ -333,17 +333,16 @@ final Stream nonceNotUpdatedWhenSenderDoesNotHaveEnoughBalanceHandl @HapiTest final Stream nonceNotUpdatedForNonEthereumTransaction() { - return defaultHapiSpec("nonceNotUpdatedForNonEthereumTransaction") - .given( - cryptoCreate(RELAYER).balance(ONE_HUNDRED_HBARS), - uploadInitCode(INTERNAL_CALLEE_CONTRACT), - contractCreate(INTERNAL_CALLEE_CONTRACT)) - .when(contractCall(INTERNAL_CALLEE_CONTRACT, EXTERNAL_FUNCTION) + return hapiTest( + cryptoCreate(RELAYER).balance(ONE_HUNDRED_HBARS), + uploadInitCode(INTERNAL_CALLEE_CONTRACT), + contractCreate(INTERNAL_CALLEE_CONTRACT), + contractCall(INTERNAL_CALLEE_CONTRACT, EXTERNAL_FUNCTION) .gas(ENOUGH_GAS_LIMIT) .payingWith(RELAYER) .signingWith(RELAYER) - .via(TX)) - .then(getAccountInfo(RELAYER).has(accountWith().nonce(0L))); + .via(TX), + getAccountInfo(RELAYER).has(accountWith().nonce(0L))); } @HapiTest @@ -832,13 +831,12 @@ final Stream nonceUpdatedAfterEvmReversionDueSendingValueToEthereum // depends on https://github.com/hashgraph/hedera-services/pull/11359 @HapiTest final Stream nonceUpdatedAfterEvmReversionDueSendingValueToHederaPrecompileEthContractCreate() { - return defaultHapiSpec("nonceUpdatedAfterEvmReversionDueSendingValueToHederaPrecompileEthContractCreate") - .given( - newKeyNamed(SECP_256K1_SOURCE_KEY).shape(SECP_256K1_SHAPE), - cryptoCreate(RELAYER).balance(ONE_MILLION_HBARS), - cryptoTransfer(tinyBarsFromAccountToAlias(GENESIS, SECP_256K1_SOURCE_KEY, ONE_MILLION_HBARS)), - uploadInitCode(REVERTER_CONSTRUCTOR_CALL_WITH_VALUE_TO_HEDERA_PRECOMPILE_CONTRACT)) - .when(ethereumContractCreate(REVERTER_CONSTRUCTOR_CALL_WITH_VALUE_TO_HEDERA_PRECOMPILE_CONTRACT) + return hapiTest( + newKeyNamed(SECP_256K1_SOURCE_KEY).shape(SECP_256K1_SHAPE), + cryptoCreate(RELAYER).balance(ONE_MILLION_HBARS), + cryptoTransfer(tinyBarsFromAccountToAlias(GENESIS, SECP_256K1_SOURCE_KEY, ONE_MILLION_HBARS)), + uploadInitCode(REVERTER_CONSTRUCTOR_CALL_WITH_VALUE_TO_HEDERA_PRECOMPILE_CONTRACT), + ethereumContractCreate(REVERTER_CONSTRUCTOR_CALL_WITH_VALUE_TO_HEDERA_PRECOMPILE_CONTRACT) .balance(1L) .type(EthTransactionType.EIP1559) .signingWith(SECP_256K1_SOURCE_KEY) @@ -846,14 +844,12 @@ final Stream nonceUpdatedAfterEvmReversionDueSendingValueToHederaPr .nonce(0) .gasLimit(ENOUGH_GAS_LIMIT) .hasKnownStatus(INVALID_CONTRACT_ID) - .via(TX)) - .then( - getAliasedAccountInfo(SECP_256K1_SOURCE_KEY) - .has(accountWith().nonce(1L)), - getTxnRecord(TX) - .hasPriority(recordWith() - .status(INVALID_CONTRACT_ID) - .contractCreateResult(resultWith().signerNonce(1L)))); + .via(TX), + getAliasedAccountInfo(SECP_256K1_SOURCE_KEY).has(accountWith().nonce(1L)), + getTxnRecord(TX) + .hasPriority(recordWith() + .status(INVALID_CONTRACT_ID) + .contractCreateResult(resultWith().signerNonce(1L)))); } @HapiTest @@ -879,14 +875,13 @@ final Stream nonceUpdatedAfterSuccessfulEthereumContractCreation() @HapiTest final Stream revertsWhenSenderDoesNotExist() { AtomicReference receiverId = new AtomicReference<>(); - return defaultHapiSpec("revertsWhenSenderDoesNotExist") - .given( - cryptoCreate(RECEIVER).balance(0L).exposingCreatedIdTo(receiverId::set), - newKeyNamed(SECP_256K1_SOURCE_KEY).shape(SECP_256K1_SHAPE), - cryptoCreate(RELAYER).balance(ONE_HUNDRED_HBARS), - uploadInitCode(INTERNAL_CALLER_CONTRACT), - contractCreate(INTERNAL_CALLER_CONTRACT).balance(ONE_HBAR)) - .when(withOpContext((spec, op) -> allRunFor( + return hapiTest( + cryptoCreate(RECEIVER).balance(0L).exposingCreatedIdTo(receiverId::set), + newKeyNamed(SECP_256K1_SOURCE_KEY).shape(SECP_256K1_SHAPE), + cryptoCreate(RELAYER).balance(ONE_HUNDRED_HBARS), + uploadInitCode(INTERNAL_CALLER_CONTRACT), + contractCreate(INTERNAL_CALLER_CONTRACT).balance(ONE_HBAR), + withOpContext((spec, op) -> allRunFor( spec, ethereumCall( INTERNAL_CALLER_CONTRACT, @@ -898,8 +893,8 @@ final Stream revertsWhenSenderDoesNotExist() { .nonce(0) .gasLimit(ENOUGH_GAS_LIMIT) .hasKnownStatus(INVALID_ACCOUNT_ID) - .via(TX)))) - .then(getTxnRecord(TX) + .via(TX))), + getTxnRecord(TX) .hasPriority( recordWith().contractCallResult(resultWith().signerNonce(0L)))); } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/fees/FeeScheduleUpdateWaiverTest.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/fees/FeeScheduleUpdateWaiverTest.java index dc15bc5d3eeb..407d012878e7 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/fees/FeeScheduleUpdateWaiverTest.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/fees/FeeScheduleUpdateWaiverTest.java @@ -16,7 +16,7 @@ package com.hedera.services.bdd.suites.fees; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.assertions.AccountInfoAsserts.changeFromSnapshot; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getFileContents; @@ -43,32 +43,30 @@ public class FeeScheduleUpdateWaiverTest { final Stream feeScheduleControlAccountIsntCharged() { ResponseCodeEnum[] acceptable = {SUCCESS, FEE_SCHEDULE_FILE_PART_UPLOADED}; - return defaultHapiSpec("FeeScheduleControlAccountIsntCharged") - .given( - cryptoTransfer(tinyBarsFromTo(GENESIS, FEE_SCHEDULE_CONTROL, 1_000_000_000_000L)), - balanceSnapshot("pre", FEE_SCHEDULE_CONTROL), - getFileContents(FEE_SCHEDULE).in4kChunks(true).saveTo("feeSchedule.bin")) - .when( - fileUpdate(FEE_SCHEDULE) - .hasKnownStatusFrom(acceptable) - .payingWith(FEE_SCHEDULE_CONTROL) - .path(Path.of("./", "part0-feeSchedule.bin").toString()), - fileAppend(FEE_SCHEDULE) - .hasKnownStatusFrom(acceptable) - .payingWith(FEE_SCHEDULE_CONTROL) - .path(Path.of("./", "part1-feeSchedule.bin").toString()), - fileAppend(FEE_SCHEDULE) - .hasKnownStatusFrom(acceptable) - .payingWith(FEE_SCHEDULE_CONTROL) - .path(Path.of("./", "part2-feeSchedule.bin").toString()), - fileAppend(FEE_SCHEDULE) - .hasKnownStatusFrom(acceptable) - .payingWith(FEE_SCHEDULE_CONTROL) - .path(Path.of("./", "part3-feeSchedule.bin").toString()), - fileAppend(FEE_SCHEDULE) - .hasKnownStatusFrom(acceptable) - .payingWith(FEE_SCHEDULE_CONTROL) - .path(Path.of("./", "part4-feeSchedule.bin").toString())) - .then(getAccountBalance(FEE_SCHEDULE_CONTROL).hasTinyBars(changeFromSnapshot("pre", 0))); + return hapiTest( + cryptoTransfer(tinyBarsFromTo(GENESIS, FEE_SCHEDULE_CONTROL, 1_000_000_000_000L)), + balanceSnapshot("pre", FEE_SCHEDULE_CONTROL), + getFileContents(FEE_SCHEDULE).in4kChunks(true).saveTo("feeSchedule.bin"), + fileUpdate(FEE_SCHEDULE) + .hasKnownStatusFrom(acceptable) + .payingWith(FEE_SCHEDULE_CONTROL) + .path(Path.of("./", "part0-feeSchedule.bin").toString()), + fileAppend(FEE_SCHEDULE) + .hasKnownStatusFrom(acceptable) + .payingWith(FEE_SCHEDULE_CONTROL) + .path(Path.of("./", "part1-feeSchedule.bin").toString()), + fileAppend(FEE_SCHEDULE) + .hasKnownStatusFrom(acceptable) + .payingWith(FEE_SCHEDULE_CONTROL) + .path(Path.of("./", "part2-feeSchedule.bin").toString()), + fileAppend(FEE_SCHEDULE) + .hasKnownStatusFrom(acceptable) + .payingWith(FEE_SCHEDULE_CONTROL) + .path(Path.of("./", "part3-feeSchedule.bin").toString()), + fileAppend(FEE_SCHEDULE) + .hasKnownStatusFrom(acceptable) + .payingWith(FEE_SCHEDULE_CONTROL) + .path(Path.of("./", "part4-feeSchedule.bin").toString()), + getAccountBalance(FEE_SCHEDULE_CONTROL).hasTinyBars(changeFromSnapshot("pre", 0))); } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/DiverseStateCreation.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/DiverseStateCreation.java index 04e1145e5eb9..86c7c784398b 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/DiverseStateCreation.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/DiverseStateCreation.java @@ -16,7 +16,7 @@ package com.hedera.services.bdd.suites.file; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.keys.KeyShape.SIMPLE; import static com.hedera.services.bdd.spec.keys.KeyShape.listOf; import static com.hedera.services.bdd.spec.keys.KeyShape.threshOf; @@ -112,7 +112,7 @@ public List> getSpecsInSuite() { return List.of(createDiverseState()); } - final Stream createDiverseState() { + Stream createDiverseState() { final KeyShape SMALL_SHAPE = listOf(threshOf(1, 3)); final KeyShape MEDIUM_SHAPE = listOf(SIMPLE, threshOf(2, 3)); final KeyShape LARGE_SHAPE = listOf( @@ -126,65 +126,54 @@ final Stream createDiverseState() { final var fuseContract = "Fuse"; final var multiContract = "Multipurpose"; - return defaultHapiSpec("CreateDiverseState") - .given( - newKeyNamed(smallKey).shape(SMALL_SHAPE), - newKeyNamed(mediumKey).shape(MEDIUM_SHAPE), - newKeyNamed(largeKey).shape(LARGE_SHAPE)) - .when( - /* Create some well-known files */ - fileCreate(SMALL_FILE) - .contents(SMALL_CONTENTS) - .key(smallKey) - .expiry(SMALL_EXPIRY_TIME) - .exposingNumTo(num -> entityNums.put(SMALL_FILE, num)), - fileCreate(MEDIUM_FILE) - .contents("") - .key(mediumKey) - .expiry(MEDIUM_EXPIRY_TIME) - .exposingNumTo(num -> entityNums.put(MEDIUM_FILE, num)), - updateLargeFile( - GENESIS, - MEDIUM_FILE, - ByteString.copyFrom(MEDIUM_CONTENTS), - false, - OptionalLong.of(ONE_HBAR)), - fileDelete(MEDIUM_FILE), - fileCreate(LARGE_FILE) - .contents("") - .key(largeKey) - .expiry(LARGE_EXPIRY_TIME) - .exposingNumTo(num -> entityNums.put(LARGE_FILE, num)), - updateLargeFile( - GENESIS, - LARGE_FILE, - ByteString.copyFrom(LARGE_CONTENTS), - false, - OptionalLong.of(ONE_HBAR)), - /* Create some bytecode files */ - uploadSingleInitCode( - fuseContract, FUSE_EXPIRY_TIME, GENESIS, num -> entityNums.put(FUSE_INITCODE, num)), - uploadSingleInitCode( - multiContract, MULTI_EXPIRY_TIME, GENESIS, num -> entityNums.put(MULTI_INITCODE, num)), - contractCreate(fuseContract).exposingNumTo(num -> entityNums.put(FUSE_CONTRACT, num)), - contractCreate(multiContract).exposingNumTo(num -> entityNums.put(MULTI_CONTRACT, num)), - contractCall(multiContract, "believeIn", EXPECTED_LUCKY_NO)) - .then( - systemFileDelete(fuseContract).payingWith(GENESIS), - systemFileDelete(multiContract).payingWith(GENESIS), - getFileInfo(SMALL_FILE).exposingKeyReprTo(repr -> keyReprs.put(SMALL_FILE, repr)), - getFileInfo(MEDIUM_FILE).exposingKeyReprTo(repr -> keyReprs.put(MEDIUM_FILE, repr)), - getFileInfo(LARGE_FILE).exposingKeyReprTo(repr -> keyReprs.put(LARGE_FILE, repr)), - getContractBytecode(FUSE_CONTRACT) - .exposingBytecodeTo(code -> hexedBytecode.put(FUSE_BYTECODE, CommonUtils.hex(code))), - withOpContext((spec, opLog) -> { - final var toSerialize = Map.of( - ENTITY_NUM_KEY, entityNums, - KEY_REPRS_KEY, keyReprs, - HEXED_BYTECODE_KEY, hexedBytecode); - final var om = new ObjectMapper(); - om.writeValue(Files.newOutputStream(Paths.get(STATE_META_JSON_LOC)), toSerialize); - })); + return hapiTest( + newKeyNamed(smallKey).shape(SMALL_SHAPE), + newKeyNamed(mediumKey).shape(MEDIUM_SHAPE), + newKeyNamed(largeKey).shape(LARGE_SHAPE), + /* Create some well-known files */ + fileCreate(SMALL_FILE) + .contents(SMALL_CONTENTS) + .key(smallKey) + .expiry(SMALL_EXPIRY_TIME) + .exposingNumTo(num -> entityNums.put(SMALL_FILE, num)), + fileCreate(MEDIUM_FILE) + .contents("") + .key(mediumKey) + .expiry(MEDIUM_EXPIRY_TIME) + .exposingNumTo(num -> entityNums.put(MEDIUM_FILE, num)), + updateLargeFile( + GENESIS, MEDIUM_FILE, ByteString.copyFrom(MEDIUM_CONTENTS), false, OptionalLong.of(ONE_HBAR)), + fileDelete(MEDIUM_FILE), + fileCreate(LARGE_FILE) + .contents("") + .key(largeKey) + .expiry(LARGE_EXPIRY_TIME) + .exposingNumTo(num -> entityNums.put(LARGE_FILE, num)), + updateLargeFile( + GENESIS, LARGE_FILE, ByteString.copyFrom(LARGE_CONTENTS), false, OptionalLong.of(ONE_HBAR)), + /* Create some bytecode files */ + uploadSingleInitCode( + fuseContract, FUSE_EXPIRY_TIME, GENESIS, num -> entityNums.put(FUSE_INITCODE, num)), + uploadSingleInitCode( + multiContract, MULTI_EXPIRY_TIME, GENESIS, num -> entityNums.put(MULTI_INITCODE, num)), + contractCreate(fuseContract).exposingNumTo(num -> entityNums.put(FUSE_CONTRACT, num)), + contractCreate(multiContract).exposingNumTo(num -> entityNums.put(MULTI_CONTRACT, num)), + contractCall(multiContract, "believeIn", EXPECTED_LUCKY_NO), + systemFileDelete(fuseContract).payingWith(GENESIS), + systemFileDelete(multiContract).payingWith(GENESIS), + getFileInfo(SMALL_FILE).exposingKeyReprTo(repr -> keyReprs.put(SMALL_FILE, repr)), + getFileInfo(MEDIUM_FILE).exposingKeyReprTo(repr -> keyReprs.put(MEDIUM_FILE, repr)), + getFileInfo(LARGE_FILE).exposingKeyReprTo(repr -> keyReprs.put(LARGE_FILE, repr)), + getContractBytecode(FUSE_CONTRACT) + .exposingBytecodeTo(code -> hexedBytecode.put(FUSE_BYTECODE, CommonUtils.hex(code))), + withOpContext((spec, opLog) -> { + final var toSerialize = Map.of( + ENTITY_NUM_KEY, entityNums, + KEY_REPRS_KEY, keyReprs, + HEXED_BYTECODE_KEY, hexedBytecode); + final var om = new ObjectMapper(); + om.writeValue(Files.newOutputStream(Paths.get(STATE_META_JSON_LOC)), toSerialize); + })); } @Override diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/DiverseStateValidation.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/DiverseStateValidation.java index 837cf805462b..3e9883da8dd5 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/DiverseStateValidation.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/DiverseStateValidation.java @@ -16,7 +16,7 @@ package com.hedera.services.bdd.suites.file; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.isLiteralResult; import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; import static com.hedera.services.bdd.spec.queries.QueryVerbs.contractCallLocal; @@ -92,67 +92,53 @@ public List> getSpecsInSuite() { } @SuppressWarnings("unchecked") - final Stream validateDiverseState() { - return defaultHapiSpec("ValidateDiverseState") - .given(withOpContext((spec, opLog) -> { + Stream validateDiverseState() { + return hapiTest( + withOpContext((spec, opLog) -> { final var om = new ObjectMapper(); final var meta = (Map) om.readValue(Files.newInputStream(Paths.get(STATE_META_JSON_LOC)), Map.class); entityNums.set((Map) meta.get(ENTITY_NUM_KEY)); keyReprs.set((Map) meta.get(KEY_REPRS_KEY)); hexedBytecode.set((Map) meta.get(HEXED_BYTECODE_KEY)); - })) - .when( - sourcing(() -> systemFileUndelete( - idLiteralWith(entityNums.get().get(FUSE_INITCODE))) - .payingWith(GENESIS)), - sourcing(() -> systemFileUndelete( - idLiteralWith(entityNums.get().get(MULTI_INITCODE))) - .payingWith(GENESIS)), - /* Confirm un-deletion recovered expiry times */ - sourcing( - () -> getFileInfo(idLiteralWith(entityNums.get().get(FUSE_INITCODE))) - .hasExpiry(() -> DiverseStateCreation.FUSE_EXPIRY_TIME)), - sourcing( - () -> getFileInfo(idLiteralWith(entityNums.get().get(MULTI_INITCODE))) - .hasExpiry(() -> DiverseStateCreation.MULTI_EXPIRY_TIME))) - .then( - /* Confirm misc file meta and contents */ - sourcing( - () -> getFileInfo(idLiteralWith(entityNums.get().get(SMALL_FILE))) - .hasKeyReprTo(keyReprs.get().get(SMALL_FILE)) - .hasExpiry(() -> DiverseStateCreation.SMALL_EXPIRY_TIME) - .hasDeleted(false)), - sourcing(() -> getFileContents( - idLiteralWith(entityNums.get().get(SMALL_FILE))) - .hasContents(ignore -> SMALL_CONTENTS)), - sourcing( - () -> getFileInfo(idLiteralWith(entityNums.get().get(MEDIUM_FILE))) - .hasKeyReprTo(keyReprs.get().get(MEDIUM_FILE)) - .hasExpiry(() -> DiverseStateCreation.MEDIUM_EXPIRY_TIME) - .hasDeleted(true)), - logIt("--- Now validating large file ---"), - sourcing( - () -> getFileInfo(idLiteralWith(entityNums.get().get(LARGE_FILE))) - .hasKeyReprTo(keyReprs.get().get(LARGE_FILE)) - .hasExpiry(() -> DiverseStateCreation.LARGE_EXPIRY_TIME) - .hasDeleted(false)), - sourcing(() -> getFileContents( - idLiteralWith(entityNums.get().get(LARGE_FILE))) - .hasContents(ignore -> LARGE_CONTENTS)), - /* Confirm contract code and behavior */ - logIt("--- Now validating contract stuff ---"), - sourcing(() -> getContractBytecode( - idLiteralWith(entityNums.get().get(FUSE_CONTRACT))) - .hasBytecode( - CommonUtils.unhex(hexedBytecode.get().get(FUSE_BYTECODE)))), - sourcing(() -> contractCallLocal( - idLiteralWith(entityNums.get().get(MULTI_CONTRACT)), "pick") - .has(resultWith() - .resultThruAbi( - getABIFor(FUNCTION, "pick", MULTI_CONTRACT), - isLiteralResult( - new Object[] {BigInteger.valueOf(EXPECTED_LUCKY_NO)}))))); + }), + sourcing(() -> systemFileUndelete(idLiteralWith(entityNums.get().get(FUSE_INITCODE))) + .payingWith(GENESIS)), + sourcing(() -> systemFileUndelete(idLiteralWith(entityNums.get().get(MULTI_INITCODE))) + .payingWith(GENESIS)), + /* Confirm un-deletion recovered expiry times */ + sourcing(() -> getFileInfo(idLiteralWith(entityNums.get().get(FUSE_INITCODE))) + .hasExpiry(() -> DiverseStateCreation.FUSE_EXPIRY_TIME)), + sourcing(() -> getFileInfo(idLiteralWith(entityNums.get().get(MULTI_INITCODE))) + .hasExpiry(() -> DiverseStateCreation.MULTI_EXPIRY_TIME)), + /* Confirm misc file meta and contents */ + sourcing(() -> getFileInfo(idLiteralWith(entityNums.get().get(SMALL_FILE))) + .hasKeyReprTo(keyReprs.get().get(SMALL_FILE)) + .hasExpiry(() -> DiverseStateCreation.SMALL_EXPIRY_TIME) + .hasDeleted(false)), + sourcing(() -> getFileContents(idLiteralWith(entityNums.get().get(SMALL_FILE))) + .hasContents(ignore -> SMALL_CONTENTS)), + sourcing(() -> getFileInfo(idLiteralWith(entityNums.get().get(MEDIUM_FILE))) + .hasKeyReprTo(keyReprs.get().get(MEDIUM_FILE)) + .hasExpiry(() -> DiverseStateCreation.MEDIUM_EXPIRY_TIME) + .hasDeleted(true)), + logIt("--- Now validating large file ---"), + sourcing(() -> getFileInfo(idLiteralWith(entityNums.get().get(LARGE_FILE))) + .hasKeyReprTo(keyReprs.get().get(LARGE_FILE)) + .hasExpiry(() -> DiverseStateCreation.LARGE_EXPIRY_TIME) + .hasDeleted(false)), + sourcing(() -> getFileContents(idLiteralWith(entityNums.get().get(LARGE_FILE))) + .hasContents(ignore -> LARGE_CONTENTS)), + /* Confirm contract code and behavior */ + logIt("--- Now validating contract stuff ---"), + sourcing(() -> getContractBytecode( + idLiteralWith(entityNums.get().get(FUSE_CONTRACT))) + .hasBytecode(CommonUtils.unhex(hexedBytecode.get().get(FUSE_BYTECODE)))), + sourcing(() -> contractCallLocal(idLiteralWith(entityNums.get().get(MULTI_CONTRACT)), "pick") + .has(resultWith() + .resultThruAbi( + getABIFor(FUNCTION, "pick", MULTI_CONTRACT), + isLiteralResult(new Object[] {BigInteger.valueOf(EXPECTED_LUCKY_NO)}))))); } private String idLiteralWith(long num) { diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/ExchangeRateControlSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/ExchangeRateControlSuite.java index c017a17a88dc..da5275a6031e 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/ExchangeRateControlSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/ExchangeRateControlSuite.java @@ -16,7 +16,7 @@ package com.hedera.services.bdd.suites.file; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getFileContents; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; @@ -48,46 +48,42 @@ public class ExchangeRateControlSuite { @HapiTest final Stream acct57CanMakeSmallChanges() { - return defaultHapiSpec("Acct57CanMakeSmallChanges") - .given( - resetRatesOp, - cryptoTransfer(tinyBarsFromTo(GENESIS, EXCHANGE_RATE_CONTROL, ADEQUATE_FUNDS)) - .fee(ONE_HUNDRED_HBARS)) - .when(fileUpdate(EXCHANGE_RATES) + return hapiTest( + resetRatesOp, + cryptoTransfer(tinyBarsFromTo(GENESIS, EXCHANGE_RATE_CONTROL, ADEQUATE_FUNDS)) + .fee(ONE_HUNDRED_HBARS), + fileUpdate(EXCHANGE_RATES) .contents(spec -> { ByteString newRates = spec.ratesProvider().rateSetWith(10, 121).toByteString(); spec.registry().saveBytes("newRates", newRates); return newRates; }) - .payingWith(EXCHANGE_RATE_CONTROL)) - .then( - getFileContents(EXCHANGE_RATES) - .hasContents(spec -> spec.registry().getBytes("newRates")), - resetRatesOp); + .payingWith(EXCHANGE_RATE_CONTROL), + getFileContents(EXCHANGE_RATES) + .hasContents(spec -> spec.registry().getBytes("newRates")), + resetRatesOp); } @HapiTest final Stream midnightRateChangesWhenAcct50UpdatesFile112() { - return defaultHapiSpec("MidnightRateChangesWhenAcct50UpdatesFile112") - .given( - resetRatesOp, - cryptoTransfer(tinyBarsFromTo(GENESIS, EXCHANGE_RATE_CONTROL, ADEQUATE_FUNDS)) - .fee(ONE_HUNDRED_HBARS), - cryptoTransfer(tinyBarsFromTo(GENESIS, SYSTEM_ADMIN, ADEQUATE_FUNDS)) - .fee(ONE_HUNDRED_HBARS), - fileUpdate(EXCHANGE_RATES) - .contents(spec -> { - ByteString newRates = spec.ratesProvider() - .rateSetWith(10, 254) - .toByteString(); - spec.registry().saveBytes("newRates", newRates); - return newRates; - }) - .payingWith(EXCHANGE_RATE_CONTROL) - .fee(1_000_000_000) - .hasKnownStatus(EXCHANGE_RATE_CHANGE_LIMIT_EXCEEDED)) - .when(fileUpdate(EXCHANGE_RATES) + return hapiTest( + resetRatesOp, + cryptoTransfer(tinyBarsFromTo(GENESIS, EXCHANGE_RATE_CONTROL, ADEQUATE_FUNDS)) + .fee(ONE_HUNDRED_HBARS), + cryptoTransfer(tinyBarsFromTo(GENESIS, SYSTEM_ADMIN, ADEQUATE_FUNDS)) + .fee(ONE_HUNDRED_HBARS), + fileUpdate(EXCHANGE_RATES) + .contents(spec -> { + ByteString newRates = + spec.ratesProvider().rateSetWith(10, 254).toByteString(); + spec.registry().saveBytes("newRates", newRates); + return newRates; + }) + .payingWith(EXCHANGE_RATE_CONTROL) + .fee(1_000_000_000) + .hasKnownStatus(EXCHANGE_RATE_CHANGE_LIMIT_EXCEEDED), + fileUpdate(EXCHANGE_RATES) .contents(spec -> { ByteString newRates = spec.ratesProvider().rateSetWith(1, 25).toByteString(); @@ -95,38 +91,36 @@ final Stream midnightRateChangesWhenAcct50UpdatesFile112() { return newRates; }) .payingWith(SYSTEM_ADMIN) - .fee(1_000_000_000)) - .then( - fileUpdate(EXCHANGE_RATES) - .contents(spec -> { - ByteString newRates = spec.ratesProvider() - .rateSetWith(10, 254) - .toByteString(); - spec.registry().saveBytes("newRates", newRates); - return newRates; - }) - .payingWith(EXCHANGE_RATE_CONTROL) - .fee(1_000_000_000) - .hasKnownStatus(SUCCESS), - fileUpdate(EXCHANGE_RATES) - .contents(spec -> { - ByteString newRates = spec.ratesProvider() - .rateSetWith(1, 12, 1, 15) - .toByteString(); - spec.registry().saveBytes("newRates", newRates); - return newRates; - }) - .payingWith(SYSTEM_ADMIN) - .fee(1_000_000_000) - .hasKnownStatus(SUCCESS)); + .fee(1_000_000_000), + fileUpdate(EXCHANGE_RATES) + .contents(spec -> { + ByteString newRates = + spec.ratesProvider().rateSetWith(10, 254).toByteString(); + spec.registry().saveBytes("newRates", newRates); + return newRates; + }) + .payingWith(EXCHANGE_RATE_CONTROL) + .fee(1_000_000_000) + .hasKnownStatus(SUCCESS), + fileUpdate(EXCHANGE_RATES) + .contents(spec -> { + ByteString newRates = spec.ratesProvider() + .rateSetWith(1, 12, 1, 15) + .toByteString(); + spec.registry().saveBytes("newRates", newRates); + return newRates; + }) + .payingWith(SYSTEM_ADMIN) + .fee(1_000_000_000) + .hasKnownStatus(SUCCESS)); } @HapiTest final Stream anonCantUpdateRates() { - return defaultHapiSpec("AnonCantUpdateRates") - .given(resetRatesOp, cryptoCreate("randomAccount")) - .when() - .then(fileUpdate(EXCHANGE_RATES) + return hapiTest( + resetRatesOp, + cryptoCreate("randomAccount"), + fileUpdate(EXCHANGE_RATES) .contents("Should be impossible!") .payingWith("randomAccount") .hasPrecheck(AUTHORIZATION_FAILED)); @@ -134,13 +128,11 @@ final Stream anonCantUpdateRates() { @HapiTest final Stream acct57CantMakeLargeChanges() { - return defaultHapiSpec("Acct57CantMakeLargeChanges") - .given( - resetRatesOp, - cryptoTransfer(tinyBarsFromTo(GENESIS, EXCHANGE_RATE_CONTROL, ADEQUATE_FUNDS)) - .fee(ONE_HUNDRED_HBARS)) - .when() - .then(fileUpdate(EXCHANGE_RATES) + return hapiTest( + resetRatesOp, + cryptoTransfer(tinyBarsFromTo(GENESIS, EXCHANGE_RATE_CONTROL, ADEQUATE_FUNDS)) + .fee(ONE_HUNDRED_HBARS), + fileUpdate(EXCHANGE_RATES) .contents( spec -> spec.ratesProvider().rateSetWith(1, 25).toByteString()) .payingWith(EXCHANGE_RATE_CONTROL) diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/FileCreateSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/FileCreateSuite.java index dcef52325dd8..230767540a98 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/FileCreateSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/FileCreateSuite.java @@ -17,7 +17,7 @@ package com.hedera.services.bdd.suites.file; import static com.hedera.services.bdd.spec.HapiSpec.customHapiSpec; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.assertions.AccountInfoAsserts.changeFromSnapshot; import static com.hedera.services.bdd.spec.keys.ControlForKey.forKey; import static com.hedera.services.bdd.spec.keys.KeyShape.SIMPLE; @@ -81,46 +81,37 @@ public class FileCreateSuite { @HapiTest final Stream exchangeRateControlAccountIsntCharged() { - return defaultHapiSpec("ExchangeRateControlAccountIsntCharged") - .given( - cryptoTransfer(tinyBarsFromTo(GENESIS, EXCHANGE_RATE_CONTROL, 1_000_000_000_000L)), - balanceSnapshot("pre", EXCHANGE_RATE_CONTROL), - getFileContents(EXCHANGE_RATES).saveTo("exchangeRates.bin")) - .when(fileUpdate(EXCHANGE_RATES) + return hapiTest( + cryptoTransfer(tinyBarsFromTo(GENESIS, EXCHANGE_RATE_CONTROL, 1_000_000_000_000L)), + balanceSnapshot("pre", EXCHANGE_RATE_CONTROL), + getFileContents(EXCHANGE_RATES).saveTo("exchangeRates.bin"), + fileUpdate(EXCHANGE_RATES) .payingWith(EXCHANGE_RATE_CONTROL) - .path(Path.of("./", "exchangeRates.bin").toString())) - .then(getAccountBalance(EXCHANGE_RATE_CONTROL).hasTinyBars(changeFromSnapshot("pre", 0))); + .path(Path.of("./", "exchangeRates.bin").toString()), + getAccountBalance(EXCHANGE_RATE_CONTROL).hasTinyBars(changeFromSnapshot("pre", 0))); } @HapiTest final Stream createFailsWithExcessiveLifetime() { - return defaultHapiSpec("CreateFailsWithExcessiveLifetime") - .given() - .when() - .then(doWithStartupConfig("entities.maxLifetime", value -> fileCreate("test") - .lifetime(Long.parseLong(value) + 12_345L) - .hasPrecheck(AUTORENEW_DURATION_NOT_IN_RANGE))); + return hapiTest(doWithStartupConfig("entities.maxLifetime", value -> fileCreate("test") + .lifetime(Long.parseLong(value) + 12_345L) + .hasPrecheck(AUTORENEW_DURATION_NOT_IN_RANGE))); } @HapiTest final Stream idVariantsTreatedAsExpected() { - return defaultHapiSpec("idVariantsTreatedAsExpected") - .given() - .when() - .then(submitModified(withSuccessivelyVariedBodyIds(), () -> fileCreate("file") - .contents("ABC"))); + return hapiTest(submitModified( + withSuccessivelyVariedBodyIds(), () -> fileCreate("file").contents("ABC"))); } @HapiTest final Stream createWithMemoWorks() { String memo = "Really quite something!"; - return defaultHapiSpec("createWithMemoWorks") - .given( - fileCreate("ntb").entityMemo(ZERO_BYTE_MEMO).hasPrecheck(INVALID_ZERO_BYTE_IN_STRING), - fileCreate("memorable").entityMemo(memo)) - .when() - .then(withTargetLedgerId(ledgerId -> + return hapiTest( + fileCreate("ntb").entityMemo(ZERO_BYTE_MEMO).hasPrecheck(INVALID_ZERO_BYTE_IN_STRING), + fileCreate("memorable").entityMemo(memo), + withTargetLedgerId(ledgerId -> getFileInfo("memorable").hasEncodedLedgerId(ledgerId).hasMemo(memo))); } @@ -130,15 +121,12 @@ final Stream createFailsWithMissingSigs() { SigControl validSig = shape.signedWith(sigs(ON, sigs(ON, ON, OFF), sigs(OFF, OFF, ON))); SigControl invalidSig = shape.signedWith(sigs(OFF, sigs(ON, ON, OFF), sigs(OFF, OFF, ON))); - return defaultHapiSpec("CreateFailsWithMissingSigs") - .given() - .when() - .then( - fileCreate("test") - .waclShape(shape) - .sigControl(forKey("test", invalidSig)) - .hasKnownStatus(INVALID_SIGNATURE), - fileCreate("test").waclShape(shape).sigControl(forKey("test", validSig))); + return hapiTest( + fileCreate("test") + .waclShape(shape) + .sigControl(forKey("test", invalidSig)) + .hasKnownStatus(INVALID_SIGNATURE), + fileCreate("test").waclShape(shape).sigControl(forKey("test", validSig))); } private static Transaction replaceTxnNodeAccount(Transaction txn) { @@ -155,15 +143,12 @@ final Stream createFailsWithPayerAccountNotFound() { KeyShape shape = listOf(SIMPLE, threshOf(2, 3), threshOf(1, 3)); SigControl validSig = shape.signedWith(sigs(ON, sigs(ON, ON, OFF), sigs(OFF, OFF, ON))); - return defaultHapiSpec("CreateFailsWithPayerAccountNotFound") - .given() - .when() - .then(fileCreate("test") - .withProtoStructure(HapiSpecSetup.TxnProtoStructure.OLD) - .waclShape(shape) - .sigControl(forKey("test", validSig)) - .withTxnTransform(FileCreateSuite::replaceTxnNodeAccount) - .hasPrecheckFrom(INVALID_NODE_ACCOUNT)); + return hapiTest(fileCreate("test") + .withProtoStructure(HapiSpecSetup.TxnProtoStructure.OLD) + .waclShape(shape) + .sigControl(forKey("test", validSig)) + .withTxnTransform(FileCreateSuite::replaceTxnNodeAccount) + .hasPrecheckFrom(INVALID_NODE_ACCOUNT)); } @HapiTest @@ -171,12 +156,8 @@ final Stream precheckRejectsBadEffectiveAutoRenewPeriod() { var now = Instant.now(); System.out.println(now.getEpochSecond()); - return defaultHapiSpec("precheckRejectsBadEffectiveAutoRenewPeriod") - .given() - .when() - .then(fileCreate("notHere") - .lifetime(-60L) - .hasPrecheck(ResponseCodeEnum.AUTORENEW_DURATION_NOT_IN_RANGE)); + return hapiTest( + fileCreate("notHere").lifetime(-60L).hasPrecheck(ResponseCodeEnum.AUTORENEW_DURATION_NOT_IN_RANGE)); } @HapiTest @@ -187,41 +168,38 @@ final Stream targetsAppear() { var newWacl = listOf(SIMPLE, listOf(3), threshOf(1, 3)); var newWaclSigs = newWacl.signedWith(sigs(ON, sigs(ON, ON, ON), sigs(OFF, OFF, ON))); - return defaultHapiSpec("targetsAppear") - .given(UtilVerbs.newKeyNamed("newWacl").shape(newWacl)) - .when(fileCreate("file") + return hapiTest( + UtilVerbs.newKeyNamed("newWacl").shape(newWacl), + fileCreate("file") .via("createTxn") .contents(contents) .key("newWacl") .expiry(requestedExpiry) .signedBy(GENESIS, "newWacl") - .sigControl(ControlForKey.forKey("newWacl", newWaclSigs))) - .then( - QueryVerbs.getFileInfo("file") - .hasDeleted(false) - .hasWacl("newWacl") - .hasExpiryPassing(expiry -> expiry == requestedExpiry), - QueryVerbs.getFileContents("file") - .hasByteStringContents(ignore -> ByteString.copyFrom(contents))); + .sigControl(ControlForKey.forKey("newWacl", newWaclSigs)), + QueryVerbs.getFileInfo("file") + .hasDeleted(false) + .hasWacl("newWacl") + .hasExpiryPassing(expiry -> expiry == requestedExpiry), + QueryVerbs.getFileContents("file").hasByteStringContents(ignore -> ByteString.copyFrom(contents))); } @HapiTest final Stream getsExpectedRejections() { - return defaultHapiSpec("getsExpectedRejections") - .given(fileCreate("tbd"), fileDelete("tbd")) - .when() - .then( - getFileInfo("1.2.3").nodePayment(1_234L).hasAnswerOnlyPrecheck(INVALID_FILE_ID), - getFileContents("1.2.3").nodePayment(1_234L).hasAnswerOnlyPrecheck(INVALID_FILE_ID), - getFileContents("tbd") - .nodePayment(1_234L) - .hasAnswerOnlyPrecheck(OK) - .logged(), - getFileInfo("tbd") - .nodePayment(1_234L) - .hasAnswerOnlyPrecheck(OK) - .hasDeleted(true) - .logged()); + return hapiTest( + fileCreate("tbd"), + fileDelete("tbd"), + getFileInfo("1.2.3").nodePayment(1_234L).hasAnswerOnlyPrecheck(INVALID_FILE_ID), + getFileContents("1.2.3").nodePayment(1_234L).hasAnswerOnlyPrecheck(INVALID_FILE_ID), + getFileContents("tbd") + .nodePayment(1_234L) + .hasAnswerOnlyPrecheck(OK) + .logged(), + getFileInfo("tbd") + .nodePayment(1_234L) + .hasAnswerOnlyPrecheck(OK) + .hasDeleted(true) + .logged()); } /** diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/FileDeleteSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/FileDeleteSuite.java index e528793fbcaf..7ac2d9469c49 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/FileDeleteSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/FileDeleteSuite.java @@ -16,7 +16,7 @@ package com.hedera.services.bdd.suites.file; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.keys.ControlForKey.forKey; import static com.hedera.services.bdd.spec.keys.KeyShape.SIMPLE; import static com.hedera.services.bdd.spec.keys.KeyShape.listOf; @@ -42,10 +42,9 @@ public class FileDeleteSuite { @HapiTest final Stream idVariantsTreatedAsExpected() { - return defaultHapiSpec("idVariantsTreatedAsExpected") - .given(fileCreate("file").contents("ABC")) - .when() - .then(submitModified(withSuccessivelyVariedBodyIds(), () -> fileDelete("file"))); + return hapiTest( + fileCreate("file").contents("ABC"), + submitModified(withSuccessivelyVariedBodyIds(), () -> fileDelete("file"))); } @HapiTest @@ -53,33 +52,25 @@ final Stream canDeleteWithAnyOneOfTopLevelKeyList() { KeyShape shape = listOf(SIMPLE, threshOf(1, 2), listOf(2)); SigControl deleteSigs = shape.signedWith(sigs(ON, sigs(OFF, OFF), sigs(ON, OFF))); - return defaultHapiSpec("CanDeleteWithAnyOneOfTopLevelKeyList") - .given(fileCreate("test").waclShape(shape)) - .when() - .then(fileDelete("test").sigControl(forKey("test", deleteSigs))); + return hapiTest(fileCreate("test").waclShape(shape), fileDelete("test").sigControl(forKey("test", deleteSigs))); } @HapiTest final Stream getDeletedFileInfo() { - return defaultHapiSpec("getDeletedFileInfo") - .given(fileCreate("deletedFile").logged()) - .when(fileDelete("deletedFile").logged()) - .then(getFileInfo("deletedFile").hasAnswerOnlyPrecheck(OK).hasDeleted(true)); + return hapiTest( + fileCreate("deletedFile").logged(), + fileDelete("deletedFile").logged(), + getFileInfo("deletedFile").hasAnswerOnlyPrecheck(OK).hasDeleted(true)); } @HapiTest final Stream handleRejectsMissingFile() { - return defaultHapiSpec("handleRejectsMissingFile") - .given() - .when() - .then(fileDelete("1.2.3").signedBy(GENESIS).hasKnownStatus(ResponseCodeEnum.INVALID_FILE_ID)); + return hapiTest(fileDelete("1.2.3").signedBy(GENESIS).hasKnownStatus(ResponseCodeEnum.INVALID_FILE_ID)); } @HapiTest final Stream handleRejectsDeletedFile() { - return defaultHapiSpec("handleRejectsDeletedFile") - .given(fileCreate("tbd")) - .when(fileDelete("tbd")) - .then(fileDelete("tbd").hasKnownStatus(ResponseCodeEnum.FILE_DELETED)); + return hapiTest( + fileCreate("tbd"), fileDelete("tbd"), fileDelete("tbd").hasKnownStatus(ResponseCodeEnum.FILE_DELETED)); } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/FileUpdateSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/FileUpdateSuite.java index d3b8ec6af9be..607eb3c068f3 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/FileUpdateSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/FileUpdateSuite.java @@ -19,7 +19,6 @@ import static com.hedera.services.bdd.junit.ContextRequirement.PERMISSION_OVERRIDES; import static com.hedera.services.bdd.junit.ContextRequirement.UPGRADE_FILE_CONTENT; import static com.hedera.services.bdd.junit.TestTags.ADHOC; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.assertions.ContractFnResultAsserts.resultWith; import static com.hedera.services.bdd.spec.assertions.ContractInfoAsserts.contractWith; @@ -135,31 +134,28 @@ public class FileUpdateSuite { @HapiTest final Stream idVariantsTreatedAsExpected() { - return defaultHapiSpec("idVariantsTreatedAsExpected") - .given(fileCreate("file").contents("ABC")) - .when() - .then(submitModified(withSuccessivelyVariedBodyIds(), () -> fileUpdate("file") + return hapiTest( + fileCreate("file").contents("ABC"), + submitModified(withSuccessivelyVariedBodyIds(), () -> fileUpdate("file") .contents("DEF"))); } @HapiTest final Stream associateHasExpectedSemantics() { - return defaultHapiSpec("AssociateHasExpectedSemantics") - .given(flattened((Object[]) TokenAssociationSpecs.basicKeysAndTokens())) - .when( - cryptoCreate("misc").balance(0L), - tokenAssociate("misc", TokenAssociationSpecs.FREEZABLE_TOKEN_ON_BY_DEFAULT), - tokenAssociate("misc", TokenAssociationSpecs.FREEZABLE_TOKEN_ON_BY_DEFAULT) - .hasKnownStatus(TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT), - tokenAssociate("misc", INVALID_ENTITY_ID).hasKnownStatus(INVALID_TOKEN_ID), - tokenAssociate("misc", INVALID_ENTITY_ID, INVALID_ENTITY_ID) - .hasPrecheck(TOKEN_ID_REPEATED_IN_TOKEN_LIST), - tokenDissociate("misc", INVALID_ENTITY_ID, INVALID_ENTITY_ID) - .hasPrecheck(TOKEN_ID_REPEATED_IN_TOKEN_LIST), - tokenAssociate("misc", TokenAssociationSpecs.FREEZABLE_TOKEN_OFF_BY_DEFAULT), - tokenAssociate( - "misc", TokenAssociationSpecs.KNOWABLE_TOKEN, TokenAssociationSpecs.VANILLA_TOKEN)) - .then(getAccountInfo("misc") + return hapiTest(flattened( + TokenAssociationSpecs.basicKeysAndTokens(), + cryptoCreate("misc").balance(0L), + tokenAssociate("misc", TokenAssociationSpecs.FREEZABLE_TOKEN_ON_BY_DEFAULT), + tokenAssociate("misc", TokenAssociationSpecs.FREEZABLE_TOKEN_ON_BY_DEFAULT) + .hasKnownStatus(TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT), + tokenAssociate("misc", INVALID_ENTITY_ID).hasKnownStatus(INVALID_TOKEN_ID), + tokenAssociate("misc", INVALID_ENTITY_ID, INVALID_ENTITY_ID) + .hasPrecheck(TOKEN_ID_REPEATED_IN_TOKEN_LIST), + tokenDissociate("misc", INVALID_ENTITY_ID, INVALID_ENTITY_ID) + .hasPrecheck(TOKEN_ID_REPEATED_IN_TOKEN_LIST), + tokenAssociate("misc", TokenAssociationSpecs.FREEZABLE_TOKEN_OFF_BY_DEFAULT), + tokenAssociate("misc", TokenAssociationSpecs.KNOWABLE_TOKEN, TokenAssociationSpecs.VANILLA_TOKEN), + getAccountInfo("misc") .hasToken(relationshipWith(TokenAssociationSpecs.FREEZABLE_TOKEN_ON_BY_DEFAULT) .kyc(KycNotApplicable) .freeze(Frozen)) @@ -172,7 +168,7 @@ final Stream associateHasExpectedSemantics() { .hasToken(relationshipWith(TokenAssociationSpecs.VANILLA_TOKEN) .kyc(KycNotApplicable) .freeze(FreezeNotApplicable)) - .logged()); + .logged())); } @LeakyHapiTest(overrides = {"tokens.maxCustomFeesAllowed"}) @@ -201,35 +197,31 @@ final Stream optimisticSpecialFileUpdate() { } catch (NoSuchAlgorithmException e) { throw new IllegalStateException(e); } - return defaultHapiSpec("OptimisticSpecialFileUpdate") - .given() - .when(updateSpecialFile(GENESIS, specialFile, specialFileContents, BYTES_4K, appendsPerBurst)) - .then(getFileInfo(specialFile).hasMemo(CommonUtils.hex(expectedHash))); + return hapiTest( + updateSpecialFile(GENESIS, specialFile, specialFileContents, BYTES_4K, appendsPerBurst), + getFileInfo(specialFile).hasMemo(CommonUtils.hex(expectedHash))); } @LeakyHapiTest(requirement = PERMISSION_OVERRIDES) final Stream apiPermissionsChangeDynamically() { final var civilian = CIVILIAN; - return defaultHapiSpec("ApiPermissionsChangeDynamically") - .given( - cryptoTransfer(tinyBarsFromTo(GENESIS, ADDRESS_BOOK_CONTROL, 1_000_000_000L)), - cryptoCreate(civilian).balance(ONE_HUNDRED_HBARS), - getFileContents(API_PERMISSIONS).logged(), - tokenCreate("poc").payingWith(civilian)) - .when( - fileUpdate(API_PERMISSIONS) - .payingWith(ADDRESS_BOOK_CONTROL) - .overridingProps(Map.of("tokenCreate", "0-1")), - getFileContents(API_PERMISSIONS).logged()) - .then( - tokenCreate("poc") - .payingWith(civilian) - .hasPrecheckFrom(NOT_SUPPORTED, OK) - .hasKnownStatus(UNAUTHORIZED), - fileUpdate(API_PERMISSIONS) - .payingWith(ADDRESS_BOOK_CONTROL) - .overridingProps(Map.of("tokenCreate", "0-*")), - tokenCreate("secondPoc").payingWith(civilian)); + return hapiTest( + cryptoTransfer(tinyBarsFromTo(GENESIS, ADDRESS_BOOK_CONTROL, 1_000_000_000L)), + cryptoCreate(civilian).balance(ONE_HUNDRED_HBARS), + getFileContents(API_PERMISSIONS).logged(), + tokenCreate("poc").payingWith(civilian), + fileUpdate(API_PERMISSIONS) + .payingWith(ADDRESS_BOOK_CONTROL) + .overridingProps(Map.of("tokenCreate", "0-1")), + getFileContents(API_PERMISSIONS).logged(), + tokenCreate("poc") + .payingWith(civilian) + .hasPrecheckFrom(NOT_SUPPORTED, OK) + .hasKnownStatus(UNAUTHORIZED), + fileUpdate(API_PERMISSIONS) + .payingWith(ADDRESS_BOOK_CONTROL) + .overridingProps(Map.of("tokenCreate", "0-*")), + tokenCreate("secondPoc").payingWith(civilian)); } @HapiTest @@ -275,45 +267,35 @@ final Stream vanillaUpdateSucceeds() { final String firstMemo = "Originally"; final String secondMemo = "Subsequently"; - return defaultHapiSpec("VanillaUpdateSucceeds") - .given(fileCreate("test").entityMemo(firstMemo).contents(old4K)) - .when( - fileUpdate("test") - .entityMemo(ZERO_BYTE_MEMO) - .contents(new4k) - .hasPrecheck(INVALID_ZERO_BYTE_IN_STRING), - fileUpdate("test").entityMemo(secondMemo).contents(new4k)) - .then( - getFileContents("test").hasContents(ignore -> new4k), - getFileInfo("test").hasMemo(secondMemo)); + return hapiTest( + fileCreate("test").entityMemo(firstMemo).contents(old4K), + fileUpdate("test").entityMemo(ZERO_BYTE_MEMO).contents(new4k).hasPrecheck(INVALID_ZERO_BYTE_IN_STRING), + fileUpdate("test").entityMemo(secondMemo).contents(new4k), + getFileContents("test").hasContents(ignore -> new4k), + getFileInfo("test").hasMemo(secondMemo)); } @HapiTest final Stream cannotUpdateImmutableFile() { final String file1 = "FILE_1"; final String file2 = "FILE_2"; - return defaultHapiSpec("CannotUpdateImmutableFile") - .given( - fileCreate(file1).contents("Hello World").unmodifiable(), - fileCreate(file2).contents("Hello World").waclShape(SigControl.emptyList())) - .when() - .then( - fileUpdate(file1) - .contents("Goodbye World") - .signedBy(DEFAULT_PAYER) - .hasKnownStatus(UNAUTHORIZED), - fileUpdate(file2) - .contents("Goodbye World") - .signedBy(DEFAULT_PAYER) - .hasKnownStatus(UNAUTHORIZED)); + return hapiTest( + fileCreate(file1).contents("Hello World").unmodifiable(), + fileCreate(file2).contents("Hello World").waclShape(SigControl.emptyList()), + fileUpdate(file1) + .contents("Goodbye World") + .signedBy(DEFAULT_PAYER) + .hasKnownStatus(UNAUTHORIZED), + fileUpdate(file2) + .contents("Goodbye World") + .signedBy(DEFAULT_PAYER) + .hasKnownStatus(UNAUTHORIZED)); } @HapiTest final Stream cannotUpdateExpirationPastMaxLifetime() { - return defaultHapiSpec("CannotUpdateExpirationPastMaxLifetime") - .given(fileCreate("test")) - .when() - .then(doWithStartupConfig("entities.maxLifetime", maxLifetime -> fileUpdate("test") + return hapiTest( + fileCreate("test"), doWithStartupConfig("entities.maxLifetime", maxLifetime -> fileUpdate("test") .lifetime(parseLong(maxLifetime) + 12_345L) .hasPrecheck(AUTORENEW_DURATION_NOT_IN_RANGE))); } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/MidnightUpdateRateSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/MidnightUpdateRateSuite.java index a1b6d9083791..067ad19097da 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/MidnightUpdateRateSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/MidnightUpdateRateSuite.java @@ -16,7 +16,7 @@ package com.hedera.services.bdd.suites.file; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getFileContents; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.fileUpdate; @@ -45,34 +45,31 @@ public class MidnightUpdateRateSuite { .contents(spec -> spec.ratesProvider().rateSetWith(1, 12).toByteString()); final Stream acct57UpdatesMidnightRateAtMidNight() throws ParseException { - return defaultHapiSpec("Acct57UpdatesMidnightRateAtMidNight") - .given(resetRatesOp, cryptoTransfer(tinyBarsFromTo(GENESIS, EXCHANGE_RATE_CONTROL, ADEQUATE_FUNDS))) - .when( - // should be done just before midnight - UtilVerbs.waitUntil("23:58"), - fileUpdate(EXCHANGE_RATES) - .contents(spec -> { - ByteString newRates = spec.ratesProvider() - .rateSetWith(10, 147) - .toByteString(); - spec.registry().saveBytes("midnightRate", newRates); - return newRates; - }) - .payingWith(EXCHANGE_RATE_CONTROL)) - .then( - // should be the first transaction after midnight - UtilVerbs.sleepFor(300_000), - fileUpdate(EXCHANGE_RATES) - .contents(spec -> { - ByteString newRates = spec.ratesProvider() - .rateSetWith(10, 183) - .toByteString(); - spec.registry().saveBytes("newRates", newRates); - return newRates; - }) - .payingWith(EXCHANGE_RATE_CONTROL), - getFileContents(EXCHANGE_RATES) - .hasContents(spec -> spec.registry().getBytes("newRates")), - resetRatesOp); + return hapiTest( + resetRatesOp, + cryptoTransfer(tinyBarsFromTo(GENESIS, EXCHANGE_RATE_CONTROL, ADEQUATE_FUNDS)), + // should be done just before midnight + UtilVerbs.waitUntil("23:58"), + fileUpdate(EXCHANGE_RATES) + .contents(spec -> { + ByteString newRates = + spec.ratesProvider().rateSetWith(10, 147).toByteString(); + spec.registry().saveBytes("midnightRate", newRates); + return newRates; + }) + .payingWith(EXCHANGE_RATE_CONTROL), + // should be the first transaction after midnight + UtilVerbs.sleepFor(300_000), + fileUpdate(EXCHANGE_RATES) + .contents(spec -> { + ByteString newRates = + spec.ratesProvider().rateSetWith(10, 183).toByteString(); + spec.registry().saveBytes("newRates", newRates); + return newRates; + }) + .payingWith(EXCHANGE_RATE_CONTROL), + getFileContents(EXCHANGE_RATES) + .hasContents(spec -> spec.registry().getBytes("newRates")), + resetRatesOp); } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/PermissionSemanticsSpec.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/PermissionSemanticsSpec.java index b7520c7969ea..cc9327d3028b 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/PermissionSemanticsSpec.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/PermissionSemanticsSpec.java @@ -16,7 +16,7 @@ package com.hedera.services.bdd.suites.file; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; import static com.hedera.services.bdd.spec.keys.KeyShape.sigs; import static com.hedera.services.bdd.spec.keys.SigControl.OFF; @@ -60,24 +60,21 @@ public class PermissionSemanticsSpec { final Stream addressBookAdminExemptFromFeesGivenAuthorizedOps() { long amount = 100 * 100_000_000L; AtomicReference origContents = new AtomicReference<>(); - return defaultHapiSpec("AddressBookAdminExemptFromFeesGivenAuthorizedOps") - .given( - cryptoTransfer(tinyBarsFromTo(GENESIS, ADDRESS_BOOK_CONTROL, amount)) - .fee(ONE_HUNDRED_HBARS), - fileCreate("tbu"), - getFileContents(NODE_DETAILS).consumedBy(origContents::set)) - .when( - fileUpdate(NODE_DETAILS) - .payingWith(ADDRESS_BOOK_CONTROL) - .contents(ignore -> ByteString.copyFrom(origContents.get())) - .via("authorizedTxn"), - fileUpdate("tbu") - .payingWith(ADDRESS_BOOK_CONTROL) - .contents("This is something new.") - .via("unauthorizedTxn")) - .then( - getTxnRecord("unauthorizedTxn").hasPriority(recordWith().feeDifferentThan(0L)), - getTxnRecord("authorizedTxn").hasPriority(recordWith().fee(0L))); + return hapiTest( + cryptoTransfer(tinyBarsFromTo(GENESIS, ADDRESS_BOOK_CONTROL, amount)) + .fee(ONE_HUNDRED_HBARS), + fileCreate("tbu"), + getFileContents(NODE_DETAILS).consumedBy(origContents::set), + fileUpdate(NODE_DETAILS) + .payingWith(ADDRESS_BOOK_CONTROL) + .contents(ignore -> ByteString.copyFrom(origContents.get())) + .via("authorizedTxn"), + fileUpdate("tbu") + .payingWith(ADDRESS_BOOK_CONTROL) + .contents("This is something new.") + .via("unauthorizedTxn"), + getTxnRecord("unauthorizedTxn").hasPriority(recordWith().feeDifferentThan(0L)), + getTxnRecord("authorizedTxn").hasPriority(recordWith().fee(0L))); } @HapiTest @@ -85,41 +82,32 @@ final Stream supportsImmutableFiles() { long extensionSecs = 666L; AtomicLong approxExpiry = new AtomicLong(); - return defaultHapiSpec("SupportsImmutableFiles") - .given( - newKeyNamed(NEVER_TO_BE_USED).type(KeyFactory.KeyType.LIST), - cryptoCreate(CIVILIAN), - fileCreate(ETERNAL).payingWith(CIVILIAN).unmodifiable()) - .when( - fileDelete(ETERNAL) - .payingWith(CIVILIAN) - .signedBy(CIVILIAN) - .hasKnownStatus(UNAUTHORIZED), - fileAppend(ETERNAL) - .payingWith(CIVILIAN) - .signedBy(CIVILIAN) - .content("Ignored.") - .hasKnownStatus(UNAUTHORIZED), - fileUpdate(ETERNAL) - .payingWith(CIVILIAN) - .signedBy(CIVILIAN) - .contents("Ignored.") - .hasKnownStatus(UNAUTHORIZED), - fileUpdate(ETERNAL) - .payingWith(CIVILIAN) - .signedBy(CIVILIAN, NEVER_TO_BE_USED) - .wacl(NEVER_TO_BE_USED) - .hasKnownStatus(UNAUTHORIZED)) - .then( - withOpContext((spec, opLog) -> approxExpiry.set( - spec.registry().getTimestamp(ETERNAL).getSeconds())), - fileUpdate(ETERNAL) - .payingWith(CIVILIAN) - .signedBy(CIVILIAN) - .extendingExpiryBy(extensionSecs), - getFileInfo(ETERNAL) - .isUnmodifiable() - .hasExpiryPassing(l -> Math.abs(l - approxExpiry.get() - extensionSecs) < 5)); + return hapiTest( + newKeyNamed(NEVER_TO_BE_USED).type(KeyFactory.KeyType.LIST), + cryptoCreate(CIVILIAN), + fileCreate(ETERNAL).payingWith(CIVILIAN).unmodifiable(), + fileDelete(ETERNAL).payingWith(CIVILIAN).signedBy(CIVILIAN).hasKnownStatus(UNAUTHORIZED), + fileAppend(ETERNAL) + .payingWith(CIVILIAN) + .signedBy(CIVILIAN) + .content("Ignored.") + .hasKnownStatus(UNAUTHORIZED), + fileUpdate(ETERNAL) + .payingWith(CIVILIAN) + .signedBy(CIVILIAN) + .contents("Ignored.") + .hasKnownStatus(UNAUTHORIZED), + fileUpdate(ETERNAL) + .payingWith(CIVILIAN) + .signedBy(CIVILIAN, NEVER_TO_BE_USED) + .wacl(NEVER_TO_BE_USED) + .hasKnownStatus(UNAUTHORIZED), + withOpContext((spec, opLog) -> + approxExpiry.set(spec.registry().getTimestamp(ETERNAL).getSeconds())), + fileUpdate(ETERNAL).payingWith(CIVILIAN).signedBy(CIVILIAN).extendingExpiryBy(extensionSecs), + getFileInfo(ETERNAL) + .isUnmodifiable() + .hasExpiryPassing(l -> Math.abs(l - approxExpiry.get() - extensionSecs) < 5)); } @HapiTest @@ -132,23 +120,22 @@ final Stream allowsDeleteWithOneTopLevelSig() { var updateSig = wacl.signedWith(sigs(ON, sigs(ON, ON))); var failedUpdateSig = wacl.signedWith(sigs(ON, sigs(OFF, ON))); - return defaultHapiSpec("AllowsDeleteWithOneTopLevelSig") - .given(newKeyNamed(WACL).shape(wacl)) - .when(fileCreate("tbd").key(WACL)) - .then( - fileUpdate("tbd") - .contents("Some more contents!") - .signedBy(GENESIS, WACL) - .sigControl(ControlForKey.forKey(WACL, failedUpdateSig)) - .hasKnownStatus(INVALID_SIGNATURE), - fileUpdate("tbd") - .contents("Some new contents!") - .signedBy(GENESIS, WACL) - .sigControl(ControlForKey.forKey(WACL, updateSig)), - fileDelete("tbd") - .signedBy(GENESIS, WACL) - .sigControl(ControlForKey.forKey(WACL, failedDeleteSig)) - .hasKnownStatus(INVALID_SIGNATURE), - fileDelete("tbd").signedBy(GENESIS, WACL).sigControl(ControlForKey.forKey(WACL, deleteSig))); + return hapiTest( + newKeyNamed(WACL).shape(wacl), + fileCreate("tbd").key(WACL), + fileUpdate("tbd") + .contents("Some more contents!") + .signedBy(GENESIS, WACL) + .sigControl(ControlForKey.forKey(WACL, failedUpdateSig)) + .hasKnownStatus(INVALID_SIGNATURE), + fileUpdate("tbd") + .contents("Some new contents!") + .signedBy(GENESIS, WACL) + .sigControl(ControlForKey.forKey(WACL, updateSig)), + fileDelete("tbd") + .signedBy(GENESIS, WACL) + .sigControl(ControlForKey.forKey(WACL, failedDeleteSig)) + .hasKnownStatus(INVALID_SIGNATURE), + fileDelete("tbd").signedBy(GENESIS, WACL).sigControl(ControlForKey.forKey(WACL, deleteSig))); } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/negative/UpdateFailuresSpec.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/negative/UpdateFailuresSpec.java index 25f3e97297c9..ab70d4e2bb8a 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/negative/UpdateFailuresSpec.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/negative/UpdateFailuresSpec.java @@ -16,7 +16,7 @@ package com.hedera.services.bdd.suites.file.negative; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.fileCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.fileDelete; @@ -58,75 +58,65 @@ final Stream confusedUpdateCantExtendExpiry() { // this test verify that the exchange rate file parsed correctly on update, it doesn't check expiry var initialExpiry = new AtomicLong(); var extension = 1_000L; - return defaultHapiSpec("ConfusedUpdateCantExtendExpiry") - .given(withOpContext((spec, opLog) -> { + return hapiTest( + withOpContext((spec, opLog) -> { var infoOp = QueryVerbs.getFileInfo(EXCHANGE_RATES); CustomSpecAssert.allRunFor(spec, infoOp); var info = infoOp.getResponse().getFileGetInfo().getFileInfo(); initialExpiry.set(info.getExpirationTime().getSeconds()); - })) - .when(fileUpdate(EXCHANGE_RATES) + }), + fileUpdate(EXCHANGE_RATES) .payingWith(EXCHANGE_RATE_CONTROL) .contents("NONSENSE".getBytes()) .extendingExpiryBy(extension) - .hasKnownStatus(ResponseCodeEnum.INVALID_EXCHANGE_RATE_FILE)) - .then(QueryVerbs.getFileInfo(EXCHANGE_RATES).hasExpiry(initialExpiry::get)); + .hasKnownStatus(ResponseCodeEnum.INVALID_EXCHANGE_RATE_FILE), + QueryVerbs.getFileInfo(EXCHANGE_RATES).hasExpiry(initialExpiry::get)); } @HapiTest final Stream precheckRejectsUnauthorized() { // this test is to verify that the system files cannot be updated without privileged account - return defaultHapiSpec("precheckRejectsUnauthorized") - .given(cryptoCreate(CIVILIAN)) - .when() - .then( - fileUpdate(ADDRESS_BOOK).payingWith(CIVILIAN).hasPrecheck(AUTHORIZATION_FAILED), - fileUpdate(NODE_DETAILS).payingWith(CIVILIAN).hasPrecheck(AUTHORIZATION_FAILED), - fileUpdate(API_PERMISSIONS).payingWith(CIVILIAN).hasPrecheck(AUTHORIZATION_FAILED), - fileUpdate(APP_PROPERTIES).payingWith(CIVILIAN).hasPrecheck(AUTHORIZATION_FAILED), - fileUpdate(FEE_SCHEDULE).payingWith(CIVILIAN).hasPrecheck(AUTHORIZATION_FAILED), - fileUpdate(EXCHANGE_RATES).payingWith(CIVILIAN).hasPrecheck(AUTHORIZATION_FAILED)); + return hapiTest( + cryptoCreate(CIVILIAN), + fileUpdate(ADDRESS_BOOK).payingWith(CIVILIAN).hasPrecheck(AUTHORIZATION_FAILED), + fileUpdate(NODE_DETAILS).payingWith(CIVILIAN).hasPrecheck(AUTHORIZATION_FAILED), + fileUpdate(API_PERMISSIONS).payingWith(CIVILIAN).hasPrecheck(AUTHORIZATION_FAILED), + fileUpdate(APP_PROPERTIES).payingWith(CIVILIAN).hasPrecheck(AUTHORIZATION_FAILED), + fileUpdate(FEE_SCHEDULE).payingWith(CIVILIAN).hasPrecheck(AUTHORIZATION_FAILED), + fileUpdate(EXCHANGE_RATES).payingWith(CIVILIAN).hasPrecheck(AUTHORIZATION_FAILED)); } @HapiTest final Stream precheckAllowsMissing() { - return defaultHapiSpec("PrecheckAllowsMissing") - .given() - .when() - .then(fileUpdate("1.2.3") - .payingWith(GENESIS) - .signedBy(GENESIS) - .fee(1_234_567L) - .hasPrecheck(OK) - .hasKnownStatus(INVALID_FILE_ID)); + return hapiTest(fileUpdate("1.2.3") + .payingWith(GENESIS) + .signedBy(GENESIS) + .fee(1_234_567L) + .hasPrecheck(OK) + .hasKnownStatus(INVALID_FILE_ID)); } @HapiTest final Stream precheckAllowsDeleted() { - return defaultHapiSpec("PrecheckAllowsDeleted") - .given(fileCreate("tbd")) - .when(fileDelete("tbd")) - .then(fileUpdate("tbd").hasPrecheck(OK).hasKnownStatus(FILE_DELETED)); + return hapiTest( + fileCreate("tbd"), + fileDelete("tbd"), + fileUpdate("tbd").hasPrecheck(OK).hasKnownStatus(FILE_DELETED)); } @HapiTest final Stream precheckRejectsPrematureExpiry() { long now = Instant.now().getEpochSecond(); - return defaultHapiSpec("PrecheckRejectsPrematureExpiry") - .given(fileCreate("file")) - .when() - .then(fileUpdate("file") - .fee(A_LOT) - .extendingExpiryBy(-now) - .hasPrecheck(AUTORENEW_DURATION_NOT_IN_RANGE)); + return hapiTest( + fileCreate("file"), + fileUpdate("file").fee(A_LOT).extendingExpiryBy(-now).hasPrecheck(AUTORENEW_DURATION_NOT_IN_RANGE)); } @HapiTest final Stream precheckAllowsBadEncoding() { - return defaultHapiSpec("PrecheckAllowsBadEncoding") - .given(fileCreate("file")) - .when() - .then(fileUpdate("file") + return hapiTest( + fileCreate("file"), + fileUpdate("file") .fee(A_LOT) .signedBy(GENESIS) .useBadWacl() @@ -139,13 +129,12 @@ final Stream precheckAllowsBadEncoding() { final Stream handleIgnoresEarlierExpiry() { var initialExpiry = new AtomicLong(); - return defaultHapiSpec("HandleIgnoresEarlierExpiry") - .given( - fileCreate("file"), - withOpContext((spec, opLog) -> initialExpiry.set( - spec.registry().getTimestamp("file").getSeconds()))) - .when(fileUpdate("file").extendingExpiryBy(-1_000)) - .then(UtilVerbs.assertionsHold((spec, opLog) -> { + return hapiTest( + fileCreate("file"), + withOpContext((spec, opLog) -> + initialExpiry.set(spec.registry().getTimestamp("file").getSeconds())), + fileUpdate("file").extendingExpiryBy(-1_000), + UtilVerbs.assertionsHold((spec, opLog) -> { var infoOp = QueryVerbs.getFileInfo("file"); CustomSpecAssert.allRunFor(spec, infoOp); var currExpiry = infoOp.getResponse() diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/positive/SysDelSysUndelSpec.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/positive/SysDelSysUndelSpec.java index 5011a6315239..44c5c3a1e876 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/positive/SysDelSysUndelSpec.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/file/positive/SysDelSysUndelSpec.java @@ -16,7 +16,7 @@ package com.hedera.services.bdd.suites.file.positive; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getFileContents; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getFileInfo; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.fileCreate; @@ -47,19 +47,18 @@ public class SysDelSysUndelSpec { @HapiTest final Stream sysDelIdVariantsTreatedAsExpected() { - return defaultHapiSpec("sysDelIdVariantsTreatedAsExpected") - .given(fileCreate("misc").contents(ORIG_FILE)) - .when() - .then(submitModifiedWithFixedPayer(withSuccessivelyVariedBodyIds(), () -> systemFileDelete("misc") + return hapiTest( + fileCreate("misc").contents(ORIG_FILE), + submitModifiedWithFixedPayer(withSuccessivelyVariedBodyIds(), () -> systemFileDelete("misc") .payingWith(SYSTEM_DELETE_ADMIN))); } @HapiTest final Stream sysUndelIdVariantsTreatedAsExpected() { - return defaultHapiSpec("sysUndelIdVariantsTreatedAsExpected") - .given(fileCreate("misc").contents(ORIG_FILE)) - .when(systemFileDelete("misc").payingWith(SYSTEM_DELETE_ADMIN)) - .then(submitModifiedWithFixedPayer(withSuccessivelyVariedBodyIds(), () -> systemFileUndelete("misc") + return hapiTest( + fileCreate("misc").contents(ORIG_FILE), + systemFileDelete("misc").payingWith(SYSTEM_DELETE_ADMIN), + submitModifiedWithFixedPayer(withSuccessivelyVariedBodyIds(), () -> systemFileUndelete("misc") .payingWith(SYSTEM_UNDELETE_ADMIN))); } @@ -67,31 +66,22 @@ final Stream sysUndelIdVariantsTreatedAsExpected() { final Stream distinguishesAdminPrivileges() { final var lifetime = THREE_MONTHS_IN_SECONDS; - return defaultHapiSpec("DistinguishesAdminPrivileges") - .given(fileCreate("misc").lifetime(lifetime).contents(ORIG_FILE)) - .when() - .then( - systemFileDelete("misc") - .payingWith(SYSTEM_UNDELETE_ADMIN) - .hasPrecheck(NOT_SUPPORTED), - systemFileUndelete("misc") - .payingWith(SYSTEM_DELETE_ADMIN) - .hasPrecheck(AUTHORIZATION_FAILED), - systemFileDelete(ADDRESS_BOOK).payingWith(GENESIS).hasPrecheck(ENTITY_NOT_ALLOWED_TO_DELETE)); + return hapiTest( + fileCreate("misc").lifetime(lifetime).contents(ORIG_FILE), + systemFileDelete("misc").payingWith(SYSTEM_UNDELETE_ADMIN).hasPrecheck(NOT_SUPPORTED), + systemFileUndelete("misc").payingWith(SYSTEM_DELETE_ADMIN).hasPrecheck(AUTHORIZATION_FAILED), + systemFileDelete(ADDRESS_BOOK).payingWith(GENESIS).hasPrecheck(ENTITY_NOT_ALLOWED_TO_DELETE)); } @HapiTest final Stream systemDeleteWithPastExpiryDestroysFile() { final var lifetime = THREE_MONTHS_IN_SECONDS; - return defaultHapiSpec("systemDeleteWithPastExpiryDestroysFile") - .given(fileCreate("misc").lifetime(lifetime).contents(ORIG_FILE)) - .when( - systemFileDelete("misc").payingWith(SYSTEM_DELETE_ADMIN).updatingExpiry(1L), - getFileInfo("misc").nodePayment(1_234L).hasAnswerOnlyPrecheck(INVALID_FILE_ID)) - .then(systemFileUndelete("misc") - .payingWith(SYSTEM_UNDELETE_ADMIN) - .hasKnownStatus(INVALID_FILE_ID)); + return hapiTest( + fileCreate("misc").lifetime(lifetime).contents(ORIG_FILE), + systemFileDelete("misc").payingWith(SYSTEM_DELETE_ADMIN).updatingExpiry(1L), + getFileInfo("misc").nodePayment(1_234L).hasAnswerOnlyPrecheck(INVALID_FILE_ID), + systemFileUndelete("misc").payingWith(SYSTEM_UNDELETE_ADMIN).hasKnownStatus(INVALID_FILE_ID)); } @HapiTest @@ -100,25 +90,17 @@ final Stream systemDeleteThenUndeleteRestoresContentsAndExpiry() { var lifetime = THREE_MONTHS_IN_SECONDS; AtomicLong initExpiry = new AtomicLong(); - return defaultHapiSpec("systemDeleteThenUndeleteRestoresContentsAndExpiry") - .given( - fileCreate("misc").lifetime(lifetime).contents(ORIG_FILE), - UtilVerbs.withOpContext((spec, opLog) -> initExpiry.set( - spec.registry().getTimestamp("misc").getSeconds()))) - .when( - systemFileDelete("misc") - .payingWith(SYSTEM_DELETE_ADMIN) - .fee(0L) - .updatingExpiry(now + lifetime - 1_000), - getFileInfo("misc") - .nodePayment(1_234L) - .hasAnswerOnlyPrecheck(OK) - .hasDeleted(true), - systemFileUndelete("misc") - .payingWith(SYSTEM_UNDELETE_ADMIN) - .fee(0L)) - .then( - getFileContents("misc").hasContents(ignore -> ORIG_FILE), - getFileInfo("misc").hasExpiry(initExpiry::get)); + return hapiTest( + fileCreate("misc").lifetime(lifetime).contents(ORIG_FILE), + UtilVerbs.withOpContext((spec, opLog) -> + initExpiry.set(spec.registry().getTimestamp("misc").getSeconds())), + systemFileDelete("misc").payingWith(SYSTEM_DELETE_ADMIN).fee(0L).updatingExpiry(now + lifetime - 1_000), + getFileInfo("misc") + .nodePayment(1_234L) + .hasAnswerOnlyPrecheck(OK) + .hasDeleted(true), + systemFileUndelete("misc").payingWith(SYSTEM_UNDELETE_ADMIN).fee(0L), + getFileContents("misc").hasContents(ignore -> ORIG_FILE), + getFileInfo("misc").hasExpiry(initExpiry::get)); } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/freeze/UpdateServerFiles.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/freeze/UpdateServerFiles.java index c86c3924e592..dbfc2c7fd819 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/freeze/UpdateServerFiles.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/freeze/UpdateServerFiles.java @@ -16,7 +16,7 @@ package com.hedera.services.bdd.suites.freeze; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.fileUpdate; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.freezeUpgrade; import static com.hedera.services.bdd.suites.utils.ZipUtil.createZip; @@ -75,13 +75,12 @@ public List> getSpecsInSuite() { } private List> postiveTests() { - return Arrays.asList(uploadGivenDirectory()); + return Arrays.asList(performsFreezeUpgrade()); } // Zip all files under target directory and add an unzip and launch script to it // then send to server to update server - final Stream uploadGivenDirectory() { - + final Stream performsFreezeUpgrade() { log.info("Creating zip file from {}", uploadPath); // create directory if uploadPath doesn't exist if (!new File(uploadPath).exists()) { @@ -95,9 +94,7 @@ final Stream uploadGivenDirectory() { final File directory = new File(temp_dir); if (directory.exists()) { // delete everything in it recursively - FileUtils.cleanDirectory(directory); - } else { directory.mkdir(); } @@ -115,18 +112,16 @@ final Stream uploadGivenDirectory() { Assertions.fail("Directory creation failed"); } final byte[] hash = CommonUtils.noThrowSha384HashOf(data); - return defaultHapiSpec("uploadFileAndUpdate") - .given( - fileUpdate(APP_PROPERTIES) - .payingWith(ADDRESS_BOOK_CONTROL) - .overridingProps(Map.of("maxFileSize", "2048000")), - UtilVerbs.updateLargeFile(GENESIS, fileIDString, ByteString.copyFrom(data))) - .when(freezeUpgrade() + return hapiTest( + fileUpdate(APP_PROPERTIES) + .payingWith(ADDRESS_BOOK_CONTROL) + .overridingProps(Map.of("maxFileSize", "2048000")), + UtilVerbs.updateLargeFile(GENESIS, fileIDString, ByteString.copyFrom(data)), + freezeUpgrade() .withUpdateFile(fileIDString) .havingHash(hash) .payingWith(GENESIS) .startingIn(60) - .seconds()) - .then(); + .seconds()); } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/freeze/UpgradeSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/freeze/UpgradeSuite.java index aade86c42786..cad958bf360a 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/freeze/UpgradeSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/freeze/UpgradeSuite.java @@ -16,7 +16,7 @@ package com.hedera.services.bdd.suites.freeze; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getFileContents; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.fileAppend; @@ -107,146 +107,130 @@ public List> getSpecsInSuite() { } final Stream precheckRejectsUnknownFreezeType() { - return defaultHapiSpec("PrejeckRejectsUnknownFreezeType") - .given() - .when() - .then(freeze().hasPrecheck(INVALID_FREEZE_TRANSACTION_BODY)); + return hapiTest(freeze().hasPrecheck(INVALID_FREEZE_TRANSACTION_BODY)); } final Stream freezeOnlyPrecheckRejectsInvalid() { - return defaultHapiSpec("freezeOnlyPrecheckRejectsInvalid") - .given() - .when() - .then( - freezeOnly().withRejectedStartHr().hasPrecheck(INVALID_FREEZE_TRANSACTION_BODY), - freezeOnly().withRejectedStartMin().hasPrecheck(INVALID_FREEZE_TRANSACTION_BODY), - freezeOnly().withRejectedEndHr().hasPrecheck(INVALID_FREEZE_TRANSACTION_BODY), - freezeOnly().withRejectedEndMin().hasPrecheck(INVALID_FREEZE_TRANSACTION_BODY), - freezeOnly().startingIn(-60).minutes().hasPrecheck(FREEZE_START_TIME_MUST_BE_FUTURE)); + return hapiTest( + freezeOnly().withRejectedStartHr().hasPrecheck(INVALID_FREEZE_TRANSACTION_BODY), + freezeOnly().withRejectedStartMin().hasPrecheck(INVALID_FREEZE_TRANSACTION_BODY), + freezeOnly().withRejectedEndHr().hasPrecheck(INVALID_FREEZE_TRANSACTION_BODY), + freezeOnly().withRejectedEndMin().hasPrecheck(INVALID_FREEZE_TRANSACTION_BODY), + freezeOnly().startingIn(-60).minutes().hasPrecheck(FREEZE_START_TIME_MUST_BE_FUTURE)); } final Stream freezeUpgradeValidationRejectsInvalid() { - return defaultHapiSpec("freezeUpgradeValidationRejectsInvalid") - .given() - .when() - .then( - freezeUpgrade().withRejectedStartHr().hasPrecheck(INVALID_FREEZE_TRANSACTION_BODY), - freezeUpgrade().withRejectedStartMin().hasPrecheck(INVALID_FREEZE_TRANSACTION_BODY), - freezeUpgrade().withRejectedEndHr().hasPrecheck(INVALID_FREEZE_TRANSACTION_BODY), - freezeUpgrade().withRejectedEndMin().hasPrecheck(INVALID_FREEZE_TRANSACTION_BODY), - freezeUpgrade().startingIn(-60).minutes().hasPrecheck(FREEZE_START_TIME_MUST_BE_FUTURE), - freezeUpgrade() - .startingIn(2) - .minutes() - .withUpdateFile(standardUpdateFile) - .havingHash(poeticUpgradeHash) - .hasKnownStatus(NO_UPGRADE_HAS_BEEN_PREPARED), - freezeUpgrade() - .startingIn(2) - .minutes() - .havingHash(poeticUpgradeHash) - .hasPrecheck(INVALID_FREEZE_TRANSACTION_BODY), - freezeUpgrade() - .startingIn(2) - .minutes() - .withUpdateFile(standardUpdateFile) - .hasPrecheck(INVALID_FREEZE_TRANSACTION_BODY)); + return hapiTest( + freezeUpgrade().withRejectedStartHr().hasPrecheck(INVALID_FREEZE_TRANSACTION_BODY), + freezeUpgrade().withRejectedStartMin().hasPrecheck(INVALID_FREEZE_TRANSACTION_BODY), + freezeUpgrade().withRejectedEndHr().hasPrecheck(INVALID_FREEZE_TRANSACTION_BODY), + freezeUpgrade().withRejectedEndMin().hasPrecheck(INVALID_FREEZE_TRANSACTION_BODY), + freezeUpgrade().startingIn(-60).minutes().hasPrecheck(FREEZE_START_TIME_MUST_BE_FUTURE), + freezeUpgrade() + .startingIn(2) + .minutes() + .withUpdateFile(standardUpdateFile) + .havingHash(poeticUpgradeHash) + .hasKnownStatus(NO_UPGRADE_HAS_BEEN_PREPARED), + freezeUpgrade() + .startingIn(2) + .minutes() + .havingHash(poeticUpgradeHash) + .hasPrecheck(INVALID_FREEZE_TRANSACTION_BODY), + freezeUpgrade() + .startingIn(2) + .minutes() + .withUpdateFile(standardUpdateFile) + .hasPrecheck(INVALID_FREEZE_TRANSACTION_BODY)); } final Stream freezeAbortIsIdempotent() { - return defaultHapiSpec("FreezeAbortIsIdempotent") - .given() - .when() - .then(freezeAbort().hasKnownStatus(SUCCESS), freezeAbort().hasKnownStatus(SUCCESS)); + return hapiTest(freezeAbort().hasKnownStatus(SUCCESS), freezeAbort().hasKnownStatus(SUCCESS)); } final Stream prepareUpgradeValidationRejectsInvalid() { - return defaultHapiSpec("PrepareUpgradeValidationRejectsInvalid") - .given( - fileUpdate(standardUpdateFile) - .signedBy(FREEZE_ADMIN) - .contents(pragmatism) - .payingWith(FREEZE_ADMIN), - cryptoTransfer(tinyBarsFromTo(GENESIS, FREEZE_ADMIN, ONE_HUNDRED_HBARS)), - prepareUpgrade() - .withUpdateFile("0.0.149") - .havingHash(poeticUpgradeHash) - .hasPrecheck(FREEZE_UPDATE_FILE_DOES_NOT_EXIST), - prepareUpgrade() - .withUpdateFile(standardUpdateFile) - .havingHash(notEvenASha384Hash) - .hasPrecheck(FREEZE_UPDATE_FILE_HASH_DOES_NOT_MATCH), - prepareUpgrade() - .withUpdateFile(standardUpdateFile) - .havingHash(poeticUpgradeHash) - .hasKnownStatus(FREEZE_UPDATE_FILE_HASH_DOES_NOT_MATCH)) - .when(fileUpdate(standardUpdateFile) + return hapiTest( + fileUpdate(standardUpdateFile) .signedBy(FREEZE_ADMIN) .contents(pragmatism) - .payingWith(FREEZE_ADMIN)) - .then( - prepareUpgrade() - .withUpdateFile(standardUpdateFile) - .havingHash(poeticUpgradeHash) - .hasKnownStatus(FREEZE_UPDATE_FILE_HASH_DOES_NOT_MATCH), - fileUpdate(standardUpdateFile) - .signedBy(FREEZE_ADMIN) - .path(poeticUpgradeLoc) - .payingWith(FREEZE_ADMIN), - getFileContents(standardUpdateFile) - .hasByteStringContents(ignore -> ByteString.copyFrom(poeticUpgrade)), - prepareUpgrade().withUpdateFile(standardUpdateFile).havingHash(poeticUpgradeHash), - prepareUpgrade() - .withUpdateFile(standardUpdateFile) - .havingHash(poeticUpgradeHash) - .hasKnownStatus(FREEZE_UPGRADE_IN_PROGRESS), - freezeOnly().startingIn(60).minutes().hasKnownStatus(FREEZE_UPGRADE_IN_PROGRESS), - telemetryUpgrade() - .withUpdateFile(standardUpdateFile) - .havingHash(poeticUpgradeHash) - .startingIn(60) - .minutes() - .hasKnownStatus(FREEZE_UPGRADE_IN_PROGRESS), - fileUpdate(standardUpdateFile) - .signedBy(FREEZE_ADMIN) - .path(poeticUpgradeLoc) - .payingWith(FREEZE_ADMIN) - .hasKnownStatus(PREPARED_UPDATE_FILE_IS_IMMUTABLE), - fileAppend(standardUpdateFile) - .signedBy(FREEZE_ADMIN) - .path(poeticUpgradeLoc) - .payingWith(FREEZE_ADMIN) - .hasKnownStatus(PREPARED_UPDATE_FILE_IS_IMMUTABLE), - freezeAbort()); + .payingWith(FREEZE_ADMIN), + cryptoTransfer(tinyBarsFromTo(GENESIS, FREEZE_ADMIN, ONE_HUNDRED_HBARS)), + prepareUpgrade() + .withUpdateFile("0.0.149") + .havingHash(poeticUpgradeHash) + .hasPrecheck(FREEZE_UPDATE_FILE_DOES_NOT_EXIST), + prepareUpgrade() + .withUpdateFile(standardUpdateFile) + .havingHash(notEvenASha384Hash) + .hasPrecheck(FREEZE_UPDATE_FILE_HASH_DOES_NOT_MATCH), + prepareUpgrade() + .withUpdateFile(standardUpdateFile) + .havingHash(poeticUpgradeHash) + .hasKnownStatus(FREEZE_UPDATE_FILE_HASH_DOES_NOT_MATCH), + fileUpdate(standardUpdateFile) + .signedBy(FREEZE_ADMIN) + .contents(pragmatism) + .payingWith(FREEZE_ADMIN), + prepareUpgrade() + .withUpdateFile(standardUpdateFile) + .havingHash(poeticUpgradeHash) + .hasKnownStatus(FREEZE_UPDATE_FILE_HASH_DOES_NOT_MATCH), + fileUpdate(standardUpdateFile) + .signedBy(FREEZE_ADMIN) + .path(poeticUpgradeLoc) + .payingWith(FREEZE_ADMIN), + getFileContents(standardUpdateFile).hasByteStringContents(ignore -> ByteString.copyFrom(poeticUpgrade)), + prepareUpgrade().withUpdateFile(standardUpdateFile).havingHash(poeticUpgradeHash), + prepareUpgrade() + .withUpdateFile(standardUpdateFile) + .havingHash(poeticUpgradeHash) + .hasKnownStatus(FREEZE_UPGRADE_IN_PROGRESS), + freezeOnly().startingIn(60).minutes().hasKnownStatus(FREEZE_UPGRADE_IN_PROGRESS), + telemetryUpgrade() + .withUpdateFile(standardUpdateFile) + .havingHash(poeticUpgradeHash) + .startingIn(60) + .minutes() + .hasKnownStatus(FREEZE_UPGRADE_IN_PROGRESS), + fileUpdate(standardUpdateFile) + .signedBy(FREEZE_ADMIN) + .path(poeticUpgradeLoc) + .payingWith(FREEZE_ADMIN) + .hasKnownStatus(PREPARED_UPDATE_FILE_IS_IMMUTABLE), + fileAppend(standardUpdateFile) + .signedBy(FREEZE_ADMIN) + .path(poeticUpgradeLoc) + .payingWith(FREEZE_ADMIN) + .hasKnownStatus(PREPARED_UPDATE_FILE_IS_IMMUTABLE), + freezeAbort()); } final Stream telemetryUpgradeValidationRejectsInvalid() { - return defaultHapiSpec("TelemetryUpgradeValidationRejectsInvalid") - .given( - cryptoTransfer(tinyBarsFromTo(GENESIS, FREEZE_ADMIN, ONE_HUNDRED_HBARS)), - telemetryUpgrade() - .withUpdateFile(standardTelemetryFile) - .havingHash(poeticUpgradeHash) - .startingIn(-60) - .minutes() - .hasPrecheck(FREEZE_START_TIME_MUST_BE_FUTURE), - telemetryUpgrade() - .withUpdateFile("0.0.149") - .havingHash(poeticUpgradeHash) - .startingIn(3) - .minutes() - .hasPrecheck(FREEZE_UPDATE_FILE_DOES_NOT_EXIST), - telemetryUpgrade() - .startingIn(3) - .minutes() - .withUpdateFile(standardTelemetryFile) - .havingHash(notEvenASha384Hash) - .hasPrecheck(FREEZE_UPDATE_FILE_HASH_DOES_NOT_MATCH)) - .when(fileUpdate(standardTelemetryFile) + return hapiTest( + cryptoTransfer(tinyBarsFromTo(GENESIS, FREEZE_ADMIN, ONE_HUNDRED_HBARS)), + telemetryUpgrade() + .withUpdateFile(standardTelemetryFile) + .havingHash(poeticUpgradeHash) + .startingIn(-60) + .minutes() + .hasPrecheck(FREEZE_START_TIME_MUST_BE_FUTURE), + telemetryUpgrade() + .withUpdateFile("0.0.149") + .havingHash(poeticUpgradeHash) + .startingIn(3) + .minutes() + .hasPrecheck(FREEZE_UPDATE_FILE_DOES_NOT_EXIST), + telemetryUpgrade() + .startingIn(3) + .minutes() + .withUpdateFile(standardTelemetryFile) + .havingHash(notEvenASha384Hash) + .hasPrecheck(FREEZE_UPDATE_FILE_HASH_DOES_NOT_MATCH), + fileUpdate(standardTelemetryFile) .signedBy(FREEZE_ADMIN) .contents(pragmatism) - .payingWith(FREEZE_ADMIN)) - .then(telemetryUpgrade() + .payingWith(FREEZE_ADMIN), + telemetryUpgrade() .startingIn(3) .minutes() .withUpdateFile(standardTelemetryFile) @@ -255,46 +239,43 @@ final Stream telemetryUpgradeValidationRejectsInvalid() { } final Stream canFreezeUpgradeWithPreparedUpgrade() { - return defaultHapiSpec("CanFreezeUpgradeWithPreparedUpgrade") - .given( - cryptoTransfer(tinyBarsFromTo(GENESIS, FREEZE_ADMIN, ONE_HUNDRED_HBARS)), - fileUpdate(standardUpdateFile) - .signedBy(FREEZE_ADMIN) - .path(poeticUpgradeLoc) - .payingWith(FREEZE_ADMIN)) - .when(prepareUpgrade().withUpdateFile(standardUpdateFile).havingHash(poeticUpgradeHash)) - .then( - freezeUpgrade() - .startingIn(60) - .minutes() - .withUpdateFile(standardTelemetryFile) - .havingHash(poeticUpgradeHash) - .hasKnownStatus(UPDATE_FILE_ID_DOES_NOT_MATCH_PREPARED), - freezeUpgrade() - .startingIn(60) - .minutes() - .withUpdateFile(standardUpdateFile) - .havingHash(heavyPoeticUpgradeHash) - .hasKnownStatus(UPDATE_FILE_HASH_DOES_NOT_MATCH_PREPARED), - freezeUpgrade() - .startingIn(60) - .minutes() - .withUpdateFile(standardUpdateFile) - .havingHash(poeticUpgradeHash), - freezeAbort()); + return hapiTest( + cryptoTransfer(tinyBarsFromTo(GENESIS, FREEZE_ADMIN, ONE_HUNDRED_HBARS)), + fileUpdate(standardUpdateFile) + .signedBy(FREEZE_ADMIN) + .path(poeticUpgradeLoc) + .payingWith(FREEZE_ADMIN), + prepareUpgrade().withUpdateFile(standardUpdateFile).havingHash(poeticUpgradeHash), + freezeUpgrade() + .startingIn(60) + .minutes() + .withUpdateFile(standardTelemetryFile) + .havingHash(poeticUpgradeHash) + .hasKnownStatus(UPDATE_FILE_ID_DOES_NOT_MATCH_PREPARED), + freezeUpgrade() + .startingIn(60) + .minutes() + .withUpdateFile(standardUpdateFile) + .havingHash(heavyPoeticUpgradeHash) + .hasKnownStatus(UPDATE_FILE_HASH_DOES_NOT_MATCH_PREPARED), + freezeUpgrade() + .startingIn(60) + .minutes() + .withUpdateFile(standardUpdateFile) + .havingHash(poeticUpgradeHash), + freezeAbort()); } final Stream canTelemetryUpgradeWithValid() { - return defaultHapiSpec("CanTelemetryUpgradeWithValid") - .given(cryptoTransfer(tinyBarsFromTo(GENESIS, FREEZE_ADMIN, ONE_HUNDRED_HBARS))) - .when( - fileUpdate(standardUpdateFile) - .signedBy(FREEZE_ADMIN) - .path(heavyPoeticUpgradeLoc) - .payingWith(FREEZE_ADMIN), - getFileContents(standardUpdateFile) - .hasByteStringContents(ignore -> ByteString.copyFrom(heavyPoeticUpgrade))) - .then(telemetryUpgrade() + return hapiTest( + cryptoTransfer(tinyBarsFromTo(GENESIS, FREEZE_ADMIN, ONE_HUNDRED_HBARS)), + fileUpdate(standardUpdateFile) + .signedBy(FREEZE_ADMIN) + .path(heavyPoeticUpgradeLoc) + .payingWith(FREEZE_ADMIN), + getFileContents(standardUpdateFile) + .hasByteStringContents(ignore -> ByteString.copyFrom(heavyPoeticUpgrade)), + telemetryUpgrade() .startingIn(60) .minutes() .withUpdateFile(standardUpdateFile) diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip423/DisabledLongTermExecutionScheduleTest.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip423/DisabledLongTermExecutionScheduleTest.java index c25c46e7c87c..2556ceab76a2 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip423/DisabledLongTermExecutionScheduleTest.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip423/DisabledLongTermExecutionScheduleTest.java @@ -17,7 +17,6 @@ package com.hedera.services.bdd.suites.hip423; import static com.hedera.services.bdd.junit.RepeatableReason.NEEDS_VIRTUAL_TIME_FOR_FAST_EXECUTION; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getScheduleInfo; @@ -72,117 +71,107 @@ static void beforeAll(@NonNull final TestLifecycle lifecycle) { @Order(1) public Stream waitForExpiryIgnoredWhenLongTermDisabled() { - return defaultHapiSpec("WaitForExpiryIgnoredWhenLongTermDisabled") - .given( - cryptoCreate(PAYER).balance(ONE_HBAR), - cryptoCreate(SENDER).balance(1L).via(SENDER_TXN), - cryptoCreate(RECEIVER).balance(0L).receiverSigRequired(true)) - .when( - scheduleCreate( - THREE_SIG_XFER, - cryptoTransfer(tinyBarsFromTo(SENDER, RECEIVER, 1)) - .fee(ONE_HBAR)) - .withRelativeExpiry(SENDER_TXN, 50) - .waitForExpiry(true) - .designatingPayer(PAYER) - .alsoSigningWith(SENDER, RECEIVER), - getAccountBalance(RECEIVER).hasTinyBars(0L), - scheduleSign(THREE_SIG_XFER).alsoSigningWith(PAYER)) - .then( - getAccountBalance(RECEIVER).hasTinyBars(1L), - getScheduleInfo(THREE_SIG_XFER) - .hasScheduleId(THREE_SIG_XFER) - .hasWaitForExpiry(false) - .isExecuted()); + return hapiTest( + cryptoCreate(PAYER).balance(ONE_HBAR), + cryptoCreate(SENDER).balance(1L).via(SENDER_TXN), + cryptoCreate(RECEIVER).balance(0L).receiverSigRequired(true), + scheduleCreate( + THREE_SIG_XFER, + cryptoTransfer(tinyBarsFromTo(SENDER, RECEIVER, 1)) + .fee(ONE_HBAR)) + .withRelativeExpiry(SENDER_TXN, 50) + .waitForExpiry(true) + .designatingPayer(PAYER) + .alsoSigningWith(SENDER, RECEIVER), + getAccountBalance(RECEIVER).hasTinyBars(0L), + scheduleSign(THREE_SIG_XFER).alsoSigningWith(PAYER), + getAccountBalance(RECEIVER).hasTinyBars(1L), + getScheduleInfo(THREE_SIG_XFER) + .hasScheduleId(THREE_SIG_XFER) + .hasWaitForExpiry(false) + .isExecuted()); } @HapiTest @Order(2) public Stream expiryIgnoredWhenLongTermDisabled() { - return defaultHapiSpec("ExpiryIgnoredWhenLongTermDisabled") - .given( - cryptoCreate(SENDER).balance(ONE_HBAR).via(SENDER_TXN), - cryptoCreate(RECEIVER).balance(0L).receiverSigRequired(true)) - .when(scheduleCreate( + return hapiTest( + cryptoCreate(SENDER).balance(ONE_HBAR).via(SENDER_TXN), + cryptoCreate(RECEIVER).balance(0L).receiverSigRequired(true), + scheduleCreate( THREE_SIG_XFER, cryptoTransfer(tinyBarsFromTo(SENDER, RECEIVER, 1)) .fee(ONE_HBAR)) .withRelativeExpiry(SENDER_TXN, 20) .waitForExpiry(true) - .designatingPayer(SENDER)) - .then( - scheduleSign(THREE_SIG_XFER).alsoSigningWith(SENDER, RECEIVER), - getScheduleInfo(THREE_SIG_XFER) - .hasScheduleId(THREE_SIG_XFER) - .isExecuted() - .isNotDeleted(), - getAccountBalance(RECEIVER).hasTinyBars(1L)); + .designatingPayer(SENDER), + scheduleSign(THREE_SIG_XFER).alsoSigningWith(SENDER, RECEIVER), + getScheduleInfo(THREE_SIG_XFER) + .hasScheduleId(THREE_SIG_XFER) + .isExecuted() + .isNotDeleted(), + getAccountBalance(RECEIVER).hasTinyBars(1L)); } @HapiTest @Order(3) public Stream waitForExpiryIgnoredWhenLongTermDisabledThenEnabled() { - return defaultHapiSpec("WaitForExpiryIgnoredWhenLongTermDisabledThenEnabled") - .given( - cryptoCreate(PAYER).balance(ONE_HBAR), - cryptoCreate(SENDER).balance(1L).via(SENDER_TXN), - cryptoCreate(RECEIVER).balance(0L).receiverSigRequired(true)) - .when( - scheduleCreate( - THREE_SIG_XFER, - cryptoTransfer(tinyBarsFromTo(SENDER, RECEIVER, 1)) - .fee(ONE_HBAR)) - .withRelativeExpiry(SENDER_TXN, 4) - .waitForExpiry(true) - .designatingPayer(PAYER) - .alsoSigningWith(SENDER, RECEIVER), - getAccountBalance(RECEIVER).hasTinyBars(0L), - overriding(SCHEDULING_LONG_TERM_ENABLED, "true"), - scheduleSign(THREE_SIG_XFER).alsoSigningWith(PAYER)) - .then( - cryptoCreate("triggerTxn"), - getAccountBalance(RECEIVER).hasTinyBars(1L), - getScheduleInfo(THREE_SIG_XFER) - .hasScheduleId(THREE_SIG_XFER) - .hasWaitForExpiry(false) - .isExecuted(), - overriding(SCHEDULING_LONG_TERM_ENABLED, "false")); + return hapiTest( + cryptoCreate(PAYER).balance(ONE_HBAR), + cryptoCreate(SENDER).balance(1L).via(SENDER_TXN), + cryptoCreate(RECEIVER).balance(0L).receiverSigRequired(true), + scheduleCreate( + THREE_SIG_XFER, + cryptoTransfer(tinyBarsFromTo(SENDER, RECEIVER, 1)) + .fee(ONE_HBAR)) + .withRelativeExpiry(SENDER_TXN, 4) + .waitForExpiry(true) + .designatingPayer(PAYER) + .alsoSigningWith(SENDER, RECEIVER), + getAccountBalance(RECEIVER).hasTinyBars(0L), + overriding(SCHEDULING_LONG_TERM_ENABLED, "true"), + scheduleSign(THREE_SIG_XFER).alsoSigningWith(PAYER), + cryptoCreate("triggerTxn"), + getAccountBalance(RECEIVER).hasTinyBars(1L), + getScheduleInfo(THREE_SIG_XFER) + .hasScheduleId(THREE_SIG_XFER) + .hasWaitForExpiry(false) + .isExecuted(), + overriding(SCHEDULING_LONG_TERM_ENABLED, "false")); } @HapiTest @Order(4) public Stream expiryIgnoredWhenLongTermDisabledThenEnabled() { - return defaultHapiSpec("ExpiryIgnoredWhenLongTermDisabledThenEnabled") - .given( - cryptoCreate(PAYER).balance(ONE_HBAR), - cryptoCreate(SENDER).balance(1L).via(SENDER_TXN), - cryptoCreate(RECEIVER).balance(0L).receiverSigRequired(true)) - .when(scheduleCreate( + return hapiTest( + cryptoCreate(PAYER).balance(ONE_HBAR), + cryptoCreate(SENDER).balance(1L).via(SENDER_TXN), + cryptoCreate(RECEIVER).balance(0L).receiverSigRequired(true), + scheduleCreate( THREE_SIG_XFER, cryptoTransfer(tinyBarsFromTo(SENDER, RECEIVER, 1)) .fee(ONE_HBAR)) .withRelativeExpiry(SENDER_TXN, 4) .waitForExpiry(true) .designatingPayer(PAYER) - .via(CREATE_TXN)) - .then( - getScheduleInfo(THREE_SIG_XFER) - .hasScheduleId(THREE_SIG_XFER) - .hasWaitForExpiry(false) - .hasRelativeExpiry(CREATE_TXN, TimeUnit.MINUTES.toSeconds(30)) - .isNotExecuted() - .isNotDeleted(), - scheduleSign(THREE_SIG_XFER) - .alsoSigningWith(PAYER, SENDER, RECEIVER) - .payingWith(PAYER), - getScheduleInfo(THREE_SIG_XFER) - .hasScheduleId(THREE_SIG_XFER) - .hasWaitForExpiry(false) - .hasRelativeExpiry(CREATE_TXN, TimeUnit.MINUTES.toSeconds(30)) - .isExecuted() - .isNotDeleted(), - getAccountBalance(RECEIVER).hasTinyBars(1L)); + .via(CREATE_TXN), + getScheduleInfo(THREE_SIG_XFER) + .hasScheduleId(THREE_SIG_XFER) + .hasWaitForExpiry(false) + .hasRelativeExpiry(CREATE_TXN, TimeUnit.MINUTES.toSeconds(30)) + .isNotExecuted() + .isNotDeleted(), + scheduleSign(THREE_SIG_XFER) + .alsoSigningWith(PAYER, SENDER, RECEIVER) + .payingWith(PAYER), + getScheduleInfo(THREE_SIG_XFER) + .hasScheduleId(THREE_SIG_XFER) + .hasWaitForExpiry(false) + .hasRelativeExpiry(CREATE_TXN, TimeUnit.MINUTES.toSeconds(30)) + .isExecuted() + .isNotDeleted(), + getAccountBalance(RECEIVER).hasTinyBars(1L)); } @RepeatableHapiTest(NEEDS_VIRTUAL_TIME_FOR_FAST_EXECUTION) diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip869/NodeCreateTest.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip869/NodeCreateTest.java index 42a7276baf35..cde7afd24d59 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip869/NodeCreateTest.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip869/NodeCreateTest.java @@ -20,7 +20,6 @@ import static com.hedera.services.bdd.junit.EmbeddedReason.MUST_SKIP_INGEST; import static com.hedera.services.bdd.junit.EmbeddedReason.NEEDS_STATE_ACCESS; import static com.hedera.services.bdd.junit.hedera.utils.AddressBookUtils.endpointFor; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.PropertySource.asAccount; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; @@ -396,49 +395,43 @@ final Stream minimumFieldsSetHappyCase() throws CertificateEncoding */ @EmbeddedHapiTest(MUST_SKIP_INGEST) final Stream validateFees() throws CertificateEncodingException { - return defaultHapiSpec("validateFees") - .given( - newKeyNamed(ED_25519_KEY).shape(KeyShape.ED25519), - newKeyNamed("testKey"), - newKeyNamed("randomAccount"), - cryptoCreate("payer").balance(10_000_000_000L), - // Submit to a different node so ingest check is skipped - nodeCreate("ntb") - .adminKey(ED_25519_KEY) - .payingWith("payer") - .signedBy("payer") - .setNode("0.0.4") - .gossipCaCertificate( - gossipCertificates.getFirst().getEncoded()) - .hasKnownStatus(UNAUTHORIZED) - .via("nodeCreationFailed")) - .when() - .then( - getTxnRecord("nodeCreationFailed").logged(), - // Validate that the failed transaction charges the correct fees. - validateChargedUsdWithin("nodeCreationFailed", 0.001, 3), - nodeCreate("ntb") - .adminKey(ED_25519_KEY) - .fee(ONE_HBAR) - .gossipCaCertificate( - gossipCertificates.getFirst().getEncoded()) - .via("nodeCreation"), - getTxnRecord("nodeCreation").logged(), - // But, note that the fee will not be charged for privileged payer - // The fee is charged here because the payer is not privileged - validateChargedUsdWithin("nodeCreation", 0.0, 0.0), - - // Submit with several signatures and the price should increase - nodeCreate("ntb") - .adminKey(ED_25519_KEY) - .payingWith("payer") - .signedBy("payer", "randomAccount", "testKey") - .setNode("0.0.4") - .gossipCaCertificate( - gossipCertificates.getLast().getEncoded()) - .hasKnownStatus(UNAUTHORIZED) - .via("multipleSigsCreation"), - validateChargedUsdWithin("multipleSigsCreation", 0.0011276316, 3.0)); + return hapiTest( + newKeyNamed(ED_25519_KEY).shape(KeyShape.ED25519), + newKeyNamed("testKey"), + newKeyNamed("randomAccount"), + cryptoCreate("payer").balance(10_000_000_000L), + // Submit to a different node so ingest check is skipped + nodeCreate("ntb") + .adminKey(ED_25519_KEY) + .payingWith("payer") + .signedBy("payer") + .setNode("0.0.4") + .gossipCaCertificate(gossipCertificates.getFirst().getEncoded()) + .hasKnownStatus(UNAUTHORIZED) + .via("nodeCreationFailed"), + getTxnRecord("nodeCreationFailed").logged(), + // Validate that the failed transaction charges the correct fees. + validateChargedUsdWithin("nodeCreationFailed", 0.001, 3), + nodeCreate("ntb") + .adminKey(ED_25519_KEY) + .fee(ONE_HBAR) + .gossipCaCertificate(gossipCertificates.getFirst().getEncoded()) + .via("nodeCreation"), + getTxnRecord("nodeCreation").logged(), + // But, note that the fee will not be charged for privileged payer + // The fee is charged here because the payer is not privileged + validateChargedUsdWithin("nodeCreation", 0.0, 0.0), + + // Submit with several signatures and the price should increase + nodeCreate("ntb") + .adminKey(ED_25519_KEY) + .payingWith("payer") + .signedBy("payer", "randomAccount", "testKey") + .setNode("0.0.4") + .gossipCaCertificate(gossipCertificates.getLast().getEncoded()) + .hasKnownStatus(UNAUTHORIZED) + .via("multipleSigsCreation"), + validateChargedUsdWithin("multipleSigsCreation", 0.0011276316, 3.0)); } /** @@ -448,50 +441,44 @@ final Stream validateFees() throws CertificateEncodingException { @EmbeddedHapiTest(MUST_SKIP_INGEST) final Stream validateFeesInsufficientAmount() throws CertificateEncodingException { final String description = "His vorpal blade went snicker-snack!"; - return defaultHapiSpec("validateFees") - .given( - newKeyNamed(ED_25519_KEY).shape(KeyShape.ED25519), - newKeyNamed("testKey"), - newKeyNamed("randomAccount"), - cryptoCreate("payer").balance(10_000_000_000L), - // Submit to a different node so ingest check is skipped - nodeCreate("ntb") - .adminKey(ED_25519_KEY) - .payingWith("payer") - .signedBy("payer") - .description(description) - .setNode("0.0.4") - .gossipCaCertificate( - gossipCertificates.getFirst().getEncoded()) - .fee(1) - .hasKnownStatus(INSUFFICIENT_TX_FEE) - .via("nodeCreationFailed")) - .when() - .then( - getTxnRecord("nodeCreationFailed").logged(), - nodeCreate("ntb") - .adminKey(ED_25519_KEY) - .description(description) - .gossipCaCertificate( - gossipCertificates.getFirst().getEncoded()) - .via("nodeCreation"), - getTxnRecord("nodeCreation").logged(), - // But, note that the fee will not be charged for privileged payer - // The fee is charged here because the payer is not privileged - validateChargedUsdWithin("nodeCreation", 0.0, 0.0), - - // Submit with several signatures and the price should increase - nodeCreate("ntb") - .adminKey(ED_25519_KEY) - .payingWith("payer") - .signedBy("payer", "randomAccount", "testKey") - .description(description) - .setNode("0.0.4") - .gossipCaCertificate( - gossipCertificates.getLast().getEncoded()) - .fee(1) - .hasKnownStatus(INSUFFICIENT_TX_FEE) - .via("multipleSigsCreation")); + return hapiTest( + newKeyNamed(ED_25519_KEY).shape(KeyShape.ED25519), + newKeyNamed("testKey"), + newKeyNamed("randomAccount"), + cryptoCreate("payer").balance(10_000_000_000L), + // Submit to a different node so ingest check is skipped + nodeCreate("ntb") + .adminKey(ED_25519_KEY) + .payingWith("payer") + .signedBy("payer") + .description(description) + .setNode("0.0.4") + .gossipCaCertificate(gossipCertificates.getFirst().getEncoded()) + .fee(1) + .hasKnownStatus(INSUFFICIENT_TX_FEE) + .via("nodeCreationFailed"), + getTxnRecord("nodeCreationFailed").logged(), + nodeCreate("ntb") + .adminKey(ED_25519_KEY) + .description(description) + .gossipCaCertificate(gossipCertificates.getFirst().getEncoded()) + .via("nodeCreation"), + getTxnRecord("nodeCreation").logged(), + // But, note that the fee will not be charged for privileged payer + // The fee is charged here because the payer is not privileged + validateChargedUsdWithin("nodeCreation", 0.0, 0.0), + + // Submit with several signatures and the price should increase + nodeCreate("ntb") + .adminKey(ED_25519_KEY) + .payingWith("payer") + .signedBy("payer", "randomAccount", "testKey") + .description(description) + .setNode("0.0.4") + .gossipCaCertificate(gossipCertificates.getLast().getEncoded()) + .fee(1) + .hasKnownStatus(INSUFFICIENT_TX_FEE) + .via("multipleSigsCreation")); } @HapiTest diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip869/NodeDeleteTest.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip869/NodeDeleteTest.java index 1836149d2ccc..e3edc0b57743 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip869/NodeDeleteTest.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip869/NodeDeleteTest.java @@ -18,7 +18,6 @@ import static com.hedera.services.bdd.junit.EmbeddedReason.MUST_SKIP_INGEST; import static com.hedera.services.bdd.junit.EmbeddedReason.NEEDS_STATE_ACCESS; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; @@ -49,8 +48,6 @@ import com.hedera.services.bdd.junit.HapiTest; import com.hedera.services.bdd.junit.HapiTestLifecycle; import com.hedera.services.bdd.junit.LeakyHapiTest; -import com.hedera.services.bdd.junit.support.TestLifecycle; -import edu.umd.cs.findbugs.annotations.NonNull; import java.security.cert.CertificateEncodingException; import java.security.cert.X509Certificate; import java.util.List; @@ -64,7 +61,7 @@ public class NodeDeleteTest { private static List gossipCertificates; @BeforeAll - static void beforeAll(@NonNull final TestLifecycle testLifecycle) { + static void beforeAll() { gossipCertificates = generateX509Certificates(1); } @@ -83,40 +80,36 @@ final Stream deleteNodeWorks() throws CertificateEncodingException @EmbeddedHapiTest(MUST_SKIP_INGEST) final Stream validateFees() throws CertificateEncodingException { final String description = "His vorpal blade went snicker-snack!"; - return defaultHapiSpec("validateFees") - .given( - newKeyNamed("testKey"), - newKeyNamed("randomAccount"), - cryptoCreate("payer").balance(10_000_000_000L), - nodeCreate("node100") - .description(description) - .fee(ONE_HBAR) - .gossipCaCertificate( - gossipCertificates.getFirst().getEncoded()), - // Submit to a different node so ingest check is skipped - nodeDelete("node100") - .setNode("0.0.5") - .payingWith("payer") - .hasKnownStatus(INVALID_SIGNATURE) - .via("failedDeletion")) - .when() - .then( - getTxnRecord("failedDeletion").logged(), - // The fee is charged here because the payer is not privileged - validateChargedUsdWithin("failedDeletion", 0.001, 3.0), + return hapiTest( + newKeyNamed("testKey"), + newKeyNamed("randomAccount"), + cryptoCreate("payer").balance(10_000_000_000L), + nodeCreate("node100") + .description(description) + .fee(ONE_HBAR) + .gossipCaCertificate(gossipCertificates.getFirst().getEncoded()), + // Submit to a different node so ingest check is skipped + nodeDelete("node100") + .setNode("0.0.5") + .payingWith("payer") + .hasKnownStatus(INVALID_SIGNATURE) + .via("failedDeletion"), + getTxnRecord("failedDeletion").logged(), + // The fee is charged here because the payer is not privileged + validateChargedUsdWithin("failedDeletion", 0.001, 3.0), - // Submit with several signatures and the price should increase - nodeDelete("node100") - .setNode("0.0.5") - .payingWith("payer") - .signedBy("payer", "randomAccount", "testKey") - .hasKnownStatus(INVALID_SIGNATURE) - .via("multipleSigsDeletion"), - validateChargedUsdWithin("multipleSigsDeletion", 0.0011276316, 3.0), - nodeDelete("node100").via("deleteNode"), - getTxnRecord("deleteNode").logged(), - // The fee is not charged here because the payer is privileged - validateChargedUsdWithin("deleteNode", 0.0, 3.0)); + // Submit with several signatures and the price should increase + nodeDelete("node100") + .setNode("0.0.5") + .payingWith("payer") + .signedBy("payer", "randomAccount", "testKey") + .hasKnownStatus(INVALID_SIGNATURE) + .via("multipleSigsDeletion"), + validateChargedUsdWithin("multipleSigsDeletion", 0.0011276316, 3.0), + nodeDelete("node100").via("deleteNode"), + getTxnRecord("deleteNode").logged(), + // The fee is not charged here because the payer is privileged + validateChargedUsdWithin("deleteNode", 0.0, 3.0)); } @EmbeddedHapiTest(MUST_SKIP_INGEST) @@ -153,21 +146,17 @@ final Stream validateFeesInsufficientAmount() throws CertificateEnc @HapiTest final Stream failsAtIngestForUnAuthorizedTxns() throws CertificateEncodingException { final String description = "His vorpal blade went snicker-snack!"; - return defaultHapiSpec("failsAtIngestForUnAuthorizedTxns") - .given( - cryptoCreate("payer").balance(10_000_000_000L), - nodeCreate("ntb") - .description(description) - .fee(ONE_HBAR) - .gossipCaCertificate( - gossipCertificates.getFirst().getEncoded()), - nodeDelete("ntb") - .payingWith("payer") - .fee(ONE_HBAR) - .hasKnownStatus(INVALID_SIGNATURE) - .via("failedDeletion")) - .when() - .then(); + return hapiTest( + cryptoCreate("payer").balance(10_000_000_000L), + nodeCreate("ntb") + .description(description) + .fee(ONE_HBAR) + .gossipCaCertificate(gossipCertificates.getFirst().getEncoded()), + nodeDelete("ntb") + .payingWith("payer") + .fee(ONE_HBAR) + .hasKnownStatus(INVALID_SIGNATURE) + .via("failedDeletion")); } @HapiTest diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip869/NodeUpdateTest.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip869/NodeUpdateTest.java index c786e3ede8dd..4dd235746026 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip869/NodeUpdateTest.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip869/NodeUpdateTest.java @@ -22,7 +22,6 @@ import static com.hedera.services.bdd.spec.HapiPropertySource.asDnsServiceEndpoint; import static com.hedera.services.bdd.spec.HapiPropertySource.asServiceEndpoint; import static com.hedera.services.bdd.spec.HapiPropertySource.invalidServiceEndpoint; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; import static com.hedera.services.bdd.spec.transactions.TxnUtils.WRONG_LENGTH_EDDSA_KEY; @@ -278,25 +277,21 @@ final Stream updateAccountIdWork() throws CertificateEncodingExcept @HapiTest final Stream failsAtIngestForUnAuthorizedTxns() throws CertificateEncodingException { final String description = "His vorpal blade went snicker-snack!"; - return defaultHapiSpec("failsAtIngestForUnAuthorizedTxns") - .given( - newKeyNamed("adminKey"), - cryptoCreate("payer").balance(10_000_000_000L), - nodeCreate("ntb") - .adminKey("adminKey") - .description(description) - .fee(ONE_HBAR) - .gossipCaCertificate( - gossipCertificates.getFirst().getEncoded()) - .via("nodeCreation"), - nodeUpdate("ntb") - .payingWith("payer") - .accountId("0.0.1000") - .hasPrecheck(UPDATE_NODE_ACCOUNT_NOT_ALLOWED) - .fee(ONE_HBAR) - .via("updateNode")) - .when() - .then(); + return hapiTest( + newKeyNamed("adminKey"), + cryptoCreate("payer").balance(10_000_000_000L), + nodeCreate("ntb") + .adminKey("adminKey") + .description(description) + .fee(ONE_HBAR) + .gossipCaCertificate(gossipCertificates.getFirst().getEncoded()) + .via("nodeCreation"), + nodeUpdate("ntb") + .payingWith("payer") + .accountId("0.0.1000") + .hasPrecheck(UPDATE_NODE_ACCOUNT_NOT_ALLOWED) + .fee(ONE_HBAR) + .via("updateNode")); } @LeakyHapiTest(overrides = {"nodes.maxServiceEndpoint"}) diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip904/AirdropsDisabledTest.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip904/AirdropsDisabledTest.java index 919d272b9af6..2fd048e44275 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip904/AirdropsDisabledTest.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip904/AirdropsDisabledTest.java @@ -19,7 +19,6 @@ import static com.google.protobuf.ByteString.copyFromUtf8; import static com.hedera.services.bdd.spec.HapiPropertySource.asHexedSolidityAddress; import static com.hedera.services.bdd.spec.HapiPropertySource.contractIdFromHexedMirrorAddress; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.assertions.AccountInfoAsserts.accountWith; import static com.hedera.services.bdd.spec.assertions.AutoAssocAsserts.accountTokenPairsInAnyOrder; @@ -55,6 +54,7 @@ import static com.hedera.services.bdd.suites.HapiSuite.ONE_HBAR; import static com.hedera.services.bdd.suites.HapiSuite.ONE_HUNDRED_HBARS; import static com.hedera.services.bdd.suites.HapiSuite.TOKEN_TREASURY; +import static com.hedera.services.bdd.suites.HapiSuite.flattened; import static com.hedera.services.bdd.suites.contract.Utils.aaWith; import static com.hedera.services.bdd.suites.contract.Utils.captureChildCreate2MetaFor; import static com.hedera.services.bdd.suites.contract.Utils.captureOneChildCreate2MetaFor; @@ -120,9 +120,9 @@ public class AirdropsDisabledTest { private static final Logger LOG = LogManager.getLogger(AirdropsDisabledTest.class); - private static String owner = "owner"; - private static String receiver = "receiver"; - private static String fungibleToken = "fungibleToken"; + private static final String owner = "owner"; + private static final String receiver = "receiver"; + private static final String fungibleToken = "fungibleToken"; @BeforeAll static void beforeAll(@NonNull final TestLifecycle testLifecycle) { @@ -315,67 +315,59 @@ final Stream canMergeCreate2ChildWithHollowAccountFungibleTransfers j1++; } - return defaultHapiSpec("canMergeCreate2ChildWithHollowAccountFungibleTransfersUnlimitedAssociations") - .given(givenOps) - .when( - // GET BYTECODE OF THE CREATE2 CONTRACT - sourcing(() -> contractCallLocal( - contract, GET_BYTECODE, asHeadlongAddress(factoryEvmAddress.get()), salt) - .exposingTypedResultsTo(results -> { - final var tcInitcode = (byte[]) results[0]; - testContractInitcode.set(tcInitcode); - LOG.info(CONTRACT_REPORTED_LOG_MESSAGE, tcInitcode.length); - }) - .payingWith(GENESIS) - .nodePayment(ONE_HBAR)), - // GET THE ADDRESS WHERE THE CONTRACT WILL BE DEPLOYED - sourcing(() -> setExpectedCreate2Address( - contract, salt, expectedCreate2Address, testContractInitcode)), + return hapiTest(flattened( + givenOps, + // GET BYTECODE OF THE CREATE2 CONTRACT + sourcing(() -> contractCallLocal( + contract, GET_BYTECODE, asHeadlongAddress(factoryEvmAddress.get()), salt) + .exposingTypedResultsTo(results -> { + final var tcInitcode = (byte[]) results[0]; + testContractInitcode.set(tcInitcode); + LOG.info(CONTRACT_REPORTED_LOG_MESSAGE, tcInitcode.length); + }) + .payingWith(GENESIS) + .nodePayment(ONE_HBAR)), + // GET THE ADDRESS WHERE THE CONTRACT WILL BE DEPLOYED + sourcing(() -> setExpectedCreate2Address(contract, salt, expectedCreate2Address, testContractInitcode)), - // Now create a hollow account at the desired address - lazyCreateAccountWithFungibleTransfers(creation, expectedCreate2Address, ftIds, partyAlias), - getTxnRecord(creation) - .andAllChildRecords() - .logged() - .exposingCreationsTo(l -> hollowCreationAddress.set(l.get(0))), - sourcing(() -> getAccountInfo(hollowCreationAddress.get()) - .hasAlreadyUsedAutomaticAssociations(fungibleTransfersSize) - .logged())) - .then( - // deploy create2 - sourcing(() -> contractCall(contract, DEPLOY, testContractInitcode.get(), salt) - .payingWith(GENESIS) - .gas(4_000_000L) - .sending(tcValue) - .via("TEST2")), - getTxnRecord("TEST2").andAllChildRecords().logged(), - captureOneChildCreate2MetaFor( - "Merged deployed contract with hollow account", - "TEST2", - mergedMirrorAddr, - mergedAliasAddr), + // Now create a hollow account at the desired address + lazyCreateAccountWithFungibleTransfers(creation, expectedCreate2Address, ftIds, partyAlias), + getTxnRecord(creation) + .andAllChildRecords() + .logged() + .exposingCreationsTo(l -> hollowCreationAddress.set(l.get(0))), + sourcing(() -> getAccountInfo(hollowCreationAddress.get()) + .hasAlreadyUsedAutomaticAssociations(fungibleTransfersSize) + .logged()), + // deploy create2 + sourcing(() -> contractCall(contract, DEPLOY, testContractInitcode.get(), salt) + .payingWith(GENESIS) + .gas(4_000_000L) + .sending(tcValue) + .via("TEST2")), + getTxnRecord("TEST2").andAllChildRecords().logged(), + captureOneChildCreate2MetaFor( + "Merged deployed contract with hollow account", "TEST2", mergedMirrorAddr, mergedAliasAddr), - // check failure when trying to deploy again - sourcing(() -> contractCall(contract, DEPLOY, testContractInitcode.get(), salt) - .payingWith(GENESIS) - .gas(4_000_000L) - /* Cannot repeat CREATE2 - with same args without destroying the existing contract */ - .hasKnownStatusFrom(INVALID_SOLIDITY_ADDRESS, CONTRACT_REVERT_EXECUTED)), + // check failure when trying to deploy again + sourcing(() -> contractCall(contract, DEPLOY, testContractInitcode.get(), salt) + .payingWith(GENESIS) + .gas(4_000_000L) + /* Cannot repeat CREATE2 + with same args without destroying the existing contract */ + .hasKnownStatusFrom(INVALID_SOLIDITY_ADDRESS, CONTRACT_REVERT_EXECUTED)), - // check created contract - sourcing(() -> getContractInfo(mergedAliasAddr.get()) - .has(contractWith() - .defaultAdminKey() - .maxAutoAssociations(fungibleTransfersSize) - .hasAlreadyUsedAutomaticAssociations(fungibleTransfersSize) - .memo(LAZY_MEMO) - .balance(tcValue)) - .logged()), - sourcing( - () -> getContractBytecode(mergedAliasAddr.get()).isNonEmpty()), - sourcing(() -> - assertCreate2Address(contract, salt, expectedCreate2Address, testContractInitcode))); + // check created contract + sourcing(() -> getContractInfo(mergedAliasAddr.get()) + .has(contractWith() + .defaultAdminKey() + .maxAutoAssociations(fungibleTransfersSize) + .hasAlreadyUsedAutomaticAssociations(fungibleTransfersSize) + .memo(LAZY_MEMO) + .balance(tcValue)) + .logged()), + sourcing(() -> getContractBytecode(mergedAliasAddr.get()).isNonEmpty()), + sourcing(() -> assertCreate2Address(contract, salt, expectedCreate2Address, testContractInitcode)))); } @HapiTest diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip904/TokenAirdropTest.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip904/TokenAirdropTest.java index a400c4d08693..cfcb830ad04c 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip904/TokenAirdropTest.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/hip904/TokenAirdropTest.java @@ -173,101 +173,93 @@ class AirdropToExistingAccountsWhitFreeAutoAssociations { @HapiTest final Stream tokenAirdropToExistingAccountsTransfers() { - return defaultHapiSpec("should transfer fungible tokens") - .given() - .when( // associated receiver and receivers with free auto association slots - tokenAirdrop( - moveFungibleTokensTo(ASSOCIATED_RECEIVER), - moveFungibleTokensTo(RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS), - moveFungibleTokensTo(RECEIVER_WITH_FREE_AUTO_ASSOCIATIONS)) - .payingWith(OWNER) - .via("fungible airdrop")) - .then( // assert txn record - getTxnRecord("fungible airdrop") - .hasPriority(recordWith() - .tokenTransfers(includingFungibleMovement(moving(30, FUNGIBLE_TOKEN) - .distributing( - OWNER, - RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS, - RECEIVER_WITH_FREE_AUTO_ASSOCIATIONS, - ASSOCIATED_RECEIVER)))), - // assert balance - getAccountBalance(ASSOCIATED_RECEIVER).hasTokenBalance(FUNGIBLE_TOKEN, 10), - getAccountBalance(RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS) - .hasTokenBalance(FUNGIBLE_TOKEN, 10), - getAccountBalance(RECEIVER_WITH_FREE_AUTO_ASSOCIATIONS) - .hasTokenBalance(FUNGIBLE_TOKEN, 10), - // associate receiver - will be simple transfer - // unlimited associations receiver - 0.1 (because not associated yet) - // free auto associations receiver - 0.1 (because not associated yet) - validateChargedUsd("fungible airdrop", 0.2, 1)); + return hapiTest( + // associated receiver and receivers with free auto association slots + tokenAirdrop( + moveFungibleTokensTo(ASSOCIATED_RECEIVER), + moveFungibleTokensTo(RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS), + moveFungibleTokensTo(RECEIVER_WITH_FREE_AUTO_ASSOCIATIONS)) + .payingWith(OWNER) + .via("fungible airdrop"), + // assert txn record + getTxnRecord("fungible airdrop") + .hasPriority(recordWith() + .tokenTransfers(includingFungibleMovement(moving(30, FUNGIBLE_TOKEN) + .distributing( + OWNER, + RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS, + RECEIVER_WITH_FREE_AUTO_ASSOCIATIONS, + ASSOCIATED_RECEIVER)))), + // assert balance + getAccountBalance(ASSOCIATED_RECEIVER).hasTokenBalance(FUNGIBLE_TOKEN, 10), + getAccountBalance(RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS) + .hasTokenBalance(FUNGIBLE_TOKEN, 10), + getAccountBalance(RECEIVER_WITH_FREE_AUTO_ASSOCIATIONS).hasTokenBalance(FUNGIBLE_TOKEN, 10), + // associate receiver - will be simple transfer + // unlimited associations receiver - 0.1 (because not associated yet) + // free auto associations receiver - 0.1 (because not associated yet) + validateChargedUsd("fungible airdrop", 0.2, 1)); } @HapiTest final Stream tokenMultipleAirdropsToSameAccount() { String receiver = "OneReceiver"; - return defaultHapiSpec("to multiple accounts should transfer fungible tokens") - .given() - .when( - cryptoCreate("Sender1"), - cryptoCreate("Sender2"), - cryptoCreate("Sender3"), - cryptoCreate(receiver).maxAutomaticTokenAssociations(1), - tokenAssociate("Sender1", FUNGIBLE_TOKEN), - tokenAssociate("Sender2", FUNGIBLE_TOKEN), - tokenAssociate("Sender3", FUNGIBLE_TOKEN), - cryptoTransfer( - moving(10, FUNGIBLE_TOKEN).between(OWNER, "Sender1"), - moving(10, FUNGIBLE_TOKEN).between(OWNER, "Sender2"), - moving(10, FUNGIBLE_TOKEN).between(OWNER, "Sender3")), - tokenAirdrop(moving(10, FUNGIBLE_TOKEN).between("Sender1", receiver)) - .payingWith("Sender1"), - tokenAirdrop(moving(10, FUNGIBLE_TOKEN).between("Sender2", receiver)) - .payingWith("Sender2"), - tokenAirdrop(moving(10, FUNGIBLE_TOKEN).between("Sender3", receiver)) - .payingWith("Sender3") - .via("multiple fungible airdrop")) - .then( - // assert balance - getAccountBalance(receiver).hasTokenBalance(FUNGIBLE_TOKEN, 30), - getAccountBalance("Sender1").hasTokenBalance(FUNGIBLE_TOKEN, 0), - getAccountBalance("Sender2").hasTokenBalance(FUNGIBLE_TOKEN, 0), - getAccountBalance("Sender3").hasTokenBalance(FUNGIBLE_TOKEN, 0)); + return hapiTest( + cryptoCreate("Sender1"), + cryptoCreate("Sender2"), + cryptoCreate("Sender3"), + cryptoCreate(receiver).maxAutomaticTokenAssociations(1), + tokenAssociate("Sender1", FUNGIBLE_TOKEN), + tokenAssociate("Sender2", FUNGIBLE_TOKEN), + tokenAssociate("Sender3", FUNGIBLE_TOKEN), + cryptoTransfer( + moving(10, FUNGIBLE_TOKEN).between(OWNER, "Sender1"), + moving(10, FUNGIBLE_TOKEN).between(OWNER, "Sender2"), + moving(10, FUNGIBLE_TOKEN).between(OWNER, "Sender3")), + tokenAirdrop(moving(10, FUNGIBLE_TOKEN).between("Sender1", receiver)) + .payingWith("Sender1"), + tokenAirdrop(moving(10, FUNGIBLE_TOKEN).between("Sender2", receiver)) + .payingWith("Sender2"), + tokenAirdrop(moving(10, FUNGIBLE_TOKEN).between("Sender3", receiver)) + .payingWith("Sender3") + .via("multiple fungible airdrop"), + // assert balance + getAccountBalance(receiver).hasTokenBalance(FUNGIBLE_TOKEN, 30), + getAccountBalance("Sender1").hasTokenBalance(FUNGIBLE_TOKEN, 0), + getAccountBalance("Sender2").hasTokenBalance(FUNGIBLE_TOKEN, 0), + getAccountBalance("Sender3").hasTokenBalance(FUNGIBLE_TOKEN, 0)); } @HapiTest final Stream nftAirdropToExistingAccountsTransfers() { - return defaultHapiSpec("should transfer NFTs") - .given() - .when( // receivers with free auto association slots - tokenAirdrop( - movingUnique(NON_FUNGIBLE_TOKEN, 3L) - .between(OWNER, RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS), - movingUnique(NON_FUNGIBLE_TOKEN, 4L) - .between(OWNER, RECEIVER_WITH_FREE_AUTO_ASSOCIATIONS), - movingUnique(NON_FUNGIBLE_TOKEN, 5L) - .between(OWNER, ASSOCIATED_RECEIVER)) - .payingWith(OWNER) - .via("non fungible airdrop")) - .then( // assert txn record - getTxnRecord("non fungible airdrop") - .hasPriority(recordWith() - .tokenTransfers(includingNonfungibleMovement( - movingUnique(NON_FUNGIBLE_TOKEN, 3L, 4L, 5L) - .distributing( - RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS, - RECEIVER_WITH_FREE_AUTO_ASSOCIATIONS, - ASSOCIATED_RECEIVER)))), - // assert account balances - getAccountBalance(ASSOCIATED_RECEIVER).hasTokenBalance(NON_FUNGIBLE_TOKEN, 1), - getAccountBalance(RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS) - .hasTokenBalance(NON_FUNGIBLE_TOKEN, 1), - getAccountBalance(RECEIVER_WITH_FREE_AUTO_ASSOCIATIONS) - .hasTokenBalance(NON_FUNGIBLE_TOKEN, 1), - // associate receiver - will be simple transfer - // unlimited associations receiver - 0.1 (because not associated yet) - // free auto associations receiver - 0.1 (because not associated yet) - validateChargedUsd("non fungible airdrop", 0.2, 1)); + return hapiTest( + // receivers with free auto association slots + tokenAirdrop( + movingUnique(NON_FUNGIBLE_TOKEN, 3L) + .between(OWNER, RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS), + movingUnique(NON_FUNGIBLE_TOKEN, 4L) + .between(OWNER, RECEIVER_WITH_FREE_AUTO_ASSOCIATIONS), + movingUnique(NON_FUNGIBLE_TOKEN, 5L).between(OWNER, ASSOCIATED_RECEIVER)) + .payingWith(OWNER) + .via("non fungible airdrop"), + // assert txn record + getTxnRecord("non fungible airdrop") + .hasPriority(recordWith() + .tokenTransfers(includingNonfungibleMovement( + movingUnique(NON_FUNGIBLE_TOKEN, 3L, 4L, 5L) + .distributing( + RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS, + RECEIVER_WITH_FREE_AUTO_ASSOCIATIONS, + ASSOCIATED_RECEIVER)))), + // assert account balances + getAccountBalance(ASSOCIATED_RECEIVER).hasTokenBalance(NON_FUNGIBLE_TOKEN, 1), + getAccountBalance(RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS) + .hasTokenBalance(NON_FUNGIBLE_TOKEN, 1), + getAccountBalance(RECEIVER_WITH_FREE_AUTO_ASSOCIATIONS).hasTokenBalance(NON_FUNGIBLE_TOKEN, 1), + // associate receiver - will be simple transfer + // unlimited associations receiver - 0.1 (because not associated yet) + // free auto associations receiver - 0.1 (because not associated yet) + validateChargedUsd("non fungible airdrop", 0.2, 1)); } } @@ -276,60 +268,54 @@ final Stream nftAirdropToExistingAccountsTransfers() { class AirdropToExistingAccountsWithoutFreeAutoAssociations { @HapiTest final Stream tokenAirdropToExistingAccountsPending() { - return defaultHapiSpec("fungible tokens should be in pending state") - .given() - .when(tokenAirdrop( + return hapiTest( + tokenAirdrop( moveFungibleTokensTo(RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS), moveFungibleTokensTo(RECEIVER_WITH_0_AUTO_ASSOCIATIONS)) .payingWith(OWNER) - .via("fungible airdrop")) - .then( // assert txn record - getTxnRecord("fungible airdrop") - .hasPriority(recordWith() - .pendingAirdrops(includingFungiblePendingAirdrop( - moveFungibleTokensTo(RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS), - moveFungibleTokensTo(RECEIVER_WITH_0_AUTO_ASSOCIATIONS)))), - // assert balances - getAccountBalance(RECEIVER_WITH_0_AUTO_ASSOCIATIONS) - .hasTokenBalance(FUNGIBLE_TOKEN, 0), - getAccountBalance(RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS) - .hasTokenBalance(FUNGIBLE_TOKEN, 0), - // zero auto associations receiver - 0.1 (creates pending airdrop) - // without free auto associations receiver - 0.1 (creates pending airdrop) - validateChargedUsd("fungible airdrop", 0.2, 1)); + .via("fungible airdrop"), + // assert txn record + getTxnRecord("fungible airdrop") + .hasPriority(recordWith() + .pendingAirdrops(includingFungiblePendingAirdrop( + moveFungibleTokensTo(RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS), + moveFungibleTokensTo(RECEIVER_WITH_0_AUTO_ASSOCIATIONS)))), + // assert balances + getAccountBalance(RECEIVER_WITH_0_AUTO_ASSOCIATIONS).hasTokenBalance(FUNGIBLE_TOKEN, 0), + getAccountBalance(RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS) + .hasTokenBalance(FUNGIBLE_TOKEN, 0), + // zero auto associations receiver - 0.1 (creates pending airdrop) + // without free auto associations receiver - 0.1 (creates pending airdrop) + validateChargedUsd("fungible airdrop", 0.2, 1)); } @HapiTest final Stream nftAirdropToExistingAccountsPending() { - return defaultHapiSpec("NFTs should be in pending state") - .given() - .when( // without free auto association slots - tokenAirdrop( + return hapiTest( + // without free auto association slots + tokenAirdrop( + movingUnique(NON_FUNGIBLE_TOKEN, 1L) + .between(OWNER, RECEIVER_WITH_0_AUTO_ASSOCIATIONS), + movingUnique(NON_FUNGIBLE_TOKEN, 2L) + .between(OWNER, RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS)) + .payingWith(OWNER) + .via("non fungible airdrop"), + // assert the pending list + getTxnRecord("non fungible airdrop") + .hasPriority(recordWith() + .pendingAirdrops(includingNftPendingAirdrop( movingUnique(NON_FUNGIBLE_TOKEN, 1L) .between(OWNER, RECEIVER_WITH_0_AUTO_ASSOCIATIONS), movingUnique(NON_FUNGIBLE_TOKEN, 2L) - .between(OWNER, RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS)) - .payingWith(OWNER) - .via("non fungible airdrop")) - .then( // assert the pending list - getTxnRecord("non fungible airdrop") - .hasPriority(recordWith() - .pendingAirdrops(includingNftPendingAirdrop( - movingUnique(NON_FUNGIBLE_TOKEN, 1L) - .between(OWNER, RECEIVER_WITH_0_AUTO_ASSOCIATIONS), - movingUnique(NON_FUNGIBLE_TOKEN, 2L) - .between( - OWNER, - RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS)))), + .between(OWNER, RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS)))), - // assert account balances - getAccountBalance(RECEIVER_WITH_0_AUTO_ASSOCIATIONS) - .hasTokenBalance(NON_FUNGIBLE_TOKEN, 0), - getAccountBalance(RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS) - .hasTokenBalance(NON_FUNGIBLE_TOKEN, 0), - // zero auto associations receiver - 0.1 (creates pending airdrop) - // without free auto associations receiver - 0.1 (creates pending airdrop) - validateChargedUsd("non fungible airdrop", 0.2, 1)); + // assert account balances + getAccountBalance(RECEIVER_WITH_0_AUTO_ASSOCIATIONS).hasTokenBalance(NON_FUNGIBLE_TOKEN, 0), + getAccountBalance(RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS) + .hasTokenBalance(NON_FUNGIBLE_TOKEN, 0), + // zero auto associations receiver - 0.1 (creates pending airdrop) + // without free auto associations receiver - 0.1 (creates pending airdrop) + validateChargedUsd("non fungible airdrop", 0.2, 1)); } @HapiTest @@ -370,67 +356,64 @@ final Stream transferMultipleFtAndNftToEOAWithNoFreeAutoAssociation final String NFT_FOR_MULTIPLE_PENDING_TRANSFER = "nftForMultiplePendingTransfer"; final String FT_FOR_MULTIPLE_PENDING_TRANSFER = "ftForMultiplePendingTransfer"; var nftSupplyKeyForMultipleTransfers = "nftSupplyKeyForMultipleTransfer"; - return defaultHapiSpec("Send multiple FT and NFT from EOA to Account without free Auto-Associations") - .given( - tokenCreate(FT_FOR_MULTIPLE_PENDING_TRANSFER) - .treasury(OWNER) - .tokenType(FUNGIBLE_COMMON) - .initialSupply(1000L), - newKeyNamed(nftSupplyKeyForMultipleTransfers), - tokenCreate(NFT_FOR_MULTIPLE_PENDING_TRANSFER) - .treasury(OWNER) - .tokenType(NON_FUNGIBLE_UNIQUE) - .initialSupply(0L) - .name(NFT_FOR_MULTIPLE_PENDING_TRANSFER) - .supplyKey(nftSupplyKeyForMultipleTransfers), - mintToken( - NFT_FOR_MULTIPLE_PENDING_TRANSFER, - IntStream.range(0, 10) - .mapToObj(a -> ByteString.copyFromUtf8(String.valueOf(a))) - .toList())) - .when( - tokenAirdrop( - moving(10, FT_FOR_MULTIPLE_PENDING_TRANSFER) - .between(OWNER, RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS), - movingUnique(NFT_FOR_MULTIPLE_PENDING_TRANSFER, 1L) - .between(OWNER, RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS)) - .payingWith(OWNER) - .signedBy(OWNER) - .via("first airdrop"), - tokenAirdrop( + return hapiTest( + tokenCreate(FT_FOR_MULTIPLE_PENDING_TRANSFER) + .treasury(OWNER) + .tokenType(FUNGIBLE_COMMON) + .initialSupply(1000L), + newKeyNamed(nftSupplyKeyForMultipleTransfers), + tokenCreate(NFT_FOR_MULTIPLE_PENDING_TRANSFER) + .treasury(OWNER) + .tokenType(NON_FUNGIBLE_UNIQUE) + .initialSupply(0L) + .name(NFT_FOR_MULTIPLE_PENDING_TRANSFER) + .supplyKey(nftSupplyKeyForMultipleTransfers), + mintToken( + NFT_FOR_MULTIPLE_PENDING_TRANSFER, + IntStream.range(0, 10) + .mapToObj(a -> ByteString.copyFromUtf8(String.valueOf(a))) + .toList()), + tokenAirdrop( + moving(10, FT_FOR_MULTIPLE_PENDING_TRANSFER) + .between(OWNER, RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS), + movingUnique(NFT_FOR_MULTIPLE_PENDING_TRANSFER, 1L) + .between(OWNER, RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS)) + .payingWith(OWNER) + .signedBy(OWNER) + .via("first airdrop"), + tokenAirdrop( + moving(10, FT_FOR_MULTIPLE_PENDING_TRANSFER) + .between(OWNER, RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS), + movingUnique(NFT_FOR_MULTIPLE_PENDING_TRANSFER, 2L) + .between(OWNER, RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS)) + .payingWith(OWNER) + .signedBy(OWNER) + .via("second airdrop"), + getTxnRecord("first airdrop") + .hasPriority(recordWith() + .pendingAirdrops(includingFungiblePendingAirdrop( moving(10, FT_FOR_MULTIPLE_PENDING_TRANSFER) - .between(OWNER, RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS), - movingUnique(NFT_FOR_MULTIPLE_PENDING_TRANSFER, 2L) - .between(OWNER, RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS)) - .payingWith(OWNER) - .signedBy(OWNER) - .via("second airdrop")) - .then( - getTxnRecord("first airdrop") - .hasPriority(recordWith() - .pendingAirdrops(includingFungiblePendingAirdrop(moving( - 10, FT_FOR_MULTIPLE_PENDING_TRANSFER) .between(OWNER, RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS))) - .pendingAirdrops(includingNftPendingAirdrop(movingUnique( - NFT_FOR_MULTIPLE_PENDING_TRANSFER, 1L) + .pendingAirdrops(includingNftPendingAirdrop( + movingUnique(NFT_FOR_MULTIPLE_PENDING_TRANSFER, 1L) .between(OWNER, RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS)))), - getTxnRecord("second airdrop") - .hasPriority(recordWith() - .pendingAirdrops(includingFungiblePendingAirdrop(moving( - 20, FT_FOR_MULTIPLE_PENDING_TRANSFER) + getTxnRecord("second airdrop") + .hasPriority(recordWith() + .pendingAirdrops(includingFungiblePendingAirdrop( + moving(20, FT_FOR_MULTIPLE_PENDING_TRANSFER) .between(OWNER, RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS))) - .pendingAirdrops(includingNftPendingAirdrop(movingUnique( - NFT_FOR_MULTIPLE_PENDING_TRANSFER, 2L) + .pendingAirdrops(includingNftPendingAirdrop( + movingUnique(NFT_FOR_MULTIPLE_PENDING_TRANSFER, 2L) .between(OWNER, RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS)))), - // assert account balances - getAccountBalance(RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS) - .hasTokenBalance(FT_FOR_MULTIPLE_PENDING_TRANSFER, 0) - .hasTokenBalance(NFT_FOR_MULTIPLE_PENDING_TRANSFER, 0), - getAccountBalance(OWNER) - .hasTokenBalance(FT_FOR_MULTIPLE_PENDING_TRANSFER, 1000) - .hasTokenBalance(NFT_FOR_MULTIPLE_PENDING_TRANSFER, 10L), - validateChargedUsd("first airdrop", 0.2, 10), - validateChargedUsd("second airdrop", 0.15, 10)); + // assert account balances + getAccountBalance(RECEIVER_WITHOUT_FREE_AUTO_ASSOCIATIONS) + .hasTokenBalance(FT_FOR_MULTIPLE_PENDING_TRANSFER, 0) + .hasTokenBalance(NFT_FOR_MULTIPLE_PENDING_TRANSFER, 0), + getAccountBalance(OWNER) + .hasTokenBalance(FT_FOR_MULTIPLE_PENDING_TRANSFER, 1000) + .hasTokenBalance(NFT_FOR_MULTIPLE_PENDING_TRANSFER, 10L), + validateChargedUsd("first airdrop", 0.2, 10), + validateChargedUsd("second airdrop", 0.15, 10)); } // AIRDROP_21 @@ -477,33 +460,31 @@ final Stream transferMultipleFtAndNftToEOAWithNoFreeAutoAssociation @HapiTest @DisplayName("with multiple tokens") final Stream tokenAirdropMultipleTokens() { - return defaultHapiSpec("airdrop multiple tokens should pass") - .given( - createTokenWithName("FT1"), - createTokenWithName("FT2"), - createTokenWithName("FT3"), - createTokenWithName("FT4"), - createTokenWithName("FT5")) - .when(tokenAirdrop( + return hapiTest( + createTokenWithName("FT1"), + createTokenWithName("FT2"), + createTokenWithName("FT3"), + createTokenWithName("FT4"), + createTokenWithName("FT5"), + tokenAirdrop( defaultMovementOfToken("FT1"), defaultMovementOfToken("FT2"), defaultMovementOfToken("FT3"), defaultMovementOfToken("FT4"), defaultMovementOfToken("FT5")) .payingWith(OWNER) - .via("fungible airdrop")) - .then( - // assert balances - getAccountBalance(RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS) - .hasTokenBalance("FT1", 10), - getAccountBalance(RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS) - .hasTokenBalance("FT2", 10), - getAccountBalance(RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS) - .hasTokenBalance("FT3", 10), - getAccountBalance(RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS) - .hasTokenBalance("FT4", 10), - getAccountBalance(RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS) - .hasTokenBalance("FT5", 10)); + .via("fungible airdrop"), + // assert balances + getAccountBalance(RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS) + .hasTokenBalance("FT1", 10), + getAccountBalance(RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS) + .hasTokenBalance("FT2", 10), + getAccountBalance(RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS) + .hasTokenBalance("FT3", 10), + getAccountBalance(RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS) + .hasTokenBalance("FT4", 10), + getAccountBalance(RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS) + .hasTokenBalance("FT5", 10)); } } @@ -515,62 +496,58 @@ final Stream consequentAirdrops() { // the second airdrop is directly transferred to the recipient and the first airdrop remains in pending // state var receiver = "receiver"; - return defaultHapiSpec("should be not affected by following airdrops") - .given( - cryptoCreate(receiver).maxAutomaticTokenAssociations(0), - // send first airdrop - tokenAirdrop(moving(10, FUNGIBLE_TOKEN).between(OWNER, receiver)) - .payingWith(OWNER) - .via("first"), - getTxnRecord("first") - // assert pending airdrops - .hasPriority(recordWith() - .pendingAirdrops(includingFungiblePendingAirdrop( - moving(10, FUNGIBLE_TOKEN).between(OWNER, receiver)))), - // creates pending airdrop - validateChargedUsd("first", 0.1, 10)) - .when(tokenAssociate(receiver, FUNGIBLE_TOKEN)) - .then( // this time tokens should be transferred - tokenAirdrop(moving(10, FUNGIBLE_TOKEN).between(OWNER, receiver)) - .payingWith(OWNER) - .via("second"), - // assert OWNER and receiver accounts to ensure first airdrop is still in pending state - getTxnRecord("second") - // assert transfers - .hasPriority(recordWith() - .tokenTransfers(includingFungibleMovement( - moving(10, FUNGIBLE_TOKEN).between(OWNER, receiver)))), - // just a crypto transfer - validateChargedUsd("second", 0.001, 10), - // assert the account balance - getAccountBalance(receiver).hasTokenBalance(FUNGIBLE_TOKEN, 10)); + return hapiTest( + cryptoCreate(receiver).maxAutomaticTokenAssociations(0), + // send first airdrop + tokenAirdrop(moving(10, FUNGIBLE_TOKEN).between(OWNER, receiver)) + .payingWith(OWNER) + .via("first"), + getTxnRecord("first") + // assert pending airdrops + .hasPriority(recordWith() + .pendingAirdrops(includingFungiblePendingAirdrop( + moving(10, FUNGIBLE_TOKEN).between(OWNER, receiver)))), + // creates pending airdrop + validateChargedUsd("first", 0.1, 10), + tokenAssociate(receiver, FUNGIBLE_TOKEN), + // this time tokens should be transferred + tokenAirdrop(moving(10, FUNGIBLE_TOKEN).between(OWNER, receiver)) + .payingWith(OWNER) + .via("second"), + // assert OWNER and receiver accounts to ensure first airdrop is still in pending state + getTxnRecord("second") + // assert transfers + .hasPriority(recordWith() + .tokenTransfers(includingFungibleMovement( + moving(10, FUNGIBLE_TOKEN).between(OWNER, receiver)))), + // just a crypto transfer + validateChargedUsd("second", 0.001, 10), + // assert the account balance + getAccountBalance(receiver).hasTokenBalance(FUNGIBLE_TOKEN, 10)); } @HapiTest @DisplayName("that is alias with 0 free maxAutoAssociations") final Stream airdropToAliasWithNoFreeSlots() { final var validAliasWithNoFreeSlots = "validAliasWithNoFreeSlots"; - return defaultHapiSpec("should go in pending state") - .given(newKeyNamed(validAliasWithNoFreeSlots)) - .when( - cryptoTransfer(movingUnique(NON_FUNGIBLE_TOKEN, 10L) - .between(OWNER, validAliasWithNoFreeSlots)) - .payingWith(OWNER) - .signedBy(OWNER, validAliasWithNoFreeSlots), - withOpContext((spec, opLog) -> updateSpecFor(spec, validAliasWithNoFreeSlots)), - cryptoUpdateAliased(validAliasWithNoFreeSlots) - .maxAutomaticAssociations(1) - .signedBy(validAliasWithNoFreeSlots, DEFAULT_PAYER)) - .then( - tokenAirdrop(moveFungibleTokensTo(validAliasWithNoFreeSlots)) - .payingWith(OWNER) - .via("aliasAirdrop"), - getTxnRecord("aliasAirdrop") - .hasPriority(recordWith() - .pendingAirdrops(includingFungiblePendingAirdrop( - moveFungibleTokensTo(validAliasWithNoFreeSlots)))), - getAccountBalance(validAliasWithNoFreeSlots).hasTokenBalance(NON_FUNGIBLE_TOKEN, 1), - getAccountBalance(validAliasWithNoFreeSlots).hasTokenBalance(FUNGIBLE_TOKEN, 0)); + return hapiTest( + newKeyNamed(validAliasWithNoFreeSlots), + cryptoTransfer(movingUnique(NON_FUNGIBLE_TOKEN, 10L).between(OWNER, validAliasWithNoFreeSlots)) + .payingWith(OWNER) + .signedBy(OWNER, validAliasWithNoFreeSlots), + withOpContext((spec, opLog) -> updateSpecFor(spec, validAliasWithNoFreeSlots)), + cryptoUpdateAliased(validAliasWithNoFreeSlots) + .maxAutomaticAssociations(1) + .signedBy(validAliasWithNoFreeSlots, DEFAULT_PAYER), + tokenAirdrop(moveFungibleTokensTo(validAliasWithNoFreeSlots)) + .payingWith(OWNER) + .via("aliasAirdrop"), + getTxnRecord("aliasAirdrop") + .hasPriority(recordWith() + .pendingAirdrops(includingFungiblePendingAirdrop( + moveFungibleTokensTo(validAliasWithNoFreeSlots)))), + getAccountBalance(validAliasWithNoFreeSlots).hasTokenBalance(NON_FUNGIBLE_TOKEN, 1), + getAccountBalance(validAliasWithNoFreeSlots).hasTokenBalance(FUNGIBLE_TOKEN, 0)); } @HapiTest @@ -650,116 +627,108 @@ class ReceiverSigRequiredTests { @DisplayName("signed and no free slots") final Stream receiverSigInPending() { - return defaultHapiSpec("should go to pending state") - .given(cryptoCreate(RECEIVER_WITH_SIG_REQUIRED) + return hapiTest( + cryptoCreate(RECEIVER_WITH_SIG_REQUIRED) .receiverSigRequired(true) - .maxAutomaticTokenAssociations(0)) - .when(tokenAirdrop(moveFungibleTokensTo(RECEIVER_WITH_SIG_REQUIRED)) + .maxAutomaticTokenAssociations(0), + tokenAirdrop(moveFungibleTokensTo(RECEIVER_WITH_SIG_REQUIRED)) .payingWith(OWNER) .signedBy(RECEIVER_WITH_SIG_REQUIRED, OWNER) - .via("sigTxn")) - .then( - getTxnRecord("sigTxn") - // assert transfers - .hasPriority(recordWith() - .pendingAirdrops( - includingFungiblePendingAirdrop(moving(10, FUNGIBLE_TOKEN) - .between(OWNER, RECEIVER_WITH_SIG_REQUIRED)))), - // assert balances - getAccountBalance(RECEIVER_WITH_SIG_REQUIRED).hasTokenBalance(FUNGIBLE_TOKEN, 0)); + .via("sigTxn"), + getTxnRecord("sigTxn") + // assert transfers + .hasPriority(recordWith() + .pendingAirdrops(includingFungiblePendingAirdrop(moving(10, FUNGIBLE_TOKEN) + .between(OWNER, RECEIVER_WITH_SIG_REQUIRED)))), + // assert balances + getAccountBalance(RECEIVER_WITH_SIG_REQUIRED).hasTokenBalance(FUNGIBLE_TOKEN, 0)); } @HapiTest @DisplayName("signed and with free slots") final Stream receiverSigInPendingFreeSlots() { - return defaultHapiSpec("should result in successful transfer") - .given(cryptoCreate(RECEIVER_WITH_SIG_REQUIRED) + return hapiTest( + cryptoCreate(RECEIVER_WITH_SIG_REQUIRED) .receiverSigRequired(true) - .maxAutomaticTokenAssociations(5)) - .when(tokenAirdrop(moveFungibleTokensTo(RECEIVER_WITH_SIG_REQUIRED)) + .maxAutomaticTokenAssociations(5), + tokenAirdrop(moveFungibleTokensTo(RECEIVER_WITH_SIG_REQUIRED)) .payingWith(OWNER) .signedBy(RECEIVER_WITH_SIG_REQUIRED, OWNER) - .via("sigTxn")) - .then( - getTxnRecord("sigTxn") - // assert transfers - .hasPriority(recordWith() - .tokenTransfers(includingFungibleMovement(moving(10, FUNGIBLE_TOKEN) - .between(OWNER, RECEIVER_WITH_SIG_REQUIRED)))), - // assert balances - getAccountBalance(RECEIVER_WITH_SIG_REQUIRED).hasTokenBalance(FUNGIBLE_TOKEN, 10)); + .via("sigTxn"), + getTxnRecord("sigTxn") + // assert transfers + .hasPriority(recordWith() + .tokenTransfers(includingFungibleMovement(moving(10, FUNGIBLE_TOKEN) + .between(OWNER, RECEIVER_WITH_SIG_REQUIRED)))), + // assert balances + getAccountBalance(RECEIVER_WITH_SIG_REQUIRED).hasTokenBalance(FUNGIBLE_TOKEN, 10)); } @HapiTest @DisplayName("and is associated and signed by receiver") final Stream receiverSigIsAssociated() { - return defaultHapiSpec("should result in successful transfer") - .given(cryptoCreate(RECEIVER_WITH_SIG_REQUIRED) + return hapiTest( + cryptoCreate(RECEIVER_WITH_SIG_REQUIRED) .receiverSigRequired(true) - .maxAutomaticTokenAssociations(5)) - .when(tokenAssociate(RECEIVER_WITH_SIG_REQUIRED, FUNGIBLE_TOKEN)) - .then( - tokenAirdrop(moveFungibleTokensTo(RECEIVER_WITH_SIG_REQUIRED)) - .payingWith(OWNER) - .signedBy(RECEIVER_WITH_SIG_REQUIRED, OWNER) - .via("sigTxn"), - getTxnRecord("sigTxn") - .hasPriority(recordWith() - .tokenTransfers(includingFungibleMovement( - moveFungibleTokensTo(RECEIVER_WITH_SIG_REQUIRED)))), - getAccountBalance(RECEIVER_WITH_SIG_REQUIRED).hasTokenBalance(FUNGIBLE_TOKEN, 10)); + .maxAutomaticTokenAssociations(5), + tokenAssociate(RECEIVER_WITH_SIG_REQUIRED, FUNGIBLE_TOKEN), + tokenAirdrop(moveFungibleTokensTo(RECEIVER_WITH_SIG_REQUIRED)) + .payingWith(OWNER) + .signedBy(RECEIVER_WITH_SIG_REQUIRED, OWNER) + .via("sigTxn"), + getTxnRecord("sigTxn") + .hasPriority(recordWith() + .tokenTransfers(includingFungibleMovement( + moveFungibleTokensTo(RECEIVER_WITH_SIG_REQUIRED)))), + getAccountBalance(RECEIVER_WITH_SIG_REQUIRED).hasTokenBalance(FUNGIBLE_TOKEN, 10)); } @HapiTest @DisplayName("and is associated but not signed by receiver") final Stream receiverSigIsAssociatedButNotSigned() { - return defaultHapiSpec("should result in pending transfer") - .given(cryptoCreate(RECEIVER_WITH_SIG_REQUIRED) + return hapiTest( + cryptoCreate(RECEIVER_WITH_SIG_REQUIRED) .receiverSigRequired(true) - .maxAutomaticTokenAssociations(5)) - .when(tokenAssociate(RECEIVER_WITH_SIG_REQUIRED, FUNGIBLE_TOKEN)) - .then( - tokenAirdrop(moveFungibleTokensTo(RECEIVER_WITH_SIG_REQUIRED)) - .payingWith(OWNER) - .signedBy(OWNER) - .via("sigTxn"), - getTxnRecord("sigTxn") - .hasPriority(recordWith() - .pendingAirdrops(includingFungiblePendingAirdrop( - moveFungibleTokensTo(RECEIVER_WITH_SIG_REQUIRED)))), - getAccountBalance(RECEIVER_WITH_SIG_REQUIRED).hasTokenBalance(FUNGIBLE_TOKEN, 0)); + .maxAutomaticTokenAssociations(5), + tokenAssociate(RECEIVER_WITH_SIG_REQUIRED, FUNGIBLE_TOKEN), + tokenAirdrop(moveFungibleTokensTo(RECEIVER_WITH_SIG_REQUIRED)) + .payingWith(OWNER) + .signedBy(OWNER) + .via("sigTxn"), + getTxnRecord("sigTxn") + .hasPriority(recordWith() + .pendingAirdrops(includingFungiblePendingAirdrop( + moveFungibleTokensTo(RECEIVER_WITH_SIG_REQUIRED)))), + getAccountBalance(RECEIVER_WITH_SIG_REQUIRED).hasTokenBalance(FUNGIBLE_TOKEN, 0)); } @HapiTest @DisplayName("multiple tokens with one associated") final Stream multipleTokensOneAssociated() { - return defaultHapiSpec("should transfer one token and keep the other in pending state") - .given( - tokenCreate("FT_B").treasury(OWNER).initialSupply(500), - cryptoCreate(RECEIVER_WITH_SIG_REQUIRED) - .receiverSigRequired(true) - .maxAutomaticTokenAssociations(0)) - .when(tokenAssociate(RECEIVER_WITH_SIG_REQUIRED, "FT_B")) - .then( - tokenAirdrop( - moving(10, FUNGIBLE_TOKEN).between(OWNER, RECEIVER_WITH_SIG_REQUIRED), - moving(10, "FT_B").between(OWNER, RECEIVER_WITH_SIG_REQUIRED)) - .payingWith(OWNER) - .signedBy(OWNER, RECEIVER_WITH_SIG_REQUIRED) - .via("sigTxn"), - getTxnRecord("sigTxn") - .hasPriority(recordWith() - .pendingAirdrops( - includingFungiblePendingAirdrop(moving(10, FUNGIBLE_TOKEN) - .between(OWNER, RECEIVER_WITH_SIG_REQUIRED))) - .tokenTransfers(includingFungibleMovement(moving(10, "FT_B") - .between(OWNER, RECEIVER_WITH_SIG_REQUIRED)))), - getAccountBalance(RECEIVER_WITH_SIG_REQUIRED).hasTokenBalance(FUNGIBLE_TOKEN, 0), - getAccountBalance(RECEIVER_WITH_SIG_REQUIRED).hasTokenBalance("FT_B", 10)); + return hapiTest( + tokenCreate("FT_B").treasury(OWNER).initialSupply(500), + cryptoCreate(RECEIVER_WITH_SIG_REQUIRED) + .receiverSigRequired(true) + .maxAutomaticTokenAssociations(0), + tokenAssociate(RECEIVER_WITH_SIG_REQUIRED, "FT_B"), + tokenAirdrop( + moving(10, FUNGIBLE_TOKEN).between(OWNER, RECEIVER_WITH_SIG_REQUIRED), + moving(10, "FT_B").between(OWNER, RECEIVER_WITH_SIG_REQUIRED)) + .payingWith(OWNER) + .signedBy(OWNER, RECEIVER_WITH_SIG_REQUIRED) + .via("sigTxn"), + getTxnRecord("sigTxn") + .hasPriority(recordWith() + .pendingAirdrops(includingFungiblePendingAirdrop( + moving(10, FUNGIBLE_TOKEN).between(OWNER, RECEIVER_WITH_SIG_REQUIRED))) + .tokenTransfers(includingFungibleMovement( + moving(10, "FT_B").between(OWNER, RECEIVER_WITH_SIG_REQUIRED)))), + getAccountBalance(RECEIVER_WITH_SIG_REQUIRED).hasTokenBalance(FUNGIBLE_TOKEN, 0), + getAccountBalance(RECEIVER_WITH_SIG_REQUIRED).hasTokenBalance("FT_B", 10)); } } @@ -795,132 +764,115 @@ static void beforeAll(@NonNull final TestLifecycle lifecycle) { @Order(1) final Stream airdropFungibleWithFixedHbarCustomFee() { final var initialBalance = 100 * ONE_HUNDRED_HBARS; - return defaultHapiSpec(" sender should prepay hbar custom fee") - .given( - cryptoCreate(OWNER_OF_TOKENS_WITH_CUSTOM_FEES).balance(initialBalance), - tokenAssociate(OWNER_OF_TOKENS_WITH_CUSTOM_FEES, FT_WITH_HBAR_FIXED_FEE), - cryptoTransfer(moving(1000, FT_WITH_HBAR_FIXED_FEE) - .between(TREASURY_FOR_CUSTOM_FEE_TOKENS, OWNER_OF_TOKENS_WITH_CUSTOM_FEES))) - .when(tokenAirdrop(moving(1, FT_WITH_HBAR_FIXED_FEE) + return hapiTest( + cryptoCreate(OWNER_OF_TOKENS_WITH_CUSTOM_FEES).balance(initialBalance), + tokenAssociate(OWNER_OF_TOKENS_WITH_CUSTOM_FEES, FT_WITH_HBAR_FIXED_FEE), + cryptoTransfer(moving(1000, FT_WITH_HBAR_FIXED_FEE) + .between(TREASURY_FOR_CUSTOM_FEE_TOKENS, OWNER_OF_TOKENS_WITH_CUSTOM_FEES)), + tokenAirdrop(moving(1, FT_WITH_HBAR_FIXED_FEE) .between(OWNER_OF_TOKENS_WITH_CUSTOM_FEES, RECEIVER_WITH_0_AUTO_ASSOCIATIONS)) .fee(ONE_HUNDRED_HBARS) .payingWith(OWNER_OF_TOKENS_WITH_CUSTOM_FEES) - .via("transferTx")) - .then( - // assert balances - getAccountBalance(RECEIVER_WITH_0_AUTO_ASSOCIATIONS) - .hasTokenBalance(FT_WITH_HBAR_FIXED_FEE, 0), - getAccountBalance(HBAR_COLLECTOR).hasTinyBars(HBAR_FEE), - withOpContext((spec, log) -> { - final var record = getTxnRecord("transferTx"); - allRunFor(spec, record); - final var txFee = record.getResponseRecord().getTransactionFee(); - // the token should not be transferred but the custom fee should be charged - final var ownerBalance = getAccountBalance(OWNER_OF_TOKENS_WITH_CUSTOM_FEES) - .hasTinyBars(initialBalance - (txFee + HBAR_FEE)) - .hasTokenBalance(FT_WITH_HBAR_FIXED_FEE, 1000); - allRunFor(spec, ownerBalance); - }), - // pending airdrop should be created - validateChargedUsd("transferTx", 0.1, 10)); + .via("transferTx"), + // assert balances + getAccountBalance(RECEIVER_WITH_0_AUTO_ASSOCIATIONS).hasTokenBalance(FT_WITH_HBAR_FIXED_FEE, 0), + getAccountBalance(HBAR_COLLECTOR).hasTinyBars(HBAR_FEE), + withOpContext((spec, log) -> { + final var record = getTxnRecord("transferTx"); + allRunFor(spec, record); + final var txFee = record.getResponseRecord().getTransactionFee(); + // the token should not be transferred but the custom fee should be charged + final var ownerBalance = getAccountBalance(OWNER_OF_TOKENS_WITH_CUSTOM_FEES) + .hasTinyBars(initialBalance - (txFee + HBAR_FEE)) + .hasTokenBalance(FT_WITH_HBAR_FIXED_FEE, 1000); + allRunFor(spec, ownerBalance); + }), + // pending airdrop should be created + validateChargedUsd("transferTx", 0.1, 10)); } @HapiTest @DisplayName("fungible token with fixed Hbar fee payed by treasury") final Stream airdropFungibleWithFixedHbarCustomFeePayedByTreasury() { - return defaultHapiSpec(" sender should prepay hbar custom fee") - .given() - .when(tokenAirdrop(moving(1, TREASURY_AS_SENDER_TOKEN) + return hapiTest( + tokenAirdrop(moving(1, TREASURY_AS_SENDER_TOKEN) .between(TREASURY_AS_SENDER, RECEIVER_WITH_0_AUTO_ASSOCIATIONS)) .payingWith(TREASURY_AS_SENDER) .signedBy(TREASURY_AS_SENDER) - .via("transferTx")) - .then( - // custom fee should not be charged - getAccountBalance(TREASURY_AS_SENDER).hasTokenBalance(DENOM_TOKEN, 0), - validateChargedUsd("transferTx", 0.1, 10)); + .via("transferTx"), + // custom fee should not be charged + getAccountBalance(TREASURY_AS_SENDER).hasTokenBalance(DENOM_TOKEN, 0), + validateChargedUsd("transferTx", 0.1, 10)); } @HapiTest @DisplayName("NFT with 2 layers fixed Hts fee") @Order(2) final Stream transferNonFungibleWithFixedHtsCustomFees2Layers() { - return defaultHapiSpec("sender should prepay hts custom fee") - .given( - cryptoCreate(OWNER_OF_TOKENS_WITH_CUSTOM_FEES).balance(100 * ONE_HUNDRED_HBARS), - tokenAssociate(OWNER_OF_TOKENS_WITH_CUSTOM_FEES, DENOM_TOKEN), - tokenAssociate(OWNER_OF_TOKENS_WITH_CUSTOM_FEES, FT_WITH_HTS_FIXED_FEE), - tokenAssociate(OWNER_OF_TOKENS_WITH_CUSTOM_FEES, NFT_WITH_HTS_FIXED_FEE), - tokenAssociate(RECEIVER_WITH_0_AUTO_ASSOCIATIONS, FT_WITH_HTS_FIXED_FEE), - cryptoTransfer( - movingUnique(NFT_WITH_HTS_FIXED_FEE, 1L) - .between(TREASURY_FOR_CUSTOM_FEE_TOKENS, OWNER_OF_TOKENS_WITH_CUSTOM_FEES), - moving(HTS_FEE, FT_WITH_HTS_FIXED_FEE) - .between(TREASURY_FOR_CUSTOM_FEE_TOKENS, OWNER_OF_TOKENS_WITH_CUSTOM_FEES), - moving(HTS_FEE, DENOM_TOKEN) - .between(TREASURY_FOR_CUSTOM_FEE_TOKENS, OWNER_OF_TOKENS_WITH_CUSTOM_FEES))) - .when( - tokenAirdrop(movingUnique(NFT_WITH_HTS_FIXED_FEE, 1L) - .between( - OWNER_OF_TOKENS_WITH_CUSTOM_FEES, - RECEIVER_WITH_0_AUTO_ASSOCIATIONS)) - .payingWith(OWNER_OF_TOKENS_WITH_CUSTOM_FEES) - .via("transferTx"), - // pending airdrop should be created - validateChargedUsd("transferTx", 0.1, 10)) - .then( - getAccountBalance(OWNER_OF_TOKENS_WITH_CUSTOM_FEES) - .hasTokenBalance(NFT_WITH_HTS_FIXED_FEE, 1) // token was transferred - .hasTokenBalance(FT_WITH_HTS_FIXED_FEE, 0) // hts was charged - .hasTokenBalance(DENOM_TOKEN, 0), // hts was charged - getAccountBalance(RECEIVER_WITH_0_AUTO_ASSOCIATIONS) - .hasTokenBalance(NFT_WITH_HTS_FIXED_FEE, 0), - getAccountBalance(HTS_COLLECTOR).hasTokenBalance(DENOM_TOKEN, htsFee), - getAccountBalance(HTS_COLLECTOR).hasTokenBalance(FT_WITH_HTS_FIXED_FEE, htsFee)); + return hapiTest( + cryptoCreate(OWNER_OF_TOKENS_WITH_CUSTOM_FEES).balance(100 * ONE_HUNDRED_HBARS), + tokenAssociate(OWNER_OF_TOKENS_WITH_CUSTOM_FEES, DENOM_TOKEN), + tokenAssociate(OWNER_OF_TOKENS_WITH_CUSTOM_FEES, FT_WITH_HTS_FIXED_FEE), + tokenAssociate(OWNER_OF_TOKENS_WITH_CUSTOM_FEES, NFT_WITH_HTS_FIXED_FEE), + tokenAssociate(RECEIVER_WITH_0_AUTO_ASSOCIATIONS, FT_WITH_HTS_FIXED_FEE), + cryptoTransfer( + movingUnique(NFT_WITH_HTS_FIXED_FEE, 1L) + .between(TREASURY_FOR_CUSTOM_FEE_TOKENS, OWNER_OF_TOKENS_WITH_CUSTOM_FEES), + moving(HTS_FEE, FT_WITH_HTS_FIXED_FEE) + .between(TREASURY_FOR_CUSTOM_FEE_TOKENS, OWNER_OF_TOKENS_WITH_CUSTOM_FEES), + moving(HTS_FEE, DENOM_TOKEN) + .between(TREASURY_FOR_CUSTOM_FEE_TOKENS, OWNER_OF_TOKENS_WITH_CUSTOM_FEES)), + tokenAirdrop(movingUnique(NFT_WITH_HTS_FIXED_FEE, 1L) + .between(OWNER_OF_TOKENS_WITH_CUSTOM_FEES, RECEIVER_WITH_0_AUTO_ASSOCIATIONS)) + .payingWith(OWNER_OF_TOKENS_WITH_CUSTOM_FEES) + .via("transferTx"), + // pending airdrop should be created + validateChargedUsd("transferTx", 0.1, 10), + getAccountBalance(OWNER_OF_TOKENS_WITH_CUSTOM_FEES) + .hasTokenBalance(NFT_WITH_HTS_FIXED_FEE, 1) // token was transferred + .hasTokenBalance(FT_WITH_HTS_FIXED_FEE, 0) // hts was charged + .hasTokenBalance(DENOM_TOKEN, 0), // hts was charged + getAccountBalance(RECEIVER_WITH_0_AUTO_ASSOCIATIONS).hasTokenBalance(NFT_WITH_HTS_FIXED_FEE, 0), + getAccountBalance(HTS_COLLECTOR).hasTokenBalance(DENOM_TOKEN, htsFee), + getAccountBalance(HTS_COLLECTOR).hasTokenBalance(FT_WITH_HTS_FIXED_FEE, htsFee)); } @HapiTest @DisplayName("FT with fractional fee and net of transfers true") @Order(3) final Stream ftWithFractionalFeeNetOfTransfersTre() { - return defaultHapiSpec("should be successful transfer") - .given( - tokenAssociate(OWNER, FT_WITH_FRACTIONAL_FEE_NET_OF_TRANSFERS), - cryptoTransfer(moving(100, FT_WITH_FRACTIONAL_FEE_NET_OF_TRANSFERS) - .between(TREASURY_FOR_CUSTOM_FEE_TOKENS, OWNER))) - .when(tokenAirdrop(moving(10, FT_WITH_FRACTIONAL_FEE_NET_OF_TRANSFERS) + return hapiTest( + tokenAssociate(OWNER, FT_WITH_FRACTIONAL_FEE_NET_OF_TRANSFERS), + cryptoTransfer(moving(100, FT_WITH_FRACTIONAL_FEE_NET_OF_TRANSFERS) + .between(TREASURY_FOR_CUSTOM_FEE_TOKENS, OWNER)), + tokenAirdrop(moving(10, FT_WITH_FRACTIONAL_FEE_NET_OF_TRANSFERS) .between(OWNER, RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS)) .payingWith(OWNER) - .via("fractionalTxn")) - .then( - validateChargedUsd("fractionalTxn", 0.1, 10), - // sender should pay 1 token for fractional fee - getAccountBalance(OWNER).hasTokenBalance(FT_WITH_FRACTIONAL_FEE_NET_OF_TRANSFERS, 89), - getAccountBalance(HTS_COLLECTOR) - .hasTokenBalance(FT_WITH_FRACTIONAL_FEE_NET_OF_TRANSFERS, 1), - getAccountBalance(RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS) - .hasTokenBalance(FT_WITH_FRACTIONAL_FEE_NET_OF_TRANSFERS, 10)); + .via("fractionalTxn"), + validateChargedUsd("fractionalTxn", 0.1, 10), + // sender should pay 1 token for fractional fee + getAccountBalance(OWNER).hasTokenBalance(FT_WITH_FRACTIONAL_FEE_NET_OF_TRANSFERS, 89), + getAccountBalance(HTS_COLLECTOR).hasTokenBalance(FT_WITH_FRACTIONAL_FEE_NET_OF_TRANSFERS, 1), + getAccountBalance(RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS) + .hasTokenBalance(FT_WITH_FRACTIONAL_FEE_NET_OF_TRANSFERS, 10)); } @HapiTest @DisplayName("FT with fractional fee with netOfTransfers=false") @Order(4) final Stream ftWithFractionalFeeNetOfTransfersFalse() { - return defaultHapiSpec("should be successful transfer") - .given( - tokenAssociate(OWNER, FT_WITH_FRACTIONAL_FEE), - cryptoTransfer( - moving(100, FT_WITH_FRACTIONAL_FEE).between(TREASURY_FOR_CUSTOM_FEE_TOKENS, OWNER))) - .when(tokenAirdrop(moving(10, FT_WITH_FRACTIONAL_FEE) + return hapiTest( + tokenAssociate(OWNER, FT_WITH_FRACTIONAL_FEE), + cryptoTransfer(moving(100, FT_WITH_FRACTIONAL_FEE).between(TREASURY_FOR_CUSTOM_FEE_TOKENS, OWNER)), + tokenAirdrop(moving(10, FT_WITH_FRACTIONAL_FEE) .between(OWNER, RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS)) .payingWith(OWNER) - .via("fractionalTxn")) - .then( - validateChargedUsd("fractionalTxn", 0.1, 10), - getAccountBalance(OWNER).hasTokenBalance(FT_WITH_FRACTIONAL_FEE, 90), - // the fee is charged from the transfer value - getAccountBalance(RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS) - .hasTokenBalance(FT_WITH_FRACTIONAL_FEE, 9)); + .via("fractionalTxn"), + validateChargedUsd("fractionalTxn", 0.1, 10), + getAccountBalance(OWNER).hasTokenBalance(FT_WITH_FRACTIONAL_FEE, 90), + // the fee is charged from the transfer value + getAccountBalance(RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS) + .hasTokenBalance(FT_WITH_FRACTIONAL_FEE, 9)); } @HapiTest @@ -928,25 +880,20 @@ final Stream ftWithFractionalFeeNetOfTransfersFalse() { @Order(5) final Stream ftWithFractionalFeeNetOfTransfersFalseInPendingState() { var sender = "sender"; - return defaultHapiSpec("the value should be reduced") - .given( - cryptoCreate(sender).balance(ONE_HUNDRED_HBARS).maxAutomaticTokenAssociations(-1), - tokenAssociate(sender, FT_WITH_FRACTIONAL_FEE), - cryptoTransfer(moving(100, FT_WITH_FRACTIONAL_FEE) - .between(TREASURY_FOR_CUSTOM_FEE_TOKENS, sender))) - .when(tokenAirdrop(moving(100, FT_WITH_FRACTIONAL_FEE) - .between(sender, RECEIVER_WITH_0_AUTO_ASSOCIATIONS)) + return hapiTest( + cryptoCreate(sender).balance(ONE_HUNDRED_HBARS).maxAutomaticTokenAssociations(-1), + tokenAssociate(sender, FT_WITH_FRACTIONAL_FEE), + cryptoTransfer(moving(100, FT_WITH_FRACTIONAL_FEE).between(TREASURY_FOR_CUSTOM_FEE_TOKENS, sender)), + tokenAirdrop(moving(100, FT_WITH_FRACTIONAL_FEE).between(sender, RECEIVER_WITH_0_AUTO_ASSOCIATIONS)) .payingWith(sender) - .via("fractionalTxn")) - .then( - validateChargedUsd("fractionalTxn", 0.1, 10), - // the fee is charged from the transfer value, - // so we expect 90% of the value to be in the pending state - getTxnRecord("fractionalTxn") - .hasPriority(recordWith() - .pendingAirdrops( - includingFungiblePendingAirdrop(moving(90, FT_WITH_FRACTIONAL_FEE) - .between(sender, RECEIVER_WITH_0_AUTO_ASSOCIATIONS))))); + .via("fractionalTxn"), + validateChargedUsd("fractionalTxn", 0.1, 10), + // the fee is charged from the transfer value, + // so we expect 90% of the value to be in the pending state + getTxnRecord("fractionalTxn") + .hasPriority(recordWith() + .pendingAirdrops(includingFungiblePendingAirdrop(moving(90, FT_WITH_FRACTIONAL_FEE) + .between(sender, RECEIVER_WITH_0_AUTO_ASSOCIATIONS))))); } @HapiTest @@ -954,40 +901,34 @@ final Stream ftWithFractionalFeeNetOfTransfersFalseInPendingState() @Order(6) final Stream ftWithFractionalFeeNetOfTransfersFalseNotAssociatedCollector() { var sender = "sender"; - return defaultHapiSpec("should have 2 pending airdrops and the value should be reduced") - .given( - cryptoCreate(sender).balance(ONE_HUNDRED_HBARS), - tokenAssociate(sender, FT_WITH_FRACTIONAL_FEE_2), - cryptoTransfer(moving(100, FT_WITH_FRACTIONAL_FEE_2) - .between(TREASURY_FOR_CUSTOM_FEE_TOKENS, sender))) - .when( - tokenDissociate(HTS_COLLECTOR, FT_WITH_FRACTIONAL_FEE_2), - tokenAirdrop(moving(100, FT_WITH_FRACTIONAL_FEE_2) - .between(sender, RECEIVER_WITH_0_AUTO_ASSOCIATIONS)) - .payingWith(sender) - .via("fractionalTxn")) - .then( - validateChargedUsd("fractionalTxn", 0.2, 10), - getTxnRecord("fractionalTxn") - .hasPriority(recordWith() - .pendingAirdrops(includingFungiblePendingAirdrop( - moving(90, FT_WITH_FRACTIONAL_FEE_2) - .between(sender, RECEIVER_WITH_0_AUTO_ASSOCIATIONS), - moving(10, FT_WITH_FRACTIONAL_FEE_2) - .between(sender, HTS_COLLECTOR))))); + return hapiTest( + cryptoCreate(sender).balance(ONE_HUNDRED_HBARS), + tokenAssociate(sender, FT_WITH_FRACTIONAL_FEE_2), + cryptoTransfer( + moving(100, FT_WITH_FRACTIONAL_FEE_2).between(TREASURY_FOR_CUSTOM_FEE_TOKENS, sender)), + tokenDissociate(HTS_COLLECTOR, FT_WITH_FRACTIONAL_FEE_2), + tokenAirdrop(moving(100, FT_WITH_FRACTIONAL_FEE_2) + .between(sender, RECEIVER_WITH_0_AUTO_ASSOCIATIONS)) + .payingWith(sender) + .via("fractionalTxn"), + validateChargedUsd("fractionalTxn", 0.2, 10), + getTxnRecord("fractionalTxn") + .hasPriority(recordWith() + .pendingAirdrops(includingFungiblePendingAirdrop( + moving(90, FT_WITH_FRACTIONAL_FEE_2) + .between(sender, RECEIVER_WITH_0_AUTO_ASSOCIATIONS), + moving(10, FT_WITH_FRACTIONAL_FEE_2).between(sender, HTS_COLLECTOR))))); } @HapiTest @DisplayName("NFT with royalty fee with fallback") @Order(7) final Stream nftWithRoyaltyFeesPaidByReceiverFails() { - return defaultHapiSpec("should fail - TOKEN_AIRDROP_WITH_FALLBACK_ROYALTY") - .given() - .when( - tokenAssociate(OWNER, NFT_WITH_ROYALTY_FEE), - cryptoTransfer(movingUnique(NFT_WITH_ROYALTY_FEE, 1L) - .between(TREASURY_FOR_CUSTOM_FEE_TOKENS, OWNER))) - .then(tokenAirdrop(movingUnique(NFT_WITH_ROYALTY_FEE, 1L) + return hapiTest( + tokenAssociate(OWNER, NFT_WITH_ROYALTY_FEE), + cryptoTransfer( + movingUnique(NFT_WITH_ROYALTY_FEE, 1L).between(TREASURY_FOR_CUSTOM_FEE_TOKENS, OWNER)), + tokenAirdrop(movingUnique(NFT_WITH_ROYALTY_FEE, 1L) .between(OWNER, RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS)) .signedByPayerAnd(RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS, OWNER) .hasKnownStatus(TOKEN_AIRDROP_WITH_FALLBACK_ROYALTY)); @@ -1300,30 +1241,28 @@ class AirdropToNonExistingAccounts { @DisplayName("ED25519 key") final Stream airdropToNonExistingED25519Account() { var ed25519key = "ed25519key"; - return defaultHapiSpec("should auto-create and transfer the tokens") - .given(newKeyNamed(ed25519key).shape(SigControl.ED25519_ON)) - .when(tokenAirdrop(moving(10, FUNGIBLE_TOKEN).between(OWNER, ed25519key)) + return hapiTest( + newKeyNamed(ed25519key).shape(SigControl.ED25519_ON), + tokenAirdrop(moving(10, FUNGIBLE_TOKEN).between(OWNER, ed25519key)) .payingWith(OWNER) - .via("ed25519Receiver")) - .then( - getAutoCreatedAccountBalance(ed25519key).hasTokenBalance(FUNGIBLE_TOKEN, 10), - // Any new auto-creation needs to explicitly associate token. So it will be $0.1 - validateChargedUsd("ed25519Receiver", 0.1, 1)); + .via("ed25519Receiver"), + getAutoCreatedAccountBalance(ed25519key).hasTokenBalance(FUNGIBLE_TOKEN, 10), + // Any new auto-creation needs to explicitly associate token. So it will be $0.1 + validateChargedUsd("ed25519Receiver", 0.1, 1)); } @HapiTest @DisplayName("SECP256K1 key account") final Stream airdropToNonExistingSECP256K1Account() { var secp256K1 = "secp256K1"; - return defaultHapiSpec("should auto-create and transfer the tokens") - .given(newKeyNamed(secp256K1).shape(SigControl.SECP256K1_ON)) - .when(tokenAirdrop(moving(10, FUNGIBLE_TOKEN).between(OWNER, secp256K1)) + return hapiTest( + newKeyNamed(secp256K1).shape(SigControl.SECP256K1_ON), + tokenAirdrop(moving(10, FUNGIBLE_TOKEN).between(OWNER, secp256K1)) .payingWith(OWNER) - .via("secp256k1Receiver")) - .then( - getAutoCreatedAccountBalance(secp256K1).hasTokenBalance(FUNGIBLE_TOKEN, 10), - // Any new auto-creation needs to explicitly associate token. So it will be $0.1 - validateChargedUsd("secp256k1Receiver", 0.1, 1)); + .via("secp256k1Receiver"), + getAutoCreatedAccountBalance(secp256K1).hasTokenBalance(FUNGIBLE_TOKEN, 10), + // Any new auto-creation needs to explicitly associate token. So it will be $0.1 + validateChargedUsd("secp256k1Receiver", 0.1, 1)); } @HapiTest @@ -1334,15 +1273,13 @@ final Stream airdropToNonExistingEvmAddressAccount() { CommonUtils.unhex("02641dc27aa851ddc5a238dc569718f82b4e5eb3b61030942432fe7ac9088459c5"); final ByteString evmAddress = ByteStringUtils.wrapUnsafely(recoverAddressFromPubKey(publicKey)); - return defaultHapiSpec("should lazy-create and transfer the tokens") - .given() - .when(tokenAirdrop(moving(10, FUNGIBLE_TOKEN).between(OWNER, evmAddress)) + return hapiTest( + tokenAirdrop(moving(10, FUNGIBLE_TOKEN).between(OWNER, evmAddress)) .payingWith(OWNER) - .via("evmAddressReceiver")) - .then( - getAliasedAccountBalance(evmAddress).hasTokenBalance(FUNGIBLE_TOKEN, 10), - // Any new auto-creation needs to explicitly associate token. So it will be $0.1 - validateChargedUsd("evmAddressReceiver", 0.1, 1)); + .via("evmAddressReceiver"), + getAliasedAccountBalance(evmAddress).hasTokenBalance(FUNGIBLE_TOKEN, 10), + // Any new auto-creation needs to explicitly associate token. So it will be $0.1 + validateChargedUsd("evmAddressReceiver", 0.1, 1)); } // AIRDROP_19 @@ -1380,16 +1317,14 @@ final Stream airdropNftToNonExistingAccount() { CommonUtils.unhex("02641dc27aa851ddc5a238dc569718f82b4e5eb3b61030942432fe7ac9088459c5"); final ByteString evmAddress = ByteStringUtils.wrapUnsafely(recoverAddressFromPubKey(publicKey)); - return defaultHapiSpec("should lazy-create and transfer with NFT") - .given() - .when(tokenAirdrop(TokenMovement.movingUnique(NON_FUNGIBLE_TOKEN, 15L) + return hapiTest( + tokenAirdrop(TokenMovement.movingUnique(NON_FUNGIBLE_TOKEN, 15L) .between(OWNER, evmAddress)) .payingWith(OWNER) - .via("evmAddressReceiver")) - .then( - getAliasedAccountBalance(evmAddress).hasTokenBalance(NON_FUNGIBLE_TOKEN, 1), - // Any new auto-creation needs to explicitly associate token. So it will be $0.1 - validateChargedUsd("evmAddressReceiver", 0.1, 1)); + .via("evmAddressReceiver"), + getAliasedAccountBalance(evmAddress).hasTokenBalance(NON_FUNGIBLE_TOKEN, 1), + // Any new auto-creation needs to explicitly associate token. So it will be $0.1 + validateChargedUsd("evmAddressReceiver", 0.1, 1)); } } @@ -1399,58 +1334,51 @@ class InvalidAirdrops { @HapiTest @DisplayName("containing invalid token id") final Stream airdropInvalidTokenIdFails() { - return defaultHapiSpec("should fail - INVALID_TOKEN_ID") - .given() - .when() - .then(withOpContext((spec, opLog) -> { - final var bogusTokenId = TokenID.newBuilder().setTokenNum(9999L); - spec.registry().saveTokenId("nonexistent", bogusTokenId.build()); - allRunFor( - spec, - tokenAirdrop(movingWithDecimals(1L, "nonexistent", 2) - .betweenWithDecimals(OWNER, RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS)) - .payingWith(OWNER) - .via("transferTx") - .hasKnownStatus(INVALID_TOKEN_ID), - validateChargedUsd("transferTx", 0.001, 10)); - })); + return hapiTest(withOpContext((spec, opLog) -> { + final var bogusTokenId = TokenID.newBuilder().setTokenNum(9999L); + spec.registry().saveTokenId("nonexistent", bogusTokenId.build()); + allRunFor( + spec, + tokenAirdrop(movingWithDecimals(1L, "nonexistent", 2) + .betweenWithDecimals(OWNER, RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS)) + .payingWith(OWNER) + .via("transferTx") + .hasKnownStatus(INVALID_TOKEN_ID), + validateChargedUsd("transferTx", 0.001, 10)); + })); } @HapiTest @DisplayName("containing negative NFT serial number") final Stream airdropNFTNegativeSerial() { - return defaultHapiSpec("should fail - INVALID_TOKEN_NFT_SERIAL_NUMBER") - .given() - .when() - .then(tokenAirdrop(movingUnique(NON_FUNGIBLE_TOKEN, -5) - .between(OWNER, RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS)) - .hasPrecheck(INVALID_TOKEN_NFT_SERIAL_NUMBER)); + return hapiTest(tokenAirdrop(movingUnique(NON_FUNGIBLE_TOKEN, -5) + .between(OWNER, RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS)) + .hasPrecheck(INVALID_TOKEN_NFT_SERIAL_NUMBER)); } @HapiTest @DisplayName("in pending state") final Stream freezeAndAirdrop() { var sender = "Sender"; - return defaultHapiSpec("can't be frozen and airdropped again") - .given( - cryptoCreate(sender), - tokenAssociate(sender, FUNGIBLE_TOKEN), - cryptoTransfer(moving(10, FUNGIBLE_TOKEN).between(OWNER, sender)), - // send first airdrop - tokenAirdrop(moving(10, FUNGIBLE_TOKEN).between(sender, RECEIVER_WITH_0_AUTO_ASSOCIATIONS)) - .payingWith(sender) - .via("first"), - getTxnRecord("first") - // assert pending airdrops - .hasPriority(recordWith() - .pendingAirdrops(includingFungiblePendingAirdrop(moving(10, FUNGIBLE_TOKEN) - .between(sender, RECEIVER_WITH_0_AUTO_ASSOCIATIONS))))) - .when(tokenFreeze(FUNGIBLE_TOKEN, sender)) - .then( // the airdrop should fail - tokenAirdrop(moving(10, FUNGIBLE_TOKEN).between(sender, RECEIVER_WITH_0_AUTO_ASSOCIATIONS)) - .payingWith(sender) - .via("second") - .hasKnownStatus(ACCOUNT_FROZEN_FOR_TOKEN)); + return hapiTest( + cryptoCreate(sender), + tokenAssociate(sender, FUNGIBLE_TOKEN), + cryptoTransfer(moving(10, FUNGIBLE_TOKEN).between(OWNER, sender)), + // send first airdrop + tokenAirdrop(moving(10, FUNGIBLE_TOKEN).between(sender, RECEIVER_WITH_0_AUTO_ASSOCIATIONS)) + .payingWith(sender) + .via("first"), + getTxnRecord("first") + // assert pending airdrops + .hasPriority(recordWith() + .pendingAirdrops(includingFungiblePendingAirdrop(moving(10, FUNGIBLE_TOKEN) + .between(sender, RECEIVER_WITH_0_AUTO_ASSOCIATIONS)))), + tokenFreeze(FUNGIBLE_TOKEN, sender), + // the airdrop should fail + tokenAirdrop(moving(10, FUNGIBLE_TOKEN).between(sender, RECEIVER_WITH_0_AUTO_ASSOCIATIONS)) + .payingWith(sender) + .via("second") + .hasKnownStatus(ACCOUNT_FROZEN_FOR_TOKEN)); } /** @@ -1461,24 +1389,19 @@ final Stream freezeAndAirdrop() { @DisplayName("containing negative amount") final Stream airdropNegativeAmountFails3() { var receiver = "receiver"; - return defaultHapiSpec("should fail - INVALID_SIGNATURE") - .given( - cryptoCreate(receiver), - tokenAssociate(receiver, FUNGIBLE_TOKEN), - cryptoTransfer(moving(15, FUNGIBLE_TOKEN).between(OWNER, receiver))) - .when() - .then(tokenAirdrop(moving(-15, FUNGIBLE_TOKEN).between(OWNER, receiver)) + return hapiTest( + cryptoCreate(receiver), + tokenAssociate(receiver, FUNGIBLE_TOKEN), + cryptoTransfer(moving(15, FUNGIBLE_TOKEN).between(OWNER, receiver)), + tokenAirdrop(moving(-15, FUNGIBLE_TOKEN).between(OWNER, receiver)) .hasKnownStatus(INVALID_SIGNATURE)); } @HapiTest @DisplayName("with missing sender's signature") final Stream missingSenderSigFails() { - return defaultHapiSpec("should fail - INVALID_SIGNATURE") - .given() - .when() - .then(tokenAirdrop( - moving(1, FUNGIBLE_TOKEN).between(OWNER, RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS)) + return hapiTest( + tokenAirdrop(moving(1, FUNGIBLE_TOKEN).between(OWNER, RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS)) .hasPrecheck(INVALID_SIGNATURE)); } @@ -1486,12 +1409,10 @@ final Stream missingSenderSigFails() { @DisplayName("fungible token with allowance") final Stream airdropFtWithAllowance() { var spender = "spender"; - return defaultHapiSpec("should fail - NOT_SUPPORTED") - .given(cryptoCreate(spender).balance(ONE_HUNDRED_HBARS)) - .when(cryptoApproveAllowance() - .payingWith(OWNER) - .addTokenAllowance(OWNER, FUNGIBLE_TOKEN, spender, 100)) - .then(tokenAirdrop(movingWithAllowance(50, FUNGIBLE_TOKEN) + return hapiTest( + cryptoCreate(spender).balance(ONE_HUNDRED_HBARS), + cryptoApproveAllowance().payingWith(OWNER).addTokenAllowance(OWNER, FUNGIBLE_TOKEN, spender, 100), + tokenAirdrop(movingWithAllowance(50, FUNGIBLE_TOKEN) .between(spender, RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS)) .signedBy(OWNER, spender) .hasPrecheck(NOT_SUPPORTED)); @@ -1501,12 +1422,12 @@ final Stream airdropFtWithAllowance() { @DisplayName("NFT with allowance") final Stream airdropNftWithAllowance() { var spender = "spender"; - return defaultHapiSpec("should fail - NOT_SUPPORTED") - .given(cryptoCreate(spender).balance(ONE_HUNDRED_HBARS)) - .when(cryptoApproveAllowance() + return hapiTest( + cryptoCreate(spender).balance(ONE_HUNDRED_HBARS), + cryptoApproveAllowance() .payingWith(OWNER) - .addNftAllowance(OWNER, NON_FUNGIBLE_TOKEN, spender, true, List.of())) - .then(tokenAirdrop(movingUniqueWithAllowance(NON_FUNGIBLE_TOKEN, 1L) + .addNftAllowance(OWNER, NON_FUNGIBLE_TOKEN, spender, true, List.of()), + tokenAirdrop(movingUniqueWithAllowance(NON_FUNGIBLE_TOKEN, 1L) .between(OWNER, RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS)) .signedBy(OWNER, spender) .hasPrecheck(NOT_SUPPORTED)); @@ -1516,13 +1437,11 @@ final Stream airdropNftWithAllowance() { @DisplayName("owner does not have enough balance") final Stream ownerNotEnoughBalanceFails() { var lowBalanceOwner = "lowBalanceOwner"; - return defaultHapiSpec("should fail - INVALID_ACCOUNT_AMOUNTS") - .given( - cryptoCreate(lowBalanceOwner), - tokenAssociate(lowBalanceOwner, FUNGIBLE_TOKEN), - cryptoTransfer(moving(1, FUNGIBLE_TOKEN).between(OWNER, lowBalanceOwner))) - .when() - .then(tokenAirdrop(moving(99, FUNGIBLE_TOKEN) + return hapiTest( + cryptoCreate(lowBalanceOwner), + tokenAssociate(lowBalanceOwner, FUNGIBLE_TOKEN), + cryptoTransfer(moving(1, FUNGIBLE_TOKEN).between(OWNER, lowBalanceOwner)), + tokenAirdrop(moving(99, FUNGIBLE_TOKEN) .between(lowBalanceOwner, RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS)) .payingWith(lowBalanceOwner) .hasKnownStatus(INSUFFICIENT_TOKEN_BALANCE)); @@ -1531,51 +1450,44 @@ final Stream ownerNotEnoughBalanceFails() { @HapiTest @DisplayName("containing duplicate entries in the transfer list") final Stream duplicateEntryInTokenTransferFails() { - return defaultHapiSpec("should fail - INVALID_ACCOUNT_AMOUNTS") - .given() - .when() - .then(tokenAirdrop( - movingUnique(NON_FUNGIBLE_TOKEN, 1L) - .between(OWNER, RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS), - movingUnique(NON_FUNGIBLE_TOKEN, 1L) - .between(OWNER, RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS)) - .payingWith(OWNER) - .hasPrecheck(INVALID_ACCOUNT_AMOUNTS)); + return hapiTest(tokenAirdrop( + movingUnique(NON_FUNGIBLE_TOKEN, 1L) + .between(OWNER, RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS), + movingUnique(NON_FUNGIBLE_TOKEN, 1L) + .between(OWNER, RECEIVER_WITH_UNLIMITED_AUTO_ASSOCIATIONS)) + .payingWith(OWNER) + .hasPrecheck(INVALID_ACCOUNT_AMOUNTS)); } @HapiTest @DisplayName("already exists in pending airdrop state") final Stream duplicateEntryInPendingStateFails() { var receiver = "receiver"; - return defaultHapiSpec("should fail - PENDING_NFT_AIRDROP_ALREADY_EXISTS") - .given(cryptoCreate(receiver).maxAutomaticTokenAssociations(0)) - .when() - .then( - tokenAirdrop(movingUnique(NON_FUNGIBLE_TOKEN, 1L).between(OWNER, receiver)) - .payingWith(OWNER), - tokenAirdrop(movingUnique(NON_FUNGIBLE_TOKEN, 1L).between(OWNER, receiver)) - .payingWith(OWNER) - .hasKnownStatus(PENDING_NFT_AIRDROP_ALREADY_EXISTS)); + return hapiTest( + cryptoCreate(receiver).maxAutomaticTokenAssociations(0), + tokenAirdrop(movingUnique(NON_FUNGIBLE_TOKEN, 1L).between(OWNER, receiver)) + .payingWith(OWNER), + tokenAirdrop(movingUnique(NON_FUNGIBLE_TOKEN, 1L).between(OWNER, receiver)) + .payingWith(OWNER) + .hasKnownStatus(PENDING_NFT_AIRDROP_ALREADY_EXISTS)); } @HapiTest @DisplayName("has transfer list size above the max to one account") final Stream aboveMaxTransfersFails() { - return defaultHapiSpec("should fail - TOKEN_REFERENCE_LIST_SIZE_LIMIT_EXCEEDED") - .given( - createTokenWithName("FUNGIBLE1"), - createTokenWithName("FUNGIBLE2"), - createTokenWithName("FUNGIBLE3"), - createTokenWithName("FUNGIBLE4"), - createTokenWithName("FUNGIBLE5"), - createTokenWithName("FUNGIBLE6"), - createTokenWithName("FUNGIBLE7"), - createTokenWithName("FUNGIBLE8"), - createTokenWithName("FUNGIBLE9"), - createTokenWithName("FUNGIBLE10"), - createTokenWithName("FUNGIBLE11")) - .when() - .then(tokenAirdrop( + return hapiTest( + createTokenWithName("FUNGIBLE1"), + createTokenWithName("FUNGIBLE2"), + createTokenWithName("FUNGIBLE3"), + createTokenWithName("FUNGIBLE4"), + createTokenWithName("FUNGIBLE5"), + createTokenWithName("FUNGIBLE6"), + createTokenWithName("FUNGIBLE7"), + createTokenWithName("FUNGIBLE8"), + createTokenWithName("FUNGIBLE9"), + createTokenWithName("FUNGIBLE10"), + createTokenWithName("FUNGIBLE11"), + tokenAirdrop( defaultMovementOfToken("FUNGIBLE1"), defaultMovementOfToken("FUNGIBLE2"), defaultMovementOfToken("FUNGIBLE3"), @@ -2270,27 +2182,21 @@ class DeleteAccount { @DisplayName("to fungible token pending airdrop") final Stream canNotDeleteAccountRelatedToAirdrop() { var receiver = "receiverToDelete"; - return defaultHapiSpec("should fail - ACCOUNT_HAS_PENDING_AIRDROPS") - .given() - .when() - .then( - cryptoCreate(receiver).maxAutomaticTokenAssociations(0), - tokenAirdrop(moving(10, FUNGIBLE_TOKEN).between(OWNER, receiver)) - .payingWith(OWNER), - cryptoDelete(OWNER).hasKnownStatus(ACCOUNT_HAS_PENDING_AIRDROPS)); + return hapiTest( + cryptoCreate(receiver).maxAutomaticTokenAssociations(0), + tokenAirdrop(moving(10, FUNGIBLE_TOKEN).between(OWNER, receiver)) + .payingWith(OWNER), + cryptoDelete(OWNER).hasKnownStatus(ACCOUNT_HAS_PENDING_AIRDROPS)); } @HapiTest @DisplayName("to non-fungible token pending airdrop") final Stream canNotDeleteAccountRelatedToNFTAirdrop() { - return defaultHapiSpec("should fail - ACCOUNT_HAS_PENDING_AIRDROPS") - .given() - .when() - .then( - tokenAirdrop(TokenMovement.movingUnique(NON_FUNGIBLE_TOKEN, 10L) - .between(OWNER, RECEIVER_WITH_0_AUTO_ASSOCIATIONS)) - .payingWith(OWNER), - cryptoDelete(OWNER).hasKnownStatus(ACCOUNT_HAS_PENDING_AIRDROPS)); + return hapiTest( + tokenAirdrop(TokenMovement.movingUnique(NON_FUNGIBLE_TOKEN, 10L) + .between(OWNER, RECEIVER_WITH_0_AUTO_ASSOCIATIONS)) + .payingWith(OWNER), + cryptoDelete(OWNER).hasKnownStatus(ACCOUNT_HAS_PENDING_AIRDROPS)); } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/issues/Issue1765Suite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/issues/Issue1765Suite.java index ed076d00a4ee..a0d133cbf017 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/issues/Issue1765Suite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/issues/Issue1765Suite.java @@ -19,7 +19,7 @@ import static com.hedera.services.bdd.junit.ContextRequirement.SYSTEM_ACCOUNT_BALANCES; import static com.hedera.services.bdd.spec.HapiPropertySource.asContract; import static com.hedera.services.bdd.spec.HapiPropertySource.asFile; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractUpdate; @@ -55,21 +55,17 @@ final Stream recordOfInvalidContractUpdateSanityChecks() { final String INVALID_CONTRACT = IMAGINARY; final String THE_MEMO_IS = MEMO_IS; - return defaultHapiSpec("RecordOfInvalidContractUpdateSanityChecks") - .given(flattened( - withOpContext((spec, ctxLog) -> - spec.registry().saveContractId(INVALID_CONTRACT, asContract(ACCOUNT))), - newKeyNamed(INVALID_CONTRACT), - takeBalanceSnapshots(FUNDING, GENESIS, NODE))) - .when(contractUpdate(INVALID_CONTRACT) + return hapiTest(flattened( + withOpContext((spec, ctxLog) -> spec.registry().saveContractId(INVALID_CONTRACT, asContract(ACCOUNT))), + newKeyNamed(INVALID_CONTRACT), + takeBalanceSnapshots(FUNDING, GENESIS, NODE), + contractUpdate(INVALID_CONTRACT) .memo(THE_MEMO_IS) .fee(ADEQUATE_FEE) .via(INVALID_UPDATE_TXN) - .hasKnownStatus(ResponseCodeEnum.INVALID_CONTRACT_ID)) - .then( - validateTransferListForBalances(INVALID_UPDATE_TXN, List.of(FUNDING, GENESIS, NODE)), - getTxnRecord(INVALID_UPDATE_TXN) - .hasPriority(recordWith().memo(THE_MEMO_IS))); + .hasKnownStatus(ResponseCodeEnum.INVALID_CONTRACT_ID), + validateTransferListForBalances(INVALID_UPDATE_TXN, List.of(FUNDING, GENESIS, NODE)), + getTxnRecord(INVALID_UPDATE_TXN).hasPriority(recordWith().memo(THE_MEMO_IS)))); } @LeakyHapiTest(requirement = SYSTEM_ACCOUNT_BALANCES) @@ -78,21 +74,17 @@ final Stream recordOfInvalidFileUpdateSanityChecks() { final String INVALID_FILE = IMAGINARY; final String THE_MEMO_IS = MEMO_IS; - return defaultHapiSpec("RecordOfInvalidFileUpdateSanityChecks") - .given(flattened( - withOpContext((spec, ctxLog) -> spec.registry().saveFileId(INVALID_FILE, asFile("0.0.0"))), - newKeyNamed(INVALID_FILE).type(KeyFactory.KeyType.LIST), - takeBalanceSnapshots(FUNDING, GENESIS, STAKING_REWARD, NODE))) - .when(fileUpdate(INVALID_FILE) + return hapiTest(flattened( + withOpContext((spec, ctxLog) -> spec.registry().saveFileId(INVALID_FILE, asFile("0.0.0"))), + newKeyNamed(INVALID_FILE).type(KeyFactory.KeyType.LIST), + takeBalanceSnapshots(FUNDING, GENESIS, STAKING_REWARD, NODE), + fileUpdate(INVALID_FILE) .memo(THE_MEMO_IS) .fee(ADEQUATE_FEE) .via(INVALID_UPDATE_TXN) - .hasKnownStatus(ResponseCodeEnum.INVALID_FILE_ID)) - .then( - validateTransferListForBalances( - INVALID_UPDATE_TXN, List.of(FUNDING, GENESIS, STAKING_REWARD, NODE)), - getTxnRecord(INVALID_UPDATE_TXN) - .hasPriority(recordWith().memo(THE_MEMO_IS))); + .hasKnownStatus(ResponseCodeEnum.INVALID_FILE_ID), + validateTransferListForBalances(INVALID_UPDATE_TXN, List.of(FUNDING, GENESIS, STAKING_REWARD, NODE)), + getTxnRecord(INVALID_UPDATE_TXN).hasPriority(recordWith().memo(THE_MEMO_IS)))); } @LeakyHapiTest(requirement = SYSTEM_ACCOUNT_BALANCES) @@ -101,21 +93,17 @@ final Stream recordOfInvalidFileAppendSanityChecks() { final String INVALID_FILE = IMAGINARY; final String THE_MEMO_IS = MEMO_IS; - return defaultHapiSpec("RecordOfInvalidFileAppendSanityChecks") - .given(flattened( - withOpContext((spec, ctxLog) -> spec.registry().saveFileId(INVALID_FILE, asFile("0.0.0"))), - newKeyNamed(INVALID_FILE).type(KeyFactory.KeyType.LIST), - takeBalanceSnapshots(FUNDING, GENESIS, STAKING_REWARD, NODE))) - .when(fileAppend(INVALID_FILE) + return hapiTest(flattened( + withOpContext((spec, ctxLog) -> spec.registry().saveFileId(INVALID_FILE, asFile("0.0.0"))), + newKeyNamed(INVALID_FILE).type(KeyFactory.KeyType.LIST), + takeBalanceSnapshots(FUNDING, GENESIS, STAKING_REWARD, NODE), + fileAppend(INVALID_FILE) .memo(THE_MEMO_IS) .content("Some more content.") .fee(ADEQUATE_FEE) .via(INVALID_APPEND_TXN) - .hasKnownStatus(ResponseCodeEnum.INVALID_FILE_ID)) - .then( - validateTransferListForBalances( - INVALID_APPEND_TXN, List.of(FUNDING, GENESIS, STAKING_REWARD, NODE)), - getTxnRecord(INVALID_APPEND_TXN) - .hasPriority(recordWith().memo(THE_MEMO_IS))); + .hasKnownStatus(ResponseCodeEnum.INVALID_FILE_ID), + validateTransferListForBalances(INVALID_APPEND_TXN, List.of(FUNDING, GENESIS, STAKING_REWARD, NODE)), + getTxnRecord(INVALID_APPEND_TXN).hasPriority(recordWith().memo(THE_MEMO_IS)))); } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/issues/Issue2098Spec.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/issues/Issue2098Spec.java index 9fd9bd0be100..5bb50864a903 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/issues/Issue2098Spec.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/issues/Issue2098Spec.java @@ -17,7 +17,7 @@ package com.hedera.services.bdd.suites.issues; import static com.hedera.services.bdd.junit.ContextRequirement.PERMISSION_OVERRIDES; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTopicInfo; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.createTopic; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; @@ -44,67 +44,65 @@ public class Issue2098Spec { @LeakyHapiTest(requirement = PERMISSION_OVERRIDES) final Stream txnApiPermissionsChangeImmediately() { - return defaultHapiSpec("TxnApiPermissionsChangeImmediately") - .given(cryptoCreate(CIVILIAN)) - .when(fileUpdate(API_PERMISSIONS) + return hapiTest( + cryptoCreate(CIVILIAN), + fileUpdate(API_PERMISSIONS) .payingWith(ADDRESS_BOOK_CONTROL) - .overridingProps(Map.of(CRYPTO_TRANSFER, "0-1"))) - .then( - cryptoTransfer(tinyBarsFromTo(CIVILIAN, FUNDING, 1L)) - .payingWith(CIVILIAN) - .hasPrecheckFrom(NOT_SUPPORTED, OK) - .hasKnownStatus(UNAUTHORIZED), - fileUpdate(API_PERMISSIONS) - .payingWith(ADDRESS_BOOK_CONTROL) - .overridingProps(Map.of(CRYPTO_TRANSFER, "0-*")), - cryptoTransfer(tinyBarsFromTo(CIVILIAN, FUNDING, 1L)).payingWith(CIVILIAN)); + .overridingProps(Map.of(CRYPTO_TRANSFER, "0-1")), + cryptoTransfer(tinyBarsFromTo(CIVILIAN, FUNDING, 1L)) + .payingWith(CIVILIAN) + .hasPrecheckFrom(NOT_SUPPORTED, OK) + .hasKnownStatus(UNAUTHORIZED), + fileUpdate(API_PERMISSIONS) + .payingWith(ADDRESS_BOOK_CONTROL) + .overridingProps(Map.of(CRYPTO_TRANSFER, "0-*")), + cryptoTransfer(tinyBarsFromTo(CIVILIAN, FUNDING, 1L)).payingWith(CIVILIAN)); } @LeakyHapiTest(requirement = PERMISSION_OVERRIDES) final Stream queryApiPermissionsChangeImmediately() { - return defaultHapiSpec("QueryApiPermissionsChangeImmediately") - .given(cryptoCreate(CIVILIAN), createTopic("misc")) - .when(fileUpdate(API_PERMISSIONS) + return hapiTest( + cryptoCreate(CIVILIAN), + createTopic("misc"), + fileUpdate(API_PERMISSIONS) + .payingWith(ADDRESS_BOOK_CONTROL) + .overridingProps(Map.of(GET_TOPIC_INFO, "0-1")), + getTopicInfo("misc").payingWith(CIVILIAN).hasAnswerOnlyPrecheck(NOT_SUPPORTED), + fileUpdate(API_PERMISSIONS) .payingWith(ADDRESS_BOOK_CONTROL) - .overridingProps(Map.of(GET_TOPIC_INFO, "0-1"))) - .then( - getTopicInfo("misc").payingWith(CIVILIAN).hasAnswerOnlyPrecheck(NOT_SUPPORTED), - fileUpdate(API_PERMISSIONS) - .payingWith(ADDRESS_BOOK_CONTROL) - .overridingProps(Map.of(GET_TOPIC_INFO, "0-*")), - getTopicInfo("misc").payingWith(CIVILIAN)); + .overridingProps(Map.of(GET_TOPIC_INFO, "0-*")), + getTopicInfo("misc").payingWith(CIVILIAN)); } @LeakyHapiTest(requirement = PERMISSION_OVERRIDES) final Stream adminsCanQueryNoMatterPermissions() { - return defaultHapiSpec("AdminsCanQueryNoMatterPermissions") - .given(cryptoCreate(CIVILIAN), createTopic("misc")) - .when(fileUpdate(API_PERMISSIONS) + return hapiTest( + cryptoCreate(CIVILIAN), + createTopic("misc"), + fileUpdate(API_PERMISSIONS) .payingWith(ADDRESS_BOOK_CONTROL) - .overridingProps(Map.of(GET_TOPIC_INFO, "0-1"))) - .then( - getTopicInfo("misc").payingWith(CIVILIAN).hasAnswerOnlyPrecheck(NOT_SUPPORTED), - getTopicInfo("misc"), - fileUpdate(API_PERMISSIONS) - .payingWith(ADDRESS_BOOK_CONTROL) - .overridingProps(Map.of(GET_TOPIC_INFO, "0-*"))); + .overridingProps(Map.of(GET_TOPIC_INFO, "0-1")), + getTopicInfo("misc").payingWith(CIVILIAN).hasAnswerOnlyPrecheck(NOT_SUPPORTED), + getTopicInfo("misc"), + fileUpdate(API_PERMISSIONS) + .payingWith(ADDRESS_BOOK_CONTROL) + .overridingProps(Map.of(GET_TOPIC_INFO, "0-*"))); } @LeakyHapiTest(requirement = PERMISSION_OVERRIDES) final Stream adminsCanTransactNoMatterPermissions() { - return defaultHapiSpec("AdminsCanTransactNoMatterPermissions") - .given(cryptoCreate(CIVILIAN)) - .when(fileUpdate(API_PERMISSIONS) + return hapiTest( + cryptoCreate(CIVILIAN), + fileUpdate(API_PERMISSIONS) + .payingWith(ADDRESS_BOOK_CONTROL) + .overridingProps(Map.of(CRYPTO_TRANSFER, "0-1")), + cryptoTransfer(tinyBarsFromTo(CIVILIAN, FUNDING, 1L)) + .payingWith(CIVILIAN) + .hasPrecheckFrom(NOT_SUPPORTED, OK) + .hasKnownStatus(UNAUTHORIZED), + cryptoTransfer(tinyBarsFromTo(GENESIS, FUNDING, 1L)), + fileUpdate(API_PERMISSIONS) .payingWith(ADDRESS_BOOK_CONTROL) - .overridingProps(Map.of(CRYPTO_TRANSFER, "0-1"))) - .then( - cryptoTransfer(tinyBarsFromTo(CIVILIAN, FUNDING, 1L)) - .payingWith(CIVILIAN) - .hasPrecheckFrom(NOT_SUPPORTED, OK) - .hasKnownStatus(UNAUTHORIZED), - cryptoTransfer(tinyBarsFromTo(GENESIS, FUNDING, 1L)), - fileUpdate(API_PERMISSIONS) - .payingWith(ADDRESS_BOOK_CONTROL) - .overridingProps(Map.of(CRYPTO_TRANSFER, "0-*"))); + .overridingProps(Map.of(CRYPTO_TRANSFER, "0-*"))); } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/issues/Issue2143Spec.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/issues/Issue2143Spec.java index 630e2ebbacf0..5be034612684 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/issues/Issue2143Spec.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/issues/Issue2143Spec.java @@ -18,7 +18,7 @@ import static com.hedera.services.bdd.junit.ContextRequirement.PERMISSION_OVERRIDES; import static com.hedera.services.bdd.junit.ContextRequirement.PROPERTY_OVERRIDES; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.fileUpdate; import static com.hedera.services.bdd.spec.transactions.crypto.HapiCryptoTransfer.tinyBarsFromTo; @@ -36,29 +36,27 @@ public class Issue2143Spec { @LeakyHapiTest(requirement = {PERMISSION_OVERRIDES}) final Stream account55ControlCanUpdatePermissions() { - return defaultHapiSpec("Account55ControlCanUpdatePropertiesAndPermissions") - .given(cryptoTransfer(tinyBarsFromTo(GENESIS, ADDRESS_BOOK_CONTROL, 1_000_000_000L))) - .when(fileUpdate(API_PERMISSIONS) + return hapiTest( + cryptoTransfer(tinyBarsFromTo(GENESIS, ADDRESS_BOOK_CONTROL, 1_000_000_000L)), + fileUpdate(API_PERMISSIONS) .overridingProps(Map.of("createFile", "0-100")) - .payingWith(ADDRESS_BOOK_CONTROL)) - .then(fileUpdate(API_PERMISSIONS) + .payingWith(ADDRESS_BOOK_CONTROL), + fileUpdate(API_PERMISSIONS) .overridingProps(Map.of("createFile", "0-*")) .payingWith(ADDRESS_BOOK_CONTROL)); } @LeakyHapiTest(requirement = {PERMISSION_OVERRIDES, PROPERTY_OVERRIDES}) final Stream account57ControlCanUpdatePropertiesAndPermissions() { - return defaultHapiSpec("Account57ControlCanUpdatePropertiesAndPermissions") - .given(cryptoTransfer(tinyBarsFromTo(GENESIS, EXCHANGE_RATE_CONTROL, 1_000_000_000L))) - .when( - fileUpdate(APP_PROPERTIES).overridingProps(Map.of()).payingWith(EXCHANGE_RATE_CONTROL), - fileUpdate(API_PERMISSIONS) - .overridingProps(Map.of("createFile", "0-100")) - .payingWith(EXCHANGE_RATE_CONTROL)) - .then( - fileUpdate(APP_PROPERTIES).overridingProps(Map.of()).payingWith(EXCHANGE_RATE_CONTROL), - fileUpdate(API_PERMISSIONS) - .overridingProps(Map.of("createFile", "0-*")) - .payingWith(EXCHANGE_RATE_CONTROL)); + return hapiTest( + cryptoTransfer(tinyBarsFromTo(GENESIS, EXCHANGE_RATE_CONTROL, 1_000_000_000L)), + fileUpdate(APP_PROPERTIES).overridingProps(Map.of()).payingWith(EXCHANGE_RATE_CONTROL), + fileUpdate(API_PERMISSIONS) + .overridingProps(Map.of("createFile", "0-100")) + .payingWith(EXCHANGE_RATE_CONTROL), + fileUpdate(APP_PROPERTIES).overridingProps(Map.of()).payingWith(EXCHANGE_RATE_CONTROL), + fileUpdate(API_PERMISSIONS) + .overridingProps(Map.of("createFile", "0-*")) + .payingWith(EXCHANGE_RATE_CONTROL)); } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/issues/Issue2319Spec.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/issues/Issue2319Spec.java index 3cb388806f89..6b20c664b1da 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/issues/Issue2319Spec.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/issues/Issue2319Spec.java @@ -17,7 +17,6 @@ package com.hedera.services.bdd.suites.issues; import static com.hedera.services.bdd.junit.ContextRequirement.SYSTEM_ACCOUNT_KEYS; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getFileContents; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; @@ -82,97 +81,77 @@ final Stream propsPermissionsSigReqsWaivedForAddressBookAdmin() { @LeakyHapiTest(requirement = SYSTEM_ACCOUNT_KEYS) final Stream sysFileImmutabilityWaivedForMasterAndTreasury() { - return defaultHapiSpec("sysFileImmutabilityWaivedForMasterAndTreasury") - .given( - cryptoCreate("civilian"), - cryptoTransfer(tinyBarsFromTo(GENESIS, SYSTEM_ADMIN, 1_000_000_000_000L))) - .when(fileUpdate(EXCHANGE_RATES) + return hapiTest( + cryptoCreate("civilian"), + cryptoTransfer(tinyBarsFromTo(GENESIS, SYSTEM_ADMIN, 1_000_000_000_000L)), + fileUpdate(EXCHANGE_RATES).payingWith(EXCHANGE_RATE_CONTROL).useEmptyWacl(), + fileUpdate(EXCHANGE_RATES) .payingWith(EXCHANGE_RATE_CONTROL) - .useEmptyWacl()) - .then( - fileUpdate(EXCHANGE_RATES) - .payingWith(EXCHANGE_RATE_CONTROL) - .wacl(GENESIS) - .payingWith(SYSTEM_ADMIN) - .signedBy(GENESIS), - fileUpdate(EXCHANGE_RATES) - .payingWith(EXCHANGE_RATE_CONTROL) - .useEmptyWacl(), - fileUpdate(EXCHANGE_RATES) - .wacl(GENESIS) - .payingWith(GENESIS) - .signedBy(GENESIS)); + .wacl(GENESIS) + .payingWith(SYSTEM_ADMIN) + .signedBy(GENESIS), + fileUpdate(EXCHANGE_RATES).payingWith(EXCHANGE_RATE_CONTROL).useEmptyWacl(), + fileUpdate(EXCHANGE_RATES).wacl(GENESIS).payingWith(GENESIS).signedBy(GENESIS)); } @LeakyHapiTest(requirement = SYSTEM_ACCOUNT_KEYS) final Stream sysAccountSigReqsWaivedForMasterAndTreasury() { - return defaultHapiSpec("SysAccountSigReqsWaivedForMasterAndTreasury") - .given( - newKeyNamed(NON_TREASURY_KEY), - newKeyListNamed(NON_TREASURY_ADMIN_KEY, List.of(NON_TREASURY_KEY)), - newKeyListNamed(DEFAULT_ADMIN_KEY, List.of(GENESIS)), - cryptoCreate("civilian"), - cryptoTransfer(tinyBarsFromTo(GENESIS, SYSTEM_ADMIN, 1_000_000_000_000L))) - .when(cryptoUpdate(EXCHANGE_RATE_CONTROL) - .key(NON_TREASURY_ADMIN_KEY) - .receiverSigRequired(true)) - .then( - cryptoUpdate(EXCHANGE_RATE_CONTROL) - .payingWith(SYSTEM_ADMIN) - .signedBy(GENESIS) - .receiverSigRequired(true), - cryptoUpdate(EXCHANGE_RATE_CONTROL) - .payingWith(GENESIS) - .signedBy(GENESIS) - .receiverSigRequired(true), - cryptoUpdate(EXCHANGE_RATE_CONTROL) - .payingWith("civilian") - .signedBy("civilian", GENESIS, NON_TREASURY_ADMIN_KEY) - .receiverSigRequired(true), + return hapiTest( + newKeyNamed(NON_TREASURY_KEY), + newKeyListNamed(NON_TREASURY_ADMIN_KEY, List.of(NON_TREASURY_KEY)), + newKeyListNamed(DEFAULT_ADMIN_KEY, List.of(GENESIS)), + cryptoCreate("civilian"), + cryptoTransfer(tinyBarsFromTo(GENESIS, SYSTEM_ADMIN, 1_000_000_000_000L)), + cryptoUpdate(EXCHANGE_RATE_CONTROL).key(NON_TREASURY_ADMIN_KEY).receiverSigRequired(true), + cryptoUpdate(EXCHANGE_RATE_CONTROL) + .payingWith(SYSTEM_ADMIN) + .signedBy(GENESIS) + .receiverSigRequired(true), + cryptoUpdate(EXCHANGE_RATE_CONTROL) + .payingWith(GENESIS) + .signedBy(GENESIS) + .receiverSigRequired(true), + cryptoUpdate(EXCHANGE_RATE_CONTROL) + .payingWith("civilian") + .signedBy("civilian", GENESIS, NON_TREASURY_ADMIN_KEY) + .receiverSigRequired(true), - // reset EXCHANGE_RATE_CONTROL to default state - cryptoUpdate(EXCHANGE_RATE_CONTROL) - .key(DEFAULT_ADMIN_KEY) - .receiverSigRequired(false) - .payingWith(GENESIS) - .signedBy(GENESIS)); + // reset EXCHANGE_RATE_CONTROL to default state + cryptoUpdate(EXCHANGE_RATE_CONTROL) + .key(DEFAULT_ADMIN_KEY) + .receiverSigRequired(false) + .payingWith(GENESIS) + .signedBy(GENESIS)); } @LeakyHapiTest(requirement = SYSTEM_ACCOUNT_KEYS) final Stream sysFileSigReqsWaivedForMasterAndTreasury() { var validRates = new AtomicReference(); - return defaultHapiSpec("SysFileSigReqsWaivedForMasterAndTreasury") - .given( - cryptoCreate("civilian"), - newKeyNamed(NON_TREASURY_KEY), - newKeyListNamed(NON_TREASURY_ADMIN_KEY, List.of(NON_TREASURY_KEY)), - withOpContext((spec, opLog) -> { - var fetch = getFileContents(EXCHANGE_RATES); - CustomSpecAssert.allRunFor(spec, fetch); - validRates.set(fetch.getResponse() - .getFileGetContents() - .getFileContents() - .getContents()); - }), - cryptoTransfer(tinyBarsFromTo(GENESIS, SYSTEM_ADMIN, 1_000_000_000_000L))) - .when(fileUpdate(EXCHANGE_RATES) - .payingWith(EXCHANGE_RATE_CONTROL) - .wacl(NON_TREASURY_ADMIN_KEY)) - .then( - fileUpdate(EXCHANGE_RATES) - .payingWith(SYSTEM_ADMIN) - .signedBy(GENESIS) - .contents(ignore -> validRates.get()), - fileUpdate(EXCHANGE_RATES) - .payingWith(GENESIS) - .signedBy(GENESIS) - .contents(ignore -> validRates.get()), - fileUpdate(EXCHANGE_RATES) - .payingWith("civilian") - .signedBy("civilian", GENESIS, NON_TREASURY_ADMIN_KEY) - .contents(ignore -> validRates.get()) - .hasPrecheck(AUTHORIZATION_FAILED), - fileUpdate(EXCHANGE_RATES).payingWith(GENESIS).wacl(GENESIS)); + return hapiTest( + cryptoCreate("civilian"), + newKeyNamed(NON_TREASURY_KEY), + newKeyListNamed(NON_TREASURY_ADMIN_KEY, List.of(NON_TREASURY_KEY)), + withOpContext((spec, opLog) -> { + var fetch = getFileContents(EXCHANGE_RATES); + CustomSpecAssert.allRunFor(spec, fetch); + validRates.set(fetch.getResponse() + .getFileGetContents() + .getFileContents() + .getContents()); + }), + cryptoTransfer(tinyBarsFromTo(GENESIS, SYSTEM_ADMIN, 1_000_000_000_000L)), + fileUpdate(EXCHANGE_RATES).payingWith(EXCHANGE_RATE_CONTROL).wacl(NON_TREASURY_ADMIN_KEY), + fileUpdate(EXCHANGE_RATES) + .payingWith(SYSTEM_ADMIN) + .signedBy(GENESIS) + .contents(ignore -> validRates.get()), + fileUpdate(EXCHANGE_RATES).payingWith(GENESIS).signedBy(GENESIS).contents(ignore -> validRates.get()), + fileUpdate(EXCHANGE_RATES) + .payingWith("civilian") + .signedBy("civilian", GENESIS, NON_TREASURY_ADMIN_KEY) + .contents(ignore -> validRates.get()) + .hasPrecheck(AUTHORIZATION_FAILED), + fileUpdate(EXCHANGE_RATES).payingWith(GENESIS).wacl(GENESIS)); } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/issues/IssueRegressionTests.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/issues/IssueRegressionTests.java index e4ba6cf46d29..d8d5793fa990 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/issues/IssueRegressionTests.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/issues/IssueRegressionTests.java @@ -18,7 +18,6 @@ import static com.hedera.services.bdd.junit.ContextRequirement.NO_CONCURRENT_CREATIONS; import static com.hedera.services.bdd.junit.TestTags.TOKEN; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.assertions.AccountInfoAsserts.approxChangeFromSnapshot; import static com.hedera.services.bdd.spec.assertions.AssertUtils.inOrder; @@ -111,11 +110,10 @@ final Stream allowsCryptoCreatePayerToHaveLessThanTwiceFee() { final Stream createDeleteInSameRoundWorks() { final var key = "tbdKey"; AtomicReference nextFileId = new AtomicReference<>(); - return defaultHapiSpec("CreateDeleteInSameRoundWorks") - .given( - newKeyNamed(key).type(KeyFactory.KeyType.LIST), - fileCreate("marker").via("markerTxn")) - .when(withOpContext((spec, opLog) -> { + return hapiTest( + newKeyNamed(key).type(KeyFactory.KeyType.LIST), + fileCreate("marker").via("markerTxn"), + withOpContext((spec, opLog) -> { var lookup = getTxnRecord("markerTxn"); allRunFor(spec, lookup); var markerFid = lookup.getResponseRecord().getReceipt().getFileID(); @@ -124,30 +122,29 @@ final Stream createDeleteInSameRoundWorks() { .build(); nextFileId.set(HapiPropertySource.asFileString(nextFid)); opLog.info("Next file will be {}", nextFileId.get()); - })) - .then( - fileCreate("tbd").key(key).deferStatusResolution(), - fileDelete(nextFileId::get).signedBy(GENESIS, key), - getFileInfo(nextFileId::get).hasDeleted(true)); + }), + fileCreate("tbd").key(key).deferStatusResolution(), + fileDelete(nextFileId::get).signedBy(GENESIS, key), + getFileInfo(nextFileId::get).hasDeleted(true)); } @HapiTest final Stream recordStorageFeeIncreasesWithNumTransfers() { - return defaultHapiSpec("RecordStorageFeeIncreasesWithNumTransfers") - .given( - cryptoCreate("civilian").balance(10 * ONE_HUNDRED_HBARS), - cryptoCreate("A"), - cryptoCreate("B"), - cryptoCreate("C"), - cryptoCreate("D"), - cryptoTransfer(tinyBarsFromTo("A", "B", 1L)) - .payingWith("civilian") - .via("txn1"), - cryptoTransfer(tinyBarsFromTo("A", "B", 1L), tinyBarsFromTo("C", "D", 1L)) - .payingWith("civilian") - .via("txn2")) - .when(UtilVerbs.recordFeeAmount("txn1", "feeForOne"), UtilVerbs.recordFeeAmount("txn2", "feeForTwo")) - .then(UtilVerbs.assertionsHold((spec, assertLog) -> { + return hapiTest( + cryptoCreate("civilian").balance(10 * ONE_HUNDRED_HBARS), + cryptoCreate("A"), + cryptoCreate("B"), + cryptoCreate("C"), + cryptoCreate("D"), + cryptoTransfer(tinyBarsFromTo("A", "B", 1L)) + .payingWith("civilian") + .via("txn1"), + cryptoTransfer(tinyBarsFromTo("A", "B", 1L), tinyBarsFromTo("C", "D", 1L)) + .payingWith("civilian") + .via("txn2"), + UtilVerbs.recordFeeAmount("txn1", "feeForOne"), + UtilVerbs.recordFeeAmount("txn2", "feeForTwo"), + UtilVerbs.assertionsHold((spec, assertLog) -> { long feeForOne = spec.registry().getAmount("feeForOne"); long feeForTwo = spec.registry().getAmount("feeForTwo"); assertLog.info("[Record storage] fee for one transfer : {}", feeForOne); @@ -160,10 +157,9 @@ final Stream recordStorageFeeIncreasesWithNumTransfers() { final Stream queryPaymentTxnMustHavePayerBalanceForBothTransferFeeAndNodePayment() { final long BALANCE = 1_000_000L; - return HapiSpec.defaultHapiSpec("QueryPaymentTxnMustHavePayerBalanceForBothTransferFeeAndNodePayment") - .given(cryptoCreate("payer").balance(BALANCE)) - .when() - .then(getAccountInfo("payer") + return HapiSpec.hapiTest( + cryptoCreate("payer").balance(BALANCE), + getAccountInfo("payer") .nodePayment(BALANCE) .payingWith("payer") .hasAnswerOnlyPrecheck(INSUFFICIENT_PAYER_BALANCE)); @@ -173,10 +169,9 @@ final Stream queryPaymentTxnMustHavePayerBalanceForBothTransferFeeA final Stream cryptoTransferListShowsOnlyFeesAfterIAB() { final long PAYER_BALANCE = 1_000_000L; - return defaultHapiSpec("CryptoTransferListShowsOnlyFeesAfterIAB") - .given(cryptoCreate("payer").balance(PAYER_BALANCE)) - .when() - .then(cryptoTransfer(tinyBarsFromTo("payer", GENESIS, PAYER_BALANCE)) + return hapiTest( + cryptoCreate("payer").balance(PAYER_BALANCE), + cryptoTransfer(tinyBarsFromTo("payer", GENESIS, PAYER_BALANCE)) .payingWith("payer") .via("txn") .hasPrecheck(INSUFFICIENT_PAYER_BALANCE)); @@ -186,32 +181,29 @@ final Stream cryptoTransferListShowsOnlyFeesAfterIAB() { final Stream duplicatedTxnsSameTypeDetected() { long initialBalance = 10_000L; - return defaultHapiSpec("duplicatedTxnsSameTypeDetected") - .given( - cryptoCreate("acct1").balance(initialBalance).logged().via("txnId1"), - sleepFor(2000), - cryptoCreate("acctWithDuplicateTxnId") - .balance(initialBalance) - .logged() - .txnId("txnId1") - .hasPrecheck(DUPLICATE_TRANSACTION)) - .when() - .then(getTxnRecord("txnId1").logged()); + return hapiTest( + cryptoCreate("acct1").balance(initialBalance).logged().via("txnId1"), + sleepFor(2000), + cryptoCreate("acctWithDuplicateTxnId") + .balance(initialBalance) + .logged() + .txnId("txnId1") + .hasPrecheck(DUPLICATE_TRANSACTION), + getTxnRecord("txnId1").logged()); } @HapiTest final Stream duplicatedTxnsDifferentTypesDetected() { - return defaultHapiSpec("duplicatedTxnsDifferentTypesDetected") - .given( - cryptoCreate("acct2").via("txnId2"), - newKeyNamed("key1"), - createTopic("topic2").submitKeyName("key1")) - .when(submitMessageTo("topic2") + return hapiTest( + cryptoCreate("acct2").via("txnId2"), + newKeyNamed("key1"), + createTopic("topic2").submitKeyName("key1"), + submitMessageTo("topic2") .message("Hello world") .payingWith("acct2") .txnId("txnId2") - .hasPrecheck(DUPLICATE_TRANSACTION)) - .then(getTxnRecord("txnId2").logged()); + .hasPrecheck(DUPLICATE_TRANSACTION), + getTxnRecord("txnId2").logged()); } @HapiTest @@ -237,102 +229,95 @@ final Stream duplicatedTxnsSameTypeDifferentNodesDetected() { @HapiTest final Stream duplicatedTxnsDifferentTypesDifferentNodesDetected() { - return defaultHapiSpec("duplicatedTxnsDifferentTypesDifferentNodesDetected") - .given( - cryptoCreate("acct4").via("txnId4").setNode("0.0.3"), - newKeyNamed("key2"), - createTopic("topic2").setNode("0.0.5").submitKeyName("key2")) - .when(submitMessageTo("topic2") + return hapiTest( + cryptoCreate("acct4").via("txnId4").setNode("0.0.3"), + newKeyNamed("key2"), + createTopic("topic2").setNode("0.0.5").submitKeyName("key2"), + submitMessageTo("topic2") .message("Hello world") .payingWith("acct4") .txnId("txnId4") - .hasPrecheck(DUPLICATE_TRANSACTION)) - .then(getTxnRecord("txnId4").logged()); + .hasPrecheck(DUPLICATE_TRANSACTION), + getTxnRecord("txnId4").logged()); } @HapiTest final Stream keepsRecordOfPayerIBE() { final var payer = "payer"; - return defaultHapiSpec("KeepsRecordOfPayerIBE") - .given( - cryptoCreate(CIVILIAN_PAYER), - cryptoTransfer(tinyBarsFromTo(GENESIS, FUNDING, 1L)) - .payingWith(CIVILIAN_PAYER) - .via("referenceTxn"), - UtilVerbs.withOpContext((spec, ctxLog) -> { - HapiGetTxnRecord subOp = getTxnRecord("referenceTxn"); - allRunFor(spec, subOp); - TransactionRecord record = subOp.getResponseRecord(); - long fee = record.getTransactionFee(); - spec.registry().saveAmount("fee", fee); - spec.registry().saveAmount("balance", fee * 2); - })) - .when(cryptoCreate(payer).balance(spec -> spec.registry().getAmount("balance"))) - .then( - UtilVerbs.inParallel( - cryptoTransfer(tinyBarsFromTo(payer, FUNDING, spec -> spec.registry() - .getAmount("fee"))) - .payingWith(payer) - .via("txnA") - .hasAnyKnownStatus(), - cryptoTransfer(tinyBarsFromTo(payer, FUNDING, spec -> spec.registry() - .getAmount("fee"))) - .payingWith(payer) - .via("txnB") - .hasAnyKnownStatus()), - getTxnRecord("txnA").logged(), - getTxnRecord("txnB").logged()); + return hapiTest( + cryptoCreate(CIVILIAN_PAYER), + cryptoTransfer(tinyBarsFromTo(GENESIS, FUNDING, 1L)) + .payingWith(CIVILIAN_PAYER) + .via("referenceTxn"), + UtilVerbs.withOpContext((spec, ctxLog) -> { + HapiGetTxnRecord subOp = getTxnRecord("referenceTxn"); + allRunFor(spec, subOp); + TransactionRecord record = subOp.getResponseRecord(); + long fee = record.getTransactionFee(); + spec.registry().saveAmount("fee", fee); + spec.registry().saveAmount("balance", fee * 2); + }), + cryptoCreate(payer).balance(spec -> spec.registry().getAmount("balance")), + UtilVerbs.inParallel( + cryptoTransfer(tinyBarsFromTo( + payer, FUNDING, spec -> spec.registry().getAmount("fee"))) + .payingWith(payer) + .via("txnA") + .hasAnyKnownStatus(), + cryptoTransfer(tinyBarsFromTo( + payer, FUNDING, spec -> spec.registry().getAmount("fee"))) + .payingWith(payer) + .via("txnB") + .hasAnyKnownStatus()), + getTxnRecord("txnA").logged(), + getTxnRecord("txnB").logged()); } @HapiTest final Stream tbdCanPayForItsOwnDeletion() { - return defaultHapiSpec("TbdCanPayForItsOwnDeletion") - .given(cryptoCreate("tbd"), cryptoCreate(TRANSFER)) - .when() - .then( - cryptoDelete("tbd") - .via("selfFinanced") - .payingWith("tbd") - .transfer(TRANSFER), - getTxnRecord("selfFinanced").logged()); + return hapiTest( + cryptoCreate("tbd"), + cryptoCreate(TRANSFER), + cryptoDelete("tbd").via("selfFinanced").payingWith("tbd").transfer(TRANSFER), + getTxnRecord("selfFinanced").logged()); } @HapiTest final Stream transferAccountCannotBeDeleted() { - return defaultHapiSpec("TransferAccountCannotBeDeleted") - .given(cryptoCreate(PAYER), cryptoCreate(TRANSFER), cryptoCreate("tbd")) - .when(cryptoDelete(TRANSFER)) - .then( - balanceSnapshot(SNAPSHOT, PAYER), - cryptoDelete("tbd") - .via(DELETE_TXN) - .payingWith(PAYER) - .transfer(TRANSFER) - .hasKnownStatus(ACCOUNT_DELETED), - getTxnRecord(DELETE_TXN).logged(), - getAccountBalance(PAYER).hasTinyBars(approxChangeFromSnapshot(SNAPSHOT, -9384399, 10000))); + return hapiTest( + cryptoCreate(PAYER), + cryptoCreate(TRANSFER), + cryptoCreate("tbd"), + cryptoDelete(TRANSFER), + balanceSnapshot(SNAPSHOT, PAYER), + cryptoDelete("tbd") + .via(DELETE_TXN) + .payingWith(PAYER) + .transfer(TRANSFER) + .hasKnownStatus(ACCOUNT_DELETED), + getTxnRecord(DELETE_TXN).logged(), + getAccountBalance(PAYER).hasTinyBars(approxChangeFromSnapshot(SNAPSHOT, -9384399, 10000))); } @HapiTest final Stream transferAccountCannotBeDeletedForContractTarget() { - return defaultHapiSpec("TransferAccountCannotBeDeletedForContractTarget") - .given( - uploadInitCode("CreateTrivial"), - uploadInitCode("PayReceivable"), - cryptoCreate(TRANSFER), - contractCreate("CreateTrivial"), - contractCreate("PayReceivable")) - .when(cryptoDelete(TRANSFER), contractDelete("PayReceivable")) - .then( - balanceSnapshot(SNAPSHOT, GENESIS), - contractDelete("CreateTrivial") - .via(DELETE_TXN) - .transferAccount(TRANSFER) - .hasKnownStatus(OBTAINER_DOES_NOT_EXIST), - contractDelete("CreateTrivial") - .via(DELETE_TXN) - .transferContract("PayReceivable") - .hasKnownStatus(INVALID_CONTRACT_ID)); + return hapiTest( + uploadInitCode("CreateTrivial"), + uploadInitCode("PayReceivable"), + cryptoCreate(TRANSFER), + contractCreate("CreateTrivial"), + contractCreate("PayReceivable"), + cryptoDelete(TRANSFER), + contractDelete("PayReceivable"), + balanceSnapshot(SNAPSHOT, GENESIS), + contractDelete("CreateTrivial") + .via(DELETE_TXN) + .transferAccount(TRANSFER) + .hasKnownStatus(OBTAINER_DOES_NOT_EXIST), + contractDelete("CreateTrivial") + .via(DELETE_TXN) + .transferContract("PayReceivable") + .hasKnownStatus(INVALID_CONTRACT_ID)); } @HapiTest @@ -340,30 +325,22 @@ final Stream multiKeyNonPayerEntityVerifiedAsync() { KeyShape LARGE_THRESH_SHAPE = KeyShape.threshOf(1, 10); SigControl firstOnly = LARGE_THRESH_SHAPE.signedWith(sigs(ON, OFF, OFF, OFF, OFF, OFF, OFF, OFF, OFF, OFF)); - return defaultHapiSpec("MultiKeyNonPayerEntityVerifiedAsync") - .given( - newKeyNamed("payerKey").shape(LARGE_THRESH_SHAPE), - newKeyNamed("receiverKey").shape(LARGE_THRESH_SHAPE), - cryptoCreate(PAYER).keyShape(LARGE_THRESH_SHAPE), - cryptoCreate(RECEIVER).keyShape(LARGE_THRESH_SHAPE).receiverSigRequired(true)) - .when() - .then(cryptoTransfer(tinyBarsFromTo(PAYER, RECEIVER, 1L)) + return hapiTest( + newKeyNamed("payerKey").shape(LARGE_THRESH_SHAPE), + newKeyNamed("receiverKey").shape(LARGE_THRESH_SHAPE), + cryptoCreate(PAYER).keyShape(LARGE_THRESH_SHAPE), + cryptoCreate(RECEIVER).keyShape(LARGE_THRESH_SHAPE).receiverSigRequired(true), + cryptoTransfer(tinyBarsFromTo(PAYER, RECEIVER, 1L)) .sigControl(forKey(PAYER, firstOnly), forKey(RECEIVER, firstOnly))); } @HapiTest final Stream discoversExpectedVersions() { - return defaultHapiSpec("discoversExpectedVersions") - .given() - .when() - .then(getVersionInfo().logged().hasNoDegenerateSemvers()); + return hapiTest(getVersionInfo().logged().hasNoDegenerateSemvers()); } @HapiTest final Stream idVariantsTreatedAsExpected() { - return defaultHapiSpec("idVariantsTreatedAsExpected") - .given() - .when() - .then(sendModified(withSuccessivelyVariedQueryIds(), QueryVerbs::getVersionInfo)); + return hapiTest(sendModified(withSuccessivelyVariedQueryIds(), QueryVerbs::getVersionInfo)); } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/CannotDeleteSystemEntitiesSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/CannotDeleteSystemEntitiesSuite.java index a38749e5db21..d1d74fcebda7 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/CannotDeleteSystemEntitiesSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/CannotDeleteSystemEntitiesSuite.java @@ -16,7 +16,7 @@ package com.hedera.services.bdd.suites.misc; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoDelete; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; @@ -117,14 +117,12 @@ final Stream systemDeleteAdminCannotSystemFileDeleteFileIds() { } final Stream systemUserCannotDeleteSystemAccounts(int firstAccount, int lastAccount, String sysUser) { - return defaultHapiSpec("systemUserCannotDeleteSystemAccounts") - .given( - cryptoCreate("unluckyReceiver").balance(0L), - cryptoTransfer(movingHbar(100 * ONE_HUNDRED_HBARS) - .distributing(GENESIS, SYSTEM_ADMIN, SYSTEM_DELETE_ADMIN)) - .payingWith(GENESIS)) - .when() - .then(inParallel(IntStream.rangeClosed(firstAccount, lastAccount) + return hapiTest( + cryptoCreate("unluckyReceiver").balance(0L), + cryptoTransfer(movingHbar(100 * ONE_HUNDRED_HBARS) + .distributing(GENESIS, SYSTEM_ADMIN, SYSTEM_DELETE_ADMIN)) + .payingWith(GENESIS), + inParallel(IntStream.rangeClosed(firstAccount, lastAccount) .mapToObj(id -> cryptoDelete("0.0." + id) .transfer("unluckyReceiver") .payingWith(sysUser) @@ -134,10 +132,11 @@ final Stream systemUserCannotDeleteSystemAccounts(int firstAccount, } final Stream normalUserCannotDeleteSystemAccounts(int firstAccount, int lastAccount) { - return defaultHapiSpec("normalUserCannotDeleteSystemAccounts") - .given(newKeyNamed("normalKey"), cryptoCreate("unluckyReceiver").balance(0L)) - .when(cryptoCreate("normalUser").key("normalKey").balance(1_000_000_000L)) - .then(inParallel(IntStream.rangeClosed(firstAccount, lastAccount) + return hapiTest( + newKeyNamed("normalKey"), + cryptoCreate("unluckyReceiver").balance(0L), + cryptoCreate("normalUser").key("normalKey").balance(1_000_000_000L), + inParallel(IntStream.rangeClosed(firstAccount, lastAccount) .mapToObj(id -> cryptoDelete("0.0." + id) .transfer("unluckyReceiver") .payingWith("normalUser") @@ -147,12 +146,11 @@ final Stream normalUserCannotDeleteSystemAccounts(int firstAccount, } final Stream systemUserCannotDeleteSystemFiles(int[] fileIds, String sysUser) { - return defaultHapiSpec("systemUserCannotDeleteSystemFiles") - .given(cryptoTransfer(movingHbar(100 * ONE_HUNDRED_HBARS) + return hapiTest( + cryptoTransfer(movingHbar(100 * ONE_HUNDRED_HBARS) .distributing(GENESIS, SYSTEM_ADMIN, SYSTEM_DELETE_ADMIN)) - .payingWith(GENESIS)) - .when() - .then(inParallel(Arrays.stream(fileIds) + .payingWith(GENESIS), + inParallel(Arrays.stream(fileIds) .mapToObj(id -> cryptoDelete("0.0." + id) .payingWith(sysUser) .signedBy(sysUser) @@ -161,10 +159,10 @@ final Stream systemUserCannotDeleteSystemFiles(int[] fileIds, Strin } final Stream normalUserCannotDeleteSystemFiles(int[] fileIds) { - return defaultHapiSpec("normalUserCannotDeleteSystemFiles") - .given(newKeyNamed("normalKey")) - .when(cryptoCreate("normalUser").key("normalKey").balance(1_000_000_000L)) - .then(inParallel(Arrays.stream(fileIds) + return hapiTest( + newKeyNamed("normalKey"), + cryptoCreate("normalUser").key("normalKey").balance(1_000_000_000L), + inParallel(Arrays.stream(fileIds) .mapToObj(id -> fileDelete("0.0." + id) .payingWith("normalUser") .signedBy("normalKey") @@ -173,12 +171,11 @@ final Stream normalUserCannotDeleteSystemFiles(int[] fileIds) { } final Stream systemDeleteCannotDeleteSystemFiles(int[] fileIds, String sysUser) { - return defaultHapiSpec("systemDeleteCannotDeleteSystemFiles") - .given(cryptoTransfer(movingHbar(100 * ONE_HUNDRED_HBARS) + return hapiTest( + cryptoTransfer(movingHbar(100 * ONE_HUNDRED_HBARS) .distributing(GENESIS, SYSTEM_ADMIN, SYSTEM_DELETE_ADMIN)) - .payingWith(GENESIS)) - .when() - .then(inParallel(Arrays.stream(fileIds) + .payingWith(GENESIS), + inParallel(Arrays.stream(fileIds) .mapToObj(id -> systemFileDelete("0.0." + id) .payingWith(sysUser) .signedBy(sysUser) diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/ConsensusQueriesStressTests.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/ConsensusQueriesStressTests.java index f3b4f3e6a15e..bbece3f56c74 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/ConsensusQueriesStressTests.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/ConsensusQueriesStressTests.java @@ -17,7 +17,7 @@ package com.hedera.services.bdd.suites.misc; import static com.hedera.services.bdd.junit.TestTags.NOT_REPEATABLE; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTopicInfo; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.createTopic; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.runWithProvider; @@ -44,20 +44,17 @@ @Tag(NOT_REPEATABLE) public class ConsensusQueriesStressTests { - private AtomicLong duration = new AtomicLong(10); - private AtomicReference unit = new AtomicReference<>(SECONDS); - private AtomicInteger maxOpsPerSec = new AtomicInteger(10); + private final AtomicLong duration = new AtomicLong(10); + private final AtomicReference unit = new AtomicReference<>(SECONDS); + private final AtomicInteger maxOpsPerSec = new AtomicInteger(10); @HapiTest final Stream getTopicInfoStress() { - return defaultHapiSpec("GetTopicInfoStress") - .given() - .when() - .then( - withOpContext((spec, opLog) -> configureFromCi(spec)), - runWithProvider(getTopicInfoFactory()) - .lasting(duration::get, unit::get) - .maxOpsPerSec(maxOpsPerSec::get)); + return hapiTest( + withOpContext((spec, opLog) -> configureFromCi(spec)), + runWithProvider(getTopicInfoFactory()) + .lasting(duration::get, unit::get) + .maxOpsPerSec(maxOpsPerSec::get)); } private Function getTopicInfoFactory() { diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/CryptoQueriesStressTests.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/CryptoQueriesStressTests.java index 4f979d4879f9..6a07ebf04139 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/CryptoQueriesStressTests.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/CryptoQueriesStressTests.java @@ -17,7 +17,7 @@ package com.hedera.services.bdd.suites.misc; import static com.hedera.services.bdd.junit.TestTags.NOT_REPEATABLE; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.assertions.AssertUtils.inOrder; import static com.hedera.services.bdd.spec.assertions.TransactionRecordAsserts.recordWith; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; @@ -51,44 +51,35 @@ @Tag(NOT_REPEATABLE) public class CryptoQueriesStressTests { - private AtomicLong duration = new AtomicLong(10); - private AtomicReference unit = new AtomicReference<>(SECONDS); - private AtomicInteger maxOpsPerSec = new AtomicInteger(10); + private final AtomicLong duration = new AtomicLong(10); + private final AtomicReference unit = new AtomicReference<>(SECONDS); + private final AtomicInteger maxOpsPerSec = new AtomicInteger(10); @HapiTest final Stream getAccountBalanceStress() { - return defaultHapiSpec("getAccountBalanceStress") - .given() - .when() - .then( - withOpContext((spec, opLog) -> configureFromCi(spec)), - runWithProvider(getAccountBalanceFactory()) - .lasting(duration::get, unit::get) - .maxOpsPerSec(maxOpsPerSec::get)); + return hapiTest( + withOpContext((spec, opLog) -> configureFromCi(spec)), + runWithProvider(getAccountBalanceFactory()) + .lasting(duration::get, unit::get) + .maxOpsPerSec(maxOpsPerSec::get)); } @HapiTest final Stream getAccountInfoStress() { - return defaultHapiSpec("getAccountInfoStress") - .given() - .when() - .then( - withOpContext((spec, opLog) -> configureFromCi(spec)), - runWithProvider(getAccountInfoFactory()) - .lasting(duration::get, unit::get) - .maxOpsPerSec(maxOpsPerSec::get)); + return hapiTest( + withOpContext((spec, opLog) -> configureFromCi(spec)), + runWithProvider(getAccountInfoFactory()) + .lasting(duration::get, unit::get) + .maxOpsPerSec(maxOpsPerSec::get)); } @HapiTest final Stream getAccountRecordsStress() { - return defaultHapiSpec("getAccountRecordsStress") - .given() - .when() - .then( - withOpContext((spec, opLog) -> configureFromCi(spec)), - runWithProvider(getAccountRecordsFactory()) - .lasting(duration::get, unit::get) - .maxOpsPerSec(maxOpsPerSec::get)); + return hapiTest( + withOpContext((spec, opLog) -> configureFromCi(spec)), + runWithProvider(getAccountRecordsFactory()) + .lasting(duration::get, unit::get) + .maxOpsPerSec(maxOpsPerSec::get)); } private Function getAccountRecordsFactory() { diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/FileQueriesStressTests.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/FileQueriesStressTests.java index b62348829f81..edaf044af3e8 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/FileQueriesStressTests.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/FileQueriesStressTests.java @@ -17,7 +17,7 @@ package com.hedera.services.bdd.suites.misc; import static com.hedera.services.bdd.junit.TestTags.NOT_REPEATABLE; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getFileContents; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getFileInfo; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.fileCreate; @@ -45,32 +45,26 @@ @Tag(NOT_REPEATABLE) public class FileQueriesStressTests { - private AtomicLong duration = new AtomicLong(10); - private AtomicReference unit = new AtomicReference<>(SECONDS); - private AtomicInteger maxOpsPerSec = new AtomicInteger(10); + private final AtomicLong duration = new AtomicLong(10); + private final AtomicReference unit = new AtomicReference<>(SECONDS); + private final AtomicInteger maxOpsPerSec = new AtomicInteger(10); @HapiTest final Stream getFileContentsStress() { - return defaultHapiSpec("getFileContentsStress") - .given() - .when() - .then( - withOpContext((spec, opLog) -> configureFromCi(spec)), - runWithProvider(getFileContentsFactory()) - .lasting(duration::get, unit::get) - .maxOpsPerSec(maxOpsPerSec::get)); + return hapiTest( + withOpContext((spec, opLog) -> configureFromCi(spec)), + runWithProvider(getFileContentsFactory()) + .lasting(duration::get, unit::get) + .maxOpsPerSec(maxOpsPerSec::get)); } @HapiTest final Stream getFileInfoStress() { - return defaultHapiSpec("getFileInfoStress") - .given() - .when() - .then( - withOpContext((spec, opLog) -> configureFromCi(spec)), - runWithProvider(getFileInfoFactory()) - .lasting(duration::get, unit::get) - .maxOpsPerSec(maxOpsPerSec::get)); + return hapiTest( + withOpContext((spec, opLog) -> configureFromCi(spec)), + runWithProvider(getFileInfoFactory()) + .lasting(duration::get, unit::get) + .maxOpsPerSec(maxOpsPerSec::get)); } private Function getFileContentsFactory() { diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/InvalidgRPCValuesTest.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/InvalidgRPCValuesTest.java index 79c39fab4904..65ef5b044029 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/InvalidgRPCValuesTest.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/InvalidgRPCValuesTest.java @@ -16,7 +16,7 @@ package com.hedera.services.bdd.suites.misc; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountBalance; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getAccountInfo; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTokenInfo; @@ -52,65 +52,58 @@ public class InvalidgRPCValuesTest { final Stream invalidIdCheck() { final long MAX_NUM_ALLOWED = 0xFFFFFFFFL; final String invalidMaxId = MAX_NUM_ALLOWED + 1 + ".2.3"; - return defaultHapiSpec("TransferWithInvalidAccount") - .given() - .when() - .then( - // sample queries - getAccountBalance(invalidMaxId).hasAnswerOnlyPrecheck(INVALID_ACCOUNT_ID), - getAccountInfo(invalidMaxId).hasCostAnswerPrecheck(INVALID_ACCOUNT_ID), - getTopicInfo(invalidMaxId).hasCostAnswerPrecheck(INVALID_TOPIC_ID), - getTokenInfo(invalidMaxId).hasCostAnswerPrecheck(INVALID_TOKEN_ID), + return hapiTest( + // sample queries + getAccountBalance(invalidMaxId).hasAnswerOnlyPrecheck(INVALID_ACCOUNT_ID), + getAccountInfo(invalidMaxId).hasCostAnswerPrecheck(INVALID_ACCOUNT_ID), + getTopicInfo(invalidMaxId).hasCostAnswerPrecheck(INVALID_TOPIC_ID), + getTokenInfo(invalidMaxId).hasCostAnswerPrecheck(INVALID_TOKEN_ID), - // sample transactions - scheduleSign(invalidMaxId).hasKnownStatus(INVALID_SCHEDULE_ID), - scheduleDelete(invalidMaxId).hasKnownStatus(INVALID_SCHEDULE_ID)); + // sample transactions + scheduleSign(invalidMaxId).hasKnownStatus(INVALID_SCHEDULE_ID), + scheduleDelete(invalidMaxId).hasKnownStatus(INVALID_SCHEDULE_ID)); } @HapiTest final Stream transactionsWithOnlySigMap() { final var contract = "BalanceLookup"; - return defaultHapiSpec("TransactionsWithOnlySigMap") - .given( - cryptoTransfer(tinyBarsFromTo(GENESIS, SYSTEM_ADMIN, 1L)) - .via(FAILED_CRYPTO_TRANSACTION) - .asTxnWithOnlySigMap() - .hasPrecheck(INVALID_TRANSACTION_BODY), - uploadInitCode(contract), - fileUpdate(contract) - .via("failedFileTransaction") - .asTxnWithOnlySigMap() - .hasPrecheck(INVALID_TRANSACTION_BODY)) - .when(contractCreate(contract) + return hapiTest( + cryptoTransfer(tinyBarsFromTo(GENESIS, SYSTEM_ADMIN, 1L)) + .via(FAILED_CRYPTO_TRANSACTION) + .asTxnWithOnlySigMap() + .hasPrecheck(INVALID_TRANSACTION_BODY), + uploadInitCode(contract), + fileUpdate(contract) + .via("failedFileTransaction") + .asTxnWithOnlySigMap() + .hasPrecheck(INVALID_TRANSACTION_BODY), + contractCreate(contract) .balance(1_000L) .via("failedContractTransaction") .asTxnWithOnlySigMap() - .hasPrecheck(INVALID_TRANSACTION_BODY)) - .then( - getTxnRecord(FAILED_CRYPTO_TRANSACTION).hasCostAnswerPrecheck(INVALID_ACCOUNT_ID), - getTxnRecord("failedFileTransaction").hasCostAnswerPrecheck(INVALID_ACCOUNT_ID), - getTxnRecord("failedContractTransaction").hasCostAnswerPrecheck(INVALID_ACCOUNT_ID)); + .hasPrecheck(INVALID_TRANSACTION_BODY), + getTxnRecord(FAILED_CRYPTO_TRANSACTION).hasCostAnswerPrecheck(INVALID_ACCOUNT_ID), + getTxnRecord("failedFileTransaction").hasCostAnswerPrecheck(INVALID_ACCOUNT_ID), + getTxnRecord("failedContractTransaction").hasCostAnswerPrecheck(INVALID_ACCOUNT_ID)); } @HapiTest final Stream transactionsWithSignedTxnBytesAndSigMap() { - return defaultHapiSpec("TransactionsWithSignedTxnBytesAndSigMap") - .given() - .when(createTopic("testTopic") + return hapiTest( + createTopic("testTopic") .via("failedConsensusTransaction") .asTxnWithSignedTxnBytesAndSigMap() - .hasPrecheck(INVALID_TRANSACTION)) - .then(getTxnRecord("failedConsensusTransaction").hasAnswerOnlyPrecheck(RECORD_NOT_FOUND)); + .hasPrecheck(INVALID_TRANSACTION), + getTxnRecord("failedConsensusTransaction").hasAnswerOnlyPrecheck(RECORD_NOT_FOUND)); } @HapiTest final Stream transactionsWithSignedTxnBytesAndBodyBytes() { - return defaultHapiSpec("TransactionsWithSignedTxnBytesAndBodyBytes") - .given() - .when(cryptoCreate("testAccount") + return hapiTest( + cryptoCreate("testAccount") .via(FAILED_CRYPTO_TRANSACTION) .asTxnWithSignedTxnBytesAndBodyBytes() - .hasPrecheck(INVALID_TRANSACTION)) - .then(getTxnRecord(FAILED_CRYPTO_TRANSACTION).hasAnswerOnlyPrecheck(RECORD_NOT_FOUND)); + .hasPrecheck(INVALID_TRANSACTION), + getTxnRecord(FAILED_CRYPTO_TRANSACTION).hasAnswerOnlyPrecheck(RECORD_NOT_FOUND)); } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/PerpetualTransfers.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/PerpetualTransfers.java index 3c3751b01b08..50a233f8aae2 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/PerpetualTransfers.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/misc/PerpetualTransfers.java @@ -17,7 +17,7 @@ package com.hedera.services.bdd.suites.misc; import static com.hedera.services.bdd.junit.TestTags.NOT_REPEATABLE; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; import static com.hedera.services.bdd.spec.transactions.crypto.HapiCryptoTransfer.tinyBarsFromTo; @@ -43,18 +43,15 @@ @Tag(NOT_REPEATABLE) public class PerpetualTransfers { - private AtomicLong duration = new AtomicLong(30); - private AtomicReference unit = new AtomicReference<>(SECONDS); - private AtomicInteger maxOpsPerSec = new AtomicInteger(500); + private final AtomicLong duration = new AtomicLong(30); + private final AtomicReference unit = new AtomicReference<>(SECONDS); + private final AtomicInteger maxOpsPerSec = new AtomicInteger(500); @HapiTest final Stream canTransferBackAndForthForever() { - return defaultHapiSpec("CanTransferBackAndForthForever") - .given() - .when() - .then(runWithProvider(transfersFactory()) - .lasting(duration::get, unit::get) - .maxOpsPerSec(maxOpsPerSec::get)); + return hapiTest(runWithProvider(transfersFactory()) + .lasting(duration::get, unit::get) + .maxOpsPerSec(maxOpsPerSec::get)); } private Function transfersFactory() { diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/records/ContractRecordsSanityCheckSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/records/ContractRecordsSanityCheckSuite.java index dafcfd26c89c..1bf914f0b48b 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/records/ContractRecordsSanityCheckSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/records/ContractRecordsSanityCheckSuite.java @@ -18,7 +18,7 @@ import static com.hedera.services.bdd.junit.ContextRequirement.SYSTEM_ACCOUNT_BALANCES; import static com.hedera.services.bdd.junit.TestTags.SMART_CONTRACT; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCall; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCallWithFunctionAbi; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.contractCallWithTuple; @@ -62,51 +62,42 @@ public class ContractRecordsSanityCheckSuite { @LeakyHapiTest(requirement = SYSTEM_ACCOUNT_BALANCES) final Stream contractDeleteRecordSanityChecks() { - return defaultHapiSpec("ContractDeleteRecordSanityChecks") - .given(flattened( - uploadInitCode(BALANCE_LOOKUP), - contractCreate(BALANCE_LOOKUP).balance(1_000L), - takeBalanceSnapshots( - BALANCE_LOOKUP, FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER))) - .when(contractDelete(BALANCE_LOOKUP).via("txn").transferAccount(DEFAULT_PAYER)) - .then( - validateTransferListForBalances( - "txn", - List.of(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER, BALANCE_LOOKUP), - Set.of(BALANCE_LOOKUP)), - validateRecordTransactionFees("txn")); + return hapiTest(flattened( + uploadInitCode(BALANCE_LOOKUP), + contractCreate(BALANCE_LOOKUP).balance(1_000L), + takeBalanceSnapshots(BALANCE_LOOKUP, FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER), + contractDelete(BALANCE_LOOKUP).via("txn").transferAccount(DEFAULT_PAYER), + validateTransferListForBalances( + "txn", + List.of(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER, BALANCE_LOOKUP), + Set.of(BALANCE_LOOKUP)), + validateRecordTransactionFees("txn"))); } @LeakyHapiTest(requirement = SYSTEM_ACCOUNT_BALANCES) final Stream contractCreateRecordSanityChecks() { - return defaultHapiSpec("ContractCreateRecordSanityChecks") - .given(flattened( - uploadInitCode(BALANCE_LOOKUP), - takeBalanceSnapshots(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER))) - .when(contractCreate(BALANCE_LOOKUP).balance(1_000L).via("txn")) - .then( - validateTransferListForBalances( - "txn", - List.of(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER, BALANCE_LOOKUP)), - validateRecordTransactionFees("txn")); + return hapiTest(flattened( + uploadInitCode(BALANCE_LOOKUP), + takeBalanceSnapshots(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER), + contractCreate(BALANCE_LOOKUP).balance(1_000L).via("txn"), + validateTransferListForBalances( + "txn", List.of(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER, BALANCE_LOOKUP)), + validateRecordTransactionFees("txn"))); } @LeakyHapiTest(requirement = SYSTEM_ACCOUNT_BALANCES) final Stream contractCallWithSendRecordSanityChecks() { - return defaultHapiSpec("ContractCallWithSendRecordSanityChecks") - .given(flattened( - uploadInitCode(PAYABLE_CONTRACT), - contractCreate(PAYABLE_CONTRACT), - UtilVerbs.takeBalanceSnapshots( - PAYABLE_CONTRACT, FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER))) - .when(contractCall(PAYABLE_CONTRACT, "deposit", BigInteger.valueOf(1_000L)) + return hapiTest(flattened( + uploadInitCode(PAYABLE_CONTRACT), + contractCreate(PAYABLE_CONTRACT), + UtilVerbs.takeBalanceSnapshots( + PAYABLE_CONTRACT, FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER), + contractCall(PAYABLE_CONTRACT, "deposit", BigInteger.valueOf(1_000L)) .via("txn") - .sending(1_000L)) - .then( - validateTransferListForBalances( - "txn", - List.of(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER, PAYABLE_CONTRACT)), - validateRecordTransactionFees("txn")); + .sending(1_000L), + validateTransferListForBalances( + "txn", List.of(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER, PAYABLE_CONTRACT)), + validateRecordTransactionFees("txn"))); } @LeakyHapiTest(requirement = SYSTEM_ACCOUNT_BALANCES) @@ -122,81 +113,76 @@ final Stream circularTransfersRecordSanityChecks() { .mapToObj(i -> String.format("Altruist%s", (char) ('A' + i))) .toArray(String[]::new); - return defaultHapiSpec("CircularTransfersRecordSanityChecks") - .given(flattened( - uploadInitCode(contractName), - Stream.of(altruists) - .map(suffix -> createDefaultContract(contractName + suffix) - .bytecode(contractName)) - .toArray(HapiSpecOperation[]::new), - Stream.of(altruists) - .map(suffix -> contractCallWithTuple( - contractName + suffix, - SET_NODES_ABI, - spec -> Tuple.singleton(Stream.of(altruists) - .map(a -> BigInteger.valueOf(spec.registry() - .getContractId(contractName + a) - .getContractNum())) - .toArray(BigInteger[]::new))) - .gas(120_000) - .via("txnFor" + contractName + suffix) - .sending(initBalanceFn.applyAsLong(contractName + suffix))) - .toArray(HapiSpecOperation[]::new), - UtilVerbs.takeBalanceSnapshots(Stream.of( - Stream.of(altruists).map(suffix -> contractName + suffix), - Stream.of(canonicalAccounts)) - .flatMap(identity()) - .toArray(String[]::new)))) - .when(contractCallWithFunctionAbi( + return hapiTest(flattened( + uploadInitCode(contractName), + Stream.of(altruists) + .map(suffix -> + createDefaultContract(contractName + suffix).bytecode(contractName)) + .toArray(HapiSpecOperation[]::new), + Stream.of(altruists) + .map(suffix -> contractCallWithTuple( + contractName + suffix, + SET_NODES_ABI, + spec -> Tuple.singleton(Stream.of(altruists) + .map(a -> BigInteger.valueOf(spec.registry() + .getContractId(contractName + a) + .getContractNum())) + .toArray(BigInteger[]::new))) + .gas(120_000) + .via("txnFor" + contractName + suffix) + .sending(initBalanceFn.applyAsLong(contractName + suffix))) + .toArray(HapiSpecOperation[]::new), + UtilVerbs.takeBalanceSnapshots(Stream.of( + Stream.of(altruists).map(suffix -> contractName + suffix), Stream.of(canonicalAccounts)) + .flatMap(identity()) + .toArray(String[]::new)), + contractCallWithFunctionAbi( contractName + altruists[0], RECEIVE_AND_SEND_ABI, initKeepAmountDivisor, stopBalance) - .via(ALTRUISTIC_TXN)) - .then( - validateTransferListForBalances( - ALTRUISTIC_TXN, - Stream.concat( - Stream.of(canonicalAccounts), - Stream.of(altruists).map(suffix -> contractName + suffix)) - .toList()), - validateRecordTransactionFees(ALTRUISTIC_TXN), - addLogInfo((spec, infoLog) -> { - long[] finalBalances = IntStream.range(0, numAltruists) - .mapToLong(ignore -> initBalanceFn.applyAsLong("")) - .toArray(); - int i = 0; - long divisor = initKeepAmountDivisor; - while (true) { - long toKeep = finalBalances[i] / divisor; - if (toKeep < stopBalance.longValue()) { - break; - } - int j = (i + 1) % numAltruists; - finalBalances[j] += (finalBalances[i] - toKeep); - finalBalances[i] = toKeep; - i = j; - divisor++; - } + .via(ALTRUISTIC_TXN), + validateTransferListForBalances( + ALTRUISTIC_TXN, + Stream.concat( + Stream.of(canonicalAccounts), + Stream.of(altruists).map(suffix -> contractName + suffix)) + .toList()), + validateRecordTransactionFees(ALTRUISTIC_TXN), + addLogInfo((spec, infoLog) -> { + long[] finalBalances = IntStream.range(0, numAltruists) + .mapToLong(ignore -> initBalanceFn.applyAsLong("")) + .toArray(); + int i = 0; + long divisor = initKeepAmountDivisor; + while (true) { + long toKeep = finalBalances[i] / divisor; + if (toKeep < stopBalance.longValue()) { + break; + } + int j = (i + 1) % numAltruists; + finalBalances[j] += (finalBalances[i] - toKeep); + finalBalances[i] = toKeep; + i = j; + divisor++; + } - infoLog.info("Expected Final Balances"); - infoLog.info("-----------------------"); - for (i = 0; i < numAltruists; i++) { - infoLog.info(" {} = {} tinyBars", i, finalBalances[i]); - } - })); + infoLog.info("Expected Final Balances"); + infoLog.info("-----------------------"); + for (i = 0; i < numAltruists; i++) { + infoLog.info(" {} = {} tinyBars", i, finalBalances[i]); + } + }))); } @LeakyHapiTest(requirement = SYSTEM_ACCOUNT_BALANCES) final Stream contractUpdateRecordSanityChecks() { - return defaultHapiSpec("ContractUpdateRecordSanityChecks") - .given(flattened( - newKeyNamed("newKey").type(KeyFactory.KeyType.SIMPLE), - uploadInitCode(BALANCE_LOOKUP), - contractCreate(BALANCE_LOOKUP).balance(1_000L), - takeBalanceSnapshots(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER))) - .when(contractUpdate(BALANCE_LOOKUP).newKey("newKey").via("txn").fee(95_000_000L)) - .then( - validateTransferListForBalances( - "txn", List.of(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER)), - validateRecordTransactionFees("txn")); + return hapiTest(flattened( + newKeyNamed("newKey").type(KeyFactory.KeyType.SIMPLE), + uploadInitCode(BALANCE_LOOKUP), + contractCreate(BALANCE_LOOKUP).balance(1_000L), + takeBalanceSnapshots(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER), + contractUpdate(BALANCE_LOOKUP).newKey("newKey").via("txn").fee(95_000_000L), + validateTransferListForBalances( + "txn", List.of(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER)), + validateRecordTransactionFees("txn"))); } private static final String SET_NODES_ABI = diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/records/DuplicateManagementTest.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/records/DuplicateManagementTest.java index 32f4be85e9ff..11b8d5e26f2f 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/records/DuplicateManagementTest.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/records/DuplicateManagementTest.java @@ -21,7 +21,6 @@ import static com.hedera.services.bdd.junit.EmbeddedReason.MUST_SKIP_INGEST; import static com.hedera.services.bdd.junit.hedera.NodeSelector.byNodeId; import static com.hedera.services.bdd.junit.hedera.embedded.SyntheticVersion.PAST; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.assertions.AccountInfoAsserts.reducedFromSnapshot; import static com.hedera.services.bdd.spec.assertions.AssertUtils.inOrder; @@ -79,62 +78,54 @@ public class DuplicateManagementTest { @HapiTest @SuppressWarnings("java:S5960") final Stream hasExpectedDuplicates() { - return defaultHapiSpec("HasExpectedDuplicates") - .given( - cryptoCreate(CIVILIAN).balance(ONE_HUNDRED_HBARS), - usableTxnIdNamed(TXN_ID).payerId(CIVILIAN)) - .when( - uncheckedSubmit(cryptoCreate(REPEATED) - .payingWith(CIVILIAN) - .txnId(TXN_ID)) - .payingWith(CIVILIAN) - .fee(ONE_HBAR) - .hasPrecheckFrom(NOT_SUPPORTED, BUSY), - uncheckedSubmit( - cryptoCreate(REPEATED).payingWith(CIVILIAN).txnId(TXN_ID)), - uncheckedSubmit( - cryptoCreate(REPEATED).payingWith(CIVILIAN).txnId(TXN_ID)), - uncheckedSubmit( - cryptoCreate(REPEATED).payingWith(CIVILIAN).txnId(TXN_ID)), - sleepFor(MS_TO_WAIT_FOR_CONSENSUS)) - .then( - getReceipt(TXN_ID) - .andAnyDuplicates() - .payingWith(CIVILIAN) - .hasPriorityStatus(SUCCESS) - .hasDuplicateStatuses(DUPLICATE_TRANSACTION, DUPLICATE_TRANSACTION), - getTxnRecord(TXN_ID) - .payingWith(CIVILIAN) - .via("cheapTxn") - .assertingNothingAboutHashes() - .hasPriority(recordWith().status(SUCCESS)), - getTxnRecord(TXN_ID) - .andAnyDuplicates() - .payingWith(CIVILIAN) - .via("costlyTxn") - .assertingNothingAboutHashes() - .hasPriority(recordWith().status(SUCCESS)) - .hasDuplicates(inOrder( - recordWith().status(DUPLICATE_TRANSACTION), - recordWith().status(DUPLICATE_TRANSACTION))), - sleepFor(MS_TO_WAIT_FOR_CONSENSUS), - withOpContext((spec, opLog) -> { - var cheapGet = getTxnRecord("cheapTxn").assertingNothingAboutHashes(); - var costlyGet = getTxnRecord("costlyTxn").assertingNothingAboutHashes(); - allRunFor(spec, cheapGet, costlyGet); - var cheapRecord = cheapGet.getResponseRecord(); - var costlyRecord = costlyGet.getResponseRecord(); - opLog.info("cheapRecord: {}", cheapRecord); - opLog.info("costlyRecord: {}", costlyRecord); - var cheapPrice = getNonFeeDeduction(cheapRecord).orElse(0); - var costlyPrice = getNonFeeDeduction(costlyRecord).orElse(0); - assertEquals( - 3 * cheapPrice - 1, - costlyPrice, - String.format( - "Costly (%d) should be 3x more expensive than" + " cheap (%d)!", - costlyPrice, cheapPrice)); - })); + return hapiTest( + cryptoCreate(CIVILIAN).balance(ONE_HUNDRED_HBARS), + usableTxnIdNamed(TXN_ID).payerId(CIVILIAN), + uncheckedSubmit(cryptoCreate(REPEATED).payingWith(CIVILIAN).txnId(TXN_ID)) + .payingWith(CIVILIAN) + .fee(ONE_HBAR) + .hasPrecheckFrom(NOT_SUPPORTED, BUSY), + uncheckedSubmit(cryptoCreate(REPEATED).payingWith(CIVILIAN).txnId(TXN_ID)), + uncheckedSubmit(cryptoCreate(REPEATED).payingWith(CIVILIAN).txnId(TXN_ID)), + uncheckedSubmit(cryptoCreate(REPEATED).payingWith(CIVILIAN).txnId(TXN_ID)), + sleepFor(MS_TO_WAIT_FOR_CONSENSUS), + getReceipt(TXN_ID) + .andAnyDuplicates() + .payingWith(CIVILIAN) + .hasPriorityStatus(SUCCESS) + .hasDuplicateStatuses(DUPLICATE_TRANSACTION, DUPLICATE_TRANSACTION), + getTxnRecord(TXN_ID) + .payingWith(CIVILIAN) + .via("cheapTxn") + .assertingNothingAboutHashes() + .hasPriority(recordWith().status(SUCCESS)), + getTxnRecord(TXN_ID) + .andAnyDuplicates() + .payingWith(CIVILIAN) + .via("costlyTxn") + .assertingNothingAboutHashes() + .hasPriority(recordWith().status(SUCCESS)) + .hasDuplicates(inOrder( + recordWith().status(DUPLICATE_TRANSACTION), + recordWith().status(DUPLICATE_TRANSACTION))), + sleepFor(MS_TO_WAIT_FOR_CONSENSUS), + withOpContext((spec, opLog) -> { + var cheapGet = getTxnRecord("cheapTxn").assertingNothingAboutHashes(); + var costlyGet = getTxnRecord("costlyTxn").assertingNothingAboutHashes(); + allRunFor(spec, cheapGet, costlyGet); + var cheapRecord = cheapGet.getResponseRecord(); + var costlyRecord = costlyGet.getResponseRecord(); + opLog.info("cheapRecord: {}", cheapRecord); + opLog.info("costlyRecord: {}", costlyRecord); + var cheapPrice = getNonFeeDeduction(cheapRecord).orElse(0); + var costlyPrice = getNonFeeDeduction(costlyRecord).orElse(0); + assertEquals( + 3 * cheapPrice - 1, + costlyPrice, + String.format( + "Costly (%d) should be 3x more expensive than" + " cheap (%d)!", + costlyPrice, cheapPrice)); + })); } @EmbeddedHapiTest(MANIPULATES_EVENT_VERSION) @@ -213,55 +204,45 @@ final Stream payerSolvencyStillCheckedEvenForDuplicateTransaction() @HapiTest final Stream usesUnclassifiableIfNoClassifiableAvailable() { - return defaultHapiSpec("UsesUnclassifiableIfNoClassifiableAvailable") - .given( - newKeyNamed("wrongKey"), - cryptoCreate(CIVILIAN), - usableTxnIdNamed(TXN_ID).payerId(CIVILIAN), - cryptoTransfer(tinyBarsFromTo(GENESIS, TO, ONE_HBAR))) - .when( - uncheckedSubmit(cryptoCreate("nope") - .payingWith(CIVILIAN) - .txnId(TXN_ID) - .signedBy("wrongKey")), - sleepFor(MS_TO_WAIT_FOR_CONSENSUS)) - .then( - getReceipt(TXN_ID).hasPriorityStatus(INVALID_PAYER_SIGNATURE), - getTxnRecord(TXN_ID) - .assertingNothingAboutHashes() - .hasPriority(recordWith() - .status(INVALID_PAYER_SIGNATURE) - .transfers(includingDeduction("node payment", TO)))); + return hapiTest( + newKeyNamed("wrongKey"), + cryptoCreate(CIVILIAN), + usableTxnIdNamed(TXN_ID).payerId(CIVILIAN), + cryptoTransfer(tinyBarsFromTo(GENESIS, TO, ONE_HBAR)), + uncheckedSubmit( + cryptoCreate("nope").payingWith(CIVILIAN).txnId(TXN_ID).signedBy("wrongKey")), + sleepFor(MS_TO_WAIT_FOR_CONSENSUS), + getReceipt(TXN_ID).hasPriorityStatus(INVALID_PAYER_SIGNATURE), + getTxnRecord(TXN_ID) + .assertingNothingAboutHashes() + .hasPriority(recordWith() + .status(INVALID_PAYER_SIGNATURE) + .transfers(includingDeduction("node payment", TO)))); } @HapiTest final Stream classifiableTakesPriorityOverUnclassifiable() { - return defaultHapiSpec("ClassifiableTakesPriorityOverUnclassifiable") - .given( - cryptoCreate(CIVILIAN).balance(100 * 100_000_000L), - usableTxnIdNamed(TXN_ID).payerId(CIVILIAN), - cryptoTransfer(tinyBarsFromTo(GENESIS, TO, 100_000_000L))) - .when( - uncheckedSubmit(cryptoCreate("nope") - .txnId(TXN_ID) - .payingWith(CIVILIAN) - .setNode("0.0.4")) - .logged(), - uncheckedSubmit(cryptoCreate("sure") + return hapiTest( + cryptoCreate(CIVILIAN).balance(100 * 100_000_000L), + usableTxnIdNamed(TXN_ID).payerId(CIVILIAN), + cryptoTransfer(tinyBarsFromTo(GENESIS, TO, 100_000_000L)), + uncheckedSubmit(cryptoCreate("nope") .txnId(TXN_ID) .payingWith(CIVILIAN) - .setNode(TO)), - sleepFor(MS_TO_WAIT_FOR_CONSENSUS)) - .then( - getReceipt(TXN_ID) - .andAnyDuplicates() - .logged() - .hasPriorityStatus(SUCCESS) - .hasDuplicateStatuses(INVALID_NODE_ACCOUNT), - getTxnRecord(TXN_ID) - .assertingNothingAboutHashes() - .andAnyDuplicates() - .hasPriority(recordWith().status(SUCCESS)) - .hasDuplicates(inOrder(recordWith().status(INVALID_NODE_ACCOUNT)))); + .setNode("0.0.4")) + .logged(), + uncheckedSubmit( + cryptoCreate("sure").txnId(TXN_ID).payingWith(CIVILIAN).setNode(TO)), + sleepFor(MS_TO_WAIT_FOR_CONSENSUS), + getReceipt(TXN_ID) + .andAnyDuplicates() + .logged() + .hasPriorityStatus(SUCCESS) + .hasDuplicateStatuses(INVALID_NODE_ACCOUNT), + getTxnRecord(TXN_ID) + .assertingNothingAboutHashes() + .andAnyDuplicates() + .hasPriority(recordWith().status(SUCCESS)) + .hasDuplicates(inOrder(recordWith().status(INVALID_NODE_ACCOUNT)))); } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/records/FileRecordsSanityCheckSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/records/FileRecordsSanityCheckSuite.java index b7a2992ffb60..7b6a245a131e 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/records/FileRecordsSanityCheckSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/records/FileRecordsSanityCheckSuite.java @@ -16,7 +16,7 @@ package com.hedera.services.bdd.suites.records; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getFileInfo; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.fileAppend; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.fileCreate; @@ -42,55 +42,48 @@ public class FileRecordsSanityCheckSuite { @HapiTest final Stream fileAppendRecordSanityChecks() { - return defaultHapiSpec("FileAppendRecordSanityChecks") - .given(flattened( - fileCreate("test"), - takeBalanceSnapshots(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER))) - .when(fileAppend("test").via("txn").fee(95_000_000L)) - .then( - validateTransferListForBalances( - "txn", List.of(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER)), - validateRecordTransactionFees("txn")); + return hapiTest(flattened( + fileCreate("test"), + takeBalanceSnapshots(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER), + fileAppend("test").via("txn").fee(95_000_000L), + validateTransferListForBalances( + "txn", List.of(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER)), + validateRecordTransactionFees("txn"))); } @HapiTest final Stream fileCreateRecordSanityChecks() { - return defaultHapiSpec("FileCreateRecordSanityChecks") - .given(takeBalanceSnapshots(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER)) - .when(fileCreate("test").via("txn")) - .then( - validateTransferListForBalances( - "txn", List.of(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER)), - validateRecordTransactionFees("txn")); + return hapiTest(flattened( + takeBalanceSnapshots(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER), + fileCreate("test").via("txn"), + validateTransferListForBalances( + "txn", List.of(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER)), + validateRecordTransactionFees("txn"))); } @HapiTest final Stream fileDeleteRecordSanityChecks() { - return defaultHapiSpec("FileDeleteRecordSanityChecks") - .given(flattened( - fileCreate("test"), - takeBalanceSnapshots(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER))) - .when(fileDelete("test").via("txn")) - .then( - validateTransferListForBalances( - "txn", List.of(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER)), - validateRecordTransactionFees("txn")); + return hapiTest(flattened( + fileCreate("test"), + takeBalanceSnapshots(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER), + fileDelete("test").via("txn"), + validateTransferListForBalances( + "txn", List.of(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER)), + validateRecordTransactionFees("txn"))); } @HapiTest final Stream fileUpdateRecordSanityChecks() { - return defaultHapiSpec("FileUpdateRecordSanityChecks") - .given(flattened( - fileCreate("test"), - takeBalanceSnapshots(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER))) - .when(fileUpdate("test") + return hapiTest(flattened( + fileCreate("test"), + takeBalanceSnapshots(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER), + fileUpdate("test") .contents("Here are some new contents!") .via("txn") - .fee(95_000_000L)) - .then( - withStrictCostAnswerValidation(() -> getFileInfo("test").payingWith(EXCHANGE_RATE_CONTROL)), - validateTransferListForBalances( - "txn", List.of(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER)), - validateRecordTransactionFees("txn")); + .fee(95_000_000L), + withStrictCostAnswerValidation(() -> getFileInfo("test").payingWith(EXCHANGE_RATE_CONTROL)), + validateTransferListForBalances( + "txn", List.of(FUNDING, NODE, STAKING_REWARD, NODE_REWARD, DEFAULT_PAYER)), + validateRecordTransactionFees("txn"))); } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/records/RecordCreationSuite.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/records/RecordCreationSuite.java index 1f6cceecb4b6..d3b88bfb58b9 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/records/RecordCreationSuite.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/records/RecordCreationSuite.java @@ -17,7 +17,6 @@ package com.hedera.services.bdd.suites.records; import static com.hedera.services.bdd.junit.ContextRequirement.SYSTEM_ACCOUNT_BALANCES; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.assertions.AccountInfoAsserts.changeFromSnapshot; import static com.hedera.services.bdd.spec.assertions.AssertUtils.inOrder; @@ -60,8 +59,6 @@ public class RecordCreationSuite { private static final String THIS_IS_OK_IT_S_FINE_IT_S_WHATEVER = "This is ok, it's fine, it's whatever."; private static final String TO_ACCOUNT = "0.0.3"; private static final String TXN_ID = "txnId"; - public static final String STAKING_FEES_NODE_REWARD_PERCENTAGE = "staking.fees.nodeRewardPercentage"; - public static final String STAKING_FEES_STAKING_REWARD_PERCENTAGE = "staking.fees.stakingRewardPercentage"; @LeakyHapiTest(requirement = SYSTEM_ACCOUNT_BALANCES) final Stream submittingNodeStillPaidIfServiceFeesOmitted() { @@ -209,11 +206,11 @@ final Stream submittingNodeChargedNetworkFeeForIgnoringPayerUnwilli final Stream accountsGetPayerRecordsIfSoConfigured() { final var txn = "ofRecord"; - return defaultHapiSpec("AccountsGetPayerRecordsIfSoConfigured") - .given(cryptoCreate(PAYER)) - .when(cryptoTransfer(tinyBarsFromTo(GENESIS, FUNDING, 1_000L)) + return hapiTest( + cryptoCreate(PAYER), + cryptoTransfer(tinyBarsFromTo(GENESIS, FUNDING, 1_000L)) .payingWith(PAYER) - .via(txn)) - .then(getAccountRecords(PAYER).has(inOrder(recordWith().txnId(txn)))); + .via(txn), + getAccountRecords(PAYER).has(inOrder(recordWith().txnId(txn)))); } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/records/TokenBalanceValidation.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/records/TokenBalanceValidation.java index cc8255b4a3ed..72d37a362b8a 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/records/TokenBalanceValidation.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/records/TokenBalanceValidation.java @@ -16,7 +16,7 @@ package com.hedera.services.bdd.suites.records; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoCreate; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.cryptoTransfer; import static com.hedera.services.bdd.spec.transactions.TxnVerbs.tokenAssociate; @@ -166,10 +166,9 @@ private HapiSpecOperation[] getHapiSpecsForTransferTxs() { * @return HAPI queries to execute */ final Stream validateTokenBalances() { - return defaultHapiSpec("ValidateTokenBalances") - .given(getHapiSpecsForTransferTxs()) // set up transfers if needed - .when() - .then(inParallel(expectedTokenBalances.entrySet().stream() + return hapiTest(flattened( + getHapiSpecsForTransferTxs(), // set up transfers if needed + inParallel(expectedTokenBalances.entrySet().stream() .map( entry -> { // for each expectedTokenBalance final var accountNum = @@ -177,7 +176,8 @@ final Stream validateTokenBalances() { final var tokenNum = entry.getKey().tokenNum(); final var tokenAmt = entry.getValue(); - // validate that the transfer worked and the receiver account has the tokens + // validate that the transfer worked and the receiver account + // has the tokens return QueryVerbs.getAccountBalance( "0.0." + accountNum, accountClassifier.isContract(accountNum)) @@ -191,7 +191,7 @@ final Stream validateTokenBalances() { .includeTokenMemoOnError(); }) .toArray(HapiSpecOperation[]::new)) - .failOnErrors()); + .failOnErrors())); } @Override diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/AddressAliasIdFuzzing.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/AddressAliasIdFuzzing.java index 75082e8ed3ff..4c007f8ccae1 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/AddressAliasIdFuzzing.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/AddressAliasIdFuzzing.java @@ -17,7 +17,6 @@ package com.hedera.services.bdd.suites.regression; import static com.hedera.services.bdd.junit.TestTags.NOT_REPEATABLE; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.infrastructure.OpProvider.UNIQUE_PAYER_ACCOUNT; import static com.hedera.services.bdd.spec.infrastructure.OpProvider.UNIQUE_PAYER_ACCOUNT_INITIAL_BALANCE; @@ -61,11 +60,10 @@ final Stream addressAliasIdFuzzing() { @HapiTest final Stream transferToKeyFuzzing() { - return defaultHapiSpec("TransferToKeyFuzzing") - .given(cryptoCreate(UNIQUE_PAYER_ACCOUNT) + return hapiTest( + cryptoCreate(UNIQUE_PAYER_ACCOUNT) .balance(UNIQUE_PAYER_ACCOUNT_INITIAL_BALANCE) - .withRecharging()) - .when() - .then(runWithProvider(idTransferToRandomKeyWith(PROPERTIES)).lasting(10L, TimeUnit.SECONDS)); + .withRecharging(), + runWithProvider(idTransferToRandomKeyWith(PROPERTIES)).lasting(10L, TimeUnit.SECONDS)); } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/CompletedHollowAccountOperationsFuzzing.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/CompletedHollowAccountOperationsFuzzing.java index 3576eb2c54ac..7e5333246a1f 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/CompletedHollowAccountOperationsFuzzing.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/CompletedHollowAccountOperationsFuzzing.java @@ -17,8 +17,9 @@ package com.hedera.services.bdd.suites.regression; import static com.hedera.services.bdd.junit.TestTags.NOT_REPEATABLE; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.runWithProvider; +import static com.hedera.services.bdd.suites.HapiSuite.flattened; import static com.hedera.services.bdd.suites.regression.factories.HollowAccountCompletedFuzzingFactory.hollowAccountFuzzingWith; import static com.hedera.services.bdd.suites.regression.factories.HollowAccountCompletedFuzzingFactory.initOperations; @@ -37,12 +38,11 @@ public class CompletedHollowAccountOperationsFuzzing { @HapiTest final Stream completedHollowAccountOperationsFuzzing() { - return defaultHapiSpec("CompletedHollowAccountOperationsFuzzing") - .given(initOperations()) - .when() - .then(runWithProvider(hollowAccountFuzzingWith(PROPERTIES)) + return hapiTest(flattened( + initOperations(), + runWithProvider(hollowAccountFuzzingWith(PROPERTIES)) .maxOpsPerSec(10) .loggingOff() - .lasting(10L, TimeUnit.SECONDS)); + .lasting(10L, TimeUnit.SECONDS))); } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/HollowAccountCompletionFuzzing.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/HollowAccountCompletionFuzzing.java index 53e420b9cc06..f00cded9896f 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/HollowAccountCompletionFuzzing.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/HollowAccountCompletionFuzzing.java @@ -17,8 +17,9 @@ package com.hedera.services.bdd.suites.regression; import static com.hedera.services.bdd.junit.TestTags.NOT_REPEATABLE; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.runWithProvider; +import static com.hedera.services.bdd.suites.HapiSuite.flattened; import static com.hedera.services.bdd.suites.regression.factories.AccountCompletionFuzzingFactory.hollowAccountFuzzingWith; import static com.hedera.services.bdd.suites.regression.factories.AccountCompletionFuzzingFactory.initOperations; @@ -37,11 +38,10 @@ public class HollowAccountCompletionFuzzing { @HapiTest final Stream hollowAccountCompletionFuzzing() { - return defaultHapiSpec("HollowAccountCompletionFuzzing") - .given(initOperations()) - .when() - .then(runWithProvider(hollowAccountFuzzingWith(PROPERTIES)) + return hapiTest(flattened( + initOperations(), + runWithProvider(hollowAccountFuzzingWith(PROPERTIES)) .maxOpsPerSec(10) - .lasting(10L, TimeUnit.SECONDS)); + .lasting(10L, TimeUnit.SECONDS))); } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/HollowAccountFuzzing.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/HollowAccountFuzzing.java index c6f1689fdce3..c669003c9fb2 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/HollowAccountFuzzing.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/HollowAccountFuzzing.java @@ -17,8 +17,9 @@ package com.hedera.services.bdd.suites.regression; import static com.hedera.services.bdd.junit.TestTags.NOT_REPEATABLE; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.runWithProvider; +import static com.hedera.services.bdd.suites.HapiSuite.flattened; import static com.hedera.services.bdd.suites.regression.factories.HollowAccountFuzzingFactory.hollowAccountFuzzingTest; import static com.hedera.services.bdd.suites.regression.factories.HollowAccountFuzzingFactory.initOperations; @@ -34,11 +35,10 @@ public class HollowAccountFuzzing { @HapiTest final Stream hollowAccountFuzzing() { - return defaultHapiSpec("HollowAccountFuzzing") - .given(initOperations()) - .when() - .then(runWithProvider(hollowAccountFuzzingTest(PROPERTIES)) + return hapiTest(flattened( + initOperations(), + runWithProvider(hollowAccountFuzzingTest(PROPERTIES)) .maxOpsPerSec(10) - .lasting(10L, TimeUnit.SECONDS)); + .lasting(10L, TimeUnit.SECONDS))); } } diff --git a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/UmbrellaRedux.java b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/UmbrellaRedux.java index 20b88fcda233..8767c3323c71 100644 --- a/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/UmbrellaRedux.java +++ b/hedera-node/test-clients/src/main/java/com/hedera/services/bdd/suites/regression/UmbrellaRedux.java @@ -17,7 +17,7 @@ package com.hedera.services.bdd.suites.regression; import static com.hedera.services.bdd.junit.TestTags.NOT_REPEATABLE; -import static com.hedera.services.bdd.spec.HapiSpec.defaultHapiSpec; +import static com.hedera.services.bdd.spec.HapiSpec.hapiTest; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.runWithProvider; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.sourcing; import static com.hedera.services.bdd.spec.utilops.UtilVerbs.withOpContext; @@ -39,27 +39,26 @@ public class UmbrellaRedux { public static final String DEFAULT_PROPERTIES = "regression-mixed_ops.properties"; - private AtomicLong duration = new AtomicLong(10); - private AtomicInteger maxOpsPerSec = new AtomicInteger(Integer.MAX_VALUE); - private AtomicInteger maxPendingOps = new AtomicInteger(Integer.MAX_VALUE); - private AtomicInteger backoffSleepSecs = new AtomicInteger(1); - private AtomicInteger statusTimeoutSecs = new AtomicInteger(5); - private AtomicReference props = new AtomicReference<>(DEFAULT_PROPERTIES); - private AtomicReference unit = new AtomicReference<>(SECONDS); + private final AtomicLong duration = new AtomicLong(10); + private final AtomicInteger maxOpsPerSec = new AtomicInteger(Integer.MAX_VALUE); + private final AtomicInteger maxPendingOps = new AtomicInteger(Integer.MAX_VALUE); + private final AtomicInteger backoffSleepSecs = new AtomicInteger(1); + private final AtomicInteger statusTimeoutSecs = new AtomicInteger(5); + private final AtomicReference props = new AtomicReference<>(DEFAULT_PROPERTIES); + private final AtomicReference unit = new AtomicReference<>(SECONDS); @HapiTest @Tag(NOT_REPEATABLE) final Stream umbrellaRedux() { - return defaultHapiSpec("UmbrellaRedux") - .given(withOpContext((spec, opLog) -> { + return hapiTest( + withOpContext((spec, opLog) -> { configureFromCi(spec); // use ci property statusTimeoutSecs to overwrite default value // of status.wait.timeout.ms spec.addOverrideProperties( Map.of("status.wait.timeout.ms", Integer.toString(1_000 * statusTimeoutSecs.get()))); - })) - .when() - .then(sourcing(() -> runWithProvider(factoryFrom(props::get)) + }), + sourcing(() -> runWithProvider(factoryFrom(props::get)) .lasting(duration::get, unit::get) .maxOpsPerSec(maxOpsPerSec::get) .maxPendingOps(maxPendingOps::get)