diff --git a/CHANGELOG.md b/CHANGELOG.md index 7b2c97295b0..25a486f96f6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -37,12 +37,21 @@ Ref: https://keepachangelog.com/en/1.0.0/ ## [Unreleased] ### Improvements + +* (logging) [\#8072](https://github.com/cosmos/cosmos-sdk/pull/8072) Refactor logging: + * Use [zerolog](https://github.com/rs/zerolog) over Tendermint's go-kit logging wrapper. + * Introduce Tendermint's `--log_format=plain|json` flag. Using format `json` allows for emitting structured JSON + logs which can be consumed by an external logging facility (e.g. Loggly). Both formats log to STDERR. + * The existing `--log_level` flag and it's default value now solely relates to the global logging + level (e.g. `info`, `debug`, etc...) instead of `:`. +* (crypto) [\#7987](https://github.com/cosmos/cosmos-sdk/pull/7987) Fix the inconsistency of CryptoCdc, only use `codec/legacy.Cdc`. * (SDK) [\#7925](https://github.com/cosmos/cosmos-sdk/pull/7925) Updated dependencies to use gRPC v1.33.2 * Updated gRPC dependency to v1.33.2 * Updated iavl dependency to v0.15-rc2 * (version) [\#7848](https://github.com/cosmos/cosmos-sdk/pull/7848) [\#7941](https://github.com/cosmos/cosmos-sdk/pull/7941) `version --long` output now shows the list of build dependencies and replaced build dependencies. ### State Machine Breaking Changes + * (x/upgrade) [\#7979](https://github.com/cosmos/cosmos-sdk/pull/7979) keeper pubkey storage serialization migration from bech32 to protobuf. ### Bug Fixes @@ -53,9 +62,11 @@ Ref: https://keepachangelog.com/en/1.0.0/ ### Client Breaking +* (crypto) [\#7419](https://github.com/cosmos/cosmos-sdk/pull/7419) The SDK doesn't use Tendermint's `crypto.PubKey` interface anymore, and uses instead it's own `PubKey` interface, defined in `crypto/types`. Replace all instances of `crypto.PubKey` by `cryptotypes.Pubkey`. * (x/staking) [\#7419](https://github.com/cosmos/cosmos-sdk/pull/7419) The `TmConsPubKey` method on ValidatorI has been removed and replaced instead by `ConsPubKey` (which returns a SDK `cryptotypes.PubKey`) and `TmConsPublicKey` (which returns a Tendermint proto PublicKey). ### Improvements + * (tendermint) [\#7828](https://github.com/cosmos/cosmos-sdk/pull/7828) Update tendermint dependency to v0.34.0-rc6 ## [v0.40.0-rc2](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.40.0-rc2) - 2020-11-02 @@ -88,18 +99,16 @@ Ref: https://keepachangelog.com/en/1.0.0/ * __Modules__ * `x/crisis` has a new function: `AddModuleInitFlags`, which will register optional crisis module flags for the start command. - ### Bug Fixes * (client) [\#7699](https://github.com/cosmos/cosmos-sdk/pull/7699) Fix panic in context when setting invalid nodeURI. `WithNodeURI` does not set the `Client` in the context. * (x/gov) [#7641](https://github.com/cosmos/cosmos-sdk/pull/7641) Fix tally calculation precision error. -### Improvements +### Improvements * (rest) [#7649](https://github.com/cosmos/cosmos-sdk/pull/7649) Return an unsigned tx in legacy GET /tx endpoint when signature conversion fails * (cli) [#7764](https://github.com/cosmos/cosmos-sdk/pull/7764) Update x/banking and x/crisis InitChain to improve node startup time - ## [v0.40.0-rc1](https://github.com/cosmos/cosmos-sdk/releases/tag/v0.40.0-rc1) - 2020-10-19 ### Client Breaking Changes diff --git a/Makefile b/Makefile index baace1ee14b..f9011d13d3b 100644 --- a/Makefile +++ b/Makefile @@ -357,7 +357,7 @@ devdoc-update: ### Protobuf ### ############################################################################### -proto-all: proto-format proto-lint proto-check-breaking proto-gen +proto-all: proto-format proto-lint proto-gen proto-gen: @echo "Generating Protobuf files" diff --git a/baseapp/abci.go b/baseapp/abci.go index d988ead0fcf..aaa48bb8c0a 100644 --- a/baseapp/abci.go +++ b/baseapp/abci.go @@ -291,7 +291,7 @@ func (app *BaseApp) Commit() (res abci.ResponseCommit) { // MultiStore (app.cms) so when Commit() is called is persists those values. app.deliverState.ms.Write() commitID := app.cms.Commit() - app.logger.Debug("Commit synced", "commit", fmt.Sprintf("%X", commitID)) + app.logger.Info("commit synced", "commit", fmt.Sprintf("%X", commitID)) // Reset the Check state to the latest committed. // @@ -358,30 +358,43 @@ func (app *BaseApp) snapshot(height int64) { app.logger.Info("snapshot manager not configured") return } - app.logger.Info("Creating state snapshot", "height", height) + + app.logger.Info("creating state snapshot", "height", height) + snapshot, err := app.snapshotManager.Create(uint64(height)) if err != nil { - app.logger.Error("Failed to create state snapshot", "height", height, "err", err) + app.logger.Error("failed to create state snapshot", "height", height, "err", err) return } - app.logger.Info("Completed state snapshot", "height", height, "format", snapshot.Format) + + app.logger.Info("completed state snapshot", "height", height, "format", snapshot.Format) if app.snapshotKeepRecent > 0 { - app.logger.Debug("Pruning state snapshots") + app.logger.Debug("pruning state snapshots") + pruned, err := app.snapshotManager.Prune(app.snapshotKeepRecent) if err != nil { app.logger.Error("Failed to prune state snapshots", "err", err) return } - app.logger.Debug("Pruned state snapshots", "pruned", pruned) + + app.logger.Debug("pruned state snapshots", "pruned", pruned) } } // Query implements the ABCI interface. It delegates to CommitMultiStore if it // implements Queryable. -func (app *BaseApp) Query(req abci.RequestQuery) abci.ResponseQuery { +func (app *BaseApp) Query(req abci.RequestQuery) (res abci.ResponseQuery) { defer telemetry.MeasureSince(time.Now(), "abci", "query") + // Add panic recovery for all queries. + // ref: https://github.com/cosmos/cosmos-sdk/pull/8039 + defer func() { + if r := recover(); r != nil { + res = sdkerrors.QueryResult(sdkerrors.Wrapf(sdkerrors.ErrPanic, "%v", r)) + } + }() + // when a client did not provide a query height, manually inject the latest if req.Height == 0 { req.Height = app.LastBlockHeight() @@ -425,13 +438,14 @@ func (app *BaseApp) ListSnapshots(req abci.RequestListSnapshots) abci.ResponseLi snapshots, err := app.snapshotManager.List() if err != nil { - app.logger.Error("Failed to list snapshots", "err", err) + app.logger.Error("failed to list snapshots", "err", err) return resp } + for _, snapshot := range snapshots { abciSnapshot, err := snapshot.ToABCI() if err != nil { - app.logger.Error("Failed to list snapshots", "err", err) + app.logger.Error("failed to list snapshots", "err", err) return resp } resp.Snapshots = append(resp.Snapshots, &abciSnapshot) @@ -447,8 +461,13 @@ func (app *BaseApp) LoadSnapshotChunk(req abci.RequestLoadSnapshotChunk) abci.Re } chunk, err := app.snapshotManager.LoadChunk(req.Height, req.Format, req.Chunk) if err != nil { - app.logger.Error("Failed to load snapshot chunk", "height", req.Height, "format", req.Format, - "chunk", req.Chunk, "err") + app.logger.Error( + "failed to load snapshot chunk", + "height", req.Height, + "format", req.Format, + "chunk", req.Chunk, + "err", err, + ) return abci.ResponseLoadSnapshotChunk{} } return abci.ResponseLoadSnapshotChunk{Chunk: chunk} @@ -462,15 +481,16 @@ func (app *BaseApp) OfferSnapshot(req abci.RequestOfferSnapshot) abci.ResponseOf } if req.Snapshot == nil { - app.logger.Error("Received nil snapshot") + app.logger.Error("received nil snapshot") return abci.ResponseOfferSnapshot{Result: abci.ResponseOfferSnapshot_REJECT} } snapshot, err := snapshottypes.SnapshotFromABCI(req.Snapshot) if err != nil { - app.logger.Error("Failed to decode snapshot metadata", "err", err) + app.logger.Error("failed to decode snapshot metadata", "err", err) return abci.ResponseOfferSnapshot{Result: abci.ResponseOfferSnapshot_REJECT} } + err = app.snapshotManager.Restore(snapshot) switch { case err == nil: @@ -480,13 +500,22 @@ func (app *BaseApp) OfferSnapshot(req abci.RequestOfferSnapshot) abci.ResponseOf return abci.ResponseOfferSnapshot{Result: abci.ResponseOfferSnapshot_REJECT_FORMAT} case errors.Is(err, snapshottypes.ErrInvalidMetadata): - app.logger.Error("Rejecting invalid snapshot", "height", req.Snapshot.Height, - "format", req.Snapshot.Format, "err", err) + app.logger.Error( + "rejecting invalid snapshot", + "height", req.Snapshot.Height, + "format", req.Snapshot.Format, + "err", err, + ) return abci.ResponseOfferSnapshot{Result: abci.ResponseOfferSnapshot_REJECT} default: - app.logger.Error("Failed to restore snapshot", "height", req.Snapshot.Height, - "format", req.Snapshot.Format, "err", err) + app.logger.Error( + "failed to restore snapshot", + "height", req.Snapshot.Height, + "format", req.Snapshot.Format, + "err", err, + ) + // We currently don't support resetting the IAVL stores and retrying a different snapshot, // so we ask Tendermint to abort all snapshot restoration. return abci.ResponseOfferSnapshot{Result: abci.ResponseOfferSnapshot_ABORT} @@ -506,8 +535,12 @@ func (app *BaseApp) ApplySnapshotChunk(req abci.RequestApplySnapshotChunk) abci. return abci.ResponseApplySnapshotChunk{Result: abci.ResponseApplySnapshotChunk_ACCEPT} case errors.Is(err, snapshottypes.ErrChunkHashMismatch): - app.logger.Error("Chunk checksum mismatch, rejecting sender and requesting refetch", - "chunk", req.Index, "sender", req.Sender, "err", err) + app.logger.Error( + "chunk checksum mismatch; rejecting sender and requesting refetch", + "chunk", req.Index, + "sender", req.Sender, + "err", err, + ) return abci.ResponseApplySnapshotChunk{ Result: abci.ResponseApplySnapshotChunk_RETRY, RefetchChunks: []uint32{req.Index}, @@ -515,7 +548,7 @@ func (app *BaseApp) ApplySnapshotChunk(req abci.RequestApplySnapshotChunk) abci. } default: - app.logger.Error("Failed to restore snapshot", "err", err) + app.logger.Error("failed to restore snapshot", "err", err) return abci.ResponseApplySnapshotChunk{Result: abci.ResponseApplySnapshotChunk_ABORT} } } diff --git a/baseapp/grpcserver.go b/baseapp/grpcserver.go index 74e8b8a21ca..a4342e0b895 100644 --- a/baseapp/grpcserver.go +++ b/baseapp/grpcserver.go @@ -5,6 +5,8 @@ import ( "strconv" gogogrpc "github.com/gogo/protobuf/grpc" + grpcmiddleware "github.com/grpc-ecosystem/go-grpc-middleware" + grpcrecovery "github.com/grpc-ecosystem/go-grpc-middleware/recovery" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/metadata" @@ -74,7 +76,10 @@ func (app *BaseApp) RegisterGRPCServer(server gogogrpc.Server) { newMethods[i] = grpc.MethodDesc{ MethodName: method.MethodName, Handler: func(srv interface{}, ctx context.Context, dec func(interface{}) error, _ grpc.UnaryServerInterceptor) (interface{}, error) { - return methodHandler(srv, ctx, dec, interceptor) + return methodHandler(srv, ctx, dec, grpcmiddleware.ChainUnaryServer( + grpcrecovery.UnaryServerInterceptor(), + interceptor, + )) }, } } diff --git a/client/broadcast.go b/client/broadcast.go index e28d76c54f3..e21bc080c28 100644 --- a/client/broadcast.go +++ b/client/broadcast.go @@ -7,10 +7,13 @@ import ( "github.com/tendermint/tendermint/crypto/tmhash" "github.com/tendermint/tendermint/mempool" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" "github.com/cosmos/cosmos-sdk/client/flags" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "github.com/cosmos/cosmos-sdk/types/tx" ) // BroadcastTx broadcasts a transactions either synchronously or asynchronously @@ -142,3 +145,36 @@ func (ctx Context) BroadcastTxAsync(txBytes []byte) (*sdk.TxResponse, error) { return sdk.NewResponseFormatBroadcastTx(res), err } + +// TxServiceBroadcast is a helper function to broadcast a Tx with the correct gRPC types +// from the tx service. Calls `clientCtx.BroadcastTx` under the hood. +func TxServiceBroadcast(grpcCtx context.Context, clientCtx Context, req *tx.BroadcastTxRequest) (*tx.BroadcastTxResponse, error) { + if req == nil || req.TxBytes == nil { + return nil, status.Error(codes.InvalidArgument, "invalid empty tx") + } + + clientCtx = clientCtx.WithBroadcastMode(normalizeBroadcastMode(req.Mode)) + resp, err := clientCtx.BroadcastTx(req.TxBytes) + if err != nil { + return nil, err + } + + return &tx.BroadcastTxResponse{ + TxResponse: resp, + }, nil +} + +// normalizeBroadcastMode converts a broadcast mode into a normalized string +// to be passed into the clientCtx. +func normalizeBroadcastMode(mode tx.BroadcastMode) string { + switch mode { + case tx.BroadcastMode_BROADCAST_MODE_ASYNC: + return "async" + case tx.BroadcastMode_BROADCAST_MODE_BLOCK: + return "block" + case tx.BroadcastMode_BROADCAST_MODE_SYNC: + return "sync" + default: + return "unspecified" + } +} diff --git a/client/flags/flags.go b/client/flags/flags.go index 2131c376397..72bcabcd60d 100644 --- a/client/flags/flags.go +++ b/client/flags/flags.go @@ -65,6 +65,10 @@ const ( FlagCountTotal = "count-total" FlagTimeoutHeight = "timeout-height" FlagKeyAlgorithm = "algo" + + // Tendermint logging flags + FlagLogLevel = "log_level" + FlagLogFormat = "log_format" ) // LineBreak can be included in a command list to provide a blank line diff --git a/types/query/query.pb.go b/client/grpc/tmservice/query.pb.go similarity index 93% rename from types/query/query.pb.go rename to client/grpc/tmservice/query.pb.go index c04a6bde32b..c08d33dc2d4 100644 --- a/types/query/query.pb.go +++ b/client/grpc/tmservice/query.pb.go @@ -1,11 +1,12 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. // source: cosmos/base/tendermint/v1beta1/query.proto -package query +package tmservice import ( context "context" fmt "fmt" + query "github.com/cosmos/cosmos-sdk/types/query" _ "github.com/gogo/protobuf/gogoproto" grpc1 "github.com/gogo/protobuf/grpc" proto "github.com/gogo/protobuf/proto" @@ -35,7 +36,7 @@ const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package type GetValidatorSetByHeightRequest struct { Height int64 `protobuf:"varint,1,opt,name=height,proto3" json:"height,omitempty"` // pagination defines an pagination for the request. - Pagination *PageRequest `protobuf:"bytes,2,opt,name=pagination,proto3" json:"pagination,omitempty"` + Pagination *query.PageRequest `protobuf:"bytes,2,opt,name=pagination,proto3" json:"pagination,omitempty"` } func (m *GetValidatorSetByHeightRequest) Reset() { *m = GetValidatorSetByHeightRequest{} } @@ -78,7 +79,7 @@ func (m *GetValidatorSetByHeightRequest) GetHeight() int64 { return 0 } -func (m *GetValidatorSetByHeightRequest) GetPagination() *PageRequest { +func (m *GetValidatorSetByHeightRequest) GetPagination() *query.PageRequest { if m != nil { return m.Pagination } @@ -90,7 +91,7 @@ type GetValidatorSetByHeightResponse struct { BlockHeight int64 `protobuf:"varint,1,opt,name=block_height,json=blockHeight,proto3" json:"block_height,omitempty"` Validators []*Validator `protobuf:"bytes,2,rep,name=validators,proto3" json:"validators,omitempty"` // pagination defines an pagination for the response. - Pagination *PageResponse `protobuf:"bytes,3,opt,name=pagination,proto3" json:"pagination,omitempty"` + Pagination *query.PageResponse `protobuf:"bytes,3,opt,name=pagination,proto3" json:"pagination,omitempty"` } func (m *GetValidatorSetByHeightResponse) Reset() { *m = GetValidatorSetByHeightResponse{} } @@ -140,7 +141,7 @@ func (m *GetValidatorSetByHeightResponse) GetValidators() []*Validator { return nil } -func (m *GetValidatorSetByHeightResponse) GetPagination() *PageResponse { +func (m *GetValidatorSetByHeightResponse) GetPagination() *query.PageResponse { if m != nil { return m.Pagination } @@ -150,7 +151,7 @@ func (m *GetValidatorSetByHeightResponse) GetPagination() *PageResponse { // GetLatestValidatorSetRequest is the request type for the Query/GetValidatorSetByHeight RPC method. type GetLatestValidatorSetRequest struct { // pagination defines an pagination for the request. - Pagination *PageRequest `protobuf:"bytes,1,opt,name=pagination,proto3" json:"pagination,omitempty"` + Pagination *query.PageRequest `protobuf:"bytes,1,opt,name=pagination,proto3" json:"pagination,omitempty"` } func (m *GetLatestValidatorSetRequest) Reset() { *m = GetLatestValidatorSetRequest{} } @@ -186,7 +187,7 @@ func (m *GetLatestValidatorSetRequest) XXX_DiscardUnknown() { var xxx_messageInfo_GetLatestValidatorSetRequest proto.InternalMessageInfo -func (m *GetLatestValidatorSetRequest) GetPagination() *PageRequest { +func (m *GetLatestValidatorSetRequest) GetPagination() *query.PageRequest { if m != nil { return m.Pagination } @@ -198,7 +199,7 @@ type GetLatestValidatorSetResponse struct { BlockHeight int64 `protobuf:"varint,1,opt,name=block_height,json=blockHeight,proto3" json:"block_height,omitempty"` Validators []*Validator `protobuf:"bytes,2,rep,name=validators,proto3" json:"validators,omitempty"` // pagination defines an pagination for the response. - Pagination *PageResponse `protobuf:"bytes,3,opt,name=pagination,proto3" json:"pagination,omitempty"` + Pagination *query.PageResponse `protobuf:"bytes,3,opt,name=pagination,proto3" json:"pagination,omitempty"` } func (m *GetLatestValidatorSetResponse) Reset() { *m = GetLatestValidatorSetResponse{} } @@ -248,7 +249,7 @@ func (m *GetLatestValidatorSetResponse) GetValidators() []*Validator { return nil } -func (m *GetLatestValidatorSetResponse) GetPagination() *PageResponse { +func (m *GetLatestValidatorSetResponse) GetPagination() *query.PageResponse { if m != nil { return m.Pagination } @@ -783,7 +784,7 @@ type Module struct { Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` // module version Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` - //checksum + // checksum Sum string `protobuf:"bytes,3,opt,name=sum,proto3" json:"sum,omitempty"` } @@ -864,72 +865,72 @@ func init() { } var fileDescriptor_40c93fb3ef485c5d = []byte{ - // 1031 bytes of a gzipped FileDescriptorProto + // 1040 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x56, 0x4f, 0x6f, 0x1b, 0x45, - 0x14, 0xcf, 0xc6, 0x6d, 0x1c, 0x3f, 0x57, 0x90, 0x4c, 0x4a, 0xb3, 0xb5, 0x52, 0x37, 0xec, 0xa1, - 0x4d, 0x88, 0xb2, 0x2b, 0x3b, 0x84, 0x82, 0xf8, 0x27, 0x85, 0x40, 0x1a, 0x15, 0xaa, 0x68, 0x83, - 0x38, 0x20, 0xa4, 0xd5, 0xd8, 0x3b, 0xd9, 0x8c, 0x62, 0xef, 0x4c, 0x77, 0xc6, 0x41, 0x16, 0xaa, - 0x40, 0xfd, 0x00, 0x08, 0x89, 0xaf, 0xc0, 0x85, 0x2f, 0xc0, 0x11, 0x71, 0xe4, 0x46, 0x25, 0x24, - 0xe8, 0x11, 0x25, 0x7c, 0x0a, 0x4e, 0x68, 0x67, 0x66, 0xed, 0xdd, 0x26, 0xa9, 0xed, 0x1c, 0x90, - 0x38, 0x79, 0xf6, 0xbd, 0xf7, 0x7b, 0xf3, 0xfb, 0xbd, 0x79, 0x33, 0x7e, 0xf0, 0x5a, 0x9b, 0x89, - 0x2e, 0x13, 0x5e, 0x0b, 0x0b, 0xe2, 0x49, 0x12, 0x87, 0x24, 0xe9, 0xd2, 0x58, 0x7a, 0xc7, 0x8d, - 0x16, 0x91, 0xb8, 0xe1, 0x3d, 0xea, 0x91, 0xa4, 0xef, 0xf2, 0x84, 0x49, 0x86, 0xea, 0x3a, 0xd6, - 0x4d, 0x63, 0xdd, 0x61, 0xac, 0x6b, 0x62, 0x6b, 0xd7, 0x23, 0x16, 0x31, 0x15, 0xea, 0xa5, 0x2b, - 0x8d, 0xaa, 0x2d, 0x45, 0x8c, 0x45, 0x1d, 0xe2, 0x61, 0x4e, 0x3d, 0x1c, 0xc7, 0x4c, 0x62, 0x49, - 0x59, 0x2c, 0x8c, 0xb7, 0x96, 0xdb, 0x93, 0x37, 0xb9, 0x27, 0xfb, 0x9c, 0x64, 0xbe, 0xa5, 0x9c, - 0x4f, 0xd9, 0xbd, 0x56, 0x87, 0xb5, 0x8f, 0x2e, 0xf4, 0xe6, 0xb1, 0x05, 0x5d, 0x4a, 0xc4, 0x40, - 0x12, 0xc7, 0x11, 0x8d, 0x15, 0x09, 0x1d, 0xeb, 0x7c, 0x63, 0x41, 0x7d, 0x87, 0xc8, 0xcf, 0x70, - 0x87, 0x86, 0x58, 0xb2, 0x64, 0x9f, 0xc8, 0xad, 0xfe, 0x7d, 0x42, 0xa3, 0x43, 0xe9, 0x93, 0x47, - 0x3d, 0x22, 0x24, 0xba, 0x01, 0x33, 0x87, 0xca, 0x60, 0x5b, 0xcb, 0xd6, 0x4a, 0xc9, 0x37, 0x5f, - 0xe8, 0x23, 0x80, 0x61, 0x3a, 0x7b, 0x7a, 0xd9, 0x5a, 0xa9, 0x36, 0xef, 0xb8, 0xf9, 0x3a, 0xe9, - 0x02, 0x9a, 0xbd, 0xdd, 0x3d, 0x1c, 0x11, 0x93, 0xd3, 0xcf, 0x21, 0x9d, 0x67, 0x16, 0xdc, 0xbe, - 0x90, 0x82, 0xe0, 0x2c, 0x16, 0x04, 0xbd, 0x0a, 0xd7, 0x94, 0xfe, 0xa0, 0xc0, 0xa4, 0xaa, 0x6c, - 0x3a, 0x14, 0xed, 0x02, 0x1c, 0x67, 0x29, 0x84, 0x3d, 0xbd, 0x5c, 0x5a, 0xa9, 0x36, 0x57, 0xdd, - 0x17, 0x1f, 0x9b, 0x3b, 0xd8, 0xd4, 0xcf, 0x81, 0xd1, 0x4e, 0x41, 0x59, 0x49, 0x29, 0xbb, 0x3b, - 0x52, 0x99, 0xa6, 0x5a, 0x90, 0x76, 0x00, 0x4b, 0x3b, 0x44, 0x7e, 0x8c, 0x25, 0x11, 0x05, 0x7d, - 0x59, 0x69, 0x8b, 0x25, 0xb4, 0x2e, 0x5d, 0xc2, 0x3f, 0x2c, 0xb8, 0x75, 0xc1, 0x46, 0xff, 0xef, - 0x02, 0x7e, 0x6b, 0x41, 0x65, 0xb0, 0x05, 0xb2, 0xa1, 0x8c, 0xc3, 0x30, 0x21, 0x42, 0x28, 0xfe, - 0xd7, 0xfc, 0xec, 0x13, 0x2d, 0x42, 0x99, 0xf7, 0x5a, 0xc1, 0x11, 0xe9, 0xab, 0x46, 0xac, 0xf8, - 0x33, 0xbc, 0xd7, 0x7a, 0x40, 0xfa, 0xa9, 0xee, 0x63, 0x26, 0x69, 0x1c, 0x05, 0x9c, 0x7d, 0x49, - 0x12, 0xc5, 0xa5, 0xe4, 0x57, 0xb5, 0x6d, 0x2f, 0x35, 0xa1, 0x35, 0x98, 0xe7, 0x09, 0xe3, 0x4c, - 0x90, 0x24, 0xe0, 0x09, 0x65, 0x09, 0x95, 0x7d, 0xfb, 0x8a, 0x8a, 0x9b, 0xcb, 0x1c, 0x7b, 0xc6, - 0xee, 0x34, 0x60, 0x71, 0x87, 0xc8, 0xad, 0xb4, 0x6c, 0x63, 0xde, 0x13, 0xe7, 0x6b, 0xb0, 0xcf, - 0x42, 0xcc, 0xb1, 0xbc, 0x0e, 0xb3, 0xfa, 0x58, 0x68, 0x68, 0x8e, 0xff, 0x66, 0xbe, 0xca, 0xfa, - 0x56, 0x2b, 0xe8, 0xee, 0xb6, 0x5f, 0x56, 0xa1, 0xbb, 0x21, 0x5a, 0x87, 0xab, 0x6a, 0x69, 0x2e, - 0xdd, 0xe2, 0x05, 0x10, 0x5f, 0x47, 0x39, 0x8b, 0xf0, 0xca, 0xa0, 0x39, 0xb4, 0x43, 0x33, 0x76, - 0x1e, 0xc3, 0x8d, 0xe7, 0x1d, 0xff, 0x25, 0xaf, 0x05, 0x98, 0xdf, 0x21, 0x72, 0xbf, 0x1f, 0xb7, - 0x69, 0x1c, 0x65, 0x9c, 0x5c, 0x40, 0x79, 0xa3, 0xe1, 0x63, 0x43, 0x59, 0x68, 0x93, 0xa2, 0x33, - 0xeb, 0x67, 0x9f, 0xce, 0x75, 0x15, 0xff, 0x90, 0x85, 0x64, 0x37, 0x3e, 0x60, 0x59, 0x96, 0x5f, - 0x2c, 0x58, 0x28, 0x98, 0x4d, 0x9e, 0x07, 0x30, 0x1f, 0x92, 0x03, 0xdc, 0xeb, 0xc8, 0x20, 0x66, - 0x21, 0x09, 0x68, 0x7c, 0xc0, 0x8c, 0xc0, 0xdb, 0x79, 0xb6, 0xbc, 0xc9, 0xdd, 0x6d, 0x1d, 0x38, - 0xc8, 0xf1, 0x72, 0x58, 0x34, 0xa0, 0x2f, 0x60, 0x01, 0x73, 0xde, 0xa1, 0x6d, 0xd5, 0xab, 0xc1, - 0x31, 0x49, 0xc4, 0xf0, 0x25, 0x5c, 0x1b, 0x79, 0x73, 0x74, 0xb8, 0x4a, 0x8d, 0x72, 0x79, 0x8c, - 0xdd, 0xf9, 0xc7, 0x82, 0x6a, 0x2e, 0x06, 0x21, 0xb8, 0x12, 0xe3, 0x2e, 0x51, 0x6c, 0x2b, 0xbe, - 0x5a, 0xa3, 0x9b, 0x30, 0x8b, 0x39, 0x0f, 0x94, 0x5d, 0xf7, 0x7d, 0x19, 0x73, 0xfe, 0x30, 0x75, - 0xd9, 0x50, 0xce, 0x08, 0x95, 0xb4, 0xc7, 0x7c, 0xa2, 0x5b, 0x00, 0x11, 0x95, 0x41, 0x9b, 0x75, - 0xbb, 0x54, 0xaa, 0x46, 0xaf, 0xf8, 0x95, 0x88, 0xca, 0x0f, 0x94, 0x21, 0x75, 0xb7, 0x7a, 0xb4, - 0x13, 0x06, 0x12, 0x47, 0xc2, 0xbe, 0xaa, 0xdd, 0xca, 0xf2, 0x29, 0x8e, 0x84, 0x42, 0xb3, 0x81, - 0xd6, 0x19, 0x83, 0x66, 0x86, 0x29, 0xfa, 0x30, 0x43, 0x87, 0x84, 0x0b, 0xbb, 0xac, 0x1e, 0x91, - 0x3b, 0xa3, 0x4a, 0xf1, 0x09, 0x0b, 0x7b, 0x1d, 0x62, 0x76, 0xd9, 0x26, 0x5c, 0x38, 0xf7, 0x61, - 0x46, 0x1b, 0x53, 0xd9, 0x1c, 0xcb, 0xc3, 0x4c, 0x76, 0xba, 0xce, 0x6b, 0x9b, 0x2e, 0x6a, 0x9b, - 0x83, 0x92, 0xe8, 0x75, 0x8d, 0xe2, 0x74, 0xd9, 0x7c, 0x52, 0x81, 0xf2, 0x3e, 0x49, 0x8e, 0x69, - 0x9b, 0xa0, 0x1f, 0x2d, 0xa8, 0xe6, 0xba, 0x02, 0x35, 0x47, 0x11, 0x3b, 0xdb, 0x59, 0xb5, 0x8d, - 0x89, 0x30, 0xba, 0xed, 0x9c, 0xc6, 0x93, 0xdf, 0xff, 0xfe, 0x7e, 0x7a, 0x0d, 0xad, 0x7a, 0x23, - 0x46, 0x8e, 0x41, 0x53, 0xa2, 0x1f, 0x2c, 0x80, 0xe1, 0x45, 0x40, 0x8d, 0x31, 0xb6, 0x2d, 0xde, - 0xa4, 0x5a, 0x73, 0x12, 0x88, 0x21, 0xea, 0x29, 0xa2, 0xab, 0xe8, 0xee, 0x28, 0xa2, 0xe6, 0xfa, - 0xa1, 0x9f, 0x2c, 0x78, 0xa9, 0xf8, 0x86, 0xa0, 0xcd, 0x31, 0xf6, 0x3d, 0xfb, 0x18, 0xd5, 0xde, - 0x98, 0x14, 0x66, 0x28, 0x6f, 0x2a, 0xca, 0x1e, 0x5a, 0x1f, 0x45, 0x59, 0x3d, 0x3a, 0xc2, 0xeb, - 0xa8, 0x1c, 0xe8, 0x67, 0x0b, 0xe6, 0x9e, 0x7f, 0x96, 0xd1, 0xbd, 0x31, 0x38, 0x9c, 0xf7, 0xf6, - 0xd7, 0xde, 0x9c, 0x1c, 0x68, 0xe8, 0xdf, 0x53, 0xf4, 0x1b, 0xc8, 0x1b, 0x93, 0xfe, 0x57, 0xfa, - 0x5f, 0xe5, 0x31, 0xfa, 0xcd, 0xca, 0x3d, 0xeb, 0xf9, 0xff, 0x7c, 0xf4, 0xce, 0xd8, 0x95, 0x3c, - 0x67, 0x26, 0xa9, 0xbd, 0x7b, 0x49, 0xb4, 0xd1, 0xf3, 0x96, 0xd2, 0xb3, 0x81, 0x1a, 0xa3, 0xf4, - 0x0c, 0xc7, 0x85, 0xec, 0x48, 0xfe, 0xb4, 0xd4, 0x9f, 0xeb, 0x79, 0x83, 0x20, 0x7a, 0x6f, 0x0c, - 0x56, 0x2f, 0x18, 0x62, 0x6b, 0xef, 0x5f, 0x1a, 0x6f, 0x74, 0xbd, 0xad, 0x74, 0x6d, 0xa2, 0x8d, - 0x09, 0x74, 0x65, 0x67, 0xb5, 0xb5, 0xf5, 0xeb, 0x49, 0xdd, 0x7a, 0x7a, 0x52, 0xb7, 0xfe, 0x3a, - 0xa9, 0x5b, 0xdf, 0x9d, 0xd6, 0xa7, 0x9e, 0x9e, 0xd6, 0xa7, 0x9e, 0x9d, 0xd6, 0xa7, 0x3e, 0x5f, - 0x89, 0xa8, 0x3c, 0xec, 0xb5, 0xdc, 0x36, 0xeb, 0x66, 0x89, 0xf5, 0xcf, 0xba, 0x08, 0x8f, 0xcc, - 0x6c, 0xaf, 0x66, 0xa5, 0xd6, 0x8c, 0x1a, 0xd8, 0x37, 0xfe, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x20, - 0x61, 0xbc, 0x54, 0xb6, 0x0c, 0x00, 0x00, + 0x14, 0xcf, 0xc6, 0xad, 0x1d, 0x3f, 0x57, 0x90, 0x4c, 0x4a, 0xb3, 0xb5, 0x52, 0x37, 0xec, 0xa1, + 0x4d, 0x88, 0xb2, 0x2b, 0x3b, 0x84, 0x82, 0xf8, 0x27, 0x85, 0x80, 0x1b, 0xb5, 0x54, 0xd1, 0x06, + 0x71, 0x40, 0x48, 0xab, 0xb5, 0x77, 0xb2, 0x19, 0xc5, 0xde, 0x99, 0xee, 0x8c, 0x8d, 0x2c, 0x54, + 0x81, 0xfa, 0x01, 0x10, 0x12, 0x5f, 0x81, 0x0b, 0x5f, 0x80, 0x23, 0xe2, 0xc8, 0x8d, 0x4a, 0x48, + 0xd0, 0x23, 0x4a, 0xf8, 0x14, 0x9c, 0xd0, 0xce, 0xcc, 0xda, 0xbb, 0xcd, 0x1f, 0xdb, 0x39, 0x20, + 0xf5, 0xe4, 0xd9, 0xf7, 0xde, 0xef, 0xcd, 0xef, 0xf7, 0xe6, 0xcd, 0xf3, 0xc0, 0x1b, 0x6d, 0xca, + 0xbb, 0x94, 0x3b, 0x2d, 0x9f, 0x63, 0x47, 0xe0, 0x28, 0xc0, 0x71, 0x97, 0x44, 0xc2, 0xe9, 0xd7, + 0x5b, 0x58, 0xf8, 0x75, 0xe7, 0x71, 0x0f, 0xc7, 0x03, 0x9b, 0xc5, 0x54, 0x50, 0x54, 0x53, 0xb1, + 0x76, 0x12, 0x6b, 0x8f, 0x62, 0x6d, 0x1d, 0x5b, 0xbd, 0x1e, 0xd2, 0x90, 0xca, 0x50, 0x27, 0x59, + 0x29, 0x54, 0x75, 0x39, 0xa4, 0x34, 0xec, 0x60, 0xc7, 0x67, 0xc4, 0xf1, 0xa3, 0x88, 0x0a, 0x5f, + 0x10, 0x1a, 0x71, 0xed, 0xad, 0x66, 0xf6, 0x64, 0x0d, 0xe6, 0x88, 0x01, 0xc3, 0xa9, 0x6f, 0x39, + 0xe3, 0x93, 0x76, 0xa7, 0xd5, 0xa1, 0xed, 0xa3, 0x73, 0xbd, 0x59, 0x6c, 0x4e, 0x97, 0x14, 0x31, + 0x94, 0xc4, 0xfc, 0x90, 0x44, 0x92, 0x84, 0x8a, 0xb5, 0xbe, 0x35, 0xa0, 0xd6, 0xc4, 0xe2, 0x73, + 0xbf, 0x43, 0x02, 0x5f, 0xd0, 0x78, 0x1f, 0x8b, 0xed, 0xc1, 0x7d, 0x4c, 0xc2, 0x43, 0xe1, 0xe2, + 0xc7, 0x3d, 0xcc, 0x05, 0xba, 0x01, 0xc5, 0x43, 0x69, 0x30, 0x8d, 0x15, 0x63, 0xb5, 0xe0, 0xea, + 0x2f, 0xf4, 0x09, 0xc0, 0x28, 0x9d, 0x39, 0xbb, 0x62, 0xac, 0x56, 0x1a, 0x77, 0xec, 0x6c, 0x9d, + 0x54, 0x01, 0xf5, 0xde, 0xf6, 0x9e, 0x1f, 0x62, 0x9d, 0xd3, 0xcd, 0x20, 0xad, 0xe7, 0x06, 0xdc, + 0x3e, 0x97, 0x02, 0x67, 0x34, 0xe2, 0x18, 0xbd, 0x0e, 0xd7, 0xa4, 0x7e, 0x2f, 0xc7, 0xa4, 0x22, + 0x6d, 0x2a, 0x14, 0xed, 0x02, 0xf4, 0xd3, 0x14, 0xdc, 0x9c, 0x5d, 0x29, 0xac, 0x56, 0x1a, 0x6b, + 0xf6, 0xc5, 0xc7, 0x66, 0x0f, 0x37, 0x75, 0x33, 0x60, 0xd4, 0xcc, 0x29, 0x2b, 0x48, 0x65, 0x77, + 0xc7, 0x2a, 0x53, 0x54, 0x73, 0xd2, 0x0e, 0x60, 0xb9, 0x89, 0xc5, 0x43, 0x5f, 0x60, 0x9e, 0xd3, + 0x97, 0x96, 0x36, 0x5f, 0x42, 0xe3, 0xd2, 0x25, 0xfc, 0xd3, 0x80, 0x5b, 0xe7, 0x6c, 0xf4, 0x72, + 0x17, 0xf0, 0x3b, 0x03, 0xca, 0xc3, 0x2d, 0x90, 0x09, 0x25, 0x3f, 0x08, 0x62, 0xcc, 0xb9, 0xe4, + 0x7f, 0xcd, 0x4d, 0x3f, 0xd1, 0x12, 0x94, 0x58, 0xaf, 0xe5, 0x1d, 0xe1, 0x81, 0x6c, 0xc4, 0xb2, + 0x5b, 0x64, 0xbd, 0xd6, 0x03, 0x3c, 0x48, 0x74, 0xf7, 0xa9, 0x20, 0x51, 0xe8, 0x31, 0xfa, 0x15, + 0x8e, 0x25, 0x97, 0x82, 0x5b, 0x51, 0xb6, 0xbd, 0xc4, 0x84, 0xd6, 0x61, 0x81, 0xc5, 0x94, 0x51, + 0x8e, 0x63, 0x8f, 0xc5, 0x84, 0xc6, 0x44, 0x0c, 0xcc, 0x2b, 0x32, 0x6e, 0x3e, 0x75, 0xec, 0x69, + 0xbb, 0x55, 0x87, 0xa5, 0x26, 0x16, 0xdb, 0x49, 0xd9, 0x26, 0xbc, 0x27, 0xd6, 0x37, 0x60, 0x9e, + 0x86, 0xe8, 0x63, 0x79, 0x13, 0xe6, 0xd4, 0xb1, 0x90, 0x40, 0x1f, 0xff, 0xcd, 0x6c, 0x95, 0xd5, + 0xad, 0x96, 0xd0, 0xdd, 0x1d, 0xb7, 0x24, 0x43, 0x77, 0x03, 0xb4, 0x01, 0x57, 0xe5, 0x52, 0x5f, + 0xba, 0xa5, 0x73, 0x20, 0xae, 0x8a, 0xb2, 0x96, 0xe0, 0xb5, 0x61, 0x73, 0x28, 0x87, 0x62, 0x6c, + 0x3d, 0x81, 0x1b, 0x2f, 0x3a, 0xfe, 0x4f, 0x5e, 0x8b, 0xb0, 0xd0, 0xc4, 0x62, 0x7f, 0x10, 0xb5, + 0x49, 0x14, 0xa6, 0x9c, 0x6c, 0x40, 0x59, 0xa3, 0xe6, 0x63, 0x42, 0x89, 0x2b, 0x93, 0xa4, 0x33, + 0xe7, 0xa6, 0x9f, 0xd6, 0x75, 0x19, 0xff, 0x88, 0x06, 0x78, 0x37, 0x3a, 0xa0, 0x69, 0x96, 0x5f, + 0x0d, 0x58, 0xcc, 0x99, 0x75, 0x9e, 0x07, 0xb0, 0x10, 0xe0, 0x03, 0xbf, 0xd7, 0x11, 0x5e, 0x44, + 0x03, 0xec, 0x91, 0xe8, 0x80, 0x6a, 0x81, 0xb7, 0xb3, 0x6c, 0x59, 0x83, 0xd9, 0x3b, 0x2a, 0x70, + 0x98, 0xe3, 0xd5, 0x20, 0x6f, 0x40, 0x5f, 0xc2, 0xa2, 0xcf, 0x58, 0x87, 0xb4, 0x65, 0xaf, 0x7a, + 0x7d, 0x1c, 0xf3, 0xd1, 0x24, 0x5c, 0x1f, 0x7b, 0x73, 0x54, 0xb8, 0x4c, 0x8d, 0x32, 0x79, 0xb4, + 0xdd, 0xfa, 0xd7, 0x80, 0x4a, 0x26, 0x06, 0x21, 0xb8, 0x12, 0xf9, 0x5d, 0x2c, 0xd9, 0x96, 0x5d, + 0xb9, 0x46, 0x37, 0x61, 0xce, 0x67, 0xcc, 0x93, 0x76, 0xd5, 0xf7, 0x25, 0x9f, 0xb1, 0x47, 0x89, + 0xcb, 0x84, 0x52, 0x4a, 0xa8, 0xa0, 0x3c, 0xfa, 0x13, 0xdd, 0x02, 0x08, 0x89, 0xf0, 0xda, 0xb4, + 0xdb, 0x25, 0x42, 0x36, 0x7a, 0xd9, 0x2d, 0x87, 0x44, 0x7c, 0x24, 0x0d, 0x89, 0xbb, 0xd5, 0x23, + 0x9d, 0xc0, 0x13, 0x7e, 0xc8, 0xcd, 0xab, 0xca, 0x2d, 0x2d, 0x9f, 0xf9, 0x21, 0x97, 0x68, 0x3a, + 0xd4, 0x5a, 0xd4, 0x68, 0xaa, 0x99, 0xa2, 0x8f, 0x53, 0x74, 0x80, 0x19, 0x37, 0x4b, 0x72, 0x88, + 0xdc, 0x19, 0x57, 0x8a, 0x4f, 0x69, 0xd0, 0xeb, 0x60, 0xbd, 0xcb, 0x0e, 0x66, 0xdc, 0xba, 0x0f, + 0x45, 0x65, 0x4c, 0x64, 0x33, 0x5f, 0x1c, 0xa6, 0xb2, 0x93, 0x75, 0x56, 0xdb, 0x6c, 0x5e, 0xdb, + 0x3c, 0x14, 0x78, 0xaf, 0xab, 0x15, 0x27, 0xcb, 0xc6, 0xd3, 0x32, 0x94, 0xf6, 0x71, 0xdc, 0x27, + 0x6d, 0x8c, 0x7e, 0x32, 0xa0, 0x92, 0xe9, 0x0a, 0xd4, 0x18, 0x47, 0xec, 0x74, 0x67, 0x55, 0x37, + 0xa7, 0xc2, 0xa8, 0xb6, 0xb3, 0xea, 0x4f, 0xff, 0xf8, 0xe7, 0x87, 0xd9, 0x75, 0xb4, 0xe6, 0x8c, + 0x79, 0x72, 0x0c, 0x9b, 0x12, 0xfd, 0x68, 0x00, 0x8c, 0x2e, 0x02, 0xaa, 0x4f, 0xb0, 0x6d, 0xfe, + 0x26, 0x55, 0x1b, 0xd3, 0x40, 0x34, 0x51, 0x47, 0x12, 0x5d, 0x43, 0x77, 0xc7, 0x11, 0xd5, 0xd7, + 0x0f, 0xfd, 0x6c, 0xc0, 0x2b, 0xf9, 0x19, 0x82, 0xb6, 0x26, 0xd8, 0xf7, 0xf4, 0x30, 0xaa, 0xbe, + 0x35, 0x2d, 0x4c, 0x53, 0xde, 0x92, 0x94, 0x1d, 0xb4, 0x31, 0x8e, 0xb2, 0x1c, 0x3a, 0xdc, 0xe9, + 0xc8, 0x1c, 0xe8, 0x17, 0x03, 0xe6, 0x5f, 0x1c, 0xcb, 0xe8, 0xde, 0x04, 0x1c, 0xce, 0x9a, 0xfd, + 0xd5, 0xb7, 0xa7, 0x07, 0x6a, 0xfa, 0xf7, 0x24, 0xfd, 0x3a, 0x72, 0x26, 0xa4, 0xff, 0xb5, 0xfa, + 0x57, 0x79, 0x82, 0x7e, 0x37, 0x32, 0x63, 0x3d, 0xfb, 0x9f, 0x8f, 0xde, 0x9b, 0xb8, 0x92, 0x67, + 0xbc, 0x49, 0xaa, 0xef, 0x5f, 0x12, 0xad, 0xf5, 0xbc, 0x23, 0xf5, 0x6c, 0xa2, 0xfa, 0x38, 0x3d, + 0xa3, 0xe7, 0x42, 0x7a, 0x24, 0x7f, 0x19, 0xf2, 0xcf, 0xf5, 0xac, 0x87, 0x20, 0xfa, 0x60, 0x02, + 0x56, 0x17, 0x3c, 0x62, 0xab, 0x1f, 0x5e, 0x1a, 0xaf, 0x75, 0xbd, 0x2b, 0x75, 0x6d, 0xa1, 0xcd, + 0x29, 0x74, 0xa5, 0x67, 0xb5, 0xfd, 0xf0, 0xb7, 0xe3, 0x9a, 0xf1, 0xec, 0xb8, 0x66, 0xfc, 0x7d, + 0x5c, 0x33, 0xbe, 0x3f, 0xa9, 0xcd, 0x3c, 0x3b, 0xa9, 0xcd, 0x3c, 0x3f, 0xa9, 0xcd, 0x7c, 0xd1, + 0x08, 0x89, 0x38, 0xec, 0xb5, 0xec, 0x36, 0xed, 0xa6, 0x89, 0xd5, 0xcf, 0x06, 0x0f, 0x8e, 0x9c, + 0x76, 0x87, 0xe0, 0x48, 0x38, 0x61, 0xcc, 0xda, 0x8e, 0xe8, 0x72, 0x35, 0xc6, 0x5a, 0x45, 0xf9, + 0x74, 0xdf, 0xfc, 0x2f, 0x00, 0x00, 0xff, 0xff, 0x82, 0x6b, 0xb0, 0xbd, 0xc0, 0x0c, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -2181,7 +2182,7 @@ func (m *GetValidatorSetByHeightRequest) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Pagination == nil { - m.Pagination = &PageRequest{} + m.Pagination = &query.PageRequest{} } if err := m.Pagination.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -2323,7 +2324,7 @@ func (m *GetValidatorSetByHeightResponse) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Pagination == nil { - m.Pagination = &PageResponse{} + m.Pagination = &query.PageResponse{} } if err := m.Pagination.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -2412,7 +2413,7 @@ func (m *GetLatestValidatorSetRequest) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Pagination == nil { - m.Pagination = &PageRequest{} + m.Pagination = &query.PageRequest{} } if err := m.Pagination.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err @@ -2554,7 +2555,7 @@ func (m *GetLatestValidatorSetResponse) Unmarshal(dAtA []byte) error { return io.ErrUnexpectedEOF } if m.Pagination == nil { - m.Pagination = &PageResponse{} + m.Pagination = &query.PageResponse{} } if err := m.Pagination.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err diff --git a/types/query/query.pb.gw.go b/client/grpc/tmservice/query.pb.gw.go similarity index 99% rename from types/query/query.pb.gw.go rename to client/grpc/tmservice/query.pb.gw.go index 01b943030d1..1fcd8526821 100644 --- a/types/query/query.pb.gw.go +++ b/client/grpc/tmservice/query.pb.gw.go @@ -2,11 +2,11 @@ // source: cosmos/base/tendermint/v1beta1/query.proto /* -Package query is a reverse proxy. +Package tmservice is a reverse proxy. It translates gRPC into RESTful JSON APIs. */ -package query +package tmservice import ( "context" diff --git a/client/grpc/tmservice/service.go b/client/grpc/tmservice/service.go index b1198eb6733..b4d9f8c9526 100644 --- a/client/grpc/tmservice/service.go +++ b/client/grpc/tmservice/service.go @@ -21,10 +21,10 @@ type queryServer struct { interfaceRegistry codectypes.InterfaceRegistry } -var _ qtypes.ServiceServer = queryServer{} +var _ ServiceServer = queryServer{} // NewQueryServer creates a new tendermint query server. -func NewQueryServer(clientCtx client.Context, interfaceRegistry codectypes.InterfaceRegistry) qtypes.ServiceServer { +func NewQueryServer(clientCtx client.Context, interfaceRegistry codectypes.InterfaceRegistry) ServiceServer { return queryServer{ clientCtx: clientCtx, interfaceRegistry: interfaceRegistry, @@ -32,18 +32,18 @@ func NewQueryServer(clientCtx client.Context, interfaceRegistry codectypes.Inter } // GetSyncing implements ServiceServer.GetSyncing -func (s queryServer) GetSyncing(_ context.Context, _ *qtypes.GetSyncingRequest) (*qtypes.GetSyncingResponse, error) { +func (s queryServer) GetSyncing(_ context.Context, _ *GetSyncingRequest) (*GetSyncingResponse, error) { status, err := getNodeStatus(s.clientCtx) if err != nil { return nil, err } - return &qtypes.GetSyncingResponse{ + return &GetSyncingResponse{ Syncing: status.SyncInfo.CatchingUp, }, nil } // GetLatestBlock implements ServiceServer.GetLatestBlock -func (s queryServer) GetLatestBlock(context.Context, *qtypes.GetLatestBlockRequest) (*qtypes.GetLatestBlockResponse, error) { +func (s queryServer) GetLatestBlock(context.Context, *GetLatestBlockRequest) (*GetLatestBlockResponse, error) { status, err := getBlock(s.clientCtx, nil) if err != nil { return nil, err @@ -55,14 +55,14 @@ func (s queryServer) GetLatestBlock(context.Context, *qtypes.GetLatestBlockReque return nil, err } - return &qtypes.GetLatestBlockResponse{ + return &GetLatestBlockResponse{ BlockId: &protoBlockID, Block: protoBlock, }, nil } // GetBlockByHeight implements ServiceServer.GetBlockByHeight -func (s queryServer) GetBlockByHeight(_ context.Context, req *qtypes.GetBlockByHeightRequest) (*qtypes.GetBlockByHeightResponse, error) { +func (s queryServer) GetBlockByHeight(_ context.Context, req *GetBlockByHeightRequest) (*GetBlockByHeightResponse, error) { chainHeight, err := rpc.GetChainHeight(s.clientCtx) if err != nil { return nil, err @@ -81,14 +81,14 @@ func (s queryServer) GetBlockByHeight(_ context.Context, req *qtypes.GetBlockByH if err != nil { return nil, err } - return &qtypes.GetBlockByHeightResponse{ + return &GetBlockByHeightResponse{ BlockId: &protoBlockID, Block: protoBlock, }, nil } // GetLatestValidatorSet implements ServiceServer.GetLatestValidatorSet -func (s queryServer) GetLatestValidatorSet(ctx context.Context, req *qtypes.GetLatestValidatorSetRequest) (*qtypes.GetLatestValidatorSetResponse, error) { +func (s queryServer) GetLatestValidatorSet(ctx context.Context, req *GetLatestValidatorSetRequest) (*GetLatestValidatorSetResponse, error) { page, limit, err := qtypes.ParsePagination(req.Pagination) if err != nil { return nil, err @@ -99,13 +99,13 @@ func (s queryServer) GetLatestValidatorSet(ctx context.Context, req *qtypes.GetL return nil, err } - outputValidatorsRes := &qtypes.GetLatestValidatorSetResponse{ + outputValidatorsRes := &GetLatestValidatorSetResponse{ BlockHeight: validatorsRes.BlockHeight, - Validators: make([]*qtypes.Validator, len(validatorsRes.Validators)), + Validators: make([]*Validator, len(validatorsRes.Validators)), } for i, validator := range validatorsRes.Validators { - outputValidatorsRes.Validators[i] = &qtypes.Validator{ + outputValidatorsRes.Validators[i] = &Validator{ Address: validator.Address, ProposerPriority: validator.ProposerPriority, PubKey: validator.PubKey, @@ -116,7 +116,7 @@ func (s queryServer) GetLatestValidatorSet(ctx context.Context, req *qtypes.GetL } // GetValidatorSetByHeight implements ServiceServer.GetValidatorSetByHeight -func (s queryServer) GetValidatorSetByHeight(ctx context.Context, req *qtypes.GetValidatorSetByHeightRequest) (*qtypes.GetValidatorSetByHeightResponse, error) { +func (s queryServer) GetValidatorSetByHeight(ctx context.Context, req *GetValidatorSetByHeightRequest) (*GetValidatorSetByHeightResponse, error) { page, limit, err := qtypes.ParsePagination(req.Pagination) if err != nil { return nil, err @@ -136,13 +136,13 @@ func (s queryServer) GetValidatorSetByHeight(ctx context.Context, req *qtypes.Ge return nil, err } - outputValidatorsRes := &qtypes.GetValidatorSetByHeightResponse{ + outputValidatorsRes := &GetValidatorSetByHeightResponse{ BlockHeight: validatorsRes.BlockHeight, - Validators: make([]*qtypes.Validator, len(validatorsRes.Validators)), + Validators: make([]*Validator, len(validatorsRes.Validators)), } for i, validator := range validatorsRes.Validators { - outputValidatorsRes.Validators[i] = &qtypes.Validator{ + outputValidatorsRes.Validators[i] = &Validator{ Address: validator.Address, ProposerPriority: validator.ProposerPriority, PubKey: validator.PubKey, @@ -153,7 +153,7 @@ func (s queryServer) GetValidatorSetByHeight(ctx context.Context, req *qtypes.Ge } // GetNodeInfo implements ServiceServer.GetNodeInfo -func (s queryServer) GetNodeInfo(ctx context.Context, req *qtypes.GetNodeInfoRequest) (*qtypes.GetNodeInfoResponse, error) { +func (s queryServer) GetNodeInfo(ctx context.Context, req *GetNodeInfoRequest) (*GetNodeInfoResponse, error) { status, err := getNodeStatus(s.clientCtx) if err != nil { return nil, err @@ -162,19 +162,19 @@ func (s queryServer) GetNodeInfo(ctx context.Context, req *qtypes.GetNodeInfoReq protoNodeInfo := status.NodeInfo.ToProto() nodeInfo := version.NewInfo() - deps := make([]*qtypes.Module, len(nodeInfo.BuildDeps)) + deps := make([]*Module, len(nodeInfo.BuildDeps)) for i, dep := range nodeInfo.BuildDeps { - deps[i] = &qtypes.Module{ + deps[i] = &Module{ Path: dep.Path, Sum: dep.Sum, Version: dep.Version, } } - resp := qtypes.GetNodeInfoResponse{ + resp := GetNodeInfoResponse{ DefaultNodeInfo: protoNodeInfo, - ApplicationVersion: &qtypes.VersionInfo{ + ApplicationVersion: &VersionInfo{ AppName: nodeInfo.AppName, Name: nodeInfo.Name, GitCommit: nodeInfo.GitCommit, @@ -193,7 +193,7 @@ func RegisterTendermintService( clientCtx client.Context, interfaceRegistry codectypes.InterfaceRegistry, ) { - qtypes.RegisterServiceServer( + RegisterServiceServer( qrt, NewQueryServer(clientCtx, interfaceRegistry), ) @@ -202,5 +202,5 @@ func RegisterTendermintService( // RegisterGRPCGatewayRoutes mounts the tendermint service's GRPC-gateway routes on the // given Mux. func RegisterGRPCGatewayRoutes(clientConn gogogrpc.ClientConn, mux *runtime.ServeMux) { - qtypes.RegisterServiceHandlerClient(context.Background(), mux, qtypes.NewServiceClient(clientConn)) + RegisterServiceHandlerClient(context.Background(), mux, NewServiceClient(clientConn)) } diff --git a/client/grpc/tmservice/service_test.go b/client/grpc/tmservice/service_test.go index bb145e6e21c..2210486f442 100644 --- a/client/grpc/tmservice/service_test.go +++ b/client/grpc/tmservice/service_test.go @@ -7,6 +7,7 @@ import ( "github.com/stretchr/testify/suite" + "github.com/cosmos/cosmos-sdk/client/grpc/tmservice" "github.com/cosmos/cosmos-sdk/testutil/network" qtypes "github.com/cosmos/cosmos-sdk/types/query" "github.com/cosmos/cosmos-sdk/types/rest" @@ -19,7 +20,7 @@ type IntegrationTestSuite struct { cfg network.Config network *network.Network - queryClient qtypes.ServiceClient + queryClient tmservice.ServiceClient } func (s *IntegrationTestSuite) SetupSuite() { @@ -36,7 +37,7 @@ func (s *IntegrationTestSuite) SetupSuite() { _, err := s.network.WaitForHeight(1) s.Require().NoError(err) - s.queryClient = qtypes.NewServiceClient(s.network.Validators[0].ClientCtx) + s.queryClient = tmservice.NewServiceClient(s.network.Validators[0].ClientCtx) } func (s *IntegrationTestSuite) TearDownSuite() { @@ -47,13 +48,13 @@ func (s *IntegrationTestSuite) TearDownSuite() { func (s IntegrationTestSuite) TestQueryNodeInfo() { val := s.network.Validators[0] - res, err := s.queryClient.GetNodeInfo(context.Background(), &qtypes.GetNodeInfoRequest{}) + res, err := s.queryClient.GetNodeInfo(context.Background(), &tmservice.GetNodeInfoRequest{}) s.Require().NoError(err) s.Require().Equal(res.ApplicationVersion.AppName, version.NewInfo().AppName) restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/node_info", val.APIAddress)) s.Require().NoError(err) - var getInfoRes qtypes.GetNodeInfoResponse + var getInfoRes tmservice.GetNodeInfoResponse s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &getInfoRes)) s.Require().Equal(getInfoRes.ApplicationVersion.AppName, version.NewInfo().AppName) } @@ -61,35 +62,35 @@ func (s IntegrationTestSuite) TestQueryNodeInfo() { func (s IntegrationTestSuite) TestQuerySyncing() { val := s.network.Validators[0] - _, err := s.queryClient.GetSyncing(context.Background(), &qtypes.GetSyncingRequest{}) + _, err := s.queryClient.GetSyncing(context.Background(), &tmservice.GetSyncingRequest{}) s.Require().NoError(err) restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/syncing", val.APIAddress)) s.Require().NoError(err) - var syncingRes qtypes.GetSyncingResponse + var syncingRes tmservice.GetSyncingResponse s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &syncingRes)) } func (s IntegrationTestSuite) TestQueryLatestBlock() { val := s.network.Validators[0] - _, err := s.queryClient.GetLatestBlock(context.Background(), &qtypes.GetLatestBlockRequest{}) + _, err := s.queryClient.GetLatestBlock(context.Background(), &tmservice.GetLatestBlockRequest{}) s.Require().NoError(err) restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/blocks/latest", val.APIAddress)) s.Require().NoError(err) - var blockInfoRes qtypes.GetLatestBlockResponse + var blockInfoRes tmservice.GetLatestBlockResponse s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &blockInfoRes)) } func (s IntegrationTestSuite) TestQueryBlockByHeight() { val := s.network.Validators[0] - _, err := s.queryClient.GetBlockByHeight(context.Background(), &qtypes.GetBlockByHeightRequest{Height: 1}) + _, err := s.queryClient.GetBlockByHeight(context.Background(), &tmservice.GetBlockByHeightRequest{Height: 1}) s.Require().NoError(err) restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/blocks/%d", val.APIAddress, 1)) s.Require().NoError(err) - var blockInfoRes qtypes.GetBlockByHeightResponse + var blockInfoRes tmservice.GetBlockByHeightResponse s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &blockInfoRes)) } @@ -97,13 +98,13 @@ func (s IntegrationTestSuite) TestQueryLatestValidatorSet() { val := s.network.Validators[0] // nil pagination - _, err := s.queryClient.GetLatestValidatorSet(context.Background(), &qtypes.GetLatestValidatorSetRequest{ + _, err := s.queryClient.GetLatestValidatorSet(context.Background(), &tmservice.GetLatestValidatorSetRequest{ Pagination: nil, }) s.Require().NoError(err) //with pagination - _, err = s.queryClient.GetLatestValidatorSet(context.Background(), &qtypes.GetLatestValidatorSetRequest{Pagination: &qtypes.PageRequest{ + _, err = s.queryClient.GetLatestValidatorSet(context.Background(), &tmservice.GetLatestValidatorSetRequest{Pagination: &qtypes.PageRequest{ Offset: 0, Limit: 10, }}) @@ -116,7 +117,7 @@ func (s IntegrationTestSuite) TestQueryLatestValidatorSet() { // rest request with pagination restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/validators/latest?pagination.offset=%d&pagination.limit=%d", val.APIAddress, 0, 1)) s.Require().NoError(err) - var validatorSetRes qtypes.GetLatestValidatorSetResponse + var validatorSetRes tmservice.GetLatestValidatorSetResponse s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &validatorSetRes)) } @@ -124,13 +125,13 @@ func (s IntegrationTestSuite) TestQueryValidatorSetByHeight() { val := s.network.Validators[0] // nil pagination - _, err := s.queryClient.GetValidatorSetByHeight(context.Background(), &qtypes.GetValidatorSetByHeightRequest{ + _, err := s.queryClient.GetValidatorSetByHeight(context.Background(), &tmservice.GetValidatorSetByHeightRequest{ Height: 1, Pagination: nil, }) s.Require().NoError(err) - _, err = s.queryClient.GetValidatorSetByHeight(context.Background(), &qtypes.GetValidatorSetByHeightRequest{ + _, err = s.queryClient.GetValidatorSetByHeight(context.Background(), &tmservice.GetValidatorSetByHeightRequest{ Height: 1, Pagination: &qtypes.PageRequest{ Offset: 0, @@ -144,7 +145,7 @@ func (s IntegrationTestSuite) TestQueryValidatorSetByHeight() { // rest query with pagination restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/base/tendermint/v1beta1/validators/%d?pagination.offset=%d&pagination.limit=%d", val.APIAddress, 1, 0, 1)) - var validatorSetRes qtypes.GetValidatorSetByHeightResponse + var validatorSetRes tmservice.GetValidatorSetByHeightResponse s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &validatorSetRes)) } diff --git a/client/grpc_query.go b/client/grpc_query.go index 979333f6413..fc8cdeeb80d 100644 --- a/client/grpc_query.go +++ b/client/grpc_query.go @@ -3,6 +3,7 @@ package client import ( gocontext "context" "fmt" + "reflect" "strconv" gogogrpc "github.com/gogo/protobuf/grpc" @@ -12,10 +13,10 @@ import ( "google.golang.org/grpc/encoding/proto" "google.golang.org/grpc/metadata" - grpctypes "github.com/cosmos/cosmos-sdk/types/grpc" - "github.com/cosmos/cosmos-sdk/codec/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + grpctypes "github.com/cosmos/cosmos-sdk/types/grpc" + "github.com/cosmos/cosmos-sdk/types/tx" ) var _ gogogrpc.ClientConn = Context{} @@ -23,7 +24,37 @@ var _ gogogrpc.ClientConn = Context{} var protoCodec = encoding.GetCodec(proto.Name) // Invoke implements the grpc ClientConn.Invoke method -func (ctx Context) Invoke(grpcCtx gocontext.Context, method string, args, reply interface{}, opts ...grpc.CallOption) error { +func (ctx Context) Invoke(grpcCtx gocontext.Context, method string, args, reply interface{}, opts ...grpc.CallOption) (err error) { + // Two things can happen here: + // 1. either we're broadcasting a Tx, in which call we call Tendermint's broadcast endpoint directly, + // 2. or we are querying for state, in which case we call ABCI's Query. + + // In both cases, we don't allow empty request args (it will panic unexpectedly). + if reflect.ValueOf(args).IsNil() { + return sdkerrors.Wrap(sdkerrors.ErrInvalidRequest, "request cannot be nil") + } + + // Case 1. Broadcasting a Tx. + if isBroadcast(method) { + req, ok := args.(*tx.BroadcastTxRequest) + if !ok { + return sdkerrors.Wrapf(sdkerrors.ErrInvalidRequest, "expected %T, got %T", (*tx.BroadcastTxRequest)(nil), args) + } + res, ok := reply.(*tx.BroadcastTxResponse) + if !ok { + return sdkerrors.Wrapf(sdkerrors.ErrInvalidRequest, "expected %T, got %T", (*tx.BroadcastTxResponse)(nil), args) + } + + broadcastRes, err := TxServiceBroadcast(grpcCtx, ctx, req) + if err != nil { + return err + } + *res = *broadcastRes + + return err + } + + // Case 2. Querying state. reqBz, err := protoCodec.Marshal(args) if err != nil { return err @@ -86,3 +117,7 @@ func (ctx Context) Invoke(grpcCtx gocontext.Context, method string, args, reply func (Context) NewStream(gocontext.Context, *grpc.StreamDesc, string, ...grpc.CallOption) (grpc.ClientStream, error) { return nil, fmt.Errorf("streaming rpc not supported") } + +func isBroadcast(method string) bool { + return method == "/cosmos.tx.v1beta1.Service/BroadcastTx" +} diff --git a/client/keys/export.go b/client/keys/export.go index 3f75ee2e52e..ac2b6b47683 100644 --- a/client/keys/export.go +++ b/client/keys/export.go @@ -2,23 +2,45 @@ package keys import ( "bufio" + "fmt" "github.com/spf13/cobra" "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/client/input" + "github.com/cosmos/cosmos-sdk/crypto/keyring" +) + +const ( + flagUnarmoredHex = "unarmored-hex" + flagUnsafe = "unsafe" ) // ExportKeyCommand exports private keys from the key store. func ExportKeyCommand() *cobra.Command { - return &cobra.Command{ + cmd := &cobra.Command{ Use: "export ", Short: "Export private keys", - Long: `Export a private key from the local keybase in ASCII-armored encrypted format.`, - Args: cobra.ExactArgs(1), + Long: `Export a private key from the local keyring in ASCII-armored encrypted format. + +When both the --unarmored-hex and --unsafe flags are selected, cryptographic +private key material is exported in an INSECURE fashion that is designed to +allow users to import their keys in hot wallets. This feature is for advanced +users only that are confident about how to handle private keys work and are +FULLY AWARE OF THE RISKS. If you are unsure, you may want to do some research +and export your keys in ASCII-armored encrypted format.`, + Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { buf := bufio.NewReader(cmd.InOrStdin()) clientCtx := client.GetClientContextFromCmd(cmd) + unarmored, _ := cmd.Flags().GetBool(flagUnarmoredHex) + unsafe, _ := cmd.Flags().GetBool(flagUnsafe) + + if unarmored && unsafe { + return exportUnsafeUnarmored(cmd, args[0], buf, clientCtx.Keyring) + } else if unarmored || unsafe { + return fmt.Errorf("the flags %s and %s must be used together", flagUnsafe, flagUnarmoredHex) + } encryptPassword, err := input.GetPassword("Enter passphrase to encrypt the exported key:", buf) if err != nil { @@ -31,7 +53,31 @@ func ExportKeyCommand() *cobra.Command { } cmd.Println(armored) + return nil }, } + + cmd.Flags().Bool(flagUnarmoredHex, false, "Export unarmored hex privkey. Requires --unsafe.") + cmd.Flags().Bool(flagUnsafe, false, "Enable unsafe operations. This flag must be switched on along with all unsafe operation-specific options.") + + return cmd +} + +func exportUnsafeUnarmored(cmd *cobra.Command, uid string, buf *bufio.Reader, kr keyring.Keyring) error { + // confirm deletion, unless -y is passed + if yes, err := input.GetConfirmation("WARNING: The private key will be exported as an unarmored hexadecimal string. USE AT YOUR OWN RISK. Continue?", buf, cmd.ErrOrStderr()); err != nil { + return err + } else if !yes { + return nil + } + + hexPrivKey, err := keyring.NewUnsafe(kr).UnsafeExportPrivKeyHex(uid) + if err != nil { + return err + } + + cmd.Println(hexPrivKey) + + return nil } diff --git a/client/keys/export_test.go b/client/keys/export_test.go index 4276db1c124..b01bbf82302 100644 --- a/client/keys/export_test.go +++ b/client/keys/export_test.go @@ -36,15 +36,34 @@ func Test_runExportCmd(t *testing.T) { require.NoError(t, err) // Now enter password - mockIn.Reset("123456789\n123456789\n") - cmd.SetArgs([]string{ + args := []string{ "keyname1", fmt.Sprintf("--%s=%s", flags.FlagHome, kbHome), fmt.Sprintf("--%s=%s", flags.FlagKeyringBackend, keyring.BackendTest), - }) + } + + mockIn.Reset("123456789\n123456789\n") + cmd.SetArgs(args) clientCtx := client.Context{}.WithKeyring(kb) ctx := context.WithValue(context.Background(), client.ClientContextKey, &clientCtx) require.NoError(t, cmd.ExecuteContext(ctx)) + + argsUnsafeOnly := append(args, "--unsafe") + cmd.SetArgs(argsUnsafeOnly) + require.Error(t, cmd.ExecuteContext(ctx)) + + argsUnarmoredHexOnly := append(args, "--unarmored-hex") + cmd.SetArgs(argsUnarmoredHexOnly) + require.Error(t, cmd.ExecuteContext(ctx)) + + argsUnsafeUnarmoredHex := append(args, "--unsafe", "--unarmored-hex") + cmd.SetArgs(argsUnsafeUnarmoredHex) + require.Error(t, cmd.ExecuteContext(ctx)) + + mockIn, mockOut := testutil.ApplyMockIO(cmd) + mockIn.Reset("y\n") + require.NoError(t, cmd.ExecuteContext(ctx)) + require.Equal(t, "2485e33678db4175dc0ecef2d6e1fc493d4a0d7f7ce83324b6ed70afe77f3485\n", mockOut.String()) } diff --git a/codec/legacy/codec.go b/codec/legacy/codec.go index 09225998e0b..5ec6b2976ca 100644 --- a/codec/legacy/codec.go +++ b/codec/legacy/codec.go @@ -3,6 +3,7 @@ package legacy import ( "github.com/cosmos/cosmos-sdk/codec" cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec" + cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" ) // Cdc defines a global generic sealed Amino codec to be used throughout sdk. It @@ -15,5 +16,16 @@ func init() { Cdc = codec.NewLegacyAmino() cryptocodec.RegisterCrypto(Cdc) codec.RegisterEvidences(Cdc) - Cdc.Seal() +} + +// PrivKeyFromBytes unmarshals private key bytes and returns a PrivKey +func PrivKeyFromBytes(privKeyBytes []byte) (privKey cryptotypes.PrivKey, err error) { + err = Cdc.UnmarshalBinaryBare(privKeyBytes, &privKey) + return +} + +// PubKeyFromBytes unmarshals public key bytes and returns a PubKey +func PubKeyFromBytes(pubKeyBytes []byte) (pubKey cryptotypes.PubKey, err error) { + err = Cdc.UnmarshalBinaryBare(pubKeyBytes, &pubKey) + return } diff --git a/codec/types/any.go b/codec/types/any.go index 38fe4b42aa7..4a30ddad4d0 100644 --- a/codec/types/any.go +++ b/codec/types/any.go @@ -1,8 +1,9 @@ package types import ( - sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/gogo/protobuf/proto" + + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" ) type Any struct { diff --git a/contrib/images/simd-env/Dockerfile b/contrib/images/simd-env/Dockerfile index 3be7eb5c847..1ccaeac657d 100644 --- a/contrib/images/simd-env/Dockerfile +++ b/contrib/images/simd-env/Dockerfile @@ -12,7 +12,7 @@ VOLUME [ /simd ] WORKDIR /simd EXPOSE 26656 26657 ENTRYPOINT ["/usr/bin/wrapper.sh"] -CMD ["start"] +CMD ["start", "--log_format", "plain"] STOPSIGNAL SIGTERM COPY wrapper.sh /usr/bin/wrapper.sh diff --git a/crypto/armor.go b/crypto/armor.go index 3ee472707a7..35deb107798 100644 --- a/crypto/armor.go +++ b/crypto/armor.go @@ -10,7 +10,6 @@ import ( "github.com/tendermint/tendermint/crypto/xsalsa20symmetric" "github.com/cosmos/cosmos-sdk/codec/legacy" - cryptoAmino "github.com/cosmos/cosmos-sdk/crypto/codec" cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" ) @@ -153,7 +152,7 @@ func encryptPrivKey(privKey cryptotypes.PrivKey, passphrase string) (saltBytes [ } key = crypto.Sha256(key) // get 32 bytes - privKeyBytes := legacy.Cdc.Amino.MustMarshalBinaryBare(privKey) + privKeyBytes := legacy.Cdc.MustMarshalBinaryBare(privKey) return saltBytes, xsalsa20symmetric.EncryptSymmetric(privKeyBytes, key) } @@ -206,5 +205,5 @@ func decryptPrivKey(saltBytes []byte, encBytes []byte, passphrase string) (privK return privKey, err } - return cryptoAmino.PrivKeyFromBytes(privKeyBytes) + return legacy.PrivKeyFromBytes(privKeyBytes) } diff --git a/crypto/armor_test.go b/crypto/armor_test.go index 3267ca98a78..abbc7870aae 100644 --- a/crypto/armor_test.go +++ b/crypto/armor_test.go @@ -15,7 +15,6 @@ import ( "github.com/cosmos/cosmos-sdk/codec/legacy" "github.com/cosmos/cosmos-sdk/crypto" - cryptoAmino "github.com/cosmos/cosmos-sdk/crypto/codec" "github.com/cosmos/cosmos-sdk/crypto/hd" "github.com/cosmos/cosmos-sdk/crypto/keyring" "github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1" @@ -79,7 +78,7 @@ func TestArmorUnarmorPubKey(t *testing.T) { armored := crypto.ArmorPubKeyBytes(legacy.Cdc.Amino.MustMarshalBinaryBare(info.GetPubKey()), "") pubBytes, algo, err := crypto.UnarmorPubKeyBytes(armored) require.NoError(t, err) - pub, err := cryptoAmino.PubKeyFromBytes(pubBytes) + pub, err := legacy.PubKeyFromBytes(pubBytes) require.NoError(t, err) require.Equal(t, string(hd.Secp256k1Type), algo) require.True(t, pub.Equals(info.GetPubKey())) @@ -87,7 +86,7 @@ func TestArmorUnarmorPubKey(t *testing.T) { armored = crypto.ArmorPubKeyBytes(legacy.Cdc.Amino.MustMarshalBinaryBare(info.GetPubKey()), "unknown") pubBytes, algo, err = crypto.UnarmorPubKeyBytes(armored) require.NoError(t, err) - pub, err = cryptoAmino.PubKeyFromBytes(pubBytes) + pub, err = legacy.PubKeyFromBytes(pubBytes) require.NoError(t, err) require.Equal(t, "unknown", algo) require.True(t, pub.Equals(info.GetPubKey())) diff --git a/crypto/codec/amino.go b/crypto/codec/amino.go index 5cd61c3dd31..d50a08864c2 100644 --- a/crypto/codec/amino.go +++ b/crypto/codec/amino.go @@ -10,13 +10,6 @@ import ( cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" ) -var amino *codec.LegacyAmino - -func init() { - amino = codec.NewLegacyAmino() - RegisterCrypto(amino) -} - // RegisterCrypto registers all crypto dependency types with the provided Amino // codec. func RegisterCrypto(cdc *codec.LegacyAmino) { @@ -38,15 +31,3 @@ func RegisterCrypto(cdc *codec.LegacyAmino) { cdc.RegisterConcrete(&secp256k1.PrivKey{}, secp256k1.PrivKeyName, nil) } - -// PrivKeyFromBytes unmarshals private key bytes and returns a PrivKey -func PrivKeyFromBytes(privKeyBytes []byte) (privKey cryptotypes.PrivKey, err error) { - err = amino.UnmarshalBinaryBare(privKeyBytes, &privKey) - return -} - -// PubKeyFromBytes unmarshals public key bytes and returns a PubKey -func PubKeyFromBytes(pubKeyBytes []byte) (pubKey cryptotypes.PubKey, err error) { - err = amino.UnmarshalBinaryBare(pubKeyBytes, &pubKey) - return -} diff --git a/crypto/keyring/codec.go b/crypto/keyring/codec.go index 6e6a254c87e..558f377752d 100644 --- a/crypto/keyring/codec.go +++ b/crypto/keyring/codec.go @@ -2,18 +2,12 @@ package keyring import ( "github.com/cosmos/cosmos-sdk/codec" - cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec" + "github.com/cosmos/cosmos-sdk/codec/legacy" "github.com/cosmos/cosmos-sdk/crypto/hd" ) -// CryptoCdc defines the codec required for keys and info -var CryptoCdc *codec.LegacyAmino - func init() { - CryptoCdc = codec.NewLegacyAmino() - cryptocodec.RegisterCrypto(CryptoCdc) - RegisterLegacyAminoCodec(CryptoCdc) - CryptoCdc.Seal() + RegisterLegacyAminoCodec(legacy.Cdc) } // RegisterLegacyAminoCodec registers concrete types and interfaces on the given codec. diff --git a/crypto/keyring/info.go b/crypto/keyring/info.go index f05e2f0ddef..24024599bba 100644 --- a/crypto/keyring/info.go +++ b/crypto/keyring/info.go @@ -3,6 +3,7 @@ package keyring import ( "fmt" + "github.com/cosmos/cosmos-sdk/codec/legacy" codectypes "github.com/cosmos/cosmos-sdk/codec/types" "github.com/cosmos/cosmos-sdk/crypto/hd" "github.com/cosmos/cosmos-sdk/crypto/keys/multisig" @@ -246,12 +247,12 @@ func (i multiInfo) UnpackInterfaces(unpacker codectypes.AnyUnpacker) error { // encoding info func marshalInfo(i Info) []byte { - return CryptoCdc.MustMarshalBinaryLengthPrefixed(i) + return legacy.Cdc.MustMarshalBinaryLengthPrefixed(i) } // decoding info func unmarshalInfo(bz []byte) (info Info, err error) { - err = CryptoCdc.UnmarshalBinaryLengthPrefixed(bz, &info) + err = legacy.Cdc.UnmarshalBinaryLengthPrefixed(bz, &info) if err != nil { return nil, err } @@ -266,7 +267,7 @@ func unmarshalInfo(bz []byte) (info Info, err error) { _, ok := info.(multiInfo) if ok { var multi multiInfo - err = CryptoCdc.UnmarshalBinaryLengthPrefixed(bz, &multi) + err = legacy.Cdc.UnmarshalBinaryLengthPrefixed(bz, &multi) return multi, err } diff --git a/crypto/keyring/keyring.go b/crypto/keyring/keyring.go index d3220b5b43e..f88ffdc3701 100644 --- a/crypto/keyring/keyring.go +++ b/crypto/keyring/keyring.go @@ -18,8 +18,8 @@ import ( tmcrypto "github.com/tendermint/tendermint/crypto" "github.com/cosmos/cosmos-sdk/client/input" + "github.com/cosmos/cosmos-sdk/codec/legacy" "github.com/cosmos/cosmos-sdk/crypto" - cryptoamino "github.com/cosmos/cosmos-sdk/crypto/codec" "github.com/cosmos/cosmos-sdk/crypto/hd" "github.com/cosmos/cosmos-sdk/crypto/ledger" "github.com/cosmos/cosmos-sdk/crypto/types" @@ -88,6 +88,13 @@ type Keyring interface { Exporter } +// UnsafeKeyring exposes unsafe operations such as unsafe unarmored export in +// addition to those that are made available by the Keyring interface. +type UnsafeKeyring interface { + Keyring + UnsafeExporter +} + // Signer is implemented by key stores that want to provide signing capabilities. type Signer interface { // Sign sign byte messages with a user key. @@ -110,12 +117,20 @@ type Exporter interface { // Export public key ExportPubKeyArmor(uid string) (string, error) ExportPubKeyArmorByAddress(address sdk.Address) (string, error) + // ExportPrivKey returns a private key in ASCII armored format. // It returns an error if the key does not exist or a wrong encryption passphrase is supplied. ExportPrivKeyArmor(uid, encryptPassphrase string) (armor string, err error) ExportPrivKeyArmorByAddress(address sdk.Address, encryptPassphrase string) (armor string, err error) } +// UnsafeExporter is implemented by key stores that support unsafe export +// of private keys' material. +type UnsafeExporter interface { + // UnsafeExportPrivKeyHex returns a private key in unarmored hex format + UnsafeExportPrivKeyHex(uid string) (string, error) +} + // Option overrides keyring configuration options. type Option func(options *Options) @@ -198,7 +213,7 @@ func (ks keystore) ExportPubKeyArmor(uid string) (string, error) { return "", fmt.Errorf("no key to export with name: %s", uid) } - return crypto.ArmorPubKeyBytes(CryptoCdc.MustMarshalBinaryBare(bz.GetPubKey()), string(bz.GetAlgo())), nil + return crypto.ArmorPubKeyBytes(legacy.Cdc.MustMarshalBinaryBare(bz.GetPubKey()), string(bz.GetAlgo())), nil } func (ks keystore) ExportPubKeyArmorByAddress(address sdk.Address) (string, error) { @@ -240,7 +255,7 @@ func (ks keystore) ExportPrivateKeyObject(uid string) (types.PrivKey, error) { return nil, err } - priv, err = cryptoamino.PrivKeyFromBytes([]byte(linfo.PrivKeyArmor)) + priv, err = legacy.PrivKeyFromBytes([]byte(linfo.PrivKeyArmor)) if err != nil { return nil, err } @@ -289,7 +304,7 @@ func (ks keystore) ImportPubKey(uid string, armor string) error { return err } - pubKey, err := cryptoamino.PubKeyFromBytes(pubBytes) + pubKey, err := legacy.PubKeyFromBytes(pubBytes) if err != nil { return err } @@ -316,7 +331,7 @@ func (ks keystore) Sign(uid string, msg []byte) ([]byte, types.PubKey, error) { return nil, nil, fmt.Errorf("private key not available") } - priv, err = cryptoamino.PrivKeyFromBytes([]byte(i.PrivKeyArmor)) + priv, err = legacy.PrivKeyFromBytes([]byte(i.PrivKeyArmor)) if err != nil { return nil, nil, err } @@ -696,7 +711,7 @@ func (ks keystore) writeLocalKey(name string, priv types.PrivKey, algo hd.PubKey // encrypt private key using keyring pub := priv.PubKey() - info := newLocalInfo(name, pub, string(CryptoCdc.MustMarshalBinaryBare(priv)), algo) + info := newLocalInfo(name, pub, string(legacy.Cdc.MustMarshalBinaryBare(priv)), algo) if err := ks.writeInfo(info); err != nil { return nil, err } @@ -774,6 +789,29 @@ func (ks keystore) writeMultisigKey(name string, pub types.PubKey) (Info, error) return info, nil } +type unsafeKeystore struct { + keystore +} + +// NewUnsafe returns a new keyring that provides support for unsafe operations. +func NewUnsafe(kr Keyring) UnsafeKeyring { + // The type assertion is against the only keystore + // implementation that is currently provided. + ks := kr.(keystore) + + return unsafeKeystore{ks} +} + +// UnsafeExportPrivKeyHex exports private keys in unarmored hexadecimal format. +func (ks unsafeKeystore) UnsafeExportPrivKeyHex(uid string) (privkey string, err error) { + priv, err := ks.ExportPrivateKeyObject(uid) + if err != nil { + return "", err + } + + return hex.EncodeToString(priv.Bytes()), nil +} + func addrHexKeyAsString(address sdk.Address) string { return fmt.Sprintf("%s.%s", hex.EncodeToString(address.Bytes()), addressSuffix) } diff --git a/crypto/keyring/keyring_test.go b/crypto/keyring/keyring_test.go index 540ea57bf3f..bcd7eddf39f 100644 --- a/crypto/keyring/keyring_test.go +++ b/crypto/keyring/keyring_test.go @@ -1,6 +1,7 @@ package keyring import ( + "encoding/hex" "fmt" "strings" "testing" @@ -1092,6 +1093,29 @@ func TestAltKeyring_ImportExportPubKey_ByAddress(t *testing.T) { require.EqualError(t, err, fmt.Sprintf("cannot overwrite key: %s", newUID)) } +func TestAltKeyring_UnsafeExportPrivKeyHex(t *testing.T) { + keyring, err := New(t.Name(), BackendTest, t.TempDir(), nil) + require.NoError(t, err) + + uid := theID + + _, _, err = keyring.NewMnemonic(uid, English, sdk.FullFundraiserPath, hd.Secp256k1) + require.NoError(t, err) + + unsafeKeyring := NewUnsafe(keyring) + privKey, err := unsafeKeyring.UnsafeExportPrivKeyHex(uid) + + require.NoError(t, err) + require.Equal(t, 64, len(privKey)) + + _, err = hex.DecodeString(privKey) + require.NoError(t, err) + + // test error on non existing key + _, err = unsafeKeyring.UnsafeExportPrivKeyHex("non-existing") + require.Error(t, err) +} + func TestAltKeyring_ConstructorSupportedAlgos(t *testing.T) { keyring, err := New(t.Name(), BackendTest, t.TempDir(), nil) require.NoError(t, err) diff --git a/crypto/keyring/types_test.go b/crypto/keyring/types_test.go index 8c68bce670a..b04aa454796 100644 --- a/crypto/keyring/types_test.go +++ b/crypto/keyring/types_test.go @@ -4,10 +4,11 @@ import ( "encoding/hex" "testing" + "github.com/stretchr/testify/require" + "github.com/cosmos/cosmos-sdk/crypto/hd" "github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1" sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/stretchr/testify/require" ) func Test_writeReadLedgerInfo(t *testing.T) { diff --git a/crypto/ledger/ledger_test.go b/crypto/ledger/ledger_test.go index 5fe9da8f55f..ec1b8dbedc3 100644 --- a/crypto/ledger/ledger_test.go +++ b/crypto/ledger/ledger_test.go @@ -6,7 +6,7 @@ import ( "github.com/stretchr/testify/require" - cryptoAmino "github.com/cosmos/cosmos-sdk/crypto/codec" + "github.com/cosmos/cosmos-sdk/codec/legacy" "github.com/cosmos/cosmos-sdk/crypto/hd" "github.com/cosmos/cosmos-sdk/crypto/types" "github.com/cosmos/cosmos-sdk/testutil" @@ -238,7 +238,7 @@ func TestRealDeviceSecp256k1(t *testing.T) { // now, let's serialize the public key and make sure it still works bs := cdc.Amino.MustMarshalBinaryBare(priv.PubKey()) - pub2, err := cryptoAmino.PubKeyFromBytes(bs) + pub2, err := legacy.PubKeyFromBytes(bs) require.Nil(t, err, "%+v", err) // make sure we get the same pubkey when we load from disk @@ -251,8 +251,8 @@ func TestRealDeviceSecp256k1(t *testing.T) { require.True(t, valid) // make sure pubkeys serialize properly as well - bs = cdc.Amino.MustMarshalBinaryBare(pub) - bpub, err := cryptoAmino.PubKeyFromBytes(bs) + bs = legacy.Cdc.MustMarshalBinaryBare(pub) + bpub, err := legacy.PubKeyFromBytes(bs) require.NoError(t, err) require.Equal(t, pub, bpub) } diff --git a/docs/migrations/rest.md b/docs/migrations/rest.md index d418587d086..82881669533 100644 --- a/docs/migrations/rest.md +++ b/docs/migrations/rest.md @@ -14,12 +14,15 @@ Some important information concerning all legacy REST endpoints: ## Breaking Changes in Legacy REST Endpoints -| Legacy REST Endpoint | Description | Breaking Change | -| ------------------------- | ------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `POST /txs` | Query tx by hash | Endpoint will error when trying to broadcast transactions that don't support Amino serialization (e.g. IBC txs)1. | -| `GET /txs/{hash}` | Query tx by hash | Endpoint will error when trying to output transactions that don't support Amino serialization (e.g. IBC txs)1. | -| `GET /txs` | Query tx by events | Endpoint will error when trying to output transactions that don't support Amino serialization (e.g. IBC txs)1. | -| `GET /staking/validators` | Get all validators | BondStatus is now a protobuf enum instead of an int32, and JSON serialized using its protobuf name, so expect query parameters like `?status=BOND_STATUS_{BONDED,UNBONDED,UNBONDING}` as opposed to `?status={bonded,unbonded,unbonding}`. | +| Legacy REST Endpoint | Description | Breaking Change | +| ------------------------------------------------------------------------ | ------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `POST /txs` | Broadcast tx | Endpoint will error when trying to broadcast transactions that don't support Amino serialization (e.g. IBC txs)1. | +| `POST /txs/encode`, `POST /txs/decode` | Encode/decode Amino txs from JSON to binary | Endpoint will error when trying to encode/decode transactions that don't support Amino serialization (e.g. IBC txs)1. | +| `GET /txs/{hash}` | Query tx by hash | Endpoint will error when trying to output transactions that don't support Amino serialization (e.g. IBC txs)1. | +| `GET /txs` | Query tx by events | Endpoint will error when trying to output transactions that don't support Amino serialization (e.g. IBC txs)1. | +| `GET /gov/proposals/{id}/votes`, `GET /gov/proposals/{id}/votes/{voter}` | Gov endpoints for querying votes | All gov endpoints which return votes return int32 in the `option` field instead of string: `1=VOTE_OPTION_YES, 2=VOTE_OPTION_ABSTAIN, 3=VOTE_OPTION_NO, 4=VOTE_OPTION_NO_WITH_VETO`. | +| `GET /staking/*` | Staking query endpoints | All staking endpoints which return validators have two breaking changes. First, the validator's `consensus_pubkey` field returns an Amino-encoded struct representing an `Any` instead of a bech32-encoded string representing the pubkey. The `value` field of the `Any` is the pubkey's raw key as base64-encoded bytes. Second, the validator's `status` field now returns an int32 instead of string: `1=BOND_STATUS_UNBONDED`, `2=BOND_STATUS_UNBONDING`, `3=BOND_STATUS_BONDED`. | +| `GET /staking/validators` | Get all validators | BondStatus is now a protobuf enum instead of an int32, and JSON serialized using its protobuf name, so expect query parameters like `?status=BOND_STATUS_{BONDED,UNBONDED,UNBONDING}` as opposed to `?status={bonded,unbonded,unbonding}`. | 1: Transactions that don't support Amino serialization are the ones that contain one or more `Msg`s that are not registered with the Amino codec. Currently in the SDK, only IBC `Msg`s fall into this case. @@ -31,7 +34,7 @@ Some modules expose legacy `POST` endpoints to generate unsigned transactions fo | Legacy REST Endpoint | Description | New gGPC-gateway REST Endpoint | | ------------------------------------------------------------------------------- | ------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------- | -| `GET /txs/{hash}` | Query tx by hash | `GET /cosmos/tx/v1beta1/tx/{hash}` | +| `GET /txs/{hash}` | Query tx by hash | `GET /cosmos/tx/v1beta1/txs/{hash}` | | `GET /txs` | Query tx by events | `GET /cosmos/tx/v1beta1/txs` | | `POST /txs` | Broadcast tx | `POST /cosmos/tx/v1beta1/txs` | | `POST /txs/encode` | Encodes an Amino JSON tx to an Amino binary tx | N/A, use Protobuf directly | diff --git a/docs/package-lock.json b/docs/package-lock.json index 98bf43081d1..935f8aedc0a 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -5,118 +5,118 @@ "requires": true, "dependencies": { "@algolia/cache-browser-local-storage": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.6.0.tgz", - "integrity": "sha512-3ObeNwZ5gfDvKPp9NXdtbBrCtz/yR1oyDu/AReG73Oanua3y30Y11p7VQzzpLe2R/gDCLOGdRgr17h11lGy1Hg==", + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.8.2.tgz", + "integrity": "sha512-X2528jVZk+iPmsA4gF2AxH7RnREF10O98yV8QWwXcXcEYD7qjCsidPUGXcRsZCWOkCdZPA2IMJBiPDxZqfrQqA==", "requires": { - "@algolia/cache-common": "4.6.0" + "@algolia/cache-common": "4.8.2" } }, "@algolia/cache-common": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.6.0.tgz", - "integrity": "sha512-mEedrPb2O3WwtiIHggFoIhTbHVCMNikxMiiN9kqmwZkdDfClfxm435OUGZfAl67rBZfc0DNs/jmPM2mUoefM9A==" + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.8.2.tgz", + "integrity": "sha512-ER3QxHH2vmatfO4rRv504ByAiqqoj6kg0RcoBEetQflxRcRznmX7uFBXI3Zo42OoPKM3NMzFted50YO0Um5VLA==" }, "@algolia/cache-in-memory": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.6.0.tgz", - "integrity": "sha512-J7ayGokVWEFkuLxzgrIsPS4k1/ZndyGVpG/qPrG9RHVrs7ZogEhUSY1tbEyUlW3mGy7diIh+/52dtohDL/nbGQ==", + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.8.2.tgz", + "integrity": "sha512-CYse8/ZNPr/pMo6inQ0Uu+HWFFN9OcfJw67YCvU+1yz8NaS3rQ2HxU+zu1M/BCKMA89/dYF0jjBMT5rm6E4cdw==", "requires": { - "@algolia/cache-common": "4.6.0" + "@algolia/cache-common": "4.8.2" } }, "@algolia/client-account": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.6.0.tgz", - "integrity": "sha512-0t2yU6wNBNJgAmrARHrM1llhANyPT4Q/1wu6yEzv2WfPXlfsHwMhtKYNti4/k8eswwUt9wAri10WFV6TJI48rg==", + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.8.2.tgz", + "integrity": "sha512-cRtZ2xiLUfsanrpjYkxyNwE+4SbyUvbe8CL9HwpTJPsP0Jsv69H4H71lL7v0pQY5OWkFxKMsqVxCMH7Px3740w==", "requires": { - "@algolia/client-common": "4.6.0", - "@algolia/client-search": "4.6.0", - "@algolia/transporter": "4.6.0" + "@algolia/client-common": "4.8.2", + "@algolia/client-search": "4.8.2", + "@algolia/transporter": "4.8.2" } }, "@algolia/client-analytics": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.6.0.tgz", - "integrity": "sha512-7yfn9pabA21Uw2iZjW1MNN4IJUT5y/YSg+ZJ+3HqBB6SgzOOqY0N3fATsPeGuN9EqSfVnqvnIrJMS8mI0b5FzQ==", + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.8.2.tgz", + "integrity": "sha512-+vnFokDGxi0vAaumbAgvDuvXWs0VvLk3gDkjkegXD8MMUTs3ByTZApCM4NPnIdbcUroFAJxbyzZQT9/CRZHgcA==", "requires": { - "@algolia/client-common": "4.6.0", - "@algolia/client-search": "4.6.0", - "@algolia/requester-common": "4.6.0", - "@algolia/transporter": "4.6.0" + "@algolia/client-common": "4.8.2", + "@algolia/client-search": "4.8.2", + "@algolia/requester-common": "4.8.2", + "@algolia/transporter": "4.8.2" } }, "@algolia/client-common": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.6.0.tgz", - "integrity": "sha512-60jK0LK5H+6q6HyyMyoBBD0fIs8zZzJt6BiyJGQG90o3gUV/SnjiNxO9Bx0RRlqdkE5s0OYFu1L7P9Y5TX7oAw==", + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.8.2.tgz", + "integrity": "sha512-jO9RvC0FPxxhe/nynGxVEYmNltE5xgYV1Y6zviwl/80PwsrGfWp/rVDh4CVZaBOntmsOp+y0aqQwNYjLMVWXBg==", "requires": { - "@algolia/requester-common": "4.6.0", - "@algolia/transporter": "4.6.0" + "@algolia/requester-common": "4.8.2", + "@algolia/transporter": "4.8.2" } }, "@algolia/client-recommendation": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/client-recommendation/-/client-recommendation-4.6.0.tgz", - "integrity": "sha512-j+Yb1z5QeIRDCCO+9hS9oZS3KNqRogPHDbJJsLTt6pkrs4CG2UVLVV67M977B1nzJ9OzaEki3VbpGQhRhPGNfQ==", + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/client-recommendation/-/client-recommendation-4.8.2.tgz", + "integrity": "sha512-evngF6Odrw93gXkXrOYPXxTWwDQ2K01sadB3Xpa1hQb+vjiBwcA/54w6nKyE4aiII1loT5q+Uj+G1f8HwBuksw==", "requires": { - "@algolia/client-common": "4.6.0", - "@algolia/requester-common": "4.6.0", - "@algolia/transporter": "4.6.0" + "@algolia/client-common": "4.8.2", + "@algolia/requester-common": "4.8.2", + "@algolia/transporter": "4.8.2" } }, "@algolia/client-search": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.6.0.tgz", - "integrity": "sha512-+qA1NA88YnXvuCKifegfgts1RQs8IzcwccQqyurz8ins4hypZL1tXN2BkrOqqDIgvYIrUvFyhv+gLO6U9PpDUA==", + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.8.2.tgz", + "integrity": "sha512-JtmhdBKsA3Ll9ITvBfvMjsfuOY5oOPlaS9ahBGeb2OFfC1Myb6kbjXl73VtSVh4Bh0MpTsT4SdBdYCJFctRsQg==", "requires": { - "@algolia/client-common": "4.6.0", - "@algolia/requester-common": "4.6.0", - "@algolia/transporter": "4.6.0" + "@algolia/client-common": "4.8.2", + "@algolia/requester-common": "4.8.2", + "@algolia/transporter": "4.8.2" } }, "@algolia/logger-common": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.6.0.tgz", - "integrity": "sha512-F+0HTGSQzJfWsX/cJq2l4eG2Y5JA6pqZ0YETyo5XJhZX4JaDrGszVKuOqp8kovZF/Ifebywxb8JdCiSUskmbig==" + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.8.2.tgz", + "integrity": "sha512-Sse29WFBZH4CSCnbMTh8t6uAFaJtNyRRcpDjFfvkSNdPAN/pxLAY9GYUzJmP4J+ILdJn6ZWMNpvwhNQ8p2I+mg==" }, "@algolia/logger-console": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.6.0.tgz", - "integrity": "sha512-ers7OhfU6qBQl6s7MOe5gNUkcpa7LGrhEzDWnD0cUwLSd5BvWt7zEN69O2CZVbvAUZYlZ5zJTzMMa49s0VXrKQ==", + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.8.2.tgz", + "integrity": "sha512-hpZvy708iOeX6tcgy9qXVzlH8Avd3UA7AMwd1wAK5dG8PwAcrhO9wRQuE1AemvuVIEhshbWGQl9pDGXsejO+4g==", "requires": { - "@algolia/logger-common": "4.6.0" + "@algolia/logger-common": "4.8.2" } }, "@algolia/requester-browser-xhr": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.6.0.tgz", - "integrity": "sha512-ugrJT25VUkoKrl5vJVFclMdogbhTiDZ38Gss4xfTiSsP/SGE/0ei5VEOMEcj/bjkurJjPky1HfJZ3ykJhIsfCA==", + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.8.2.tgz", + "integrity": "sha512-Vdv38BtgwAeVPThwOVRVrR8mDiRLADwqXt1c87dnHHL1Rs3/FMRQ9ogKMKnaJMAH+OeXf+yzNxh+QCISPKaMkQ==", "requires": { - "@algolia/requester-common": "4.6.0" + "@algolia/requester-common": "4.8.2" } }, "@algolia/requester-common": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.6.0.tgz", - "integrity": "sha512-DJ5iIGBGrRudimaaFnpBFM19pv8SsXiMYuukn9q1GgQh2mPPBCBBJiezKc7+OzE1UyCVrAFBpR/hrJnflZnRdQ==" + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.8.2.tgz", + "integrity": "sha512-dN6MuKQQTp7+IBZNIRC9KUCrWVQRM3LaSLLB9lM7evjt++2jJTlhUu2Vncd78VbSy2kviojelxZ/mXTITRRxoA==" }, "@algolia/requester-node-http": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.6.0.tgz", - "integrity": "sha512-MPZK3oZz0jSBsqrGiPxv7LOKMUNknlaRNyRDy0v/ASIYG+GvLhGTdEzG5Eyw5tgSvBr8CWrWM5tDC31EH40Ndw==", + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.8.2.tgz", + "integrity": "sha512-pnpDRzIfibJ67rPQvq1me+bqhfflS2w9MlbVMhKdPsSuO8GKAZQ4GJgvIphvpSmhVnB7drdbZZ3J0KVP/y7jeg==", "requires": { - "@algolia/requester-common": "4.6.0" + "@algolia/requester-common": "4.8.2" } }, "@algolia/transporter": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.6.0.tgz", - "integrity": "sha512-xp+HI8sB8gLCvP00scaOVPQEk5H7nboWUxrwLKyVUvtUO4o003bOfFPsH86NRyu5Dv7fzX9b8EH3rVxcLOhjqg==", + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.8.2.tgz", + "integrity": "sha512-r3ecEn+4GWW8ntydDmGGlZ5Iqds080bt2RtAUVNbPPwyuXAs9HUqwkYQiTIHSmeYtAlQ6YOYVnX3W6W8FhbhaA==", "requires": { - "@algolia/cache-common": "4.6.0", - "@algolia/logger-common": "4.6.0", - "@algolia/requester-common": "4.6.0" + "@algolia/cache-common": "4.8.2", + "@algolia/logger-common": "4.8.2", + "@algolia/requester-common": "4.8.2" } }, "@babel/code-frame": { @@ -128,23 +128,23 @@ } }, "@babel/compat-data": { - "version": "7.12.5", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.12.5.tgz", - "integrity": "sha512-DTsS7cxrsH3by8nqQSpFSyjSfSYl57D6Cf4q8dW3LK83tBKBDCkfcay1nYkXq1nIHXnpX8WMMb/O25HOy3h1zg==" + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.12.7.tgz", + "integrity": "sha512-YaxPMGs/XIWtYqrdEOZOCPsVWfEoriXopnsz3/i7apYPXQ3698UFhS6dVT1KN5qOsWmVgw/FOrmQgpRaZayGsw==" }, "@babel/core": { - "version": "7.12.3", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.12.3.tgz", - "integrity": "sha512-0qXcZYKZp3/6N2jKYVxZv0aNCsxTSVCiK72DTiTYZAu7sjg73W0/aynWjMbiGd87EQL4WyA8reiJVh92AVla9g==", + "version": "7.12.9", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.12.9.tgz", + "integrity": "sha512-gTXYh3M5wb7FRXQy+FErKFAv90BnlOuNn1QkCK2lREoPAjrQCO49+HVSrFoe5uakFAF5eenS75KbO2vQiLrTMQ==", "requires": { "@babel/code-frame": "^7.10.4", - "@babel/generator": "^7.12.1", + "@babel/generator": "^7.12.5", "@babel/helper-module-transforms": "^7.12.1", - "@babel/helpers": "^7.12.1", - "@babel/parser": "^7.12.3", - "@babel/template": "^7.10.4", - "@babel/traverse": "^7.12.1", - "@babel/types": "^7.12.1", + "@babel/helpers": "^7.12.5", + "@babel/parser": "^7.12.7", + "@babel/template": "^7.12.7", + "@babel/traverse": "^7.12.9", + "@babel/types": "^7.12.7", "convert-source-map": "^1.7.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.1", @@ -156,9 +156,9 @@ }, "dependencies": { "debug": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz", - "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==", + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", "requires": { "ms": "2.1.2" } @@ -253,12 +253,11 @@ } }, "@babel/helper-create-regexp-features-plugin": { - "version": "7.12.1", - "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.12.1.tgz", - "integrity": "sha512-rsZ4LGvFTZnzdNZR5HZdmJVuXK8834R5QkF3WvcnBhrlVtF0HSIUC6zbreL9MgjTywhKokn8RIYRiq99+DLAxA==", + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.12.7.tgz", + "integrity": "sha512-idnutvQPdpbduutvi3JVfEgcVIHooQnhvhx0Nk9isOINOIGYkZea1Pk2JlJRiUnMefrlvr0vkByATBY/mB4vjQ==", "requires": { "@babel/helper-annotate-as-pure": "^7.10.4", - "@babel/helper-regex": "^7.10.4", "regexpu-core": "^4.7.1" } }, @@ -307,11 +306,11 @@ } }, "@babel/helper-member-expression-to-functions": { - "version": "7.12.1", - "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.12.1.tgz", - "integrity": "sha512-k0CIe3tXUKTRSoEx1LQEPFU9vRQfqHtl+kf8eNnDqb4AUJEy5pz6aIiog+YWtVm2jpggjS1laH68bPsR+KWWPQ==", + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.12.7.tgz", + "integrity": "sha512-DCsuPyeWxeHgh1Dus7APn7iza42i/qXqiFPWyBDdOFtvS581JQePsc1F/nD+fHrcswhLlRc2UpYS1NwERxZhHw==", "requires": { - "@babel/types": "^7.12.1" + "@babel/types": "^7.12.7" } }, "@babel/helper-module-imports": { @@ -339,11 +338,11 @@ } }, "@babel/helper-optimise-call-expression": { - "version": "7.10.4", - "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.10.4.tgz", - "integrity": "sha512-n3UGKY4VXwXThEiKrgRAoVPBMqeoPgHVqiHZOanAJCG9nQUL2pLRQirUzl0ioKclHGpGqRgIOkgcIJaIWLpygg==", + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.12.7.tgz", + "integrity": "sha512-I5xc9oSJ2h59OwyUqjv95HRyzxj53DAubUERgQMrpcCEYQyToeHA+NEcUEsVWB4j53RDeskeBJ0SgRAYHDBckw==", "requires": { - "@babel/types": "^7.10.4" + "@babel/types": "^7.12.7" } }, "@babel/helper-plugin-utils": { @@ -351,14 +350,6 @@ "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz", "integrity": "sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg==" }, - "@babel/helper-regex": { - "version": "7.10.5", - "resolved": "https://registry.npmjs.org/@babel/helper-regex/-/helper-regex-7.10.5.tgz", - "integrity": "sha512-68kdUAzDrljqBrio7DYAEgCoJHxppJOERHOgOrDN7WjOzP0ZQ1LsSDRXcemzVZaLvjaJsJEESb6qt+znNuENDg==", - "requires": { - "lodash": "^4.17.19" - } - }, "@babel/helper-remap-async-to-generator": { "version": "7.12.1", "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.12.1.tgz", @@ -446,9 +437,9 @@ } }, "@babel/parser": { - "version": "7.12.5", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.12.5.tgz", - "integrity": "sha512-FVM6RZQ0mn2KCf1VUED7KepYeUWoVShczewOCfm3nzoBybaih51h+sYVVGthW9M6lPByEPTQf+xm27PBdlpwmQ==" + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.12.7.tgz", + "integrity": "sha512-oWR02Ubp4xTLCAqPRiNIuMVgNO5Aif/xpXtabhzW2HWUD47XJsAB4Zd/Rg30+XeQA3juXigV7hlquOTmwqLiwg==" }, "@babel/plugin-proposal-async-generator-functions": { "version": "7.12.1", @@ -525,9 +516,9 @@ } }, "@babel/plugin-proposal-numeric-separator": { - "version": "7.12.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.12.5.tgz", - "integrity": "sha512-UiAnkKuOrCyjZ3sYNHlRlfuZJbBHknMQ9VMwVeX97Ofwx7RpD6gS2HfqTCh8KNUQgcOm8IKt103oR4KIjh7Q8g==", + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.12.7.tgz", + "integrity": "sha512-8c+uy0qmnRTeukiGsjLGy6uVs/TFjJchGXUeBqlG4VWYOdJWkhhVPdQ3uHwbmalfJwv2JsV0qffXP4asRfL2SQ==", "requires": { "@babel/helper-plugin-utils": "^7.10.4", "@babel/plugin-syntax-numeric-separator": "^7.10.4" @@ -553,9 +544,9 @@ } }, "@babel/plugin-proposal-optional-chaining": { - "version": "7.12.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.12.1.tgz", - "integrity": "sha512-c2uRpY6WzaVDzynVY9liyykS+kVU+WRZPMPYpkelXH8KBt1oXoI89kPbZKKG/jDT5UK92FTW2fZkZaJhdiBabw==", + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.12.7.tgz", + "integrity": "sha512-4ovylXZ0PWmwoOvhU2vhnzVNnm88/Sm9nx7V8BPgMvAzn5zDou3/Awy0EjglyubVHasJj+XCEkr/r1X3P5elCA==", "requires": { "@babel/helper-plugin-utils": "^7.10.4", "@babel/helper-skip-transparent-expression-wrappers": "^7.12.1", @@ -951,12 +942,11 @@ } }, "@babel/plugin-transform-sticky-regex": { - "version": "7.12.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.12.1.tgz", - "integrity": "sha512-CiUgKQ3AGVk7kveIaPEET1jNDhZZEl1RPMWdTBE1799bdz++SwqDHStmxfCtDfBhQgCl38YRiSnrMuUMZIWSUQ==", + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.12.7.tgz", + "integrity": "sha512-VEiqZL5N/QvDbdjfYQBhruN0HYjSPjC4XkeqW4ny/jNtH9gcbgaqBIXYEZCNnESMAGs0/K/R7oFGMhOyu/eIxg==", "requires": { - "@babel/helper-plugin-utils": "^7.10.4", - "@babel/helper-regex": "^7.10.4" + "@babel/helper-plugin-utils": "^7.10.4" } }, "@babel/plugin-transform-template-literals": { @@ -993,13 +983,13 @@ } }, "@babel/preset-env": { - "version": "7.12.1", - "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.12.1.tgz", - "integrity": "sha512-H8kxXmtPaAGT7TyBvSSkoSTUK6RHh61So05SyEbpmr0MCZrsNYn7mGMzzeYoOUCdHzww61k8XBft2TaES+xPLg==", + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.12.7.tgz", + "integrity": "sha512-OnNdfAr1FUQg7ksb7bmbKoby4qFOHw6DKWWUNB9KqnnCldxhxJlP+21dpyaWFmf2h0rTbOkXJtAGevY3XW1eew==", "requires": { - "@babel/compat-data": "^7.12.1", - "@babel/helper-compilation-targets": "^7.12.1", - "@babel/helper-module-imports": "^7.12.1", + "@babel/compat-data": "^7.12.7", + "@babel/helper-compilation-targets": "^7.12.5", + "@babel/helper-module-imports": "^7.12.5", "@babel/helper-plugin-utils": "^7.10.4", "@babel/helper-validator-option": "^7.12.1", "@babel/plugin-proposal-async-generator-functions": "^7.12.1", @@ -1009,10 +999,10 @@ "@babel/plugin-proposal-json-strings": "^7.12.1", "@babel/plugin-proposal-logical-assignment-operators": "^7.12.1", "@babel/plugin-proposal-nullish-coalescing-operator": "^7.12.1", - "@babel/plugin-proposal-numeric-separator": "^7.12.1", + "@babel/plugin-proposal-numeric-separator": "^7.12.7", "@babel/plugin-proposal-object-rest-spread": "^7.12.1", "@babel/plugin-proposal-optional-catch-binding": "^7.12.1", - "@babel/plugin-proposal-optional-chaining": "^7.12.1", + "@babel/plugin-proposal-optional-chaining": "^7.12.7", "@babel/plugin-proposal-private-methods": "^7.12.1", "@babel/plugin-proposal-unicode-property-regex": "^7.12.1", "@babel/plugin-syntax-async-generators": "^7.8.0", @@ -1054,14 +1044,14 @@ "@babel/plugin-transform-reserved-words": "^7.12.1", "@babel/plugin-transform-shorthand-properties": "^7.12.1", "@babel/plugin-transform-spread": "^7.12.1", - "@babel/plugin-transform-sticky-regex": "^7.12.1", + "@babel/plugin-transform-sticky-regex": "^7.12.7", "@babel/plugin-transform-template-literals": "^7.12.1", "@babel/plugin-transform-typeof-symbol": "^7.12.1", "@babel/plugin-transform-unicode-escapes": "^7.12.1", "@babel/plugin-transform-unicode-regex": "^7.12.1", "@babel/preset-modules": "^0.1.3", - "@babel/types": "^7.12.1", - "core-js-compat": "^3.6.2", + "@babel/types": "^7.12.7", + "core-js-compat": "^3.7.0", "semver": "^5.5.0" }, "dependencies": { @@ -1100,35 +1090,35 @@ } }, "@babel/template": { - "version": "7.10.4", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.10.4.tgz", - "integrity": "sha512-ZCjD27cGJFUB6nmCB1Enki3r+L5kJveX9pq1SvAUKoICy6CZ9yD8xO086YXdYhvNjBdnekm4ZnaP5yC8Cs/1tA==", + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.12.7.tgz", + "integrity": "sha512-GkDzmHS6GV7ZeXfJZ0tLRBhZcMcY0/Lnb+eEbXDBfCAcZCjrZKe6p3J4we/D24O9Y8enxWAg1cWwof59yLh2ow==", "requires": { "@babel/code-frame": "^7.10.4", - "@babel/parser": "^7.10.4", - "@babel/types": "^7.10.4" + "@babel/parser": "^7.12.7", + "@babel/types": "^7.12.7" } }, "@babel/traverse": { - "version": "7.12.5", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.12.5.tgz", - "integrity": "sha512-xa15FbQnias7z9a62LwYAA5SZZPkHIXpd42C6uW68o8uTuua96FHZy1y61Va5P/i83FAAcMpW8+A/QayntzuqA==", + "version": "7.12.9", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.12.9.tgz", + "integrity": "sha512-iX9ajqnLdoU1s1nHt36JDI9KG4k+vmI8WgjK5d+aDTwQbL2fUnzedNedssA645Ede3PM2ma1n8Q4h2ohwXgMXw==", "requires": { "@babel/code-frame": "^7.10.4", "@babel/generator": "^7.12.5", "@babel/helper-function-name": "^7.10.4", "@babel/helper-split-export-declaration": "^7.11.0", - "@babel/parser": "^7.12.5", - "@babel/types": "^7.12.5", + "@babel/parser": "^7.12.7", + "@babel/types": "^7.12.7", "debug": "^4.1.0", "globals": "^11.1.0", "lodash": "^4.17.19" }, "dependencies": { "debug": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz", - "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==", + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", "requires": { "ms": "2.1.2" } @@ -1141,9 +1131,9 @@ } }, "@babel/types": { - "version": "7.12.5", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.12.5.tgz", - "integrity": "sha512-gyTcvz7JFa4V45C0Zklv//GmFOAal5fL23OWpBLqc4nZ4Yrz67s4kCNwSK1Gu0MXGTU8mRY3zJYtacLdKXlzig==", + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.12.7.tgz", + "integrity": "sha512-MNyI92qZq6jrQkXvtIiykvl4WtoRrVV9MPn+ZfsoEENjiWcBQ3ZSHrkxnJWgWtLX3XXqX5hrSQ+X69wkmesXuQ==", "requires": { "@babel/helper-validator-identifier": "^7.10.4", "lodash": "^4.17.19", @@ -1263,9 +1253,9 @@ "integrity": "sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA==" }, "@types/node": { - "version": "14.14.6", - "resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.6.tgz", - "integrity": "sha512-6QlRuqsQ/Ox/aJEQWBEJG7A9+u7oSYl3mem/K8IzxXG/kAGbV1YPD9Bg9Zw3vyxC/YP+zONKwy8hGkSt1jxFMw==" + "version": "14.14.10", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.10.tgz", + "integrity": "sha512-J32dgx2hw8vXrSbu4ZlVhn1Nm3GbeCFNw2FWL8S5QKucHGY0cyNwjdQdO+KMBZ4wpmC7KhLCiNsdk1RFRIYUQQ==" }, "@types/q": { "version": "1.5.4", @@ -1325,9 +1315,9 @@ } }, "@vue/babel-preset-app": { - "version": "4.5.8", - "resolved": "https://registry.npmjs.org/@vue/babel-preset-app/-/babel-preset-app-4.5.8.tgz", - "integrity": "sha512-efCBo2HY8Jcs6+SyCnvWl8jGeF1Fl38reFL35AjO4SBcro0ol/qjPkeeJLjzvXUxrHAsM9DMfL/DvPa/hBmZwQ==", + "version": "4.5.9", + "resolved": "https://registry.npmjs.org/@vue/babel-preset-app/-/babel-preset-app-4.5.9.tgz", + "integrity": "sha512-d2H4hFnJsGnZtJAAZIbo1dmQJ2SI1MYix1Tc9/etlnJtCDPRHeCNodCSeuLgDwnoAyT3unzyHmTtaO56KRDuOQ==", "requires": { "@babel/core": "^7.11.0", "@babel/helper-compilation-targets": "^7.9.6", @@ -1348,9 +1338,9 @@ }, "dependencies": { "core-js": { - "version": "3.6.5", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.6.5.tgz", - "integrity": "sha512-vZVEEwZoIsI+vPEuoF9Iqf5H7/M3eeQqWlQnYa8FSKKePuYTf5MWnxb5SDAzCa60b3JBRS5g9b+Dq7b1y/RCrA==" + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.8.0.tgz", + "integrity": "sha512-W2VYNB0nwQQE7tKS7HzXd7r2y/y2SVJl4ga6oH/dnaLFzM0o2lB2P3zCkWj5Wc/zyMYjtgd5Hmhk0ObkQFZOIA==" } } }, @@ -1521,9 +1511,9 @@ }, "dependencies": { "core-js": { - "version": "3.6.5", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.6.5.tgz", - "integrity": "sha512-vZVEEwZoIsI+vPEuoF9Iqf5H7/M3eeQqWlQnYa8FSKKePuYTf5MWnxb5SDAzCa60b3JBRS5g9b+Dq7b1y/RCrA==" + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.8.0.tgz", + "integrity": "sha512-W2VYNB0nwQQE7tKS7HzXd7r2y/y2SVJl4ga6oH/dnaLFzM0o2lB2P3zCkWj5Wc/zyMYjtgd5Hmhk0ObkQFZOIA==" } } }, @@ -1872,24 +1862,24 @@ "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==" }, "algoliasearch": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.6.0.tgz", - "integrity": "sha512-f4QVfUYnWIGZwOupZh0RAqW8zEfpZAcZG6ZT0p6wDMztEyKBrjjbTXBk9p9uEaJqoIhFUm6TtApOxodTdHbqvw==", - "requires": { - "@algolia/cache-browser-local-storage": "4.6.0", - "@algolia/cache-common": "4.6.0", - "@algolia/cache-in-memory": "4.6.0", - "@algolia/client-account": "4.6.0", - "@algolia/client-analytics": "4.6.0", - "@algolia/client-common": "4.6.0", - "@algolia/client-recommendation": "4.6.0", - "@algolia/client-search": "4.6.0", - "@algolia/logger-common": "4.6.0", - "@algolia/logger-console": "4.6.0", - "@algolia/requester-browser-xhr": "4.6.0", - "@algolia/requester-common": "4.6.0", - "@algolia/requester-node-http": "4.6.0", - "@algolia/transporter": "4.6.0" + "version": "4.8.2", + "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.8.2.tgz", + "integrity": "sha512-wQg1UpiXO6iXMXXyrmhKopjd3K4GGq5N/0qEjPB5OYzdvj4ju9rDIW8bYL9ghv9jD5IDrcyFsqCzlSKqn/RVXw==", + "requires": { + "@algolia/cache-browser-local-storage": "4.8.2", + "@algolia/cache-common": "4.8.2", + "@algolia/cache-in-memory": "4.8.2", + "@algolia/client-account": "4.8.2", + "@algolia/client-analytics": "4.8.2", + "@algolia/client-common": "4.8.2", + "@algolia/client-recommendation": "4.8.2", + "@algolia/client-search": "4.8.2", + "@algolia/logger-common": "4.8.2", + "@algolia/logger-console": "4.8.2", + "@algolia/requester-browser-xhr": "4.8.2", + "@algolia/requester-common": "4.8.2", + "@algolia/requester-node-http": "4.8.2", + "@algolia/transporter": "4.8.2" } }, "align-text": { @@ -2167,25 +2157,14 @@ } }, "babel-loader": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-8.1.0.tgz", - "integrity": "sha512-7q7nC1tYOrqvUrN3LQK4GwSk/TQorZSOlO9C+RZDZpODgyN4ZlCqE5q9cDsyWOliN+aU9B4JX01xK9eJXowJLw==", + "version": "8.2.2", + "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-8.2.2.tgz", + "integrity": "sha512-JvTd0/D889PQBtUXJ2PXaKU/pjZDMtHA9V2ecm+eNRmmBCMR09a+fmpGTNwnJtFmFl5Ei7Vy47LjBb+L0wQ99g==", "requires": { - "find-cache-dir": "^2.1.0", + "find-cache-dir": "^3.3.1", "loader-utils": "^1.4.0", - "mkdirp": "^0.5.3", - "pify": "^4.0.1", + "make-dir": "^3.1.0", "schema-utils": "^2.6.5" - }, - "dependencies": { - "mkdirp": { - "version": "0.5.5", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", - "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", - "requires": { - "minimist": "^1.2.5" - } - } } }, "babel-plugin-dynamic-import-node": { @@ -2277,9 +2256,9 @@ } }, "base64-js": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.1.tgz", - "integrity": "sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g==" + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" }, "batch": { "version": "0.6.1", @@ -2542,19 +2521,12 @@ } }, "browserify-rsa": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.0.1.tgz", - "integrity": "sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ=", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.1.0.tgz", + "integrity": "sha512-AdEER0Hkspgno2aR97SAf6vi0y0k8NuOpGnVH3O99rcA5Q6sh8QxcngtHuJ6uXwnfAXNM4Gn1Gb7/MV1+Ymbog==", "requires": { - "bn.js": "^4.1.0", + "bn.js": "^5.0.0", "randombytes": "^2.0.1" - }, - "dependencies": { - "bn.js": { - "version": "4.11.9", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.9.tgz", - "integrity": "sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw==" - } } }, "browserify-sign": { @@ -2582,14 +2554,15 @@ } }, "browserslist": { - "version": "4.14.6", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.14.6.tgz", - "integrity": "sha512-zeFYcUo85ENhc/zxHbiIp0LGzzTrE2Pv2JhxvS7kpUb9Q9D38kUX6Bie7pGutJ/5iF5rOxE7CepAuWD56xJ33A==", + "version": "4.14.7", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.14.7.tgz", + "integrity": "sha512-BSVRLCeG3Xt/j/1cCGj1019Wbty0H+Yvu2AOuZSuoaUWn3RatbL33Cxk+Q4jRMRAbOm0p7SLravLjpnT6s0vzQ==", "requires": { - "caniuse-lite": "^1.0.30001154", - "electron-to-chromium": "^1.3.585", + "caniuse-lite": "^1.0.30001157", + "colorette": "^1.2.1", + "electron-to-chromium": "^1.3.591", "escalade": "^3.1.1", - "node-releases": "^1.1.65" + "node-releases": "^1.1.66" } }, "buffer": { @@ -2698,6 +2671,42 @@ "schema-utils": "^1.0.0" }, "dependencies": { + "find-cache-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz", + "integrity": "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==", + "requires": { + "commondir": "^1.0.1", + "make-dir": "^2.0.0", + "pkg-dir": "^3.0.0" + } + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "requires": { + "pify": "^4.0.1", + "semver": "^5.6.0" + } + }, "mkdirp": { "version": "0.5.5", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", @@ -2706,6 +2715,27 @@ "minimist": "^1.2.5" } }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "requires": { + "p-limit": "^2.0.0" + } + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" + }, + "pkg-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", + "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", + "requires": { + "find-up": "^3.0.0" + } + }, "schema-utils": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", @@ -2715,6 +2745,11 @@ "ajv-errors": "^1.0.0", "ajv-keywords": "^3.1.0" } + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" } } }, @@ -2813,9 +2848,9 @@ } }, "caniuse-lite": { - "version": "1.0.30001154", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001154.tgz", - "integrity": "sha512-y9DvdSti8NnYB9Be92ddMZQrcOe04kcQtcxtBx4NkB04+qZ+JUWotnXBJTmxlKudhxNTQ3RRknMwNU2YQl/Org==" + "version": "1.0.30001163", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001163.tgz", + "integrity": "sha512-QQbOGkHWnvhn3Dlf4scPlXTZVhGOK+2qCOP5gPxqzXHhtn3tZHwNdH9qNcQRWN0f3tDYrsyXFJCFiP/GLzI5Vg==" }, "caseless": { "version": "0.12.0", @@ -3154,16 +3189,6 @@ "unique-string": "^2.0.0", "write-file-atomic": "^3.0.0", "xdg-basedir": "^4.0.0" - }, - "dependencies": { - "make-dir": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", - "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", - "requires": { - "semver": "^6.0.0" - } - } } }, "connect-history-api-fallback": { @@ -3297,6 +3322,24 @@ "webpack-log": "^2.0.0" }, "dependencies": { + "find-cache-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz", + "integrity": "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==", + "requires": { + "commondir": "^1.0.1", + "make-dir": "^2.0.0", + "pkg-dir": "^3.0.0" + } + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "requires": { + "locate-path": "^3.0.0" + } + }, "globby": { "version": "7.1.1", "resolved": "https://registry.npmjs.org/globby/-/globby-7.1.1.tgz", @@ -3308,6 +3351,13 @@ "ignore": "^3.3.5", "pify": "^3.0.0", "slash": "^1.0.0" + }, + "dependencies": { + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" + } } }, "ignore": { @@ -3315,10 +3365,44 @@ "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==" }, - "pify": { + "locate-path": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "requires": { + "pify": "^4.0.1", + "semver": "^5.6.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "requires": { + "p-limit": "^2.0.0" + } + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" + }, + "pkg-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", + "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", + "requires": { + "find-up": "^3.0.0" + } }, "schema-utils": { "version": "1.0.0", @@ -3330,6 +3414,11 @@ "ajv-keywords": "^3.1.0" } }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + }, "slash": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/slash/-/slash-1.0.0.tgz", @@ -3338,16 +3427,16 @@ } }, "core-js": { - "version": "2.6.11", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.11.tgz", - "integrity": "sha512-5wjnpaT/3dV+XB4borEsnAYQchn00XSgTAWKDkEqv+K8KevjbzmofK6hfJ9TZIlpj2N0xQpazy7PiRQiWHqzWg==" + "version": "2.6.12", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.12.tgz", + "integrity": "sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ==" }, "core-js-compat": { - "version": "3.6.5", - "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.6.5.tgz", - "integrity": "sha512-7ItTKOhOZbznhXAQ2g/slGg1PJV5zDO/WdkTwi7UEOJmkvsE32PWvx6mKtDjiMpjnR2CNf6BAD6sSxIlv7ptng==", + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.8.0.tgz", + "integrity": "sha512-o9QKelQSxQMYWHXc/Gc4L8bx/4F7TTraE5rhuN8I7mKBt5dBIUpXpIR3omv70ebr8ST5R3PqbDQr+ZI3+Tt1FQ==", "requires": { - "browserslist": "^4.8.5", + "browserslist": "^4.14.7", "semver": "7.0.0" }, "dependencies": { @@ -3637,26 +3726,26 @@ "integrity": "sha512-WcKx5OY+KoSIAxBW6UBBRay1U6vkYheCdjyVNDm85zt5K9mHoGOfsOsqIszfAqrQQFIIKgjh2+FDgIj/zsl21Q==" }, "csso": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/csso/-/csso-4.1.0.tgz", - "integrity": "sha512-h+6w/W1WqXaJA4tb1dk7r5tVbOm97MsKxzwnvOR04UQ6GILroryjMWu3pmCCtL2mLaEStQ0fZgeGiy99mo7iyg==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/csso/-/csso-4.2.0.tgz", + "integrity": "sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA==", "requires": { - "css-tree": "^1.0.0" + "css-tree": "^1.1.2" }, "dependencies": { "css-tree": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-1.0.0.tgz", - "integrity": "sha512-CdVYz/Yuqw0VdKhXPBIgi8DO3NicJVYZNWeX9XcIuSp9ZoFT5IcleVRW07O5rMjdcx1mb+MEJPknTTEW7DdsYw==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-1.1.2.tgz", + "integrity": "sha512-wCoWush5Aeo48GLhfHPbmvZs59Z+M7k5+B1xDnXbdWNcEF423DoFdqSWE0PM5aNk5nI5cp1q7ms36zGApY/sKQ==", "requires": { - "mdn-data": "2.0.12", + "mdn-data": "2.0.14", "source-map": "^0.6.1" } }, "mdn-data": { - "version": "2.0.12", - "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.12.tgz", - "integrity": "sha512-ULbAlgzVb8IqZ0Hsxm6hHSlQl3Jckst2YEQS7fODu9ilNWy2LvcoSY7TRFIktABP2mdppBioc66va90T+NUs8Q==" + "version": "2.0.14", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.14.tgz", + "integrity": "sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==" } } }, @@ -4093,9 +4182,9 @@ "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" }, "electron-to-chromium": { - "version": "1.3.587", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.587.tgz", - "integrity": "sha512-8XFNxzNj0R8HpTQslWAw6UWpGSuOKSP3srhyFHVbGUGb8vTHckZGCyWi+iQlaXJx5DNeTQTQLd6xN11WSckkmA==" + "version": "1.3.612", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.612.tgz", + "integrity": "sha512-CdrdX1B6mQqxfw+51MPWB5qA6TKWjza9f5voBtUlRfEZEwZiFaxJLrhFI8zHE9SBAuGt4h84rQU6Ho9Bauo1LA==" }, "elliptic": { "version": "6.5.3", @@ -4225,15 +4314,16 @@ } }, "es-abstract": { - "version": "1.17.7", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.7.tgz", - "integrity": "sha512-VBl/gnfcJ7OercKA9MVaegWsBHFjV492syMudcnQZvt/Dw8ezpcOHYZXa/J96O8vx+g4x65YKhxOwDUh63aS5g==", + "version": "1.18.0-next.1", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.1.tgz", + "integrity": "sha512-I4UGspA0wpZXWENrdA0uHbnhte683t3qT/1VFH9aX2dA5PPSf6QW5HHXf5HImaqPmjXaVeVk4RGWnaylmV7uAA==", "requires": { "es-to-primitive": "^1.2.1", "function-bind": "^1.1.1", "has": "^1.0.3", "has-symbols": "^1.0.1", "is-callable": "^1.2.2", + "is-negative-zero": "^2.0.0", "is-regex": "^1.1.1", "object-inspect": "^1.8.0", "object-keys": "^1.1.1", @@ -4632,21 +4722,22 @@ } }, "find-cache-dir": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz", - "integrity": "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.1.tgz", + "integrity": "sha512-t2GDMt3oGC/v+BMwzmllWDuJF/xcDtE5j/fCGbqDD7OLuJkj0cfh1YSA5VKPvwMeLFLNDBkwOKZ2X85jGLVftQ==", "requires": { "commondir": "^1.0.1", - "make-dir": "^2.0.0", - "pkg-dir": "^3.0.0" + "make-dir": "^3.0.2", + "pkg-dir": "^4.1.0" } }, "find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", "requires": { - "locate-path": "^3.0.0" + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" } }, "flush-write-stream": { @@ -5413,6 +5504,46 @@ "requires": { "pkg-dir": "^3.0.0", "resolve-cwd": "^2.0.0" + }, + "dependencies": { + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "requires": { + "p-limit": "^2.0.0" + } + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" + }, + "pkg-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", + "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", + "requires": { + "find-up": "^3.0.0" + } + } } }, "imurmurhash": { @@ -5553,9 +5684,9 @@ } }, "is-core-module": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.0.0.tgz", - "integrity": "sha512-jq1AH6C8MuteOoBPwkxHafmByhL9j5q4OaPGdbuD+ZtQJVzH+i6E3BJDQcBA09k57i2Hh2yQbEG8yObZ0jdlWw==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.2.0.tgz", + "integrity": "sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ==", "requires": { "has": "^1.0.3" } @@ -5998,12 +6129,11 @@ } }, "locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", "requires": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" + "p-locate": "^4.1.0" } }, "lodash": { @@ -6074,9 +6204,9 @@ "integrity": "sha1-0CJTc662Uq3BvILklFM5qEJ1R3M=" }, "loglevel": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.7.0.tgz", - "integrity": "sha512-i2sY04nal5jDcagM3FMfG++T69GEEM8CYuOfeOIvmXzOIcwE9a/CJPR0MFM97pYMj/u10lzz7/zd7+qwhrBTqQ==" + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.7.1.tgz", + "integrity": "sha512-Hesni4s5UkWkwCGJMQGAh71PaLUmKFM60dHvq0zi/vDhhrzuk+4GgNbTXJ12YYQJn6ZKBDNIjYcuQGKudvqrIw==" }, "longest": { "version": "1.0.1", @@ -6102,19 +6232,11 @@ } }, "make-dir": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", - "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", "requires": { - "pify": "^4.0.1", - "semver": "^5.6.0" - }, - "dependencies": { - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" - } + "semver": "^6.0.0" } }, "map-cache": { @@ -6645,9 +6767,9 @@ } }, "node-releases": { - "version": "1.1.65", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.65.tgz", - "integrity": "sha512-YpzJOe2WFIW0V4ZkJQd/DGR/zdVwc/pI4Nl1CZrBO19FdRcSTmsuhdttw9rsTzzJLrNcSloLiBbEYx1C4f6gpA==" + "version": "1.1.67", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.67.tgz", + "integrity": "sha512-V5QF9noGFl3EymEwUYzO+3NTDpGfQB4ve6Qfnzf3UNydMhjQRVPR1DZTuvWiLzaFJYw2fmDwAfnRNEVb64hSIg==" }, "nopt": { "version": "1.0.10", @@ -6747,33 +6869,12 @@ "integrity": "sha512-jLdtEOB112fORuypAyl/50VRVIBIdVQOSUUGQHzJ4xBSbit81zRarz7GThkEFZy1RceYrWYcPcBFPQwHyAc1gA==" }, "object-is": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/object-is/-/object-is-1.1.3.tgz", - "integrity": "sha512-teyqLvFWzLkq5B9ki8FVWA902UER2qkxmdA4nLf+wjOLAWgxzCWZNCxpDq9MvE8MmhWNr+I8w3BN49Vx36Y6Xg==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/object-is/-/object-is-1.1.4.tgz", + "integrity": "sha512-1ZvAZ4wlF7IyPVOcE1Omikt7UpaFlOQq0HlSti+ZvDH3UiD2brwGMwDbyV43jao2bKJ+4+WdPJHSd7kgzKYVqg==", "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.18.0-next.1" - }, - "dependencies": { - "es-abstract": { - "version": "1.18.0-next.1", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.1.tgz", - "integrity": "sha512-I4UGspA0wpZXWENrdA0uHbnhte683t3qT/1VFH9aX2dA5PPSf6QW5HHXf5HImaqPmjXaVeVk4RGWnaylmV7uAA==", - "requires": { - "es-to-primitive": "^1.2.1", - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.1", - "is-callable": "^1.2.2", - "is-negative-zero": "^2.0.0", - "is-regex": "^1.1.1", - "object-inspect": "^1.8.0", - "object-keys": "^1.1.1", - "object.assign": "^4.1.1", - "string.prototype.trimend": "^1.0.1", - "string.prototype.trimstart": "^1.0.1" - } - } + "call-bind": "^1.0.0", + "define-properties": "^1.1.3" } }, "object-keys": { @@ -6801,12 +6902,13 @@ } }, "object.getownpropertydescriptors": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.0.tgz", - "integrity": "sha512-Z53Oah9A3TdLoblT7VKJaTDdXdT+lQO+cNpKVnya5JDe9uLvzu1YyY1yFDFrcxrlRgWrEFH0jJtD/IbuwjcEVg==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.1.tgz", + "integrity": "sha512-6DtXgZ/lIZ9hqx4GtZETobXLR/ZLaa0aqV0kzbn80Rf8Z2e/XFnhA0I7p07N2wH8bBBltr2xQPi6sbKWAY2Eng==", "requires": { + "call-bind": "^1.0.0", "define-properties": "^1.1.3", - "es-abstract": "^1.17.0-next.1" + "es-abstract": "^1.18.0-next.1" } }, "object.pick": { @@ -6818,13 +6920,13 @@ } }, "object.values": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.1.tgz", - "integrity": "sha512-WTa54g2K8iu0kmS/us18jEmdv1a4Wi//BZ/DTVYEcH0XhLM5NYdpDHja3gt57VrZLcNAO2WGA+KpWsDBaHt6eA==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.2.tgz", + "integrity": "sha512-MYC0jvJopr8EK6dPBiO8Nb9mvjdypOachO5REGk6MXzujbBrAisKo3HmdEI6kZDL6fC31Mwee/5YbtMebixeag==", "requires": { + "call-bind": "^1.0.0", "define-properties": "^1.1.3", - "es-abstract": "^1.17.0-next.1", - "function-bind": "^1.1.1", + "es-abstract": "^1.18.0-next.1", "has": "^1.0.3" } }, @@ -6908,11 +7010,11 @@ } }, "p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", "requires": { - "p-limit": "^2.0.0" + "p-limit": "^2.2.0" } }, "p-map": { @@ -7046,9 +7148,9 @@ "integrity": "sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA=" }, "path-exists": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", - "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==" }, "path-is-absolute": { "version": "1.0.1", @@ -7132,11 +7234,11 @@ } }, "pkg-dir": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", - "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", "requires": { - "find-up": "^3.0.0" + "find-up": "^4.0.0" } }, "portfinder": { @@ -7150,9 +7252,9 @@ }, "dependencies": { "debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", "requires": { "ms": "^2.1.1" } @@ -8187,6 +8289,26 @@ "requires": { "define-properties": "^1.1.3", "es-abstract": "^1.17.0-next.1" + }, + "dependencies": { + "es-abstract": { + "version": "1.17.7", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.7.tgz", + "integrity": "sha512-VBl/gnfcJ7OercKA9MVaegWsBHFjV492syMudcnQZvt/Dw8ezpcOHYZXa/J96O8vx+g4x65YKhxOwDUh63aS5g==", + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.2.2", + "is-regex": "^1.1.1", + "object-inspect": "^1.8.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.1", + "string.prototype.trimend": "^1.0.1", + "string.prototype.trimstart": "^1.0.1" + } + } } }, "regexpu-core": { @@ -8203,9 +8325,9 @@ } }, "registry-auth-token": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-4.2.0.tgz", - "integrity": "sha512-P+lWzPrsgfN+UEpDS3U8AQKg/UjZX6mQSJueZj3EK+vNESoqBSpBUD3gmu4sF9lOsjXWjF11dQKUqemf3veq1w==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-4.2.1.tgz", + "integrity": "sha512-6gkSb4U6aWJB4SF2ZvLb76yCBjcvufXBqvvEx1HbmKPkutswjW1xNVRY0+daljIYRbogN7O0etYSlbiaEQyMyw==", "requires": { "rc": "^1.2.8" } @@ -8320,11 +8442,11 @@ "integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8=" }, "resolve": { - "version": "1.18.1", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.18.1.tgz", - "integrity": "sha512-lDfCPaMKfOJXjy0dPayzPdF1phampNWr3qFCjAu+rw/qbQmr5jWH5xN2hwh9QKfw9E5v4hwV7A+jrCmL8yjjqA==", + "version": "1.19.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.19.0.tgz", + "integrity": "sha512-rArEXAgsBG4UgRGcynxWIWKFvh/XZCcS8UJdHhwy91zwAvCZIbcs+vAbflgBnNjYMs/i/i+/Ux6IZhML1yPvxg==", "requires": { - "is-core-module": "^2.0.0", + "is-core-module": "^2.1.0", "path-parse": "^1.0.6" } }, @@ -8807,9 +8929,9 @@ }, "dependencies": { "debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", "requires": { "ms": "^2.1.1" } @@ -8886,9 +9008,9 @@ }, "dependencies": { "debug": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz", - "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==", + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", "requires": { "ms": "2.1.2" } @@ -8914,9 +9036,9 @@ }, "dependencies": { "debug": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz", - "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==", + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", "requires": { "ms": "2.1.2" } @@ -8990,9 +9112,19 @@ "integrity": "sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w==" }, "stack-utils": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-1.0.2.tgz", - "integrity": "sha512-MTX+MeG5U994cazkjd/9KNAapsHnibjMLnfXodlkXw76JEea0UiNzrqidzo1emMwk7w5Qhc9jd4Bn9TBb1MFwA==" + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-1.0.4.tgz", + "integrity": "sha512-IPDJfugEGbfizBwBZRZ3xpccMdRyP5lqsBWXGQWimVjua/ccLCeMOAVjlc1R7LxFjo5sEDhyNIXd8mo/AiDS9w==", + "requires": { + "escape-string-regexp": "^2.0.0" + }, + "dependencies": { + "escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==" + } + } }, "static-extend": { "version": "0.1.2", @@ -9150,63 +9282,21 @@ } }, "string.prototype.trimend": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.2.tgz", - "integrity": "sha512-8oAG/hi14Z4nOVP0z6mdiVZ/wqjDtWSLygMigTzAb+7aPEDTleeFf+WrF+alzecxIRkckkJVn+dTlwzJXORATw==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.3.tgz", + "integrity": "sha512-ayH0pB+uf0U28CtjlLvL7NaohvR1amUvVZk+y3DYb0Ey2PUV5zPkkKy9+U1ndVEIXO8hNg18eIv9Jntbii+dKw==", "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.18.0-next.1" - }, - "dependencies": { - "es-abstract": { - "version": "1.18.0-next.1", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.1.tgz", - "integrity": "sha512-I4UGspA0wpZXWENrdA0uHbnhte683t3qT/1VFH9aX2dA5PPSf6QW5HHXf5HImaqPmjXaVeVk4RGWnaylmV7uAA==", - "requires": { - "es-to-primitive": "^1.2.1", - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.1", - "is-callable": "^1.2.2", - "is-negative-zero": "^2.0.0", - "is-regex": "^1.1.1", - "object-inspect": "^1.8.0", - "object-keys": "^1.1.1", - "object.assign": "^4.1.1", - "string.prototype.trimend": "^1.0.1", - "string.prototype.trimstart": "^1.0.1" - } - } + "call-bind": "^1.0.0", + "define-properties": "^1.1.3" } }, "string.prototype.trimstart": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.2.tgz", - "integrity": "sha512-7F6CdBTl5zyu30BJFdzSTlSlLPwODC23Od+iLoVH8X6+3fvDPPuBVVj9iaB1GOsSTSIgVfsfm27R2FGrAPznWg==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.3.tgz", + "integrity": "sha512-oBIBUy5lea5tt0ovtOFiEQaBkoBBkyJhZXzJYrSmDo5IUUqbOPvVezuRs/agBIdZ2p2Eo1FD6bD9USyBLfl3xg==", "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.18.0-next.1" - }, - "dependencies": { - "es-abstract": { - "version": "1.18.0-next.1", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.1.tgz", - "integrity": "sha512-I4UGspA0wpZXWENrdA0uHbnhte683t3qT/1VFH9aX2dA5PPSf6QW5HHXf5HImaqPmjXaVeVk4RGWnaylmV7uAA==", - "requires": { - "es-to-primitive": "^1.2.1", - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.1", - "is-callable": "^1.2.2", - "is-negative-zero": "^2.0.0", - "is-regex": "^1.1.1", - "object-inspect": "^1.8.0", - "object-keys": "^1.1.1", - "object.assign": "^4.1.1", - "string.prototype.trimend": "^1.0.1", - "string.prototype.trimstart": "^1.0.1" - } - } + "call-bind": "^1.0.0", + "define-properties": "^1.1.3" } }, "string_decoder": { @@ -9405,6 +9495,63 @@ "worker-farm": "^1.7.0" }, "dependencies": { + "find-cache-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz", + "integrity": "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==", + "requires": { + "commondir": "^1.0.1", + "make-dir": "^2.0.0", + "pkg-dir": "^3.0.0" + } + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "requires": { + "pify": "^4.0.1", + "semver": "^5.6.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "requires": { + "p-limit": "^2.0.0" + } + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" + }, + "pkg-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", + "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", + "requires": { + "find-up": "^3.0.0" + } + }, "schema-utils": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", @@ -9414,6 +9561,11 @@ "ajv-errors": "^1.0.0", "ajv-keywords": "^3.1.0" } + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" } } }, @@ -9983,6 +10135,26 @@ "es-abstract": "^1.17.2", "has-symbols": "^1.0.1", "object.getownpropertydescriptors": "^2.1.0" + }, + "dependencies": { + "es-abstract": { + "version": "1.17.7", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.7.tgz", + "integrity": "sha512-VBl/gnfcJ7OercKA9MVaegWsBHFjV492syMudcnQZvt/Dw8ezpcOHYZXa/J96O8vx+g4x65YKhxOwDUh63aS5g==", + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.2.2", + "is-regex": "^1.1.1", + "object-inspect": "^1.8.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.1", + "string.prototype.trimend": "^1.0.1", + "string.prototype.trimstart": "^1.0.1" + } + } } }, "utila": { @@ -10058,9 +10230,9 @@ } }, "vue-router": { - "version": "3.4.8", - "resolved": "https://registry.npmjs.org/vue-router/-/vue-router-3.4.8.tgz", - "integrity": "sha512-3BsR84AqarcmweXjItxw3jwQsiYNssYg090yi4rlzTnCJxmHtkyCvhNz9Z7qRSOkmiV485KkUCReTp5AjNY4wg==" + "version": "3.4.9", + "resolved": "https://registry.npmjs.org/vue-router/-/vue-router-3.4.9.tgz", + "integrity": "sha512-CGAKWN44RqXW06oC+u4mPgHLQQi2t6vLD/JbGRDAXm0YpMv0bgpKuU5bBd7AvMgfTz9kXVRIWKHqRwGEb8xFkA==" }, "vue-server-renderer": { "version": "2.6.12", @@ -10227,9 +10399,9 @@ } }, "vuepress-theme-cosmos": { - "version": "1.0.175", - "resolved": "https://registry.npmjs.org/vuepress-theme-cosmos/-/vuepress-theme-cosmos-1.0.175.tgz", - "integrity": "sha512-QwVVaU1cMEl+j11trOEp2Vw+C3TAU+DQQIK4rcezHwMCsIYm9Wj4yDhz6rZVYd/Rg+KaCgZ1OCiZlcH/CXdu2A==", + "version": "1.0.176", + "resolved": "https://registry.npmjs.org/vuepress-theme-cosmos/-/vuepress-theme-cosmos-1.0.176.tgz", + "integrity": "sha512-9PNjU+AjI0FAzPthKeaEpTuoNbKfm8p5mzchkF8gXZoLCZPAN5fPaDRN3rfuE4oXHgn1INaZA7fNGaU4MgZzxg==", "requires": { "@cosmos-ui/vue": "^0.35.0", "@vuepress/plugin-google-analytics": "1.7.1", @@ -10244,7 +10416,7 @@ "jsonp": "^0.2.1", "markdown-it": "^12.0.0", "markdown-it-attrs": "^3.0.3", - "prismjs": "^1.21.0", + "prismjs": "^1.22.0", "pug": "^2.0.4", "pug-plain-loader": "^1.0.0", "stylus": "^0.54.8", @@ -10256,14 +10428,14 @@ } }, "watchpack": { - "version": "1.7.4", - "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-1.7.4.tgz", - "integrity": "sha512-aWAgTW4MoSJzZPAicljkO1hsi1oKj/RRq/OJQh2PKI2UKL04c2Bs+MBOB+BBABHTXJpf9mCwHN7ANCvYsvY2sg==", + "version": "1.7.5", + "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-1.7.5.tgz", + "integrity": "sha512-9P3MWk6SrKjHsGkLT2KHXdQ/9SNkyoJbabxnKOoJepsvJjJG8uYTR3yTPxPQvNDI3w4Nz1xnE0TLHK4RIVe/MQ==", "requires": { "chokidar": "^3.4.1", "graceful-fs": "^4.1.2", "neo-async": "^2.5.0", - "watchpack-chokidar2": "^2.0.0" + "watchpack-chokidar2": "^2.0.1" }, "dependencies": { "anymatch": { @@ -10367,9 +10539,9 @@ } }, "watchpack-chokidar2": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/watchpack-chokidar2/-/watchpack-chokidar2-2.0.0.tgz", - "integrity": "sha512-9TyfOyN/zLUbA288wZ8IsMZ+6cbzvsNyEzSBp6e/zkifi6xxbl8SmQ/CxQq32k8NNqrdVEVUVSEf56L4rQ/ZxA==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/watchpack-chokidar2/-/watchpack-chokidar2-2.0.1.tgz", + "integrity": "sha512-nCFfBIPKr5Sh61s4LPpy1Wtfi0HE8isJ3d2Yb5/Ppw2P2B/3eVSEBjKfN0fmHJSK14+31KwMKmcrzs2GM4P0Ww==", "optional": true, "requires": { "chokidar": "^2.1.8" @@ -10547,23 +10719,53 @@ } }, "debug": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz", - "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==", + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", "requires": { "ms": "2.1.2" } }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "requires": { + "locate-path": "^3.0.0" + } + }, "is-absolute-url": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/is-absolute-url/-/is-absolute-url-3.0.3.tgz", "integrity": "sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q==" }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, "ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "requires": { + "p-limit": "^2.0.0" + } + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" + }, "schema-utils": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", @@ -10818,9 +11020,9 @@ "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" }, "y18n": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", - "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==" + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.1.tgz", + "integrity": "sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ==" }, "yallist": { "version": "3.1.1", diff --git a/docs/package.json b/docs/package.json index 3b3fa4b1fcb..a9221ed061f 100644 --- a/docs/package.json +++ b/docs/package.json @@ -14,6 +14,6 @@ "author": "", "license": "ISC", "dependencies": { - "vuepress-theme-cosmos": "^1.0.175" + "vuepress-theme-cosmos": "^1.0.176" } } diff --git a/go.mod b/go.mod index 39e28042489..b437a100634 100644 --- a/go.mod +++ b/go.mod @@ -23,6 +23,7 @@ require ( github.com/golang/snappy v0.0.2 // indirect github.com/gorilla/handlers v1.5.1 github.com/gorilla/mux v1.8.0 + github.com/grpc-ecosystem/go-grpc-middleware v1.2.2 github.com/grpc-ecosystem/grpc-gateway v1.16.0 github.com/hashicorp/golang-lru v0.5.4 github.com/magiconair/properties v1.8.4 @@ -34,6 +35,7 @@ require ( github.com/prometheus/common v0.15.0 github.com/rakyll/statik v0.1.7 github.com/regen-network/cosmos-proto v0.3.0 + github.com/rs/zerolog v1.20.0 github.com/spf13/afero v1.2.2 // indirect github.com/spf13/cast v1.3.1 github.com/spf13/cobra v1.1.1 diff --git a/go.sum b/go.sum index da0703bd69d..4542327b0e4 100644 --- a/go.sum +++ b/go.sum @@ -247,6 +247,7 @@ github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/ad github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-middleware v1.2.1/go.mod h1:EaizFBKfUKtMIF5iaDEhniwNedqGo9FuLFzppDr3uwI= +github.com/grpc-ecosystem/go-grpc-middleware v1.2.2 h1:FlFbCRLd5Jr4iYXZufAvgWN6Ao0JrI5chLINnUXDDr0= github.com/grpc-ecosystem/go-grpc-middleware v1.2.2/go.mod h1:EaizFBKfUKtMIF5iaDEhniwNedqGo9FuLFzppDr3uwI= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= github.com/grpc-ecosystem/grpc-gateway v1.8.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= @@ -480,6 +481,9 @@ github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6L github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik= github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= +github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= +github.com/rs/zerolog v1.20.0 h1:38k9hgtUBdxFwE34yS8rTHmHBa4eN16E4DJlv177LNs= +github.com/rs/zerolog v1.20.0/go.mod h1:IzD0RJ65iWH0w97OQQebJEvTZYvsCUm9WVLWBQrJRjo= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= @@ -725,6 +729,7 @@ golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgw golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190828213141-aed303cbaa74/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= diff --git a/proto/cosmos/base/tendermint/v1beta1/query.proto b/proto/cosmos/base/tendermint/v1beta1/query.proto index b5220231ac0..640522e7a9f 100644 --- a/proto/cosmos/base/tendermint/v1beta1/query.proto +++ b/proto/cosmos/base/tendermint/v1beta1/query.proto @@ -8,7 +8,7 @@ import "tendermint/types/block.proto"; import "tendermint/types/types.proto"; import "cosmos/base/query/v1beta1/pagination.proto"; -option go_package = "github.com/cosmos/cosmos-sdk/types/query"; +option go_package = "github.com/cosmos/cosmos-sdk/client/grpc/tmservice"; // Service defines the gRPC querier service for tendermint queries. service Service { diff --git a/proto/cosmos/tx/v1beta1/service.proto b/proto/cosmos/tx/v1beta1/service.proto index d78216d031e..59df75bab12 100644 --- a/proto/cosmos/tx/v1beta1/service.proto +++ b/proto/cosmos/tx/v1beta1/service.proto @@ -4,6 +4,7 @@ package cosmos.tx.v1beta1; import "google/api/annotations.proto"; import "cosmos/base/abci/v1beta1/abci.proto"; import "cosmos/tx/v1beta1/tx.proto"; +import "gogoproto/gogo.proto"; import "cosmos/base/query/v1beta1/pagination.proto"; option go_package = "github.com/cosmos/cosmos-sdk/types/tx"; @@ -12,13 +13,22 @@ option go_package = "github.com/cosmos/cosmos-sdk/types/tx"; service Service { // Simulate simulates executing a transaction for estimating gas usage. rpc Simulate(SimulateRequest) returns (SimulateResponse) { - option (google.api.http).post = "/cosmos/tx/v1beta1/simulate"; + option (google.api.http) = { + post: "/cosmos/tx/v1beta1/simulate" + body: "*" + }; } // GetTx fetches a tx by hash. rpc GetTx(GetTxRequest) returns (GetTxResponse) { - option (google.api.http).get = "/cosmos/tx/v1beta1/tx/{hash}"; + option (google.api.http).get = "/cosmos/tx/v1beta1/txs/{hash}"; + } + // BroadcastTx broadcast transaction. + rpc BroadcastTx(BroadcastTxRequest) returns (BroadcastTxResponse) { + option (google.api.http) = { + post: "/cosmos/tx/v1beta1/txs" + body: "*" + }; } - // GetTxsEvent fetches txs by event. rpc GetTxsEvent(GetTxsEventRequest) returns (GetTxsEventResponse) { option (google.api.http).get = "/cosmos/tx/v1beta1/txs"; @@ -28,8 +38,8 @@ service Service { // GetTxsEventRequest is the request type for the Service.TxsByEvents // RPC method. message GetTxsEventRequest { - // event is the transaction event type. - string event = 1; + // events is the list of transaction event type. + repeated string events = 1; // pagination defines an pagination for the request. cosmos.base.query.v1beta1.PageRequest pagination = 2; } @@ -45,6 +55,36 @@ message GetTxsEventResponse { cosmos.base.query.v1beta1.PageResponse pagination = 3; } +// BroadcastTxRequest is the request type for the Service.BroadcastTxRequest +// RPC method. +message BroadcastTxRequest { + // tx_bytes is the raw transaction. + bytes tx_bytes = 1; + BroadcastMode mode = 2; +} + +// BroadcastMode specifies the broadcast mode for the TxService.Broadcast RPC method. +enum BroadcastMode { + // zero-value for mode ordering + BROADCAST_MODE_UNSPECIFIED = 0; + // BROADCAST_MODE_BLOCK defines a tx broadcasting mode where the client waits for + // the tx to be committed in a block. + BROADCAST_MODE_BLOCK = 1; + // BROADCAST_MODE_SYNC defines a tx broadcasting mode where the client waits for + // a CheckTx execution response only. + BROADCAST_MODE_SYNC = 2; + // BROADCAST_MODE_ASYNC defines a tx broadcasting mode where the client returns + // immediately. + BROADCAST_MODE_ASYNC = 3; +} + +// BroadcastTxResponse is the response type for the +// Service.BroadcastTx method. +message BroadcastTxResponse { + // tx_response is the queried TxResponses. + cosmos.base.abci.v1beta1.TxResponse tx_response = 1; +} + // SimulateRequest is the request type for the Service.Simulate // RPC method. message SimulateRequest { diff --git a/proto/ibc/core/client/v1/genesis.proto b/proto/ibc/core/client/v1/genesis.proto index b1c4247c94a..06b4bbd0648 100644 --- a/proto/ibc/core/client/v1/genesis.proto +++ b/proto/ibc/core/client/v1/genesis.proto @@ -20,4 +20,6 @@ message GenesisState { Params params = 3 [(gogoproto.nullable) = false]; // create localhost on initialization bool create_localhost = 4 [(gogoproto.moretags) = "yaml:\"create_localhost\""]; + // the sequence for the next generated client identifier + uint64 next_client_sequence = 5 [(gogoproto.moretags) = "yaml:\"next_client_sequence\""]; } diff --git a/proto/ibc/core/client/v1/tx.proto b/proto/ibc/core/client/v1/tx.proto index 1019e15a034..a30ec8bbf10 100644 --- a/proto/ibc/core/client/v1/tx.proto +++ b/proto/ibc/core/client/v1/tx.proto @@ -27,15 +27,13 @@ message MsgCreateClient { option (gogoproto.equal) = false; option (gogoproto.goproto_getters) = false; - // client unique identifier - string client_id = 1 [(gogoproto.moretags) = "yaml:\"client_id\""]; // light client state - google.protobuf.Any client_state = 2 [(gogoproto.moretags) = "yaml:\"client_state\""]; + google.protobuf.Any client_state = 1 [(gogoproto.moretags) = "yaml:\"client_state\""]; // consensus state associated with the client that corresponds to a given // height. - google.protobuf.Any consensus_state = 3 [(gogoproto.moretags) = "yaml:\"consensus_state\""]; + google.protobuf.Any consensus_state = 2 [(gogoproto.moretags) = "yaml:\"consensus_state\""]; // signer address - string signer = 4; + string signer = 3; } // MsgCreateClientResponse defines the Msg/CreateClient response type. diff --git a/server/grpc/server_test.go b/server/grpc/server_test.go index bfa39d81852..5648233f8f3 100644 --- a/server/grpc/server_test.go +++ b/server/grpc/server_test.go @@ -11,55 +11,66 @@ import ( "github.com/stretchr/testify/suite" "google.golang.org/grpc" "google.golang.org/grpc/metadata" - rpb "google.golang.org/grpc/reflection/grpc_reflection_v1alpha" + clienttx "github.com/cosmos/cosmos-sdk/client/tx" "github.com/cosmos/cosmos-sdk/testutil/network" "github.com/cosmos/cosmos-sdk/testutil/testdata" sdk "github.com/cosmos/cosmos-sdk/types" grpctypes "github.com/cosmos/cosmos-sdk/types/grpc" + "github.com/cosmos/cosmos-sdk/types/tx" + txtypes "github.com/cosmos/cosmos-sdk/types/tx" + "github.com/cosmos/cosmos-sdk/types/tx/signing" + authclient "github.com/cosmos/cosmos-sdk/x/auth/client" banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" ) type IntegrationTestSuite struct { suite.Suite + cfg network.Config network *network.Network + conn *grpc.ClientConn } func (s *IntegrationTestSuite) SetupSuite() { s.T().Log("setting up integration test suite") - s.network = network.New(s.T(), network.DefaultConfig()) + s.cfg = network.DefaultConfig() + s.network = network.New(s.T(), s.cfg) s.Require().NotNil(s.network) _, err := s.network.WaitForHeight(2) s.Require().NoError(err) -} -func (s *IntegrationTestSuite) TearDownSuite() { - s.T().Log("tearing down integration test suite") - s.network.Cleanup() -} - -func (s *IntegrationTestSuite) TestGRPCServer() { val0 := s.network.Validators[0] - conn, err := grpc.Dial( + s.conn, err = grpc.Dial( val0.AppConfig.GRPC.Address, grpc.WithInsecure(), // Or else we get "no transport security set" ) s.Require().NoError(err) - defer conn.Close() +} +func (s *IntegrationTestSuite) TearDownSuite() { + s.T().Log("tearing down integration test suite") + s.conn.Close() + s.network.Cleanup() +} + +func (s *IntegrationTestSuite) TestGRPCServer_TestService() { // gRPC query to test service should work - testClient := testdata.NewQueryClient(conn) + testClient := testdata.NewQueryClient(s.conn) testRes, err := testClient.Echo(context.Background(), &testdata.EchoRequest{Message: "hello"}) s.Require().NoError(err) s.Require().Equal("hello", testRes.Message) +} + +func (s *IntegrationTestSuite) TestGRPCServer_BankBalance() { + val0 := s.network.Validators[0] // gRPC query to bank service should work denom := fmt.Sprintf("%stoken", val0.Moniker) - bankClient := banktypes.NewQueryClient(conn) + bankClient := banktypes.NewQueryClient(s.conn) var header metadata.MD bankRes, err := bankClient.Balance( context.Background(), @@ -82,9 +93,11 @@ func (s *IntegrationTestSuite) TestGRPCServer() { ) blockHeight = header.Get(grpctypes.GRPCBlockHeightHeader) s.Require().Equal([]string{"1"}, blockHeight) +} +func (s *IntegrationTestSuite) TestGRPCServer_Reflection() { // Test server reflection - reflectClient := rpb.NewServerReflectionClient(conn) + reflectClient := rpb.NewServerReflectionClient(s.conn) stream, err := reflectClient.ServerReflectionInfo(context.Background(), grpc.WaitForReady(true)) s.Require().NoError(err) s.Require().NoError(stream.Send(&rpb.ServerReflectionRequest{ @@ -101,6 +114,65 @@ func (s *IntegrationTestSuite) TestGRPCServer() { s.Require().True(servicesMap["cosmos.bank.v1beta1.Query"]) } +func (s *IntegrationTestSuite) TestGRPCServer_GetTxsEvent() { + // Query the tx via gRPC without pagination. This used to panic, see + // https://github.com/cosmos/cosmos-sdk/issues/8038. + txServiceClient := txtypes.NewServiceClient(s.conn) + _, err := txServiceClient.GetTxsEvent( + context.Background(), + &tx.GetTxsEventRequest{ + Events: []string{"message.action=send"}, + }, + ) + // TODO Once https://github.com/cosmos/cosmos-sdk/pull/8029 is merged, this + // should not error anymore. + s.Require().NoError(err) +} + +func (s *IntegrationTestSuite) TestGRPCServer_BroadcastTx() { + val0 := s.network.Validators[0] + + // prepare txBuilder with msg + txBuilder := val0.ClientCtx.TxConfig.NewTxBuilder() + feeAmount := sdk.Coins{sdk.NewInt64Coin(s.cfg.BondDenom, 10)} + gasLimit := testdata.NewTestGasLimit() + s.Require().NoError( + txBuilder.SetMsgs(&banktypes.MsgSend{ + FromAddress: val0.Address.String(), + ToAddress: val0.Address.String(), + Amount: sdk.Coins{sdk.NewInt64Coin(s.cfg.BondDenom, 10)}, + }), + ) + txBuilder.SetFeeAmount(feeAmount) + txBuilder.SetGasLimit(gasLimit) + + // setup txFactory + txFactory := clienttx.Factory{}. + WithChainID(val0.ClientCtx.ChainID). + WithKeybase(val0.ClientCtx.Keyring). + WithTxConfig(val0.ClientCtx.TxConfig). + WithSignMode(signing.SignMode_SIGN_MODE_DIRECT) + + // Sign Tx. + err := authclient.SignTx(txFactory, val0.ClientCtx, val0.Moniker, txBuilder, false) + s.Require().NoError(err) + + txBytes, err := val0.ClientCtx.TxConfig.TxEncoder()(txBuilder.GetTx()) + s.Require().NoError(err) + + // Broadcast the tx via gRPC. + queryClient := tx.NewServiceClient(s.conn) + grpcRes, err := queryClient.BroadcastTx( + context.Background(), + &tx.BroadcastTxRequest{ + Mode: tx.BroadcastMode_BROADCAST_MODE_SYNC, + TxBytes: txBytes, + }, + ) + s.Require().NoError(err) + s.Require().Equal(uint32(0), grpcRes.TxResponse.Code) +} + // Test and enforce that we upfront reject any connections to baseapp containing // invalid initial x-cosmos-block-height that aren't positive and in the range [0, max(int64)] // See issue https://github.com/cosmos/cosmos-sdk/issues/7662. diff --git a/server/logger.go b/server/logger.go new file mode 100644 index 00000000000..e6f6f8c1110 --- /dev/null +++ b/server/logger.go @@ -0,0 +1,55 @@ +package server + +import ( + "github.com/rs/zerolog" + tmlog "github.com/tendermint/tendermint/libs/log" +) + +var _ tmlog.Logger = (*ZeroLogWrapper)(nil) + +// ZeroLogWrapper provides a wrapper around a zerolog.Logger instance. It implements +// Tendermint's Logger interface. +type ZeroLogWrapper struct { + zerolog.Logger +} + +// Info implements Tendermint's Logger interface and logs with level INFO. A set +// of key/value tuples may be provided to add context to the log. The number of +// tuples must be even and the key of the tuple must be a string. +func (z ZeroLogWrapper) Info(msg string, keyVals ...interface{}) { + z.Logger.Info().Fields(getLogFields(keyVals...)).Msg(msg) +} + +// Error implements Tendermint's Logger interface and logs with level ERR. A set +// of key/value tuples may be provided to add context to the log. The number of +// tuples must be even and the key of the tuple must be a string. +func (z ZeroLogWrapper) Error(msg string, keyVals ...interface{}) { + z.Logger.Error().Fields(getLogFields(keyVals...)).Msg(msg) +} + +// Debug implements Tendermint's Logger interface and logs with level DEBUG. A set +// of key/value tuples may be provided to add context to the log. The number of +// tuples must be even and the key of the tuple must be a string. +func (z ZeroLogWrapper) Debug(msg string, keyVals ...interface{}) { + z.Logger.Debug().Fields(getLogFields(keyVals...)).Msg(msg) +} + +// With returns a new wrapped logger with additional context provided by a set +// of key/value tuples. The number of tuples must be even and the key of the +// tuple must be a string. +func (z ZeroLogWrapper) With(keyVals ...interface{}) tmlog.Logger { + return ZeroLogWrapper{z.Logger.With().Fields(getLogFields(keyVals...)).Logger()} +} + +func getLogFields(keyVals ...interface{}) map[string]interface{} { + if len(keyVals)%2 != 0 { + return nil + } + + fields := make(map[string]interface{}) + for i := 0; i < len(keyVals); i += 2 { + fields[keyVals[i].(string)] = keyVals[i+1] + } + + return fields +} diff --git a/server/start.go b/server/start.go index e4ecd47de60..d3af7db42ca 100644 --- a/server/start.go +++ b/server/start.go @@ -240,17 +240,17 @@ func startInProcess(ctx *Context, clientCtx client.Context, appCreator types.App genDocProvider, node.DefaultDBProvider, node.DefaultMetricsProvider(cfg.Instrumentation), - ctx.Logger.With("module", "node"), + ctx.Logger, ) if err != nil { return err } - ctx.Logger.Debug("Initialization: tmNode created") + ctx.Logger.Debug("initialization: tmNode created") if err := tmNode.Start(); err != nil { return err } - ctx.Logger.Debug("Initialization: tmNode started") + ctx.Logger.Debug("initialization: tmNode started") config := config.GetConfig(ctx.Viper) diff --git a/server/util.go b/server/util.go index 7118d5b8d55..e461a12fb84 100644 --- a/server/util.go +++ b/server/util.go @@ -14,12 +14,12 @@ import ( "syscall" "time" + "github.com/rs/zerolog" + "github.com/rs/zerolog/log" "github.com/spf13/cobra" "github.com/spf13/viper" tmcfg "github.com/tendermint/tendermint/config" - tmcli "github.com/tendermint/tendermint/libs/cli" - tmflags "github.com/tendermint/tendermint/libs/cli/flags" - "github.com/tendermint/tendermint/libs/log" + tmlog "github.com/tendermint/tendermint/libs/log" dbm "github.com/tendermint/tm-db" "github.com/cosmos/cosmos-sdk/client/flags" @@ -39,7 +39,7 @@ const ServerContextKey = sdk.ContextKey("server.context") type Context struct { Viper *viper.Viper Config *tmcfg.Config - Logger log.Logger + Logger tmlog.Logger } // ErrorCode contains the exit code for server exit. @@ -52,10 +52,14 @@ func (e ErrorCode) Error() string { } func NewDefaultContext() *Context { - return NewContext(viper.New(), tmcfg.DefaultConfig(), log.NewTMLogger(log.NewSyncWriter(os.Stdout))) + return NewContext( + viper.New(), + tmcfg.DefaultConfig(), + ZeroLogWrapper{log.Logger}, + ) } -func NewContext(v *viper.Viper, config *tmcfg.Config, logger log.Logger) *Context { +func NewContext(v *viper.Viper, config *tmcfg.Config, logger tmlog.Logger) *Context { return &Context{v, config, logger} } @@ -86,27 +90,29 @@ func InterceptConfigsPreRunHandler(cmd *cobra.Command) error { serverCtx.Viper.SetEnvKeyReplacer(strings.NewReplacer(".", "_", "-", "_")) serverCtx.Viper.AutomaticEnv() - // Intercept configuration files, using both Viper instances separately + // intercept configuration files, using both Viper instances separately config, err := interceptConfigs(serverCtx.Viper) if err != nil { return err } - // Return value is a tendermint configuration object + + // return value is a tendermint configuration object serverCtx.Config = config - logger := log.NewTMLogger(log.NewSyncWriter(os.Stdout)) - logger, err = tmflags.ParseLogLevel(config.LogLevel, logger, tmcfg.DefaultLogLevel()) - if err != nil { - return err + var logWriter io.Writer + if strings.ToLower(serverCtx.Viper.GetString(flags.FlagLogFormat)) == tmcfg.LogFormatPlain { + logWriter = zerolog.ConsoleWriter{Out: os.Stderr} + } else { + logWriter = os.Stderr } - // Check if the tendermint flag for trace logging is set - // if it is then setup a tracing logger in this app as well - if serverCtx.Viper.GetBool(tmcli.TraceFlag) { - logger = log.NewTracingLogger(logger) + logLvlStr := serverCtx.Viper.GetString(flags.FlagLogLevel) + logLvl, err := zerolog.ParseLevel(logLvlStr) + if err != nil { + return fmt.Errorf("failed to parse log level (%s): %w", logLvlStr, err) } - serverCtx.Logger = logger.With("module", "main") + serverCtx.Logger = ZeroLogWrapper{zerolog.New(logWriter).Level(logLvl).With().Timestamp().Logger()} return SetCmdServerContext(cmd, serverCtx) } diff --git a/simapp/simd/cmd/root.go b/simapp/simd/cmd/root.go index dd6132095c8..847a31f2116 100644 --- a/simapp/simd/cmd/root.go +++ b/simapp/simd/cmd/root.go @@ -6,8 +6,10 @@ import ( "os" "path/filepath" + "github.com/rs/zerolog" "github.com/spf13/cast" "github.com/spf13/cobra" + tmcfg "github.com/tendermint/tendermint/config" tmcli "github.com/tendermint/tendermint/libs/cli" "github.com/tendermint/tendermint/libs/log" dbm "github.com/tendermint/tm-db" @@ -79,7 +81,8 @@ func Execute(rootCmd *cobra.Command) error { ctx = context.WithValue(ctx, client.ClientContextKey, &client.Context{}) ctx = context.WithValue(ctx, server.ServerContextKey, srvCtx) - rootCmd.PersistentFlags().String("log_level", srvCtx.Config.LogLevel, "The logging level in the format of :,...") + rootCmd.PersistentFlags().String(flags.FlagLogLevel, zerolog.InfoLevel.String(), "The logging level (trace|debug|info|warn|error|fatal|panic)") + rootCmd.PersistentFlags().String(flags.FlagLogFormat, tmcfg.LogFormatJSON, "The logging format (json|plain)") executor := tmcli.PrepareBaseCmd(rootCmd, "", simapp.DefaultNodeHome) return executor.ExecuteContext(ctx) diff --git a/store/rootmulti/store.go b/store/rootmulti/store.go index 72238cbb1e6..61378a36614 100644 --- a/store/rootmulti/store.go +++ b/store/rootmulti/store.go @@ -549,9 +549,12 @@ func (rs *Store) SetInitialVersion(version int64) error { // Loop through all the stores, if it's an IAVL store, then set initial // version on it. - for _, commitKVStore := range rs.stores { - if storeWithVersion, ok := commitKVStore.(types.StoreWithInitialVersion); ok { - storeWithVersion.SetInitialVersion(version) + for key, store := range rs.stores { + if store.GetStoreType() == types.StoreTypeIAVL { + // If the store is wrapped with an inter-block cache, we must first unwrap + // it to get the underlying IAVL store. + store = rs.GetCommitKVStore(key) + store.(*iavl.Store).SetInitialVersion(version) } } diff --git a/store/rootmulti/store_test.go b/store/rootmulti/store_test.go index efe2618779a..7934b514d16 100644 --- a/store/rootmulti/store_test.go +++ b/store/rootmulti/store_test.go @@ -657,11 +657,18 @@ func TestSetInitialVersion(t *testing.T) { db := dbm.NewMemDB() multi := newMultiStoreWithMounts(db, types.PruneNothing) + require.NoError(t, multi.LoadLatestVersion()) + multi.SetInitialVersion(5) require.Equal(t, int64(5), multi.initialVersion) multi.Commit() require.Equal(t, int64(5), multi.LastCommitID().Version) + + ckvs := multi.GetCommitKVStore(multi.keysByName["store1"]) + iavlStore, ok := ckvs.(*iavl.Store) + require.True(t, ok) + require.True(t, iavlStore.VersionExists(5)) } func BenchmarkMultistoreSnapshot100K(b *testing.B) { diff --git a/types/address.go b/types/address.go index 7e0408a8498..ba9e5b303b6 100644 --- a/types/address.go +++ b/types/address.go @@ -11,7 +11,6 @@ import ( yaml "gopkg.in/yaml.v2" "github.com/cosmos/cosmos-sdk/codec/legacy" - cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec" cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" "github.com/cosmos/cosmos-sdk/types/bech32" ) @@ -663,7 +662,7 @@ func GetPubKeyFromBech32(pkt Bech32PubKeyType, pubkeyStr string) (cryptotypes.Pu return nil, err } - return cryptocodec.PubKeyFromBytes(bz) + return legacy.PubKeyFromBytes(bz) } // MustGetPubKeyFromBech32 calls GetPubKeyFromBech32 except it panics on error. diff --git a/types/errors/abci.go b/types/errors/abci.go index f297e4ca698..df85f6bc89d 100644 --- a/types/errors/abci.go +++ b/types/errors/abci.go @@ -151,12 +151,14 @@ func errIsNil(err error) bool { return false } +var errPanicWithMsg = Wrapf(ErrPanic, "panic message redacted to hide potentially sensitive system info") + // Redact replaces an error that is not initialized as an ABCI Error with a // generic internal error instance. If the error is an ABCI Error, that error is // simply returned. func Redact(err error) error { if ErrPanic.Is(err) { - return ErrPanic + return errPanicWithMsg } if abciCode(err) == internalABCICode { return errInternal diff --git a/types/errors/abci_test.go b/types/errors/abci_test.go index 18d6b1f2e69..02c12e7bbdd 100644 --- a/types/errors/abci_test.go +++ b/types/errors/abci_test.go @@ -171,7 +171,7 @@ func (s *abciTestSuite) TestRedact() { }{ "panic looses message": { err: Wrap(ErrPanic, "some secret stack trace"), - changed: ErrPanic, + changed: errPanicWithMsg, }, "sdk errors untouched": { err: Wrap(ErrUnauthorized, "cannot drop db"), @@ -233,7 +233,7 @@ func (s *abciTestSuite) TestABCIInfoSerializeErr() { }, "redact in default encoder": { src: myPanic, - exp: "panic", + exp: "panic message redacted to hide potentially sensitive system info: panic", }, "do not redact in debug encoder": { src: myPanic, diff --git a/types/tx/service.pb.go b/types/tx/service.pb.go index 2302593c5c7..57f1f627635 100644 --- a/types/tx/service.pb.go +++ b/types/tx/service.pb.go @@ -8,6 +8,7 @@ import ( fmt "fmt" types "github.com/cosmos/cosmos-sdk/types" query "github.com/cosmos/cosmos-sdk/types/query" + _ "github.com/gogo/protobuf/gogoproto" grpc1 "github.com/gogo/protobuf/grpc" proto "github.com/gogo/protobuf/proto" _ "google.golang.org/genproto/googleapis/api/annotations" @@ -30,11 +31,50 @@ var _ = math.Inf // proto package needs to be updated. const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package +// BroadcastMode specifies the broadcast mode for the TxService.Broadcast RPC method. +type BroadcastMode int32 + +const ( + // zero-value for mode ordering + BroadcastMode_BROADCAST_MODE_UNSPECIFIED BroadcastMode = 0 + // BROADCAST_MODE_BLOCK defines a tx broadcasting mode where the client waits for + // the tx to be committed in a block. + BroadcastMode_BROADCAST_MODE_BLOCK BroadcastMode = 1 + // BROADCAST_MODE_SYNC defines a tx broadcasting mode where the client waits for + // a CheckTx execution response only. + BroadcastMode_BROADCAST_MODE_SYNC BroadcastMode = 2 + // BROADCAST_MODE_ASYNC defines a tx broadcasting mode where the client returns + // immediately. + BroadcastMode_BROADCAST_MODE_ASYNC BroadcastMode = 3 +) + +var BroadcastMode_name = map[int32]string{ + 0: "BROADCAST_MODE_UNSPECIFIED", + 1: "BROADCAST_MODE_BLOCK", + 2: "BROADCAST_MODE_SYNC", + 3: "BROADCAST_MODE_ASYNC", +} + +var BroadcastMode_value = map[string]int32{ + "BROADCAST_MODE_UNSPECIFIED": 0, + "BROADCAST_MODE_BLOCK": 1, + "BROADCAST_MODE_SYNC": 2, + "BROADCAST_MODE_ASYNC": 3, +} + +func (x BroadcastMode) String() string { + return proto.EnumName(BroadcastMode_name, int32(x)) +} + +func (BroadcastMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_e0b00a618705eca7, []int{0} +} + // GetTxsEventRequest is the request type for the Service.TxsByEvents // RPC method. type GetTxsEventRequest struct { - // event is the transaction event type. - Event string `protobuf:"bytes,1,opt,name=event,proto3" json:"event,omitempty"` + // events is the list of transaction event type. + Events []string `protobuf:"bytes,1,rep,name=events,proto3" json:"events,omitempty"` // pagination defines an pagination for the request. Pagination *query.PageRequest `protobuf:"bytes,2,opt,name=pagination,proto3" json:"pagination,omitempty"` } @@ -72,11 +112,11 @@ func (m *GetTxsEventRequest) XXX_DiscardUnknown() { var xxx_messageInfo_GetTxsEventRequest proto.InternalMessageInfo -func (m *GetTxsEventRequest) GetEvent() string { +func (m *GetTxsEventRequest) GetEvents() []string { if m != nil { - return m.Event + return m.Events } - return "" + return nil } func (m *GetTxsEventRequest) GetPagination() *query.PageRequest { @@ -151,6 +191,108 @@ func (m *GetTxsEventResponse) GetPagination() *query.PageResponse { return nil } +// BroadcastTxRequest is the request type for the Service.BroadcastTxRequest +// RPC method. +type BroadcastTxRequest struct { + // tx_bytes is the raw transaction. + TxBytes []byte `protobuf:"bytes,1,opt,name=tx_bytes,json=txBytes,proto3" json:"tx_bytes,omitempty"` + Mode BroadcastMode `protobuf:"varint,2,opt,name=mode,proto3,enum=cosmos.tx.v1beta1.BroadcastMode" json:"mode,omitempty"` +} + +func (m *BroadcastTxRequest) Reset() { *m = BroadcastTxRequest{} } +func (m *BroadcastTxRequest) String() string { return proto.CompactTextString(m) } +func (*BroadcastTxRequest) ProtoMessage() {} +func (*BroadcastTxRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_e0b00a618705eca7, []int{2} +} +func (m *BroadcastTxRequest) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *BroadcastTxRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_BroadcastTxRequest.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *BroadcastTxRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BroadcastTxRequest.Merge(m, src) +} +func (m *BroadcastTxRequest) XXX_Size() int { + return m.Size() +} +func (m *BroadcastTxRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BroadcastTxRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BroadcastTxRequest proto.InternalMessageInfo + +func (m *BroadcastTxRequest) GetTxBytes() []byte { + if m != nil { + return m.TxBytes + } + return nil +} + +func (m *BroadcastTxRequest) GetMode() BroadcastMode { + if m != nil { + return m.Mode + } + return BroadcastMode_BROADCAST_MODE_UNSPECIFIED +} + +// BroadcastTxResponse is the response type for the +// Service.BroadcastTx method. +type BroadcastTxResponse struct { + // tx_response is the queried TxResponses. + TxResponse *types.TxResponse `protobuf:"bytes,1,opt,name=tx_response,json=txResponse,proto3" json:"tx_response,omitempty"` +} + +func (m *BroadcastTxResponse) Reset() { *m = BroadcastTxResponse{} } +func (m *BroadcastTxResponse) String() string { return proto.CompactTextString(m) } +func (*BroadcastTxResponse) ProtoMessage() {} +func (*BroadcastTxResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_e0b00a618705eca7, []int{3} +} +func (m *BroadcastTxResponse) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *BroadcastTxResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_BroadcastTxResponse.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *BroadcastTxResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BroadcastTxResponse.Merge(m, src) +} +func (m *BroadcastTxResponse) XXX_Size() int { + return m.Size() +} +func (m *BroadcastTxResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BroadcastTxResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BroadcastTxResponse proto.InternalMessageInfo + +func (m *BroadcastTxResponse) GetTxResponse() *types.TxResponse { + if m != nil { + return m.TxResponse + } + return nil +} + // SimulateRequest is the request type for the Service.Simulate // RPC method. type SimulateRequest struct { @@ -162,7 +304,7 @@ func (m *SimulateRequest) Reset() { *m = SimulateRequest{} } func (m *SimulateRequest) String() string { return proto.CompactTextString(m) } func (*SimulateRequest) ProtoMessage() {} func (*SimulateRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_e0b00a618705eca7, []int{2} + return fileDescriptor_e0b00a618705eca7, []int{4} } func (m *SimulateRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -211,7 +353,7 @@ func (m *SimulateResponse) Reset() { *m = SimulateResponse{} } func (m *SimulateResponse) String() string { return proto.CompactTextString(m) } func (*SimulateResponse) ProtoMessage() {} func (*SimulateResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_e0b00a618705eca7, []int{3} + return fileDescriptor_e0b00a618705eca7, []int{5} } func (m *SimulateResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -265,7 +407,7 @@ func (m *GetTxRequest) Reset() { *m = GetTxRequest{} } func (m *GetTxRequest) String() string { return proto.CompactTextString(m) } func (*GetTxRequest) ProtoMessage() {} func (*GetTxRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_e0b00a618705eca7, []int{4} + return fileDescriptor_e0b00a618705eca7, []int{6} } func (m *GetTxRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -313,7 +455,7 @@ func (m *GetTxResponse) Reset() { *m = GetTxResponse{} } func (m *GetTxResponse) String() string { return proto.CompactTextString(m) } func (*GetTxResponse) ProtoMessage() {} func (*GetTxResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_e0b00a618705eca7, []int{5} + return fileDescriptor_e0b00a618705eca7, []int{7} } func (m *GetTxResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -357,8 +499,11 @@ func (m *GetTxResponse) GetTxResponse() *types.TxResponse { } func init() { + proto.RegisterEnum("cosmos.tx.v1beta1.BroadcastMode", BroadcastMode_name, BroadcastMode_value) proto.RegisterType((*GetTxsEventRequest)(nil), "cosmos.tx.v1beta1.GetTxsEventRequest") proto.RegisterType((*GetTxsEventResponse)(nil), "cosmos.tx.v1beta1.GetTxsEventResponse") + proto.RegisterType((*BroadcastTxRequest)(nil), "cosmos.tx.v1beta1.BroadcastTxRequest") + proto.RegisterType((*BroadcastTxResponse)(nil), "cosmos.tx.v1beta1.BroadcastTxResponse") proto.RegisterType((*SimulateRequest)(nil), "cosmos.tx.v1beta1.SimulateRequest") proto.RegisterType((*SimulateResponse)(nil), "cosmos.tx.v1beta1.SimulateResponse") proto.RegisterType((*GetTxRequest)(nil), "cosmos.tx.v1beta1.GetTxRequest") @@ -368,43 +513,54 @@ func init() { func init() { proto.RegisterFile("cosmos/tx/v1beta1/service.proto", fileDescriptor_e0b00a618705eca7) } var fileDescriptor_e0b00a618705eca7 = []byte{ - // 563 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x54, 0xcd, 0x6e, 0xd3, 0x40, - 0x10, 0xae, 0x1d, 0xfa, 0xc3, 0xa4, 0x08, 0x58, 0x7e, 0x14, 0x99, 0xe2, 0x06, 0xa7, 0x69, 0x23, - 0x24, 0xbc, 0x6a, 0xb8, 0xf4, 0x80, 0x84, 0x84, 0x54, 0x22, 0x6e, 0xc8, 0xed, 0x89, 0x4b, 0xb5, - 0x09, 0x5b, 0xc7, 0x22, 0xf1, 0xba, 0xde, 0x4d, 0xb4, 0x15, 0xf4, 0xc2, 0x91, 0x13, 0x12, 0x2f, - 0xc5, 0x31, 0x12, 0x17, 0x8e, 0x28, 0xe1, 0x0d, 0x78, 0x01, 0xe4, 0xf5, 0x3a, 0x71, 0xa8, 0x4d, - 0x7b, 0xf2, 0xae, 0xfc, 0xfd, 0xcc, 0x37, 0xe3, 0x31, 0x6c, 0xf7, 0x18, 0x1f, 0x32, 0x8e, 0x85, - 0xc4, 0xe3, 0xfd, 0x2e, 0x15, 0x64, 0x1f, 0x73, 0x1a, 0x8f, 0x83, 0x1e, 0x75, 0xa3, 0x98, 0x09, - 0x86, 0xee, 0xa6, 0x00, 0x57, 0x48, 0x57, 0x03, 0xac, 0x2d, 0x9f, 0x31, 0x7f, 0x40, 0x31, 0x89, - 0x02, 0x4c, 0xc2, 0x90, 0x09, 0x22, 0x02, 0x16, 0xf2, 0x94, 0x60, 0x35, 0xb4, 0x62, 0x97, 0x70, - 0x8a, 0x49, 0xb7, 0x17, 0xcc, 0x85, 0x93, 0x8b, 0x06, 0x59, 0x97, 0x6d, 0x85, 0xd4, 0xef, 0x9e, - 0xe6, 0x05, 0xce, 0x46, 0x34, 0x3e, 0x9f, 0x63, 0x22, 0xe2, 0x07, 0xa1, 0x72, 0x4b, 0xb1, 0x4e, - 0x0c, 0xa8, 0x43, 0xc5, 0xb1, 0xe4, 0x87, 0x63, 0x1a, 0x0a, 0x8f, 0x9e, 0x8d, 0x28, 0x17, 0xe8, - 0x3e, 0xac, 0xd2, 0xe4, 0x5e, 0x33, 0xea, 0x46, 0xeb, 0xa6, 0x97, 0x5e, 0xd0, 0x6b, 0x80, 0x05, - 0xbf, 0x66, 0xd6, 0x8d, 0x56, 0xb5, 0xbd, 0xeb, 0xea, 0x78, 0x89, 0x99, 0xab, 0xcc, 0xb2, 0x98, - 0xee, 0x5b, 0xe2, 0x53, 0xad, 0xe8, 0xe5, 0x98, 0xce, 0xc4, 0x80, 0x7b, 0x4b, 0xa6, 0x3c, 0x62, - 0x21, 0xa7, 0x68, 0x0f, 0x2a, 0x42, 0xf2, 0x9a, 0x51, 0xaf, 0xb4, 0xaa, 0xed, 0x07, 0xee, 0xa5, - 0xbe, 0xb9, 0xc7, 0xd2, 0x4b, 0x10, 0xa8, 0x03, 0x9b, 0x42, 0x9e, 0xc4, 0x9a, 0xc7, 0x6b, 0xa6, - 0x62, 0xec, 0x2c, 0x95, 0xa2, 0x7a, 0x95, 0x23, 0x6a, 0xb0, 0x57, 0x15, 0xf3, 0x73, 0x22, 0x94, - 0x4f, 0x54, 0x51, 0x89, 0xf6, 0xae, 0x4c, 0xa4, 0x95, 0xf2, 0x91, 0x0e, 0xe0, 0xf6, 0x51, 0x30, - 0x1c, 0x0d, 0x88, 0xc8, 0x12, 0xa3, 0x26, 0x98, 0x42, 0xaa, 0x06, 0x96, 0x86, 0x31, 0x85, 0x74, - 0xbe, 0x18, 0x70, 0x67, 0x41, 0xd5, 0x9d, 0x78, 0x01, 0x1b, 0x3e, 0xe1, 0x27, 0x41, 0x78, 0xca, - 0xb4, 0xc2, 0x93, 0xf2, 0x70, 0x1d, 0xc2, 0xdf, 0x84, 0xa7, 0xcc, 0x5b, 0xf7, 0xd3, 0x03, 0x3a, - 0x80, 0xb5, 0x98, 0xf2, 0xd1, 0x40, 0xe8, 0x19, 0xd5, 0xcb, 0xb9, 0x9e, 0xc2, 0x79, 0x1a, 0xef, - 0x38, 0xb0, 0xa9, 0x06, 0x93, 0x65, 0x40, 0x70, 0xa3, 0x4f, 0x78, 0x5f, 0x7f, 0x06, 0xea, 0xec, - 0x5c, 0xc0, 0x2d, 0x8d, 0xd1, 0xc5, 0x5e, 0x2f, 0x28, 0x3a, 0x84, 0x6a, 0x6e, 0x68, 0xba, 0xb4, - 0xeb, 0xcd, 0x0c, 0x16, 0x33, 0x6b, 0xff, 0x31, 0x61, 0xfd, 0x28, 0x5d, 0x30, 0x24, 0x61, 0x23, - 0x6b, 0x1d, 0x72, 0x0a, 0x9c, 0xff, 0x19, 0x89, 0xd5, 0xf8, 0x2f, 0x26, 0x35, 0x70, 0x1a, 0x9f, - 0x7f, 0xfc, 0xfe, 0x66, 0x3e, 0x76, 0x1e, 0xe1, 0x82, 0xcd, 0xce, 0xdc, 0x22, 0x58, 0x55, 0x4d, - 0x40, 0xdb, 0x05, 0x92, 0xf9, 0x16, 0x5a, 0xf5, 0x72, 0x80, 0x36, 0xdc, 0x51, 0x86, 0x36, 0xda, - 0xc2, 0x45, 0x3b, 0x8d, 0x3f, 0x26, 0x5d, 0xbf, 0x40, 0x9f, 0xa0, 0x9a, 0xdb, 0x19, 0xd4, 0x2c, - 0x93, 0x5d, 0x5a, 0x64, 0x6b, 0xf7, 0x2a, 0x98, 0xae, 0xc1, 0x56, 0x35, 0xd4, 0xd0, 0xc3, 0xc2, - 0x1a, 0xf8, 0xab, 0x97, 0xdf, 0xa7, 0xb6, 0x31, 0x99, 0xda, 0xc6, 0xaf, 0xa9, 0x6d, 0x7c, 0x9d, - 0xd9, 0x2b, 0x93, 0x99, 0xbd, 0xf2, 0x73, 0x66, 0xaf, 0xbc, 0x6b, 0xfa, 0x81, 0xe8, 0x8f, 0xba, - 0x6e, 0x8f, 0x0d, 0x33, 0x6e, 0xfa, 0x78, 0xc6, 0xdf, 0x7f, 0xc0, 0xe2, 0x3c, 0xa2, 0x89, 0x58, - 0x77, 0x4d, 0xfd, 0x6e, 0x9e, 0xff, 0x0d, 0x00, 0x00, 0xff, 0xff, 0xae, 0x63, 0xf9, 0x38, 0x2f, - 0x05, 0x00, 0x00, + // 737 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x55, 0xcd, 0x4f, 0x13, 0x41, + 0x14, 0xef, 0xb6, 0xc8, 0xc7, 0x2b, 0x68, 0x1d, 0x10, 0x6b, 0xd1, 0xa5, 0x2c, 0x16, 0x08, 0x89, + 0xbb, 0xa1, 0x7a, 0x20, 0xc6, 0xc4, 0xd0, 0x52, 0x08, 0x51, 0x3e, 0xb2, 0xc5, 0x83, 0xc6, 0xa4, + 0x99, 0xb6, 0xc3, 0xb2, 0x91, 0xee, 0x94, 0xce, 0x94, 0x2c, 0x01, 0x62, 0xe2, 0xd1, 0x93, 0x89, + 0xff, 0x94, 0x47, 0x12, 0x2f, 0x1e, 0x0d, 0xf8, 0x47, 0x78, 0x34, 0x3b, 0x3b, 0x6d, 0xb7, 0x65, + 0x0b, 0xc4, 0x13, 0x33, 0xcc, 0xef, 0xfd, 0x3e, 0xde, 0x9b, 0x9d, 0xc2, 0x74, 0x85, 0xb2, 0x1a, + 0x65, 0x06, 0x77, 0x8d, 0xa3, 0xa5, 0x32, 0xe1, 0x78, 0xc9, 0x60, 0xa4, 0x71, 0x64, 0x57, 0x88, + 0x5e, 0x6f, 0x50, 0x4e, 0xd1, 0x7d, 0x1f, 0xa0, 0x73, 0x57, 0x97, 0x80, 0xd4, 0x63, 0x8b, 0x52, + 0xeb, 0x80, 0x18, 0xb8, 0x6e, 0x1b, 0xd8, 0x71, 0x28, 0xc7, 0xdc, 0xa6, 0x0e, 0xf3, 0x0b, 0x52, + 0xb3, 0x92, 0xb1, 0x8c, 0x19, 0x31, 0x70, 0xb9, 0x62, 0xb7, 0x89, 0xbd, 0x8d, 0x04, 0xa5, 0xae, + 0xca, 0x72, 0x57, 0x9e, 0x4d, 0x58, 0xd4, 0xa2, 0x62, 0x69, 0x78, 0x2b, 0xf9, 0xdf, 0xc5, 0x20, + 0xed, 0x61, 0x93, 0x34, 0x8e, 0xdb, 0x95, 0x75, 0x6c, 0xd9, 0x8e, 0xf0, 0xe0, 0x63, 0x35, 0x0e, + 0x68, 0x9d, 0xf0, 0x5d, 0x97, 0x15, 0x8e, 0x88, 0xc3, 0x4d, 0x72, 0xd8, 0x24, 0x8c, 0xa3, 0x49, + 0x18, 0x24, 0xde, 0x9e, 0x25, 0x95, 0x74, 0x6c, 0x61, 0xc4, 0x94, 0x3b, 0xb4, 0x06, 0xd0, 0x61, + 0x48, 0x46, 0xd3, 0xca, 0x42, 0x3c, 0x3b, 0xa7, 0xcb, 0xd8, 0x9e, 0x9c, 0x2e, 0xe4, 0x5a, 0xf1, + 0xf5, 0x1d, 0x6c, 0x11, 0xc9, 0x69, 0x06, 0x2a, 0xb5, 0x73, 0x05, 0xc6, 0xbb, 0x64, 0x59, 0x9d, + 0x3a, 0x8c, 0xa0, 0x79, 0x88, 0x71, 0xd7, 0x17, 0x8d, 0x67, 0x1f, 0xe8, 0x57, 0xfa, 0xa9, 0xef, + 0xba, 0xa6, 0x87, 0x40, 0xeb, 0x30, 0xca, 0xdd, 0x52, 0x43, 0xd6, 0xb1, 0x64, 0x54, 0x54, 0x3c, + 0xed, 0xb2, 0x22, 0x7a, 0x18, 0x28, 0x94, 0x60, 0x33, 0xce, 0xdb, 0x6b, 0x8f, 0x28, 0x98, 0x28, + 0x26, 0x12, 0xcd, 0xdf, 0x98, 0x48, 0x32, 0x05, 0x23, 0x11, 0x40, 0xb9, 0x06, 0xc5, 0xd5, 0x0a, + 0x66, 0xdc, 0x13, 0xf3, 0x1b, 0xf9, 0x08, 0x86, 0xb9, 0x5b, 0x2a, 0x1f, 0x73, 0xe2, 0xa5, 0x52, + 0x16, 0x46, 0xcd, 0x21, 0xee, 0xe6, 0xbc, 0x2d, 0x7a, 0x01, 0x03, 0x35, 0x5a, 0x25, 0xa2, 0x8b, + 0x77, 0xb3, 0xe9, 0x90, 0xb0, 0x6d, 0xbe, 0x4d, 0x5a, 0x25, 0xa6, 0x40, 0x6b, 0x1f, 0x61, 0xbc, + 0x4b, 0x46, 0x36, 0xae, 0x00, 0xf1, 0x40, 0x3f, 0x84, 0xd4, 0x6d, 0xdb, 0x01, 0x9d, 0x76, 0x68, + 0xcb, 0x70, 0xaf, 0x68, 0xd7, 0x9a, 0x07, 0x98, 0xb7, 0xc6, 0x86, 0x32, 0x10, 0xe5, 0xae, 0x24, + 0xec, 0x33, 0x91, 0x28, 0x77, 0xb5, 0xaf, 0x0a, 0x24, 0x3a, 0xa5, 0xd2, 0xd5, 0x2b, 0x18, 0xb6, + 0x30, 0x2b, 0xd9, 0xce, 0x1e, 0x95, 0x0c, 0x33, 0xfd, 0x2d, 0xad, 0x63, 0xb6, 0xe1, 0xec, 0x51, + 0x73, 0xc8, 0xf2, 0x17, 0x68, 0x19, 0x06, 0x1b, 0x84, 0x35, 0x0f, 0xb8, 0xbc, 0x68, 0xe9, 0xfe, + 0xb5, 0xa6, 0xc0, 0x99, 0x12, 0xaf, 0x69, 0x30, 0x2a, 0x6e, 0x57, 0x2b, 0x03, 0x82, 0x81, 0x7d, + 0xcc, 0xf6, 0x85, 0x87, 0x11, 0x53, 0xac, 0xb5, 0x33, 0x18, 0x93, 0x18, 0x69, 0xf6, 0x76, 0x41, + 0x7b, 0x3b, 0x1d, 0xfd, 0xbf, 0x4e, 0x2f, 0x9e, 0xc2, 0x58, 0xd7, 0x78, 0x91, 0x0a, 0xa9, 0x9c, + 0xb9, 0xbd, 0xb2, 0x9a, 0x5f, 0x29, 0xee, 0x96, 0x36, 0xb7, 0x57, 0x0b, 0xa5, 0x77, 0x5b, 0xc5, + 0x9d, 0x42, 0x7e, 0x63, 0x6d, 0xa3, 0xb0, 0x9a, 0x88, 0xa0, 0x24, 0x4c, 0xf4, 0x9c, 0xe7, 0xde, + 0x6e, 0xe7, 0xdf, 0x24, 0x14, 0xf4, 0x10, 0xc6, 0x7b, 0x4e, 0x8a, 0xef, 0xb7, 0xf2, 0x89, 0x68, + 0x48, 0xc9, 0x8a, 0x38, 0x89, 0x65, 0xff, 0xc6, 0x60, 0xa8, 0xe8, 0xbf, 0x5d, 0xe8, 0x04, 0x86, + 0x5b, 0x83, 0x43, 0x5a, 0x48, 0xee, 0x9e, 0x0b, 0x91, 0x9a, 0xbd, 0x16, 0x23, 0x2f, 0xd2, 0xdc, + 0x97, 0x9f, 0x7f, 0xbe, 0x47, 0xd3, 0xda, 0x94, 0x11, 0xf2, 0x68, 0x4a, 0xf0, 0x4b, 0x65, 0x11, + 0x1d, 0xc2, 0x1d, 0x31, 0x05, 0x34, 0x1d, 0xc2, 0x1a, 0x9c, 0x61, 0x2a, 0xdd, 0x1f, 0x20, 0x35, + 0x33, 0x42, 0x73, 0x1a, 0x3d, 0x31, 0xc2, 0x5e, 0x4c, 0x66, 0x9c, 0x78, 0x73, 0x3f, 0x43, 0x9f, + 0x21, 0x1e, 0xf8, 0x82, 0x50, 0xe6, 0xba, 0x0f, 0xaf, 0x23, 0x3f, 0x77, 0x13, 0x4c, 0x9a, 0x98, + 0x11, 0x26, 0xa6, 0xb4, 0xc9, 0x70, 0x13, 0x5e, 0xe6, 0x53, 0x88, 0x07, 0xde, 0xbe, 0x50, 0x03, + 0x57, 0x9f, 0xe4, 0x50, 0x03, 0x21, 0x4f, 0xa8, 0xa6, 0x0a, 0x03, 0x49, 0xd4, 0xc7, 0x40, 0xee, + 0xf5, 0x8f, 0x0b, 0x55, 0x39, 0xbf, 0x50, 0x95, 0xdf, 0x17, 0xaa, 0xf2, 0xed, 0x52, 0x8d, 0x9c, + 0x5f, 0xaa, 0x91, 0x5f, 0x97, 0x6a, 0xe4, 0x43, 0xc6, 0xb2, 0xf9, 0x7e, 0xb3, 0xac, 0x57, 0x68, + 0xad, 0x55, 0xeb, 0xff, 0x79, 0xc6, 0xaa, 0x9f, 0x0c, 0x7e, 0x5c, 0x27, 0x1e, 0x59, 0x79, 0x50, + 0xfc, 0x70, 0x3c, 0xff, 0x17, 0x00, 0x00, 0xff, 0xff, 0x96, 0xba, 0xfb, 0xcb, 0x0f, 0x07, 0x00, + 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -423,6 +579,8 @@ type ServiceClient interface { Simulate(ctx context.Context, in *SimulateRequest, opts ...grpc.CallOption) (*SimulateResponse, error) // GetTx fetches a tx by hash. GetTx(ctx context.Context, in *GetTxRequest, opts ...grpc.CallOption) (*GetTxResponse, error) + // BroadcastTx broadcast transaction. + BroadcastTx(ctx context.Context, in *BroadcastTxRequest, opts ...grpc.CallOption) (*BroadcastTxResponse, error) // GetTxsEvent fetches txs by event. GetTxsEvent(ctx context.Context, in *GetTxsEventRequest, opts ...grpc.CallOption) (*GetTxsEventResponse, error) } @@ -453,6 +611,15 @@ func (c *serviceClient) GetTx(ctx context.Context, in *GetTxRequest, opts ...grp return out, nil } +func (c *serviceClient) BroadcastTx(ctx context.Context, in *BroadcastTxRequest, opts ...grpc.CallOption) (*BroadcastTxResponse, error) { + out := new(BroadcastTxResponse) + err := c.cc.Invoke(ctx, "/cosmos.tx.v1beta1.Service/BroadcastTx", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + func (c *serviceClient) GetTxsEvent(ctx context.Context, in *GetTxsEventRequest, opts ...grpc.CallOption) (*GetTxsEventResponse, error) { out := new(GetTxsEventResponse) err := c.cc.Invoke(ctx, "/cosmos.tx.v1beta1.Service/GetTxsEvent", in, out, opts...) @@ -468,6 +635,8 @@ type ServiceServer interface { Simulate(context.Context, *SimulateRequest) (*SimulateResponse, error) // GetTx fetches a tx by hash. GetTx(context.Context, *GetTxRequest) (*GetTxResponse, error) + // BroadcastTx broadcast transaction. + BroadcastTx(context.Context, *BroadcastTxRequest) (*BroadcastTxResponse, error) // GetTxsEvent fetches txs by event. GetTxsEvent(context.Context, *GetTxsEventRequest) (*GetTxsEventResponse, error) } @@ -482,6 +651,9 @@ func (*UnimplementedServiceServer) Simulate(ctx context.Context, req *SimulateRe func (*UnimplementedServiceServer) GetTx(ctx context.Context, req *GetTxRequest) (*GetTxResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method GetTx not implemented") } +func (*UnimplementedServiceServer) BroadcastTx(ctx context.Context, req *BroadcastTxRequest) (*BroadcastTxResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method BroadcastTx not implemented") +} func (*UnimplementedServiceServer) GetTxsEvent(ctx context.Context, req *GetTxsEventRequest) (*GetTxsEventResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method GetTxsEvent not implemented") } @@ -526,6 +698,24 @@ func _Service_GetTx_Handler(srv interface{}, ctx context.Context, dec func(inter return interceptor(ctx, in, info, handler) } +func _Service_BroadcastTx_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BroadcastTxRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceServer).BroadcastTx(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/cosmos.tx.v1beta1.Service/BroadcastTx", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceServer).BroadcastTx(ctx, req.(*BroadcastTxRequest)) + } + return interceptor(ctx, in, info, handler) +} + func _Service_GetTxsEvent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(GetTxsEventRequest) if err := dec(in); err != nil { @@ -556,6 +746,10 @@ var _Service_serviceDesc = grpc.ServiceDesc{ MethodName: "GetTx", Handler: _Service_GetTx_Handler, }, + { + MethodName: "BroadcastTx", + Handler: _Service_BroadcastTx_Handler, + }, { MethodName: "GetTxsEvent", Handler: _Service_GetTxsEvent_Handler, @@ -597,12 +791,14 @@ func (m *GetTxsEventRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { i-- dAtA[i] = 0x12 } - if len(m.Event) > 0 { - i -= len(m.Event) - copy(dAtA[i:], m.Event) - i = encodeVarintService(dAtA, i, uint64(len(m.Event))) - i-- - dAtA[i] = 0xa + if len(m.Events) > 0 { + for iNdEx := len(m.Events) - 1; iNdEx >= 0; iNdEx-- { + i -= len(m.Events[iNdEx]) + copy(dAtA[i:], m.Events[iNdEx]) + i = encodeVarintService(dAtA, i, uint64(len(m.Events[iNdEx]))) + i-- + dAtA[i] = 0xa + } } return len(dAtA) - i, nil } @@ -670,6 +866,76 @@ func (m *GetTxsEventResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *BroadcastTxRequest) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *BroadcastTxRequest) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *BroadcastTxRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.Mode != 0 { + i = encodeVarintService(dAtA, i, uint64(m.Mode)) + i-- + dAtA[i] = 0x10 + } + if len(m.TxBytes) > 0 { + i -= len(m.TxBytes) + copy(dAtA[i:], m.TxBytes) + i = encodeVarintService(dAtA, i, uint64(len(m.TxBytes))) + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func (m *BroadcastTxResponse) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *BroadcastTxResponse) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *BroadcastTxResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.TxResponse != nil { + { + size, err := m.TxResponse.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintService(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + func (m *SimulateRequest) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) @@ -846,9 +1112,11 @@ func (m *GetTxsEventRequest) Size() (n int) { } var l int _ = l - l = len(m.Event) - if l > 0 { - n += 1 + l + sovService(uint64(l)) + if len(m.Events) > 0 { + for _, s := range m.Events { + l = len(s) + n += 1 + l + sovService(uint64(l)) + } } if m.Pagination != nil { l = m.Pagination.Size() @@ -882,6 +1150,35 @@ func (m *GetTxsEventResponse) Size() (n int) { return n } +func (m *BroadcastTxRequest) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = len(m.TxBytes) + if l > 0 { + n += 1 + l + sovService(uint64(l)) + } + if m.Mode != 0 { + n += 1 + sovService(uint64(m.Mode)) + } + return n +} + +func (m *BroadcastTxResponse) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.TxResponse != nil { + l = m.TxResponse.Size() + n += 1 + l + sovService(uint64(l)) + } + return n +} + func (m *SimulateRequest) Size() (n int) { if m == nil { return 0 @@ -979,7 +1276,7 @@ func (m *GetTxsEventRequest) Unmarshal(dAtA []byte) error { switch fieldNum { case 1: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Event", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field Events", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { @@ -1007,7 +1304,7 @@ func (m *GetTxsEventRequest) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Event = string(dAtA[iNdEx:postIndex]) + m.Events = append(m.Events, string(dAtA[iNdEx:postIndex])) iNdEx = postIndex case 2: if wireType != 2 { @@ -1226,6 +1523,201 @@ func (m *GetTxsEventResponse) Unmarshal(dAtA []byte) error { } return nil } +func (m *BroadcastTxRequest) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowService + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: BroadcastTxRequest: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: BroadcastTxRequest: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field TxBytes", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowService + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return ErrInvalidLengthService + } + postIndex := iNdEx + byteLen + if postIndex < 0 { + return ErrInvalidLengthService + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.TxBytes = append(m.TxBytes[:0], dAtA[iNdEx:postIndex]...) + if m.TxBytes == nil { + m.TxBytes = []byte{} + } + iNdEx = postIndex + case 2: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Mode", wireType) + } + m.Mode = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowService + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Mode |= BroadcastMode(b&0x7F) << shift + if b < 0x80 { + break + } + } + default: + iNdEx = preIndex + skippy, err := skipService(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthService + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthService + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *BroadcastTxResponse) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowService + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: BroadcastTxResponse: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: BroadcastTxResponse: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field TxResponse", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowService + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthService + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthService + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.TxResponse == nil { + m.TxResponse = &types.TxResponse{} + } + if err := m.TxResponse.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipService(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthService + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthService + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func (m *SimulateRequest) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 diff --git a/types/tx/service.pb.gw.go b/types/tx/service.pb.gw.go index a7d5b56c5e0..6d94c423ecb 100644 --- a/types/tx/service.pb.gw.go +++ b/types/tx/service.pb.gw.go @@ -31,18 +31,15 @@ var _ = runtime.String var _ = utilities.NewDoubleArray var _ = descriptor.ForMessage -var ( - filter_Service_Simulate_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} -) - func request_Service_Simulate_0(ctx context.Context, marshaler runtime.Marshaler, client ServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { var protoReq SimulateRequest var metadata runtime.ServerMetadata - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Service_Simulate_0); err != nil { + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -55,10 +52,11 @@ func local_request_Service_Simulate_0(ctx context.Context, marshaler runtime.Mar var protoReq SimulateRequest var metadata runtime.ServerMetadata - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Service_Simulate_0); err != nil { + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } @@ -121,6 +119,40 @@ func local_request_Service_GetTx_0(ctx context.Context, marshaler runtime.Marsha } +func request_Service_BroadcastTx_0(ctx context.Context, marshaler runtime.Marshaler, client ServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq BroadcastTxRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.BroadcastTx(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func local_request_Service_BroadcastTx_0(ctx context.Context, marshaler runtime.Marshaler, server ServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq BroadcastTxRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := server.BroadcastTx(ctx, &protoReq) + return msg, metadata, err + +} + var ( filter_Service_GetTxsEvent_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} ) @@ -203,6 +235,26 @@ func RegisterServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, se }) + mux.Handle("POST", pattern_Service_BroadcastTx_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_Service_BroadcastTx_0(rctx, inboundMarshaler, server, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_Service_BroadcastTx_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + mux.Handle("GET", pattern_Service_GetTxsEvent_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() @@ -304,6 +356,26 @@ func RegisterServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, cl }) + mux.Handle("POST", pattern_Service_BroadcastTx_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_Service_BroadcastTx_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_Service_BroadcastTx_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + mux.Handle("GET", pattern_Service_GetTxsEvent_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() @@ -330,7 +402,9 @@ func RegisterServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, cl var ( pattern_Service_Simulate_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"cosmos", "tx", "v1beta1", "simulate"}, "", runtime.AssumeColonVerbOpt(true))) - pattern_Service_GetTx_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 1, 1, 0, 4, 1, 5, 3}, []string{"cosmos", "tx", "v1beta1", "hash"}, "", runtime.AssumeColonVerbOpt(true))) + pattern_Service_GetTx_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"cosmos", "tx", "v1beta1", "txs", "hash"}, "", runtime.AssumeColonVerbOpt(true))) + + pattern_Service_BroadcastTx_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"cosmos", "tx", "v1beta1", "txs"}, "", runtime.AssumeColonVerbOpt(true))) pattern_Service_GetTxsEvent_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"cosmos", "tx", "v1beta1", "txs"}, "", runtime.AssumeColonVerbOpt(true))) ) @@ -340,5 +414,7 @@ var ( forward_Service_GetTx_0 = runtime.ForwardResponseMessage + forward_Service_BroadcastTx_0 = runtime.ForwardResponseMessage + forward_Service_GetTxsEvent_0 = runtime.ForwardResponseMessage ) diff --git a/x/auth/client/cli/cli_test.go b/x/auth/client/cli/cli_test.go index 4ba383139c3..e6cc5230ea8 100644 --- a/x/auth/client/cli/cli_test.go +++ b/x/auth/client/cli/cli_test.go @@ -735,6 +735,71 @@ func (s *IntegrationTestSuite) TestCLIMultisign() { s.Require().NoError(s.network.WaitForNextBlock()) } +func (s *IntegrationTestSuite) TestSignBatchMultisig() { + val := s.network.Validators[0] + + // Fetch 2 accounts and a multisig. + account1, err := val.ClientCtx.Keyring.Key("newAccount1") + s.Require().NoError(err) + account2, err := val.ClientCtx.Keyring.Key("newAccount2") + s.Require().NoError(err) + multisigInfo, err := val.ClientCtx.Keyring.Key("multi") + + // Send coins from validator to multisig. + sendTokens := sdk.NewInt64Coin(s.cfg.BondDenom, 10) + _, err = bankcli.MsgSendExec( + val.ClientCtx, + val.Address, + multisigInfo.GetAddress(), + sdk.NewCoins(sendTokens), + fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), + fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastBlock), + fmt.Sprintf("--%s=%s", flags.FlagFees, sdk.NewCoins(sdk.NewCoin(s.cfg.BondDenom, sdk.NewInt(10))).String()), + fmt.Sprintf("--gas=%d", flags.DefaultGasLimit), + ) + s.Require().NoError(err) + s.Require().NoError(s.network.WaitForNextBlock()) + + generatedStd, err := bankcli.MsgSendExec( + val.ClientCtx, + multisigInfo.GetAddress(), + val.Address, + sdk.NewCoins( + sdk.NewCoin(s.cfg.BondDenom, sdk.NewInt(1)), + ), + fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), + fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastBlock), + fmt.Sprintf("--%s=%s", flags.FlagFees, sdk.NewCoins(sdk.NewCoin(s.cfg.BondDenom, sdk.NewInt(10))).String()), + fmt.Sprintf("--%s=true", flags.FlagGenerateOnly), + ) + s.Require().NoError(err) + + // Write the output to disk + filename, cleanup1 := testutil.WriteToNewTempFile(s.T(), strings.Repeat(generatedStd.String(), 1)) + defer cleanup1() + + val.ClientCtx.HomeDir = strings.Replace(val.ClientCtx.HomeDir, "simd", "simcli", 1) + + // sign-batch file + res, err := authtest.TxSignBatchExec(val.ClientCtx, account1.GetAddress(), filename.Name(), fmt.Sprintf("--%s=%s", flags.FlagChainID, val.ClientCtx.ChainID), "--multisig", multisigInfo.GetAddress().String()) + s.Require().NoError(err) + s.Require().Equal(1, len(strings.Split(strings.Trim(res.String(), "\n"), "\n"))) + // write sigs to file + file1, cleanup2 := testutil.WriteToNewTempFile(s.T(), res.String()) + defer cleanup2() + + // sign-batch file with account2 + res, err = authtest.TxSignBatchExec(val.ClientCtx, account2.GetAddress(), filename.Name(), fmt.Sprintf("--%s=%s", flags.FlagChainID, val.ClientCtx.ChainID), "--multisig", multisigInfo.GetAddress().String()) + s.Require().NoError(err) + s.Require().Equal(1, len(strings.Split(strings.Trim(res.String(), "\n"), "\n"))) + + // write sigs to file2 + file2, cleanup3 := testutil.WriteToNewTempFile(s.T(), res.String()) + defer cleanup3() + res, err = authtest.TxMultiSignExec(val.ClientCtx, multisigInfo.GetName(), filename.Name(), file1.Name(), file2.Name()) + s.Require().NoError(err) +} + func (s *IntegrationTestSuite) TestGetAccountCmd() { val := s.network.Validators[0] _, _, addr1 := testdata.KeyTestPubAddr() diff --git a/x/auth/client/cli/tx_sign.go b/x/auth/client/cli/tx_sign.go index 7046c495d66..a9c69807828 100644 --- a/x/auth/client/cli/tx_sign.go +++ b/x/auth/client/cli/tx_sign.go @@ -119,7 +119,10 @@ func makeSignBatchCmd() func(cmd *cobra.Command, args []string) error { return err } } else { - err = authclient.SignTxWithSignerAddress(txFactory, clientCtx, multisigAddr, clientCtx.GetFromName(), txBuilder, true) + if txFactory.SignMode() == signing.SignMode_SIGN_MODE_UNSPECIFIED { + txFactory = txFactory.WithSignMode(signing.SignMode_SIGN_MODE_LEGACY_AMINO_JSON) + } + err = authclient.SignTxWithSignerAddress(txFactory, clientCtx, multisigAddr, clientCtx.GetFromName(), txBuilder, clientCtx.Offline) } if err != nil { diff --git a/x/auth/client/rest/broadcast.go b/x/auth/client/rest/broadcast.go index e0020515d80..4fd4dcb1a90 100644 --- a/x/auth/client/rest/broadcast.go +++ b/x/auth/client/rest/broadcast.go @@ -1,9 +1,11 @@ package rest import ( + "fmt" "io/ioutil" "net/http" + clientrest "github.com/cosmos/cosmos-sdk/client/rest" "github.com/cosmos/cosmos-sdk/client/tx" "github.com/cosmos/cosmos-sdk/client" @@ -30,8 +32,15 @@ func BroadcastTxRequest(clientCtx client.Context) http.HandlerFunc { } // NOTE: amino is used intentionally here, don't migrate it! - if err := clientCtx.LegacyAmino.UnmarshalJSON(body, &req); rest.CheckBadRequestError(w, err) { - return + err = clientCtx.LegacyAmino.UnmarshalJSON(body, &req) + if err != nil { + err := fmt.Errorf("this transaction cannot be broadcasted via legacy REST endpoints, because it does not support"+ + " Amino serialization. Please either use CLI, gRPC, gRPC-gateway, or directly query the Tendermint RPC"+ + " endpoint to broadcast this transaction. The new REST endpoint (via gRPC-gateway) is POST /cosmos/tx/v1beta1/txs."+ + " Please also see the REST endpoints migration guide at %s for more info", clientrest.DeprecationURL) + if rest.CheckBadRequestError(w, err) { + return + } } txBytes, err := tx.ConvertAndEncodeStdTx(clientCtx.TxConfig, req.Tx) diff --git a/x/auth/client/rest/decode.go b/x/auth/client/rest/decode.go index 3061d71cf48..5b732fa0a19 100644 --- a/x/auth/client/rest/decode.go +++ b/x/auth/client/rest/decode.go @@ -55,7 +55,7 @@ func DecodeTxRequestHandlerFn(clientCtx client.Context) http.HandlerFunc { response := DecodeResp(stdTx) - err = checkSignModeError(clientCtx, response, "/cosmos/tx/v1beta1/txs/decode") + err = checkAminoMarshalError(clientCtx, response, "/cosmos/tx/v1beta1/txs/decode") if err != nil { rest.WriteErrorResponse(w, http.StatusInternalServerError, err.Error()) diff --git a/x/auth/client/rest/encode.go b/x/auth/client/rest/encode.go index 1fbc3b94f43..63881780153 100644 --- a/x/auth/client/rest/encode.go +++ b/x/auth/client/rest/encode.go @@ -2,12 +2,13 @@ package rest import ( "encoding/base64" + "fmt" "io/ioutil" "net/http" - "github.com/cosmos/cosmos-sdk/client/tx" - "github.com/cosmos/cosmos-sdk/client" + clientrest "github.com/cosmos/cosmos-sdk/client/rest" + "github.com/cosmos/cosmos-sdk/client/tx" "github.com/cosmos/cosmos-sdk/types/rest" "github.com/cosmos/cosmos-sdk/x/auth/legacy/legacytx" ) @@ -17,6 +18,12 @@ type EncodeResp struct { Tx string `json:"tx" yaml:"tx"` } +// ErrEncodeDecode is the error to show when encoding/decoding txs that are not +// amino-serializable (e.g. IBC txs). +var ErrEncodeDecode error = fmt.Errorf("this endpoint does not support txs that are not serializable"+ + " via Amino, such as txs that contain IBC `Msg`s. For more info, please refer to our"+ + " REST migration guide at %s", clientrest.DeprecationURL) + // EncodeTxRequestHandlerFn returns the encode tx REST handler. In particular, // it takes a json-formatted transaction, encodes it to the Amino wire protocol, // and responds with base64-encoded bytes. @@ -31,8 +38,12 @@ func EncodeTxRequestHandlerFn(clientCtx client.Context) http.HandlerFunc { // NOTE: amino is used intentionally here, don't migrate it err = clientCtx.LegacyAmino.UnmarshalJSON(body, &req) - if rest.CheckBadRequestError(w, err) { - return + // If there's an unmarshalling error, we assume that it's because we're + // using amino to unmarshal a non-amino tx. + if err != nil { + if rest.CheckBadRequestError(w, ErrEncodeDecode) { + return + } } // re-encode it in the chain's native binary format diff --git a/x/auth/client/rest/query.go b/x/auth/client/rest/query.go index f9abae6b71f..d11d4b3416c 100644 --- a/x/auth/client/rest/query.go +++ b/x/auth/client/rest/query.go @@ -108,7 +108,7 @@ func QueryTxsRequestHandlerFn(clientCtx client.Context) http.HandlerFunc { packStdTxResponse(w, clientCtx, txRes) } - err = checkSignModeError(clientCtx, searchResult, "/cosmos/tx/v1beta1/txs") + err = checkAminoMarshalError(clientCtx, searchResult, "/cosmos/tx/v1beta1/txs") if err != nil { rest.WriteErrorResponse(w, http.StatusInternalServerError, err.Error()) @@ -151,7 +151,7 @@ func QueryTxRequestHandlerFn(clientCtx client.Context) http.HandlerFunc { rest.WriteErrorResponse(w, http.StatusNotFound, fmt.Sprintf("no transaction found with hash %s", hashHexStr)) } - err = checkSignModeError(clientCtx, output, "/cosmos/tx/v1beta1/tx/{txhash}") + err = checkAminoMarshalError(clientCtx, output, "/cosmos/tx/v1beta1/txs/{txhash}") if err != nil { rest.WriteErrorResponse(w, http.StatusInternalServerError, err.Error()) @@ -198,9 +198,9 @@ func packStdTxResponse(w http.ResponseWriter, clientCtx client.Context, txRes *s return nil } -// checkSignModeError checks if there are errors with marshalling non-amino +// checkAminoMarshalError checks if there are errors with marshalling non-amino // txs with amino. -func checkSignModeError(ctx client.Context, resp interface{}, grpcEndPoint string) error { +func checkAminoMarshalError(ctx client.Context, resp interface{}, grpcEndPoint string) error { // LegacyAmino used intentionally here to handle the SignMode errors marshaler := ctx.LegacyAmino diff --git a/x/auth/client/rest/rest_test.go b/x/auth/client/rest/rest_test.go index 1d7cac6848b..fb9dd73d75f 100644 --- a/x/auth/client/rest/rest_test.go +++ b/x/auth/client/rest/rest_test.go @@ -21,7 +21,7 @@ import ( "github.com/cosmos/cosmos-sdk/types/tx/signing" authclient "github.com/cosmos/cosmos-sdk/x/auth/client" authcli "github.com/cosmos/cosmos-sdk/x/auth/client/cli" - rest2 "github.com/cosmos/cosmos-sdk/x/auth/client/rest" + authrest "github.com/cosmos/cosmos-sdk/x/auth/client/rest" "github.com/cosmos/cosmos-sdk/x/auth/legacy/legacytx" bankcli "github.com/cosmos/cosmos-sdk/x/bank/client/testutil" "github.com/cosmos/cosmos-sdk/x/bank/types" @@ -72,7 +72,7 @@ func (s *IntegrationTestSuite) TearDownSuite() { s.network.Cleanup() } -func mkTx() legacytx.StdTx { +func mkStdTx() legacytx.StdTx { // NOTE: this uses StdTx explicitly, don't migrate it! return legacytx.StdTx{ Msgs: []sdk.Msg{&types.MsgSend{}}, @@ -84,10 +84,49 @@ func mkTx() legacytx.StdTx { } } +// Create an IBC tx that's encoded as amino-JSON. Since we can't amino-marshal +// a tx with "cosmos-sdk/MsgTransfer" using the SDK, we just hardcode the tx +// here. But external clients might, see https://github.com/cosmos/cosmos-sdk/issues/8022. +func mkIBCStdTx() []byte { + ibcTx := `{ + "account_number": "68", + "chain_id": "stargate-4", + "fee": { + "amount": [ + { + "amount": "3500", + "denom": "umuon" + } + ], + "gas": "350000" + }, + "memo": "", + "msg": [ + { + "type": "cosmos-sdk/MsgTransfer", + "value": { + "receiver": "cosmos1q9wtnlwdjrhwtcjmt2uq77jrgx7z3usrq2yz7z", + "sender": "cosmos1q9wtnlwdjrhwtcjmt2uq77jrgx7z3usrq2yz7z", + "source_channel": "THEslipperCHANNEL", + "source_port": "transfer", + "token": { + "amount": "1000000", + "denom": "umuon" + } + } + } + ], + "sequence": "24" + }` + req := fmt.Sprintf(`{"tx":%s,"mode":"async"}`, ibcTx) + + return []byte(req) +} + func (s *IntegrationTestSuite) TestEncodeDecode() { var require = s.Require() val := s.network.Validators[0] - stdTx := mkTx() + stdTx := mkStdTx() // NOTE: this uses amino explicitly, don't migrate it! cdc := val.ClientCtx.LegacyAmino @@ -98,11 +137,11 @@ func (s *IntegrationTestSuite) TestEncodeDecode() { res, err := rest.PostRequest(fmt.Sprintf("%s/txs/encode", val.APIAddress), "application/json", bz) require.NoError(err) - var encodeResp rest2.EncodeResp + var encodeResp authrest.EncodeResp err = cdc.UnmarshalJSON(res, &encodeResp) require.NoError(err) - bz, err = cdc.MarshalJSON(rest2.DecodeReq{Tx: encodeResp.Tx}) + bz, err = cdc.MarshalJSON(authrest.DecodeReq{Tx: encodeResp.Tx}) require.NoError(err) res, err = rest.PostRequest(fmt.Sprintf("%s/txs/decode", val.APIAddress), "application/json", bz) @@ -111,14 +150,24 @@ func (s *IntegrationTestSuite) TestEncodeDecode() { var respWithHeight rest.ResponseWithHeight err = cdc.UnmarshalJSON(res, &respWithHeight) require.NoError(err) - var decodeResp rest2.DecodeResp + var decodeResp authrest.DecodeResp err = cdc.UnmarshalJSON(respWithHeight.Result, &decodeResp) require.NoError(err) require.Equal(stdTx, legacytx.StdTx(decodeResp)) } +func (s *IntegrationTestSuite) TestEncodeIBCTx() { + val := s.network.Validators[0] + + req := mkIBCStdTx() + res, err := rest.PostRequest(fmt.Sprintf("%s/txs/encode", val.APIAddress), "application/json", []byte(req)) + s.Require().NoError(err) + + s.Require().Contains(string(res), authrest.ErrEncodeDecode.Error()) +} + func (s *IntegrationTestSuite) TestBroadcastTxRequest() { - stdTx := mkTx() + stdTx := mkStdTx() // we just test with async mode because this tx will fail - all we care about is that it got encoded and broadcast correctly res, err := s.broadcastReq(stdTx, "async") @@ -130,6 +179,17 @@ func (s *IntegrationTestSuite) TestBroadcastTxRequest() { s.Require().NotEmpty(txRes.TxHash) } +func (s *IntegrationTestSuite) TestBroadcastIBCTxRequest() { + val := s.network.Validators[0] + + req := mkIBCStdTx() + res, err := rest.PostRequest(fmt.Sprintf("%s/txs", val.APIAddress), "application/json", []byte(req)) + s.Require().NoError(err) + + // Make sure the error message is correct. + s.Require().Contains(string(res), "this transaction cannot be broadcasted via legacy REST endpoints") +} + // Helper function to test querying txs. We will use it to query StdTx and service `Msg`s. func (s *IntegrationTestSuite) testQueryTx(txHeight int64, txHash, txRecipient string) { val0 := s.network.Validators[0] @@ -332,7 +392,7 @@ func (s *IntegrationTestSuite) broadcastReq(stdTx legacytx.StdTx, mode string) ( // NOTE: this uses amino explicitly, don't migrate it! cdc := val.ClientCtx.LegacyAmino - req := rest2.BroadcastReq{ + req := authrest.BroadcastReq{ Tx: stdTx, Mode: mode, } @@ -345,7 +405,7 @@ func (s *IntegrationTestSuite) broadcastReq(stdTx legacytx.StdTx, mode string) ( // testQueryIBCTx is a helper function to test querying txs which: // - show an error message on legacy REST endpoints // - succeed using gRPC -// In practise, we call this function on IBC txs. +// In practice, we call this function on IBC txs. func (s *IntegrationTestSuite) testQueryIBCTx(txRes sdk.TxResponse, cmd *cobra.Command, args []string) { val := s.network.Validators[0] @@ -381,7 +441,7 @@ func (s *IntegrationTestSuite) testQueryIBCTx(txRes sdk.TxResponse, cmd *cobra.C } // try fetching the txn using gRPC req, it will fetch info since it has proto codec. - grpcJSON, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/tx/v1beta1/tx/%s", val.APIAddress, txRes.TxHash)) + grpcJSON, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/tx/v1beta1/txs/%s", val.APIAddress, txRes.TxHash)) s.Require().NoError(err) var getTxRes txtypes.GetTxResponse @@ -401,7 +461,7 @@ func (s *IntegrationTestSuite) testQueryIBCTx(txRes sdk.TxResponse, cmd *cobra.C out, err = clitestutil.ExecTestCLICmd(val.ClientCtx, authcli.GetEncodeCommand(), []string{txFileName}) s.Require().NoError(err) - bz, err := val.ClientCtx.LegacyAmino.MarshalJSON(rest2.DecodeReq{Tx: string(out.Bytes())}) + bz, err := val.ClientCtx.LegacyAmino.MarshalJSON(authrest.DecodeReq{Tx: string(out.Bytes())}) s.Require().NoError(err) // try to decode the txn using legacy rest, it fails. @@ -453,7 +513,6 @@ func (s *IntegrationTestSuite) TestLegacyRestErrMessages() { "Successful IBC message", ibcsolomachinecli.NewCreateClientCmd(), []string{ - "21212121212", // dummy client-id "1", // dummy sequence consensusJSON.Name(), // path to consensus json, fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), diff --git a/x/auth/keeper/keeper.go b/x/auth/keeper/keeper.go index 75724f7d3c8..abc1cdabfc2 100644 --- a/x/auth/keeper/keeper.go +++ b/x/auth/keeper/keeper.go @@ -86,7 +86,7 @@ func NewAccountKeeper( // Logger returns a module-specific logger. func (ak AccountKeeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) + return ctx.Logger().With("module", "x/"+types.ModuleName) } // GetPubKey Returns the PubKey of the account at address diff --git a/x/auth/tx/service.go b/x/auth/tx/service.go index b12808a1640..853d401becc 100644 --- a/x/auth/tx/service.go +++ b/x/auth/tx/service.go @@ -49,32 +49,21 @@ const ( // TxsByEvents implements the ServiceServer.TxsByEvents RPC method. func (s txServer) GetTxsEvent(ctx context.Context, req *txtypes.GetTxsEventRequest) (*txtypes.GetTxsEventResponse, error) { - offset := int(req.Pagination.Offset) - limit := int(req.Pagination.Limit) - if offset < 0 { - return nil, status.Error(codes.InvalidArgument, "offset must greater than 0") - } - if len(req.Event) == 0 { - return nil, status.Error(codes.InvalidArgument, "must declare at least one event to search") + if req == nil { + return nil, status.Error(codes.InvalidArgument, "request cannot be nil") } - if limit < 0 { - return nil, status.Error(codes.InvalidArgument, "limit must greater than 0") - } else if limit == 0 { - limit = pagination.DefaultLimit + page, limit, err := pagination.ParsePagination(req.Pagination) + if err != nil { + return nil, err } - page := offset/limit + 1 - - var events []string - - if strings.Contains(req.Event, "&") { - events = strings.Split(req.Event, "&") - } else { - events = append(events, req.Event) + if len(req.Events) == 0 { + return nil, status.Error(codes.InvalidArgument, "must declare at least one event to search") } - tmEvents := make([]string, len(events)) - for i, event := range events { + + tmEvents := make([]string, len(req.Events)) + for i, event := range req.Events { if !strings.Contains(event, "=") { return nil, status.Error(codes.InvalidArgument, fmt.Sprintf("invalid event; event %s should be of the format: %s", event, eventFormat)) } else if strings.Count(event, "=") > 1 { @@ -128,7 +117,7 @@ func (s txServer) GetTxsEvent(ctx context.Context, req *txtypes.GetTxsEventReque // Simulate implements the ServiceServer.Simulate RPC method. func (s txServer) Simulate(ctx context.Context, req *txtypes.SimulateRequest) (*txtypes.SimulateResponse, error) { - if req.Tx == nil { + if req == nil || req.Tx == nil { return nil, status.Error(codes.InvalidArgument, "invalid empty tx") } @@ -154,6 +143,10 @@ func (s txServer) Simulate(ctx context.Context, req *txtypes.SimulateRequest) (* // GetTx implements the ServiceServer.GetTx RPC method. func (s txServer) GetTx(ctx context.Context, req *txtypes.GetTxRequest) (*txtypes.GetTxResponse, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "request cannot be nil") + } + // We get hash as a hex string in the request, convert it to bytes. hash, err := hex.DecodeString(req.Hash) if err != nil { @@ -185,6 +178,10 @@ func (s txServer) GetTx(ctx context.Context, req *txtypes.GetTxRequest) (*txtype }, nil } +func (s txServer) BroadcastTx(ctx context.Context, req *txtypes.BroadcastTxRequest) (*txtypes.BroadcastTxResponse, error) { + return client.TxServiceBroadcast(ctx, s.clientCtx, req) +} + // RegisterTxService registers the tx service on the gRPC router. func RegisterTxService( qrt gogogrpc.Server, diff --git a/x/auth/tx/service_test.go b/x/auth/tx/service_test.go index a1bacb55cb4..341684acbe2 100644 --- a/x/auth/tx/service_test.go +++ b/x/auth/tx/service_test.go @@ -31,6 +31,7 @@ type IntegrationTestSuite struct { network *network.Network queryClient tx.ServiceClient + txRes sdk.TxResponse } func (s *IntegrationTestSuite) SetupSuite() { @@ -41,67 +42,14 @@ func (s *IntegrationTestSuite) SetupSuite() { s.cfg = cfg s.network = network.New(s.T(), cfg) - s.Require().NotNil(s.network) - _, err := s.network.WaitForHeight(1) - s.Require().NoError(err) - - s.queryClient = tx.NewServiceClient(s.network.Validators[0].ClientCtx) -} - -func (s *IntegrationTestSuite) TearDownSuite() { - s.T().Log("tearing down integration test suite") - s.network.Cleanup() -} - -func (s IntegrationTestSuite) TestSimulate() { val := s.network.Validators[0] - // prepare txBuilder with msg - txBuilder := val.ClientCtx.TxConfig.NewTxBuilder() - feeAmount := sdk.Coins{sdk.NewInt64Coin(s.cfg.BondDenom, 10)} - gasLimit := testdata.NewTestGasLimit() - s.Require().NoError( - txBuilder.SetMsgs(&banktypes.MsgSend{ - FromAddress: val.Address.String(), - ToAddress: val.Address.String(), - Amount: sdk.Coins{sdk.NewInt64Coin(s.cfg.BondDenom, 10)}, - }), - ) - txBuilder.SetFeeAmount(feeAmount) - txBuilder.SetGasLimit(gasLimit) - txBuilder.SetMemo("foobar") - - // setup txFactory - txFactory := clienttx.Factory{}. - WithChainID(val.ClientCtx.ChainID). - WithKeybase(val.ClientCtx.Keyring). - WithTxConfig(val.ClientCtx.TxConfig). - WithSignMode(signing.SignMode_SIGN_MODE_DIRECT) - - // Sign Tx. - err := authclient.SignTx(txFactory, val.ClientCtx, val.Moniker, txBuilder, false) - s.Require().NoError(err) - - // Convert the txBuilder to a tx.Tx. - protoTx, err := txBuilderToProtoTx(txBuilder) - s.Require().NoError(err) - - // Run the simulate gRPC query. - res, err := s.queryClient.Simulate( - context.Background(), - &tx.SimulateRequest{Tx: protoTx}, - ) + _, err := s.network.WaitForHeight(1) s.Require().NoError(err) - // Check the result and gas used are correct. - s.Require().Equal(len(res.GetResult().GetEvents()), 4) // 1 transfer, 3 messages. - s.Require().True(res.GetGasInfo().GetGasUsed() > 0) // Gas used sometimes change, just check it's not empty. -} - -func (s IntegrationTestSuite) TestGetTxEvents() { - val := s.network.Validators[0] + s.queryClient = tx.NewServiceClient(val.ClientCtx) // Create a new MsgSend tx from val to itself. out, err := bankcli.MsgSendExec( @@ -118,83 +66,394 @@ func (s IntegrationTestSuite) TestGetTxEvents() { fmt.Sprintf("--%s=foobar", flags.FlagMemo), ) s.Require().NoError(err) - var txRes sdk.TxResponse - s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(out.Bytes(), &txRes)) - s.Require().Equal(uint32(0), txRes.Code) + s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(out.Bytes(), &s.txRes)) + s.Require().Equal(uint32(0), s.txRes.Code) s.Require().NoError(s.network.WaitForNextBlock()) +} - // Query the tx via gRPC. - grpcRes, err := s.queryClient.GetTxsEvent( - context.Background(), - &tx.GetTxsEventRequest{Event: "message.action=send", - Pagination: &query.PageRequest{ - CountTotal: false, - Offset: 0, - Limit: 1, - }, - }, - ) +func (s *IntegrationTestSuite) TearDownSuite() { + s.T().Log("tearing down integration test suite") + s.network.Cleanup() +} + +func (s IntegrationTestSuite) TestSimulateTx_GRPC() { + txBuilder := s.mkTxBuilder() + // Convert the txBuilder to a tx.Tx. + protoTx, err := txBuilderToProtoTx(txBuilder) s.Require().NoError(err) - s.Require().Equal(len(grpcRes.Txs), 1) - s.Require().Equal("foobar", grpcRes.Txs[0].Body.Memo) - // Query the tx via grpc-gateway. - restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/tx/v1beta1/txs?event=%s&pagination.offset=%d&pagination.limit=%d", val.APIAddress, "message.action=send", 0, 1)) + testCases := []struct { + name string + req *tx.SimulateRequest + expErr bool + expErrMsg string + }{ + {"nil request", nil, true, "request cannot be nil"}, + {"empty request", &tx.SimulateRequest{}, true, "invalid empty tx"}, + {"valid request", &tx.SimulateRequest{Tx: protoTx}, false, ""}, + } + + for _, tc := range testCases { + tc := tc + s.Run(tc.name, func() { + // Broadcast the tx via gRPC via the validator's clientCtx (which goes + // through Tendermint). + res, err := s.queryClient.Simulate(context.Background(), tc.req) + if tc.expErr { + s.Require().Error(err) + s.Require().Contains(err.Error(), tc.expErrMsg) + } else { + s.Require().NoError(err) + // Check the result and gas used are correct. + s.Require().Equal(len(res.GetResult().GetEvents()), 4) // 1 transfer, 3 messages. + s.Require().True(res.GetGasInfo().GetGasUsed() > 0) // Gas used sometimes change, just check it's not empty. + } + }) + } +} + +func (s IntegrationTestSuite) TestSimulateTx_GRPCGateway() { + val := s.network.Validators[0] + txBuilder := s.mkTxBuilder() + // Convert the txBuilder to a tx.Tx. + protoTx, err := txBuilderToProtoTx(txBuilder) s.Require().NoError(err) - var getTxRes tx.GetTxsEventResponse - s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &getTxRes)) - s.Require().Equal(len(grpcRes.Txs), 1) - s.Require().Equal("foobar", getTxRes.Txs[0].Body.Memo) - s.Require().NotZero(grpcRes.TxResponses[0].Height) + + testCases := []struct { + name string + req *tx.SimulateRequest + expErr bool + expErrMsg string + }{ + {"empty request", &tx.SimulateRequest{}, true, "invalid empty tx"}, + {"valid request", &tx.SimulateRequest{Tx: protoTx}, false, ""}, + } + + for _, tc := range testCases { + s.Run(tc.name, func() { + req, err := val.ClientCtx.JSONMarshaler.MarshalJSON(tc.req) + s.Require().NoError(err) + res, err := rest.PostRequest(fmt.Sprintf("%s/cosmos/tx/v1beta1/simulate", val.APIAddress), "application/json", req) + s.Require().NoError(err) + if tc.expErr { + s.Require().Contains(string(res), tc.expErrMsg) + } else { + var result tx.SimulateResponse + err = val.ClientCtx.JSONMarshaler.UnmarshalJSON(res, &result) + s.Require().NoError(err) + // Check the result and gas used are correct. + s.Require().Equal(len(result.GetResult().GetEvents()), 4) // 1 transfer, 3 messages. + s.Require().True(result.GetGasInfo().GetGasUsed() > 0) // Gas used sometimes change, just check it's not empty. + } + }) + } } -func (s IntegrationTestSuite) TestGetTx() { +func (s IntegrationTestSuite) TestGetTxEvents_GRPC() { + testCases := []struct { + name string + req *tx.GetTxsEventRequest + expErr bool + expErrMsg string + }{ + { + "nil request", + nil, + true, "request cannot be nil", + }, + { + "empty request", + &tx.GetTxsEventRequest{}, + true, "must declare at least one event to search", + }, + { + "request with dummy event", + &tx.GetTxsEventRequest{Events: []string{"foobar"}}, + true, "event foobar should be of the format: {eventType}.{eventAttribute}={value}", + }, + { + "without pagination", + &tx.GetTxsEventRequest{ + Events: []string{"message.action=send"}, + }, + false, "", + }, + { + "with pagination", + &tx.GetTxsEventRequest{ + Events: []string{"message.action=send"}, + Pagination: &query.PageRequest{ + CountTotal: false, + Offset: 0, + Limit: 1, + }, + }, + false, "", + }, + { + "with multi events", + &tx.GetTxsEventRequest{ + Events: []string{"message.action=send", "message.module=bank"}, + }, + false, "", + }, + } + for _, tc := range testCases { + s.Run(tc.name, func() { + // Query the tx via gRPC. + grpcRes, err := s.queryClient.GetTxsEvent(context.Background(), tc.req) + if tc.expErr { + s.Require().Error(err) + s.Require().Contains(err.Error(), tc.expErrMsg) + } else { + s.Require().NoError(err) + s.Require().GreaterOrEqual(len(grpcRes.Txs), 1) + s.Require().Equal("foobar", grpcRes.Txs[0].Body.Memo) + } + }) + } +} + +func (s IntegrationTestSuite) TestGetTxEvents_GRPCGateway() { val := s.network.Validators[0] + testCases := []struct { + name string + url string + expErr bool + expErrMsg string + }{ + { + "empty params", + fmt.Sprintf("%s/cosmos/tx/v1beta1/txs", val.APIAddress), + true, + "must declare at least one event to search", + }, + { + "without pagination", + fmt.Sprintf("%s/cosmos/tx/v1beta1/txs?events=%s", val.APIAddress, "message.action=send"), + false, + "", + }, + { + "with pagination", + fmt.Sprintf("%s/cosmos/tx/v1beta1/txs?events=%s&pagination.offset=%d&pagination.limit=%d", val.APIAddress, "message.action=send", 0, 10), + false, + "", + }, + { + "expect pass with multiple-events", + fmt.Sprintf("%s/cosmos/tx/v1beta1/txs?events=%s&events=%s", val.APIAddress, "message.action=send", "message.module=bank"), + false, + "", + }, + { + "expect pass with escape event", + fmt.Sprintf("%s/cosmos/tx/v1beta1/txs?events=%s", val.APIAddress, "message.action%3Dsend"), + false, + "", + }, + } + for _, tc := range testCases { + s.Run(tc.name, func() { + res, err := rest.GetRequest(tc.url) + s.Require().NoError(err) + if tc.expErr { + s.Require().Contains(string(res), tc.expErrMsg) + } else { + var result tx.GetTxsEventResponse + err = val.ClientCtx.JSONMarshaler.UnmarshalJSON(res, &result) + s.Require().NoError(err) + s.Require().GreaterOrEqual(len(result.Txs), 1) + s.Require().Equal("foobar", result.Txs[0].Body.Memo) + s.Require().NotZero(result.TxResponses[0].Height) + } + }) + } +} - // Create a new MsgSend tx from val to itself. - out, err := bankcli.MsgSendExec( - val.ClientCtx, - val.Address, - val.Address, - sdk.NewCoins( - sdk.NewCoin(s.cfg.BondDenom, sdk.NewInt(10)), - ), - fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), - fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastBlock), - fmt.Sprintf("--%s=%s", flags.FlagFees, sdk.NewCoins(sdk.NewCoin(s.cfg.BondDenom, sdk.NewInt(10))).String()), - fmt.Sprintf("--gas=%d", flags.DefaultGasLimit), - fmt.Sprintf("--%s=foobar", flags.FlagMemo), - ) - s.Require().NoError(err) - var txRes sdk.TxResponse - s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(out.Bytes(), &txRes)) - s.Require().Equal(uint32(0), txRes.Code) +func (s IntegrationTestSuite) TestGetTx_GRPC() { + testCases := []struct { + name string + req *tx.GetTxRequest + expErr bool + expErrMsg string + }{ + {"nil request", nil, true, "request cannot be nil"}, + {"empty request", &tx.GetTxRequest{}, true, "transaction hash cannot be empty"}, + {"request with dummy hash", &tx.GetTxRequest{Hash: "deadbeef"}, true, "tx (DEADBEEF) not found"}, + {"good request", &tx.GetTxRequest{Hash: s.txRes.TxHash}, false, ""}, + } + for _, tc := range testCases { + s.Run(tc.name, func() { + // Query the tx via gRPC. + grpcRes, err := s.queryClient.GetTx(context.Background(), tc.req) + if tc.expErr { + s.Require().Error(err) + s.Require().Contains(err.Error(), tc.expErrMsg) + } else { + s.Require().NoError(err) + s.Require().Equal("foobar", grpcRes.Tx.Body.Memo) + } + }) + } +} - s.Require().NoError(s.network.WaitForNextBlock()) +func (s IntegrationTestSuite) TestGetTx_GRPCGateway() { + val := s.network.Validators[0] + testCases := []struct { + name string + url string + expErr bool + expErrMsg string + }{ + { + "empty params", + fmt.Sprintf("%s/cosmos/tx/v1beta1/txs/", val.APIAddress), + true, "transaction hash cannot be empty", + }, + { + "dummy hash", + fmt.Sprintf("%s/cosmos/tx/v1beta1/txs/%s", val.APIAddress, "deadbeef"), + true, "tx (DEADBEEF) not found", + }, + { + "good hash", + fmt.Sprintf("%s/cosmos/tx/v1beta1/txs/%s", val.APIAddress, s.txRes.TxHash), + false, "", + }, + } + for _, tc := range testCases { + s.Run(tc.name, func() { + res, err := rest.GetRequest(tc.url) + s.Require().NoError(err) + if tc.expErr { + s.Require().Contains(string(res), tc.expErrMsg) + } else { + var result tx.GetTxResponse + err = val.ClientCtx.JSONMarshaler.UnmarshalJSON(res, &result) + s.Require().NoError(err) + s.Require().Equal("foobar", result.Tx.Body.Memo) + s.Require().NotZero(result.TxResponse.Height) + } + }) + } +} - // Query the tx via gRPC. - grpcRes, err := s.queryClient.GetTx( - context.Background(), - &tx.GetTxRequest{Hash: txRes.TxHash}, - ) +func (s IntegrationTestSuite) TestBroadcastTx_GRPC() { + val := s.network.Validators[0] + txBuilder := s.mkTxBuilder() + txBytes, err := val.ClientCtx.TxConfig.TxEncoder()(txBuilder.GetTx()) s.Require().NoError(err) - s.Require().Equal("foobar", grpcRes.Tx.Body.Memo) - s.Require().NotZero(grpcRes.TxResponse.Height) - // Query the tx via grpc-gateway. - restRes, err := rest.GetRequest(fmt.Sprintf("%s/cosmos/tx/v1beta1/tx/%s", val.APIAddress, txRes.TxHash)) + testCases := []struct { + name string + req *tx.BroadcastTxRequest + expErr bool + expErrMsg string + }{ + {"nil request", nil, true, "request cannot be nil"}, + {"empty request", &tx.BroadcastTxRequest{}, true, "invalid empty tx"}, + {"no mode", &tx.BroadcastTxRequest{ + TxBytes: txBytes, + }, true, "supported types: sync, async, block"}, + {"valid request", &tx.BroadcastTxRequest{ + Mode: tx.BroadcastMode_BROADCAST_MODE_SYNC, + TxBytes: txBytes, + }, false, ""}, + } + + for _, tc := range testCases { + tc := tc + s.Run(tc.name, func() { + // Broadcast the tx via gRPC via the validator's clientCtx (which goes + // through Tendermint). + grpcRes, err := s.queryClient.BroadcastTx(context.Background(), tc.req) + if tc.expErr { + s.Require().Error(err) + s.Require().Contains(err.Error(), tc.expErrMsg) + } else { + s.Require().NoError(err) + s.Require().Equal(uint32(0), grpcRes.TxResponse.Code) + } + }) + } +} + +func (s IntegrationTestSuite) TestBroadcastTx_GRPCGateway() { + val := s.network.Validators[0] + txBuilder := s.mkTxBuilder() + txBytes, err := val.ClientCtx.TxConfig.TxEncoder()(txBuilder.GetTx()) s.Require().NoError(err) - var getTxRes tx.GetTxResponse - s.Require().NoError(val.ClientCtx.JSONMarshaler.UnmarshalJSON(restRes, &getTxRes)) - s.Require().Equal("foobar", grpcRes.Tx.Body.Memo) - s.Require().NotZero(grpcRes.TxResponse.Height) + + testCases := []struct { + name string + req *tx.BroadcastTxRequest + expErr bool + expErrMsg string + }{ + {"empty request", &tx.BroadcastTxRequest{}, true, "invalid empty tx"}, + {"no mode", &tx.BroadcastTxRequest{TxBytes: txBytes}, true, "supported types: sync, async, block"}, + {"valid request", &tx.BroadcastTxRequest{ + Mode: tx.BroadcastMode_BROADCAST_MODE_SYNC, + TxBytes: txBytes, + }, false, ""}, + } + + for _, tc := range testCases { + s.Run(tc.name, func() { + req, err := val.ClientCtx.JSONMarshaler.MarshalJSON(tc.req) + s.Require().NoError(err) + res, err := rest.PostRequest(fmt.Sprintf("%s/cosmos/tx/v1beta1/txs", val.APIAddress), "application/json", req) + s.Require().NoError(err) + if tc.expErr { + s.Require().Contains(string(res), tc.expErrMsg) + } else { + var result tx.BroadcastTxResponse + err = val.ClientCtx.JSONMarshaler.UnmarshalJSON(res, &result) + s.Require().NoError(err) + s.Require().Equal(uint32(0), result.TxResponse.Code) + } + }) + } } func TestIntegrationTestSuite(t *testing.T) { suite.Run(t, new(IntegrationTestSuite)) } +func (s IntegrationTestSuite) mkTxBuilder() client.TxBuilder { + val := s.network.Validators[0] + s.Require().NoError(s.network.WaitForNextBlock()) + + // prepare txBuilder with msg + txBuilder := val.ClientCtx.TxConfig.NewTxBuilder() + feeAmount := sdk.Coins{sdk.NewInt64Coin(s.cfg.BondDenom, 10)} + gasLimit := testdata.NewTestGasLimit() + s.Require().NoError( + txBuilder.SetMsgs(&banktypes.MsgSend{ + FromAddress: val.Address.String(), + ToAddress: val.Address.String(), + Amount: sdk.Coins{sdk.NewInt64Coin(s.cfg.BondDenom, 10)}, + }), + ) + txBuilder.SetFeeAmount(feeAmount) + txBuilder.SetGasLimit(gasLimit) + + // setup txFactory + txFactory := clienttx.Factory{}. + WithChainID(val.ClientCtx.ChainID). + WithKeybase(val.ClientCtx.Keyring). + WithTxConfig(val.ClientCtx.TxConfig). + WithSignMode(signing.SignMode_SIGN_MODE_DIRECT) + + // Sign Tx. + err := authclient.SignTx(txFactory, val.ClientCtx, val.Moniker, txBuilder, false) + s.Require().NoError(err) + + return txBuilder +} + // txBuilderToProtoTx converts a txBuilder into a proto tx.Tx. func txBuilderToProtoTx(txBuilder client.TxBuilder) (*tx.Tx, error) { // nolint intoAnyTx, ok := txBuilder.(codectypes.IntoAny) diff --git a/x/bank/keeper/genesis.go b/x/bank/keeper/genesis.go index 7397d8d5b6e..d30415c6a28 100644 --- a/x/bank/keeper/genesis.go +++ b/x/bank/keeper/genesis.go @@ -8,7 +8,7 @@ import ( ) // InitGenesis initializes the bank module's state from a given genesis state. -func (k BaseKeeper) InitGenesis(ctx sdk.Context, genState types.GenesisState) { +func (k BaseKeeper) InitGenesis(ctx sdk.Context, genState *types.GenesisState) { k.SetParams(ctx, genState.Params) var totalSupply sdk.Coins @@ -32,6 +32,10 @@ func (k BaseKeeper) InitGenesis(ctx sdk.Context, genState types.GenesisState) { } k.SetSupply(ctx, types.NewSupply(genState.Supply)) + + for _, meta := range genState.DenomMetadata { + k.SetDenomMetaData(ctx, meta) + } } // ExportGenesis returns the bank module's genesis state. diff --git a/x/bank/keeper/genesis_test.go b/x/bank/keeper/genesis_test.go index 8bd46d68171..bdb8cdc304e 100644 --- a/x/bank/keeper/genesis_test.go +++ b/x/bank/keeper/genesis_test.go @@ -37,8 +37,20 @@ func (suite *IntegrationTestSuite) getTestBalances() []types.Balance { addr2, _ := sdk.AccAddressFromBech32("cosmos1f9xjhxm0plzrh9cskf4qee4pc2xwp0n0556gh0") addr1, _ := sdk.AccAddressFromBech32("cosmos1fl48vsnmsdzcv85q5d2q4z5ajdha8yu34mf0eh") return []types.Balance{ - {addr2.String(), sdk.Coins{sdk.NewInt64Coin("testcoin1", 32), sdk.NewInt64Coin("testcoin2", 34)}}, - {addr1.String(), sdk.Coins{sdk.NewInt64Coin("testcoin3", 10)}}, + {Address: addr2.String(), Coins: sdk.Coins{sdk.NewInt64Coin("testcoin1", 32), sdk.NewInt64Coin("testcoin2", 34)}}, + {Address: addr1.String(), Coins: sdk.Coins{sdk.NewInt64Coin("testcoin3", 10)}}, } } + +func (suite *IntegrationTestSuite) TestInitGenesis() { + require := suite.Require() + m := types.Metadata{Description: sdk.DefaultBondDenom, Base: sdk.DefaultBondDenom, Display: sdk.DefaultBondDenom} + g := types.DefaultGenesisState() + g.DenomMetadata = []types.Metadata{m} + bk := suite.app.BankKeeper + bk.InitGenesis(suite.ctx, g) + + m2 := bk.GetDenomMetaData(suite.ctx, m.Base) + require.Equal(m, m2) +} diff --git a/x/bank/keeper/keeper.go b/x/bank/keeper/keeper.go index 6594398aa51..7661c1d3157 100644 --- a/x/bank/keeper/keeper.go +++ b/x/bank/keeper/keeper.go @@ -1,7 +1,6 @@ package keeper import ( - "fmt" "time" "github.com/cosmos/cosmos-sdk/codec" @@ -22,7 +21,7 @@ var _ Keeper = (*BaseKeeper)(nil) type Keeper interface { SendKeeper - InitGenesis(sdk.Context, types.GenesisState) + InitGenesis(sdk.Context, *types.GenesisState) ExportGenesis(sdk.Context) *types.GenesisState GetSupply(ctx sdk.Context) exported.SupplyI @@ -186,7 +185,6 @@ func (k BaseKeeper) GetDenomMetaData(ctx sdk.Context, denom string) types.Metada store = prefix.NewStore(store, types.DenomMetadataKey(denom)) bz := store.Get([]byte(denom)) - var metadata types.Metadata k.cdc.MustUnmarshalBinaryBare(bz, &metadata) @@ -342,7 +340,7 @@ func (k BaseKeeper) MintCoins(ctx sdk.Context, moduleName string, amt sdk.Coins) k.SetSupply(ctx, supply) logger := k.Logger(ctx) - logger.Info(fmt.Sprintf("minted %s from %s module account", amt.String(), moduleName)) + logger.Info("minted coins from module account", "amount", amt.String(), "from", moduleName) return nil } @@ -370,7 +368,7 @@ func (k BaseKeeper) BurnCoins(ctx sdk.Context, moduleName string, amt sdk.Coins) k.SetSupply(ctx, supply) logger := k.Logger(ctx) - logger.Info(fmt.Sprintf("burned %s from %s module account", amt.String(), moduleName)) + logger.Info("burned tokens from module account", "amount", amt.String(), "from", moduleName) return nil } diff --git a/x/bank/keeper/view.go b/x/bank/keeper/view.go index 2c4c4239bc2..d4bcabad2b6 100644 --- a/x/bank/keeper/view.go +++ b/x/bank/keeper/view.go @@ -49,7 +49,7 @@ func NewBaseViewKeeper(cdc codec.BinaryMarshaler, storeKey sdk.StoreKey, ak type // Logger returns a module-specific logger. func (k BaseViewKeeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) + return ctx.Logger().With("module", "x/"+types.ModuleName) } // HasBalance returns whether or not an account has at least amt balance. diff --git a/x/bank/module.go b/x/bank/module.go index 32f7431f26f..bc998634c12 100644 --- a/x/bank/module.go +++ b/x/bank/module.go @@ -52,7 +52,7 @@ func (AppModuleBasic) DefaultGenesis(cdc codec.JSONMarshaler) json.RawMessage { } // ValidateGenesis performs genesis state validation for the bank module. -func (AppModuleBasic) ValidateGenesis(cdc codec.JSONMarshaler, config client.TxEncodingConfig, bz json.RawMessage) error { +func (AppModuleBasic) ValidateGenesis(cdc codec.JSONMarshaler, _ client.TxEncodingConfig, bz json.RawMessage) error { var data types.GenesisState if err := cdc.UnmarshalJSON(bz, &data); err != nil { return fmt.Errorf("failed to unmarshal %s genesis state: %w", types.ModuleName, err) @@ -140,7 +140,7 @@ func (am AppModule) InitGenesis(ctx sdk.Context, cdc codec.JSONMarshaler, data j cdc.MustUnmarshalJSON(data, &genesisState) telemetry.MeasureSince(start, "InitGenesis", "crisis", "unmarshal") - am.keeper.InitGenesis(ctx, genesisState) + am.keeper.InitGenesis(ctx, &genesisState) return []abci.ValidatorUpdate{} } @@ -170,7 +170,7 @@ func (AppModule) GenerateGenesisState(simState *module.SimulationState) { } // ProposalContents doesn't return any content functions for governance proposals. -func (AppModule) ProposalContents(simState module.SimulationState) []simtypes.WeightedProposalContent { +func (AppModule) ProposalContents(_ module.SimulationState) []simtypes.WeightedProposalContent { return nil } diff --git a/x/crisis/keeper/keeper.go b/x/crisis/keeper/keeper.go index 8a78c936c3f..818e272d903 100644 --- a/x/crisis/keeper/keeper.go +++ b/x/crisis/keeper/keeper.go @@ -44,7 +44,7 @@ func NewKeeper( // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) + return ctx.Logger().With("module", "x/"+types.ModuleName) } // RegisterRoute register the routes for each of the invariants @@ -76,7 +76,7 @@ func (k Keeper) AssertInvariants(ctx sdk.Context) { invarRoutes := k.Routes() n := len(invarRoutes) for i, ir := range invarRoutes { - logger.Debug("Asserting cirisis invariants", "inv", fmt.Sprint(i, "/", n)) + logger.Info("asserting cirisis invariants", "inv", fmt.Sprint(i, "/", n)) if res, stop := ir.Invar(ctx); stop { // TODO: Include app name as part of context to allow for this to be // variable. diff --git a/x/distribution/keeper/delegation.go b/x/distribution/keeper/delegation.go index 7d1e66611b8..61631da5dc2 100644 --- a/x/distribution/keeper/delegation.go +++ b/x/distribution/keeper/delegation.go @@ -152,9 +152,13 @@ func (k Keeper) withdrawDelegationRewards(ctx sdk.Context, val stakingtypes.Vali rewards := rewardsRaw.Intersect(outstanding) if !rewards.IsEqual(rewardsRaw) { logger := k.Logger(ctx) - logger.Info(fmt.Sprintf("missing rewards rounding error, delegator %v"+ - "withdrawing rewards from validator %v, should have received %v, got %v", - val.GetOperator(), del.GetDelegatorAddr(), rewardsRaw, rewards)) + logger.Info( + "rounding error withdrawing rewards from validator", + "delegator", del.GetDelegatorAddr().String(), + "validator", val.GetOperator().String(), + "got", rewards.String(), + "expected", rewardsRaw.String(), + ) } // truncate coins, return remainder to community pool diff --git a/x/distribution/keeper/keeper.go b/x/distribution/keeper/keeper.go index 0c2f25c83af..0d5c32f78eb 100644 --- a/x/distribution/keeper/keeper.go +++ b/x/distribution/keeper/keeper.go @@ -57,7 +57,7 @@ func NewKeeper( // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) + return ctx.Logger().With("module", "x/"+types.ModuleName) } // SetWithdrawAddr sets a new address that will receive the rewards upon withdrawal diff --git a/x/distribution/keeper/proposal_handler.go b/x/distribution/keeper/proposal_handler.go index 9f9c4ec9e4a..d96bfc64908 100644 --- a/x/distribution/keeper/proposal_handler.go +++ b/x/distribution/keeper/proposal_handler.go @@ -1,8 +1,6 @@ package keeper import ( - "fmt" - sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/cosmos/cosmos-sdk/x/distribution/types" @@ -13,16 +11,19 @@ func HandleCommunityPoolSpendProposal(ctx sdk.Context, k Keeper, p *types.Commun if k.blockedAddrs[p.Recipient] { return sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, "%s is not allowed to receive external funds", p.Recipient) } + recipient, addrErr := sdk.AccAddressFromBech32(p.Recipient) if addrErr != nil { return addrErr } + err := k.DistributeFromFeePool(ctx, p.Amount, recipient) if err != nil { return err } logger := k.Logger(ctx) - logger.Info(fmt.Sprintf("transferred %s from the community pool to recipient %s", p.Amount, p.Recipient)) + logger.Info("transferred from the community pool to recipient", "amount", p.Amount.String(), "recipient", p.Recipient) + return nil } diff --git a/x/evidence/client/cli/cli_test.go b/x/evidence/client/cli/cli_test.go new file mode 100644 index 00000000000..50ebfaa7aa1 --- /dev/null +++ b/x/evidence/client/cli/cli_test.go @@ -0,0 +1,80 @@ +package cli_test + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/suite" + + clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" + testnet "github.com/cosmos/cosmos-sdk/testutil/network" + "github.com/cosmos/cosmos-sdk/x/evidence/client/cli" +) + +type IntegrationTestSuite struct { + suite.Suite + + cfg testnet.Config + network *testnet.Network +} + +func (s *IntegrationTestSuite) SetupSuite() { + s.T().Log("setting up integration test suite") + + cfg := testnet.DefaultConfig() + cfg.NumValidators = 1 + + s.cfg = cfg + s.network = testnet.New(s.T(), cfg) + + _, err := s.network.WaitForHeight(1) + s.Require().NoError(err) +} + +func (s *IntegrationTestSuite) TearDownSuite() { + s.T().Log("tearing down integration test suite") + s.network.Cleanup() +} + +func TestIntegrationTestSuite(t *testing.T) { + suite.Run(t, new(IntegrationTestSuite)) +} + +func (s *IntegrationTestSuite) TestGetQueryCmd() { + val := s.network.Validators[0] + + testCases := map[string]struct { + args []string + expectedOutput string + expectErr bool + }{ + "non-existant evidence": { + []string{"DF0C23E8634E480F84B9D5674A7CDC9816466DEC28A3358F73260F68D28D7660"}, + "evidence DF0C23E8634E480F84B9D5674A7CDC9816466DEC28A3358F73260F68D28D7660 not found", + true, + }, + "all evidence (default pagination)": { + []string{}, + "evidence: []\npagination:\n next_key: null\n total: \"0\"", + false, + }, + } + + for name, tc := range testCases { + tc := tc + + s.Run(name, func() { + cmd := cli.GetQueryCmd() + clientCtx := val.ClientCtx + + out, err := clitestutil.ExecTestCLICmd(clientCtx, cmd, tc.args) + if tc.expectErr { + s.Require().Error(err) + } else { + s.Require().NoError(err) + } + + s.Require().Contains(strings.TrimSpace(out.String()), tc.expectedOutput) + }) + } +} diff --git a/x/evidence/client/cli/query.go b/x/evidence/client/cli/query.go index 31365648558..c6ec9f51793 100644 --- a/x/evidence/client/cli/query.go +++ b/x/evidence/client/cli/query.go @@ -47,18 +47,14 @@ $ %s query %s --page=2 --limit=50 // can be queried for by hash or paginated evidence can be returned. func QueryEvidenceCmd() func(*cobra.Command, []string) error { return func(cmd *cobra.Command, args []string) error { - if err := client.ValidateCmd(cmd, args); err != nil { - return err - } - clientCtx := client.GetClientContextFromCmd(cmd) clientCtx, err := client.ReadQueryCommandFlags(clientCtx, cmd.Flags()) if err != nil { return err } - if hash := args[0]; hash != "" { - return queryEvidence(clientCtx, hash) + if len(args) > 0 { + return queryEvidence(clientCtx, args[0]) } pageReq, err := client.ReadPageRequest(cmd.Flags()) diff --git a/x/evidence/keeper/keeper.go b/x/evidence/keeper/keeper.go index e15cd74c8a0..8a2bfa68683 100644 --- a/x/evidence/keeper/keeper.go +++ b/x/evidence/keeper/keeper.go @@ -40,7 +40,7 @@ func NewKeeper( // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) + return ctx.Logger().With("module", "x/"+types.ModuleName) } // SetRouter sets the Evidence Handler router for the x/evidence module. Note, diff --git a/x/gov/abci.go b/x/gov/abci.go index f31e1506278..f9815e0fb81 100644 --- a/x/gov/abci.go +++ b/x/gov/abci.go @@ -30,13 +30,13 @@ func EndBlocker(ctx sdk.Context, keeper keeper.Keeper) { ) logger.Info( - fmt.Sprintf("proposal %d (%s) didn't meet minimum deposit of %s (had only %s); deleted", - proposal.ProposalId, - proposal.GetTitle(), - keeper.GetDepositParams(ctx).MinDeposit, - proposal.TotalDeposit, - ), + "proposal did not meet minimum deposit; deleted", + "proposal", proposal.ProposalId, + "title", proposal.GetTitle(), + "min_deposit", keeper.GetDepositParams(ctx).MinDeposit.String(), + "total_deposit", proposal.TotalDeposit.String(), ) + return false }) @@ -90,10 +90,10 @@ func EndBlocker(ctx sdk.Context, keeper keeper.Keeper) { keeper.RemoveFromActiveProposalQueue(ctx, proposal.ProposalId, proposal.VotingEndTime) logger.Info( - fmt.Sprintf( - "proposal %d (%s) tallied; result: %s", - proposal.ProposalId, proposal.GetTitle(), logMsg, - ), + "proposal tallied", + "proposal", proposal.ProposalId, + "title", proposal.GetTitle(), + "result", logMsg, ) ctx.EventManager().EmitEvent( diff --git a/x/gov/client/rest/grpc_query_test.go b/x/gov/client/rest/grpc_query_test.go index 23a282d5d31..8e2d4efa9ff 100644 --- a/x/gov/client/rest/grpc_query_test.go +++ b/x/gov/client/rest/grpc_query_test.go @@ -150,7 +150,7 @@ func (s *IntegrationTestSuite) TestGetProposalsGRPC() { func (s *IntegrationTestSuite) TestGetProposalVoteGRPC() { val := s.network.Validators[0] - voterAddressBase64 := val.Address.String() + voterAddressBech32 := val.Address.String() testCases := []struct { name string @@ -159,12 +159,12 @@ func (s *IntegrationTestSuite) TestGetProposalVoteGRPC() { }{ { "empty proposal", - fmt.Sprintf("%s/cosmos/gov/v1beta1/proposals/%s/votes/%s", val.APIAddress, "", voterAddressBase64), + fmt.Sprintf("%s/cosmos/gov/v1beta1/proposals/%s/votes/%s", val.APIAddress, "", voterAddressBech32), true, }, { "get non existing proposal", - fmt.Sprintf("%s/cosmos/gov/v1beta1/proposals/%s/votes/%s", val.APIAddress, "10", voterAddressBase64), + fmt.Sprintf("%s/cosmos/gov/v1beta1/proposals/%s/votes/%s", val.APIAddress, "10", voterAddressBech32), true, }, { @@ -174,7 +174,7 @@ func (s *IntegrationTestSuite) TestGetProposalVoteGRPC() { }, { "get proposal with id", - fmt.Sprintf("%s/cosmos/gov/v1beta1/proposals/%s/votes/%s", val.APIAddress, "1", voterAddressBase64), + fmt.Sprintf("%s/cosmos/gov/v1beta1/proposals/%s/votes/%s", val.APIAddress, "1", voterAddressBech32), false, }, } diff --git a/x/gov/client/rest/rest_test.go b/x/gov/client/rest/rest_test.go new file mode 100644 index 00000000000..0b1a274022c --- /dev/null +++ b/x/gov/client/rest/rest_test.go @@ -0,0 +1,110 @@ +// +build norace + +package rest_test + +import ( + "fmt" + + "github.com/cosmos/cosmos-sdk/types/rest" + "github.com/cosmos/cosmos-sdk/x/gov/types" +) + +func (s *IntegrationTestSuite) TestLegacyGetVote() { + val := s.network.Validators[0] + voterAddressBech32 := val.Address.String() + + testCases := []struct { + name string + url string + expErr bool + expErrMsg string + }{ + { + "get non existing proposal", + fmt.Sprintf("%s/gov/proposals/%s/votes/%s", val.APIAddress, "10", voterAddressBech32), + true, "proposalID 10 does not exist", + }, + { + "get proposal with wrong voter address", + fmt.Sprintf("%s/gov/proposals/%s/votes/%s", val.APIAddress, "1", "wrongVoterAddress"), + true, "decoding bech32 failed: string not all lowercase or all uppercase", + }, + { + "get proposal with id", + fmt.Sprintf("%s/gov/proposals/%s/votes/%s", val.APIAddress, "1", voterAddressBech32), + false, "", + }, + } + + for _, tc := range testCases { + tc := tc + s.Run(tc.name, func() { + respJSON, err := rest.GetRequest(tc.url) + s.Require().NoError(err) + + if tc.expErr { + var errResp rest.ErrorResponse + s.Require().NoError(val.ClientCtx.LegacyAmino.UnmarshalJSON(respJSON, &errResp)) + + s.Require().Equal(errResp.Error, tc.expErrMsg) + } else { + var resp = rest.ResponseWithHeight{} + err = val.ClientCtx.LegacyAmino.UnmarshalJSON(respJSON, &resp) + s.Require().NoError(err) + + // Check result is not empty. + var vote types.Vote + s.Require().NoError(val.ClientCtx.LegacyAmino.UnmarshalJSON(resp.Result, &vote)) + s.Require().Equal(val.Address.String(), vote.Voter) + // Note that option is now an int. + s.Require().Equal(types.VoteOption(1), vote.Option) + } + }) + } +} + +func (s *IntegrationTestSuite) TestLegacyGetVotes() { + val := s.network.Validators[0] + + testCases := []struct { + name string + url string + expErr bool + expErrMsg string + }{ + { + "votes with empty proposal id", + fmt.Sprintf("%s/gov/proposals/%s/votes", val.APIAddress, ""), + true, "'votes' is not a valid uint64", + }, + { + "get votes with valid id", + fmt.Sprintf("%s/gov/proposals/%s/votes", val.APIAddress, "1"), + false, "", + }, + } + + for _, tc := range testCases { + tc := tc + s.Run(tc.name, func() { + respJSON, err := rest.GetRequest(tc.url) + s.Require().NoError(err) + + if tc.expErr { + var errResp rest.ErrorResponse + s.Require().NoError(val.ClientCtx.LegacyAmino.UnmarshalJSON(respJSON, &errResp)) + + s.Require().Equal(errResp.Error, tc.expErrMsg) + } else { + var resp = rest.ResponseWithHeight{} + err = val.ClientCtx.LegacyAmino.UnmarshalJSON(respJSON, &resp) + s.Require().NoError(err) + + // Check result is not empty. + var votes []types.Vote + s.Require().NoError(val.ClientCtx.LegacyAmino.UnmarshalJSON(resp.Result, &votes)) + s.Require().Greater(len(votes), 0) + } + }) + } +} diff --git a/x/gov/keeper/keeper.go b/x/gov/keeper/keeper.go index 0e999d7f287..60db99d32c1 100644 --- a/x/gov/keeper/keeper.go +++ b/x/gov/keeper/keeper.go @@ -68,7 +68,7 @@ func NewKeeper( // Logger returns a module-specific logger. func (keeper Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) + return ctx.Logger().With("module", "x/"+types.ModuleName) } // Router returns the gov Keeper's Router diff --git a/x/ibc/applications/transfer/keeper/keeper.go b/x/ibc/applications/transfer/keeper/keeper.go index e1149ef9be4..a2eebb55e1e 100644 --- a/x/ibc/applications/transfer/keeper/keeper.go +++ b/x/ibc/applications/transfer/keeper/keeper.go @@ -1,8 +1,6 @@ package keeper import ( - "fmt" - tmbytes "github.com/tendermint/tendermint/libs/bytes" "github.com/tendermint/tendermint/libs/log" @@ -63,7 +61,7 @@ func NewKeeper( // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s-%s", host.ModuleName, types.ModuleName)) + return ctx.Logger().With("module", "x/"+host.ModuleName+"-"+types.ModuleName) } // GetTransferAccount returns the ICS20 - transfers ModuleAccount diff --git a/x/ibc/core/02-client/genesis.go b/x/ibc/core/02-client/genesis.go index ef00930f0b8..6e77b20e36f 100644 --- a/x/ibc/core/02-client/genesis.go +++ b/x/ibc/core/02-client/genesis.go @@ -7,7 +7,6 @@ import ( "github.com/cosmos/cosmos-sdk/x/ibc/core/02-client/keeper" "github.com/cosmos/cosmos-sdk/x/ibc/core/02-client/types" "github.com/cosmos/cosmos-sdk/x/ibc/core/exported" - localhosttypes "github.com/cosmos/cosmos-sdk/x/ibc/light-clients/09-localhost/types" ) // InitGenesis initializes the ibc client submodule's state from a provided genesis @@ -39,29 +38,10 @@ func InitGenesis(ctx sdk.Context, k keeper.Keeper, gs types.GenesisState) { } } - if !gs.CreateLocalhost { - return - } - - // NOTE: return if the localhost client was already imported. The chain-id and - // block height will be overwriten to the correct values during BeginBlock. - if _, found := k.GetClientState(ctx, exported.Localhost); found { - return - } - - // client id is always "localhost" - revision := types.ParseChainID(ctx.ChainID()) - clientState := localhosttypes.NewClientState( - ctx.ChainID(), types.NewHeight(revision, uint64(ctx.BlockHeight())), - ) + k.SetNextClientSequence(ctx, gs.NextClientSequence) - if err := clientState.Validate(); err != nil { - panic(err) - } - - if err := k.CreateClient(ctx, exported.Localhost, clientState, nil); err != nil { - panic(err) - } + // NOTE: localhost creation is specifically disallowed for the time being. + // Issue: https://github.com/cosmos/cosmos-sdk/issues/7871 } // ExportGenesis returns the ibc client submodule's exported genesis. diff --git a/x/ibc/core/02-client/keeper/client.go b/x/ibc/core/02-client/keeper/client.go index ddb742563c1..49d5a04eb36 100644 --- a/x/ibc/core/02-client/keeper/client.go +++ b/x/ibc/core/02-client/keeper/client.go @@ -15,20 +15,17 @@ import ( // // CONTRACT: ClientState was constructed correctly from given initial consensusState func (k Keeper) CreateClient( - ctx sdk.Context, clientID string, clientState exported.ClientState, consensusState exported.ConsensusState, -) error { + ctx sdk.Context, clientState exported.ClientState, consensusState exported.ConsensusState, +) (string, error) { params := k.GetParams(ctx) if !params.IsAllowedClient(clientState.ClientType()) { - return sdkerrors.Wrapf( + return "", sdkerrors.Wrapf( types.ErrInvalidClientType, "client state type %s is not registered in the allowlist", clientState.ClientType(), ) } - _, found := k.GetClientState(ctx, clientID) - if found { - return sdkerrors.Wrapf(types.ErrClientExists, "cannot create client with ID %s", clientID) - } + clientID := k.GenerateClientIdentifier(ctx, clientState.ClientType()) // check if consensus state is nil in case the created client is Localhost if consensusState != nil { @@ -46,7 +43,7 @@ func (k Keeper) CreateClient( ) }() - return nil + return clientID, nil } // UpdateClient updates the consensus state and the state root from a provided header. diff --git a/x/ibc/core/02-client/keeper/client_test.go b/x/ibc/core/02-client/keeper/client_test.go index 736a34d1e48..3d772761cc5 100644 --- a/x/ibc/core/02-client/keeper/client_test.go +++ b/x/ibc/core/02-client/keeper/client_test.go @@ -19,36 +19,23 @@ import ( func (suite *KeeperTestSuite) TestCreateClient() { cases := []struct { - msg string - clientID string - expPass bool - expPanic bool + msg string + clientState exported.ClientState + expPass bool }{ - {"success", testClientID, true, false}, - {"client ID exists", testClientID, false, false}, + {"success", ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false), true}, + {"client type not supported", localhosttypes.NewClientState(testChainID, clienttypes.NewHeight(0, 1)), false}, } for i, tc := range cases { - tc := tc - i := i - if tc.expPanic { - suite.Require().Panics(func() { - clientState := ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - suite.keeper.CreateClient(suite.ctx, tc.clientID, clientState, suite.consensusState) - }, "Msg %d didn't panic: %s", i, tc.msg) - } else { - clientState := ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - if tc.expPass { - suite.Require().NotNil(clientState, "valid test case %d failed: %s", i, tc.msg) - } - // If we were able to NewClientState clientstate successfully, try persisting it to state - err := suite.keeper.CreateClient(suite.ctx, tc.clientID, clientState, suite.consensusState) - if tc.expPass { - suite.Require().NoError(err, "valid test case %d failed: %s", i, tc.msg) - } else { - suite.Require().Error(err, "invalid test case %d passed: %s", i, tc.msg) - } + clientID, err := suite.keeper.CreateClient(suite.ctx, tc.clientState, suite.consensusState) + if tc.expPass { + suite.Require().NoError(err, "valid test case %d failed: %s", i, tc.msg) + suite.Require().NotNil(clientID, "valid test case %d failed: %s", i, tc.msg) + } else { + suite.Require().Error(err, "invalid test case %d passed: %s", i, tc.msg) + suite.Require().Equal("", clientID, "invalid test case %d passed: %s", i, tc.msg) } } } @@ -72,6 +59,8 @@ func (suite *KeeperTestSuite) TestUpdateClientTendermint() { var ( updateHeader *ibctmtypes.Header clientState *ibctmtypes.ClientState + clientID string + err error ) cases := []struct { @@ -81,7 +70,7 @@ func (suite *KeeperTestSuite) TestUpdateClientTendermint() { }{ {"valid update", func() error { clientState = ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - err := suite.keeper.CreateClient(suite.ctx, testClientID, clientState, suite.consensusState) + clientID, err = suite.keeper.CreateClient(suite.ctx, clientState, suite.consensusState) // store intermediate consensus state to check that trustedHeight does not need to be highest consensus state before header height incrementedClientHeight := testClientHeight.Increment() @@ -89,17 +78,17 @@ func (suite *KeeperTestSuite) TestUpdateClientTendermint() { Timestamp: suite.now.Add(time.Minute), NextValidatorsHash: suite.valSetHash, } - suite.keeper.SetClientConsensusState(suite.ctx, testClientID, incrementedClientHeight, intermediateConsState) + suite.keeper.SetClientConsensusState(suite.ctx, clientID, incrementedClientHeight, intermediateConsState) clientState.LatestHeight = incrementedClientHeight - suite.keeper.SetClientState(suite.ctx, testClientID, clientState) + suite.keeper.SetClientState(suite.ctx, clientID, clientState) updateHeader = createFutureUpdateFn(suite) return err }, true}, {"valid past update", func() error { clientState = ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - err := suite.keeper.CreateClient(suite.ctx, testClientID, clientState, suite.consensusState) + clientID, err = suite.keeper.CreateClient(suite.ctx, clientState, suite.consensusState) suite.Require().NoError(err) height1 := types.NewHeight(0, 1) @@ -109,7 +98,7 @@ func (suite *KeeperTestSuite) TestUpdateClientTendermint() { Timestamp: suite.past, NextValidatorsHash: suite.valSetHash, } - suite.keeper.SetClientConsensusState(suite.ctx, testClientID, height1, prevConsState) + suite.keeper.SetClientConsensusState(suite.ctx, clientID, height1, prevConsState) height2 := types.NewHeight(0, 2) @@ -118,7 +107,7 @@ func (suite *KeeperTestSuite) TestUpdateClientTendermint() { Timestamp: suite.past.Add(time.Minute), NextValidatorsHash: suite.valSetHash, } - suite.keeper.SetClientConsensusState(suite.ctx, testClientID, height2, intermediateConsState) + suite.keeper.SetClientConsensusState(suite.ctx, clientID, height2, intermediateConsState) // updateHeader will fill in consensus state between prevConsState and suite.consState // clientState should not be updated @@ -147,7 +136,7 @@ func (suite *KeeperTestSuite) TestUpdateClientTendermint() { {"valid past update before client was frozen", func() error { clientState = ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) clientState.FrozenHeight = types.NewHeight(0, testClientHeight.RevisionHeight-1) - err := suite.keeper.CreateClient(suite.ctx, testClientID, clientState, suite.consensusState) + clientID, err = suite.keeper.CreateClient(suite.ctx, clientState, suite.consensusState) suite.Require().NoError(err) height1 := types.NewHeight(0, 1) @@ -157,7 +146,7 @@ func (suite *KeeperTestSuite) TestUpdateClientTendermint() { Timestamp: suite.past, NextValidatorsHash: suite.valSetHash, } - suite.keeper.SetClientConsensusState(suite.ctx, testClientID, height1, prevConsState) + suite.keeper.SetClientConsensusState(suite.ctx, clientID, height1, prevConsState) // updateHeader will fill in consensus state between prevConsState and suite.consState // clientState should not be updated @@ -166,7 +155,7 @@ func (suite *KeeperTestSuite) TestUpdateClientTendermint() { }, true}, {"invalid header", func() error { clientState = ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - err := suite.keeper.CreateClient(suite.ctx, testClientID, clientState, suite.consensusState) + _, err := suite.keeper.CreateClient(suite.ctx, clientState, suite.consensusState) suite.Require().NoError(err) updateHeader = createPastUpdateFn(suite) @@ -179,13 +168,14 @@ func (suite *KeeperTestSuite) TestUpdateClientTendermint() { i := i suite.Run(fmt.Sprintf("Case %s", tc.name), func() { suite.SetupTest() + clientID = testClientID // must be explicitly changed err := tc.malleate() suite.Require().NoError(err) suite.ctx = suite.ctx.WithBlockTime(updateHeader.Header.Time.Add(time.Minute)) - err = suite.keeper.UpdateClient(suite.ctx, testClientID, updateHeader) + err = suite.keeper.UpdateClient(suite.ctx, clientID, updateHeader) if tc.expPass { suite.Require().NoError(err, err) @@ -196,10 +186,10 @@ func (suite *KeeperTestSuite) TestUpdateClientTendermint() { NextValidatorsHash: updateHeader.Header.NextValidatorsHash, } - newClientState, found := suite.keeper.GetClientState(suite.ctx, testClientID) + newClientState, found := suite.keeper.GetClientState(suite.ctx, clientID) suite.Require().True(found, "valid test case %d failed: %s", i, tc.name) - consensusState, found := suite.keeper.GetClientConsensusState(suite.ctx, testClientID, updateHeader.GetHeight()) + consensusState, found := suite.keeper.GetClientConsensusState(suite.ctx, clientID, updateHeader.GetHeight()) suite.Require().True(found, "valid test case %d failed: %s", i, tc.name) // Determine if clientState should be updated or not @@ -399,6 +389,11 @@ func (suite *KeeperTestSuite) TestUpgradeClient() { } func (suite *KeeperTestSuite) TestCheckMisbehaviourAndUpdateState() { + var ( + clientID string + err error + ) + altPrivVal := ibctestingmock.NewPV() altPubKey, err := altPrivVal.GetPubKey() suite.Require().NoError(err) @@ -437,12 +432,12 @@ func (suite *KeeperTestSuite) TestCheckMisbehaviourAndUpdateState() { &ibctmtypes.Misbehaviour{ Header1: suite.chainA.CreateTMClientHeader(testChainID, int64(testClientHeight.RevisionHeight), testClientHeight, altTime, bothValSet, bothValSet, bothSigners), Header2: suite.chainA.CreateTMClientHeader(testChainID, int64(testClientHeight.RevisionHeight), testClientHeight, suite.ctx.BlockTime(), bothValSet, bothValSet, bothSigners), - ClientId: testClientID, + ClientId: clientID, }, func() error { suite.consensusState.NextValidatorsHash = bothValsHash clientState := ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - err := suite.keeper.CreateClient(suite.ctx, testClientID, clientState, suite.consensusState) + clientID, err = suite.keeper.CreateClient(suite.ctx, clientState, suite.consensusState) return err }, @@ -453,22 +448,22 @@ func (suite *KeeperTestSuite) TestCheckMisbehaviourAndUpdateState() { &ibctmtypes.Misbehaviour{ Header1: suite.chainA.CreateTMClientHeader(testChainID, int64(heightPlus5.RevisionHeight), testClientHeight, altTime, bothValSet, valSet, bothSigners), Header2: suite.chainA.CreateTMClientHeader(testChainID, int64(heightPlus5.RevisionHeight), testClientHeight, suite.ctx.BlockTime(), bothValSet, valSet, bothSigners), - ClientId: testClientID, + ClientId: clientID, }, func() error { suite.consensusState.NextValidatorsHash = valsHash clientState := ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - err := suite.keeper.CreateClient(suite.ctx, testClientID, clientState, suite.consensusState) + clientID, err = suite.keeper.CreateClient(suite.ctx, clientState, suite.consensusState) // store intermediate consensus state to check that trustedHeight does not need to be highest consensus state before header height intermediateConsState := &ibctmtypes.ConsensusState{ Timestamp: suite.now.Add(time.Minute), NextValidatorsHash: suite.valSetHash, } - suite.keeper.SetClientConsensusState(suite.ctx, testClientID, heightPlus3, intermediateConsState) + suite.keeper.SetClientConsensusState(suite.ctx, clientID, heightPlus3, intermediateConsState) clientState.LatestHeight = heightPlus3 - suite.keeper.SetClientState(suite.ctx, testClientID, clientState) + suite.keeper.SetClientState(suite.ctx, clientID, clientState) return err }, @@ -479,22 +474,22 @@ func (suite *KeeperTestSuite) TestCheckMisbehaviourAndUpdateState() { &ibctmtypes.Misbehaviour{ Header1: suite.chainA.CreateTMClientHeader(testChainID, int64(heightPlus5.RevisionHeight), testClientHeight, altTime, bothValSet, valSet, bothSigners), Header2: suite.chainA.CreateTMClientHeader(testChainID, int64(heightPlus5.RevisionHeight), heightPlus3, suite.ctx.BlockTime(), bothValSet, bothValSet, bothSigners), - ClientId: testClientID, + ClientId: clientID, }, func() error { suite.consensusState.NextValidatorsHash = valsHash clientState := ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - err := suite.keeper.CreateClient(suite.ctx, testClientID, clientState, suite.consensusState) + clientID, err = suite.keeper.CreateClient(suite.ctx, clientState, suite.consensusState) // store trusted consensus state for Header2 intermediateConsState := &ibctmtypes.ConsensusState{ Timestamp: suite.now.Add(time.Minute), NextValidatorsHash: bothValsHash, } - suite.keeper.SetClientConsensusState(suite.ctx, testClientID, heightPlus3, intermediateConsState) + suite.keeper.SetClientConsensusState(suite.ctx, clientID, heightPlus3, intermediateConsState) clientState.LatestHeight = heightPlus3 - suite.keeper.SetClientState(suite.ctx, testClientID, clientState) + suite.keeper.SetClientState(suite.ctx, clientID, clientState) return err }, @@ -505,12 +500,12 @@ func (suite *KeeperTestSuite) TestCheckMisbehaviourAndUpdateState() { &ibctmtypes.Misbehaviour{ Header1: suite.chainA.CreateTMClientHeader(testChainID, int64(heightPlus5.RevisionHeight), heightPlus3, altTime, bothValSet, bothValSet, bothSigners), Header2: suite.chainA.CreateTMClientHeader(testChainID, int64(heightPlus5.RevisionHeight), testClientHeight, suite.ctx.BlockTime(), bothValSet, valSet, bothSigners), - ClientId: testClientID, + ClientId: clientID, }, func() error { suite.consensusState.NextValidatorsHash = valsHash clientState := ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - err := suite.keeper.CreateClient(suite.ctx, testClientID, clientState, suite.consensusState) + clientID, err = suite.keeper.CreateClient(suite.ctx, clientState, suite.consensusState) // intermediate consensus state at height + 3 is not created return err }, @@ -521,12 +516,12 @@ func (suite *KeeperTestSuite) TestCheckMisbehaviourAndUpdateState() { &ibctmtypes.Misbehaviour{ Header1: suite.chainA.CreateTMClientHeader(testChainID, int64(heightPlus5.RevisionHeight), testClientHeight, altTime, bothValSet, valSet, bothSigners), Header2: suite.chainA.CreateTMClientHeader(testChainID, int64(heightPlus5.RevisionHeight), heightPlus3, suite.ctx.BlockTime(), bothValSet, bothValSet, bothSigners), - ClientId: testClientID, + ClientId: clientID, }, func() error { suite.consensusState.NextValidatorsHash = valsHash clientState := ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - err := suite.keeper.CreateClient(suite.ctx, testClientID, clientState, suite.consensusState) + clientID, err = suite.keeper.CreateClient(suite.ctx, clientState, suite.consensusState) // intermediate consensus state at height + 3 is not created return err }, @@ -543,15 +538,15 @@ func (suite *KeeperTestSuite) TestCheckMisbehaviourAndUpdateState() { &ibctmtypes.Misbehaviour{ Header1: suite.chainA.CreateTMClientHeader(testChainID, int64(testClientHeight.RevisionHeight), testClientHeight, altTime, bothValSet, bothValSet, bothSigners), Header2: suite.chainA.CreateTMClientHeader(testChainID, int64(testClientHeight.RevisionHeight), testClientHeight, suite.ctx.BlockTime(), bothValSet, bothValSet, bothSigners), - ClientId: testClientID, + ClientId: clientID, }, func() error { suite.consensusState.NextValidatorsHash = bothValsHash clientState := ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - err := suite.keeper.CreateClient(suite.ctx, testClientID, clientState, suite.consensusState) + clientID, err = suite.keeper.CreateClient(suite.ctx, clientState, suite.consensusState) clientState.FrozenHeight = types.NewHeight(0, 1) - suite.keeper.SetClientState(suite.ctx, testClientID, clientState) + suite.keeper.SetClientState(suite.ctx, clientID, clientState) return err }, @@ -562,14 +557,14 @@ func (suite *KeeperTestSuite) TestCheckMisbehaviourAndUpdateState() { &ibctmtypes.Misbehaviour{ Header1: suite.chainA.CreateTMClientHeader(testChainID, int64(testClientHeight.RevisionHeight), testClientHeight, altTime, bothValSet, bothValSet, bothSigners), Header2: suite.chainA.CreateTMClientHeader(testChainID, int64(testClientHeight.RevisionHeight), testClientHeight, suite.ctx.BlockTime(), altValSet, bothValSet, altSigners), - ClientId: testClientID, + ClientId: clientID, }, func() error { clientState := ibctmtypes.NewClientState(testChainID, ibctmtypes.DefaultTrustLevel, trustingPeriod, ubdPeriod, maxClockDrift, testClientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) if err != nil { return err } - err = suite.keeper.CreateClient(suite.ctx, testClientID, clientState, suite.consensusState) + clientID, err = suite.keeper.CreateClient(suite.ctx, clientState, suite.consensusState) return err }, @@ -580,18 +575,22 @@ func (suite *KeeperTestSuite) TestCheckMisbehaviourAndUpdateState() { for i, tc := range testCases { tc := tc i := i + suite.Run(tc.name, func() { - suite.SetupTest() // reset + suite.SetupTest() // reset + clientID = testClientID // must be explicitly changed err := tc.malleate() suite.Require().NoError(err) + tc.misbehaviour.ClientId = clientID + err = suite.keeper.CheckMisbehaviourAndUpdateState(suite.ctx, tc.misbehaviour) if tc.expPass { suite.Require().NoError(err, "valid test case %d failed: %s", i, tc.name) - clientState, found := suite.keeper.GetClientState(suite.ctx, testClientID) + clientState, found := suite.keeper.GetClientState(suite.ctx, clientID) suite.Require().True(found, "valid test case %d failed: %s", i, tc.name) suite.Require().True(clientState.IsFrozen(), "valid test case %d failed: %s", i, tc.name) suite.Require().Equal(tc.misbehaviour.GetHeight(), clientState.GetFrozenHeight(), diff --git a/x/ibc/core/02-client/keeper/keeper.go b/x/ibc/core/02-client/keeper/keeper.go index accf48bb14c..bae7d3627cf 100644 --- a/x/ibc/core/02-client/keeper/keeper.go +++ b/x/ibc/core/02-client/keeper/keeper.go @@ -47,7 +47,17 @@ func NewKeeper(cdc codec.BinaryMarshaler, key sdk.StoreKey, paramSpace paramtype // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s/%s", host.ModuleName, types.SubModuleName)) + return ctx.Logger().With("module", "x/"+host.ModuleName+"/"+types.SubModuleName) +} + +// GenerateClientIdentifier returns the next client identifier. +func (k Keeper) GenerateClientIdentifier(ctx sdk.Context, clientType string) string { + nextClientSeq := k.GetNextClientSequence(ctx) + clientID := types.FormatClientIdentifier(clientType, nextClientSeq) + + nextClientSeq++ + k.SetNextClientSequence(ctx, nextClientSeq) + return clientID } // GetClientState gets a particular client from the store @@ -87,6 +97,24 @@ func (k Keeper) SetClientConsensusState(ctx sdk.Context, clientID string, height store.Set(host.ConsensusStateKey(height), k.MustMarshalConsensusState(consensusState)) } +// GetNextClientSequence gets the next client sequence from the store. +func (k Keeper) GetNextClientSequence(ctx sdk.Context) uint64 { + store := ctx.KVStore(k.storeKey) + bz := store.Get([]byte(types.KeyNextClientSequence)) + if bz == nil { + panic("next client sequence is nil") + } + + return sdk.BigEndianToUint64(bz) +} + +// SetNextClientSequence sets the next client sequence to the store. +func (k Keeper) SetNextClientSequence(ctx sdk.Context, sequence uint64) { + store := ctx.KVStore(k.storeKey) + bz := sdk.Uint64ToBigEndian(sequence) + store.Set([]byte(types.KeyNextClientSequence), bz) +} + // IterateConsensusStates provides an iterator over all stored consensus states. // objects. For each State object, cb will be called. If the cb returns true, // the iterator will close and stop. diff --git a/x/ibc/core/02-client/keeper/keeper_test.go b/x/ibc/core/02-client/keeper/keeper_test.go index 45ff4f674e3..ec3c0229ca5 100644 --- a/x/ibc/core/02-client/keeper/keeper_test.go +++ b/x/ibc/core/02-client/keeper/keeper_test.go @@ -30,9 +30,9 @@ const ( testChainID = "gaiahub-0" testChainIDRevision1 = "gaiahub-1" - testClientID = "gaiachain" - testClientID2 = "ethbridge" - testClientID3 = "ethermint" + testClientID = "tendermint-0" + testClientID2 = "tendermint-1" + testClientID3 = "tendermint-2" height = 5 diff --git a/x/ibc/core/02-client/types/client.go b/x/ibc/core/02-client/types/client.go index 305b07cec47..1c44d6e2b9c 100644 --- a/x/ibc/core/02-client/types/client.go +++ b/x/ibc/core/02-client/types/client.go @@ -2,11 +2,15 @@ package types import ( "fmt" + "math" "sort" + "strings" proto "github.com/gogo/protobuf/proto" codectypes "github.com/cosmos/cosmos-sdk/codec/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + host "github.com/cosmos/cosmos-sdk/x/ibc/core/24-host" "github.com/cosmos/cosmos-sdk/x/ibc/core/exported" ) @@ -80,3 +84,28 @@ func NewConsensusStateWithHeight(height Height, consensusState exported.Consensu func (cswh ConsensusStateWithHeight) UnpackInterfaces(unpacker codectypes.AnyUnpacker) error { return unpacker.UnpackAny(cswh.ConsensusState, new(exported.ConsensusState)) } + +// ValidateClientType validates the client type. It cannot be blank or empty. It must be a valid +// client identifier when used with '0' or the maximum uint64 as the sequence. +func ValidateClientType(clientType string) error { + if strings.TrimSpace(clientType) == "" { + return sdkerrors.Wrap(ErrInvalidClientType, "client type cannot be blank") + } + + smallestPossibleClientID := FormatClientIdentifier(clientType, 0) + largestPossibleClientID := FormatClientIdentifier(clientType, math.MaxUint64) + + // IsValidClientID will check client type format and if the sequence is a uint64 + if !IsValidClientID(smallestPossibleClientID) { + return sdkerrors.Wrap(ErrInvalidClientType, "") + } + + if err := host.ClientIdentifierValidator(smallestPossibleClientID); err != nil { + return sdkerrors.Wrap(err, "client type results in smallest client identifier being invalid") + } + if err := host.ClientIdentifierValidator(largestPossibleClientID); err != nil { + return sdkerrors.Wrap(err, "client type results in largest client identifier being invalid") + } + + return nil +} diff --git a/x/ibc/core/02-client/types/client_test.go b/x/ibc/core/02-client/types/client_test.go index 409ab53050a..2dfd3967d28 100644 --- a/x/ibc/core/02-client/types/client_test.go +++ b/x/ibc/core/02-client/types/client_test.go @@ -1,6 +1,10 @@ package types_test import ( + "testing" + + "github.com/stretchr/testify/require" + "github.com/cosmos/cosmos-sdk/x/ibc/core/02-client/types" "github.com/cosmos/cosmos-sdk/x/ibc/core/exported" ibctesting "github.com/cosmos/cosmos-sdk/x/ibc/testing" @@ -54,3 +58,30 @@ func (suite *TypesTestSuite) TestMarshalConsensusStateWithHeight() { }) } } + +func TestValidateClientType(t *testing.T) { + testCases := []struct { + name string + clientType string + expPass bool + }{ + {"valid", "tendermint", true}, + {"valid solomachine", "solomachine-v1", true}, + {"too large", "tenderminttenderminttenderminttenderminttendermintt", false}, + {"too short", "t", false}, + {"blank id", " ", false}, + {"empty id", "", false}, + {"ends with dash", "tendermint-", false}, + } + + for _, tc := range testCases { + + err := types.ValidateClientType(tc.clientType) + + if tc.expPass { + require.NoError(t, err, tc.name) + } else { + require.Error(t, err, tc.name) + } + } +} diff --git a/x/ibc/core/02-client/types/genesis.go b/x/ibc/core/02-client/types/genesis.go index 4da2f5e9206..f7939c47893 100644 --- a/x/ibc/core/02-client/types/genesis.go +++ b/x/ibc/core/02-client/types/genesis.go @@ -67,23 +67,25 @@ func (ccs ClientConsensusStates) UnpackInterfaces(unpacker codectypes.AnyUnpacke // NewGenesisState creates a GenesisState instance. func NewGenesisState( clients []IdentifiedClientState, clientsConsensus ClientsConsensusStates, - params Params, createLocalhost bool, + params Params, createLocalhost bool, nextClientSequence uint64, ) GenesisState { return GenesisState{ - Clients: clients, - ClientsConsensus: clientsConsensus, - Params: params, - CreateLocalhost: createLocalhost, + Clients: clients, + ClientsConsensus: clientsConsensus, + Params: params, + CreateLocalhost: createLocalhost, + NextClientSequence: nextClientSequence, } } // DefaultGenesisState returns the ibc client submodule's default genesis state. func DefaultGenesisState() GenesisState { return GenesisState{ - Clients: []IdentifiedClientState{}, - ClientsConsensus: ClientsConsensusStates{}, - Params: DefaultParams(), - CreateLocalhost: false, + Clients: []IdentifiedClientState{}, + ClientsConsensus: ClientsConsensusStates{}, + Params: DefaultParams(), + CreateLocalhost: false, + NextClientSequence: 0, } } diff --git a/x/ibc/core/02-client/types/genesis.pb.go b/x/ibc/core/02-client/types/genesis.pb.go index 4c72edfe5e7..becfa1baba1 100644 --- a/x/ibc/core/02-client/types/genesis.pb.go +++ b/x/ibc/core/02-client/types/genesis.pb.go @@ -32,6 +32,8 @@ type GenesisState struct { Params Params `protobuf:"bytes,3,opt,name=params,proto3" json:"params"` // create localhost on initialization CreateLocalhost bool `protobuf:"varint,4,opt,name=create_localhost,json=createLocalhost,proto3" json:"create_localhost,omitempty" yaml:"create_localhost"` + // the sequence for the next generated client identifier + NextClientSequence uint64 `protobuf:"varint,5,opt,name=next_client_sequence,json=nextClientSequence,proto3" json:"next_client_sequence,omitempty" yaml:"next_client_sequence"` } func (m *GenesisState) Reset() { *m = GenesisState{} } @@ -95,6 +97,13 @@ func (m *GenesisState) GetCreateLocalhost() bool { return false } +func (m *GenesisState) GetNextClientSequence() uint64 { + if m != nil { + return m.NextClientSequence + } + return 0 +} + func init() { proto.RegisterType((*GenesisState)(nil), "ibc.core.client.v1.GenesisState") } @@ -102,30 +111,32 @@ func init() { func init() { proto.RegisterFile("ibc/core/client/v1/genesis.proto", fileDescriptor_bcd0c0f1f2e6a91a) } var fileDescriptor_bcd0c0f1f2e6a91a = []byte{ - // 362 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x91, 0xc1, 0x4e, 0xea, 0x40, - 0x14, 0x86, 0xdb, 0x0b, 0xe1, 0xde, 0x94, 0x9b, 0x88, 0x8d, 0xd1, 0x06, 0x93, 0xb6, 0xe9, 0x0a, - 0x17, 0xcc, 0x08, 0x2e, 0x34, 0x2c, 0x4b, 0xa2, 0x31, 0x71, 0xa1, 0x75, 0xe7, 0x86, 0xb4, 0xc3, - 0x58, 0x26, 0xb6, 0x1d, 0xd2, 0x33, 0x10, 0x79, 0x05, 0x57, 0xc6, 0xc7, 0xf0, 0x49, 0x58, 0xb2, - 0x74, 0x85, 0x06, 0xde, 0x80, 0x27, 0x30, 0xed, 0x14, 0x17, 0x80, 0xab, 0x39, 0xf9, 0xe7, 0xff, - 0xfe, 0xff, 0x24, 0x47, 0xb3, 0x59, 0x40, 0x30, 0xe1, 0x29, 0xc5, 0x24, 0x62, 0x34, 0x11, 0x78, - 0xdc, 0xc2, 0x21, 0x4d, 0x28, 0x30, 0x40, 0xc3, 0x94, 0x0b, 0xae, 0xeb, 0x2c, 0x20, 0x28, 0x73, - 0x20, 0xe9, 0x40, 0xe3, 0x56, 0xdd, 0xda, 0x41, 0x15, 0xbf, 0x39, 0x54, 0x3f, 0x08, 0x79, 0xc8, - 0xf3, 0x11, 0x67, 0x93, 0x54, 0x9d, 0x97, 0x92, 0xf6, 0xff, 0x4a, 0x86, 0xdf, 0x0b, 0x5f, 0x50, - 0x9d, 0x68, 0x7f, 0x25, 0x06, 0x86, 0x6a, 0x97, 0x1a, 0xd5, 0xf6, 0x09, 0xda, 0x6e, 0x43, 0xd7, - 0x7d, 0x9a, 0x08, 0xf6, 0xc8, 0x68, 0xbf, 0x9b, 0x6b, 0x39, 0xeb, 0x9a, 0xd3, 0xb9, 0xa5, 0xbc, - 0x7f, 0x5a, 0x87, 0x3b, 0xbf, 0xc1, 0x5b, 0x27, 0xeb, 0x6f, 0xaa, 0xb6, 0x5f, 0xcc, 0x3d, 0xc2, - 0x13, 0xa0, 0x09, 0x8c, 0xc0, 0xf8, 0xf3, 0x7b, 0x9f, 0x8c, 0xe9, 0xae, 0xad, 0x32, 0xcf, 0xed, - 0x64, 0x7d, 0xab, 0xb9, 0x65, 0x4c, 0xfc, 0x38, 0xea, 0x38, 0x5b, 0x89, 0x4e, 0xb6, 0x8b, 0x44, - 0x61, 0x83, 0xf5, 0x6a, 0x64, 0x43, 0xd7, 0x2f, 0xb4, 0xca, 0xd0, 0x4f, 0xfd, 0x18, 0x8c, 0x92, - 0xad, 0x36, 0xaa, 0xed, 0xfa, 0xae, 0x45, 0x6e, 0x73, 0x87, 0x5b, 0xce, 0x9a, 0xbd, 0xc2, 0xaf, - 0x5f, 0x6a, 0x35, 0x92, 0x52, 0x5f, 0xd0, 0x5e, 0xc4, 0x89, 0x1f, 0x0d, 0x38, 0x08, 0xa3, 0x6c, - 0xab, 0x8d, 0x7f, 0xee, 0xf1, 0x6a, 0x6e, 0x1d, 0x15, 0xdb, 0x6d, 0x38, 0x1c, 0x6f, 0x4f, 0x4a, - 0x37, 0x6b, 0xc5, 0xbd, 0x9b, 0x2e, 0x4c, 0x75, 0xb6, 0x30, 0xd5, 0xaf, 0x85, 0xa9, 0xbe, 0x2e, - 0x4d, 0x65, 0xb6, 0x34, 0x95, 0x8f, 0xa5, 0xa9, 0x3c, 0x9c, 0x87, 0x4c, 0x0c, 0x46, 0x01, 0x22, - 0x3c, 0xc6, 0x84, 0x43, 0xcc, 0xa1, 0x78, 0x9a, 0xd0, 0x7f, 0xc2, 0xcf, 0xf8, 0xe7, 0xf8, 0xa7, - 0xed, 0x66, 0x71, 0x7f, 0x31, 0x19, 0x52, 0x08, 0x2a, 0xf9, 0x99, 0xcf, 0xbe, 0x03, 0x00, 0x00, - 0xff, 0xff, 0x8d, 0xa4, 0x74, 0xd6, 0x55, 0x02, 0x00, 0x00, + // 400 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0xc1, 0x8e, 0x9a, 0x40, + 0x1c, 0xc6, 0x99, 0x6a, 0x6d, 0x83, 0x4d, 0x6a, 0x27, 0xa6, 0x25, 0x9a, 0x00, 0xe1, 0x44, 0x0f, + 0x32, 0xd5, 0x1e, 0xda, 0x78, 0xc4, 0xa4, 0x4d, 0x93, 0x1e, 0x2a, 0xbd, 0xf5, 0x42, 0x60, 0x9c, + 0x22, 0x29, 0x30, 0x96, 0x19, 0x8d, 0xbe, 0xc5, 0x66, 0x1f, 0x63, 0x9f, 0xc4, 0xa3, 0xc7, 0xbd, + 0x2c, 0xbb, 0xd1, 0x37, 0xf0, 0x09, 0x36, 0x30, 0xe3, 0x1e, 0x94, 0x3d, 0xf1, 0xcf, 0x37, 0xbf, + 0xef, 0xfb, 0xfe, 0x21, 0x7f, 0xd5, 0x8c, 0x43, 0x8c, 0x30, 0xcd, 0x09, 0xc2, 0x49, 0x4c, 0x32, + 0x8e, 0x56, 0x43, 0x14, 0x91, 0x8c, 0xb0, 0x98, 0x39, 0x8b, 0x9c, 0x72, 0x0a, 0x61, 0x1c, 0x62, + 0xa7, 0x24, 0x1c, 0x41, 0x38, 0xab, 0x61, 0xcf, 0xa8, 0x71, 0xc9, 0xd7, 0xca, 0xd4, 0xeb, 0x46, + 0x34, 0xa2, 0xd5, 0x88, 0xca, 0x49, 0xa8, 0xd6, 0x5d, 0x43, 0x7d, 0xf3, 0x5d, 0x84, 0xff, 0xe6, + 0x01, 0x27, 0x10, 0xab, 0xaf, 0x84, 0x8d, 0x69, 0xc0, 0x6c, 0xd8, 0xed, 0xd1, 0x47, 0xe7, 0xb2, + 0xcd, 0xf9, 0x31, 0x23, 0x19, 0x8f, 0xff, 0xc6, 0x64, 0x36, 0xa9, 0xb4, 0xca, 0xeb, 0xea, 0xdb, + 0xc2, 0x50, 0x6e, 0xee, 0x8d, 0xf7, 0xb5, 0xcf, 0xcc, 0x3b, 0x25, 0xc3, 0x6b, 0xa0, 0xbe, 0x93, + 0xb3, 0x8f, 0x69, 0xc6, 0x48, 0xc6, 0x96, 0x4c, 0x7b, 0xf1, 0x7c, 0x9f, 0x88, 0x99, 0x9c, 0x50, + 0x91, 0xe7, 0x8e, 0xcb, 0xbe, 0x63, 0x61, 0x68, 0x9b, 0x20, 0x4d, 0xc6, 0xd6, 0x45, 0xa2, 0x55, + 0xee, 0x22, 0xac, 0xec, 0xcc, 0xeb, 0x75, 0xf0, 0x99, 0x0e, 0xbf, 0xaa, 0xad, 0x45, 0x90, 0x07, + 0x29, 0xd3, 0x1a, 0x26, 0xb0, 0xdb, 0xa3, 0x5e, 0xdd, 0x22, 0xbf, 0x2a, 0xc2, 0x6d, 0x96, 0xcd, + 0x9e, 0xe4, 0xe1, 0x37, 0xb5, 0x83, 0x73, 0x12, 0x70, 0xe2, 0x27, 0x14, 0x07, 0xc9, 0x9c, 0x32, + 0xae, 0x35, 0x4d, 0x60, 0xbf, 0x76, 0xfb, 0xc7, 0xc2, 0xf8, 0x20, 0xb7, 0x3b, 0x23, 0x2c, 0xef, + 0xad, 0x90, 0x7e, 0x9e, 0x14, 0x38, 0x55, 0xbb, 0x19, 0x59, 0x73, 0x5f, 0xd4, 0xf9, 0x8c, 0xfc, + 0x5f, 0x92, 0x0c, 0x13, 0xed, 0xa5, 0x09, 0xec, 0xa6, 0x6b, 0x1c, 0x0b, 0xa3, 0x2f, 0xb2, 0xea, + 0x28, 0xcb, 0x83, 0xa5, 0x2c, 0x7f, 0xb8, 0x14, 0xdd, 0xe9, 0x76, 0xaf, 0x83, 0xdd, 0x5e, 0x07, + 0x0f, 0x7b, 0x1d, 0x5c, 0x1d, 0x74, 0x65, 0x77, 0xd0, 0x95, 0xdb, 0x83, 0xae, 0xfc, 0xf9, 0x12, + 0xc5, 0x7c, 0xbe, 0x0c, 0x1d, 0x4c, 0x53, 0x84, 0x29, 0x4b, 0x29, 0x93, 0x9f, 0x01, 0x9b, 0xfd, + 0x43, 0x6b, 0xf4, 0x74, 0x4f, 0x9f, 0x46, 0x03, 0x79, 0x52, 0x7c, 0xb3, 0x20, 0x2c, 0x6c, 0x55, + 0x97, 0xf3, 0xf9, 0x31, 0x00, 0x00, 0xff, 0xff, 0x7c, 0xcd, 0xe7, 0x85, 0xa8, 0x02, 0x00, 0x00, } func (m *GenesisState) Marshal() (dAtA []byte, err error) { @@ -148,6 +159,11 @@ func (m *GenesisState) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if m.NextClientSequence != 0 { + i = encodeVarintGenesis(dAtA, i, uint64(m.NextClientSequence)) + i-- + dAtA[i] = 0x28 + } if m.CreateLocalhost { i-- if m.CreateLocalhost { @@ -233,6 +249,9 @@ func (m *GenesisState) Size() (n int) { if m.CreateLocalhost { n += 2 } + if m.NextClientSequence != 0 { + n += 1 + sovGenesis(uint64(m.NextClientSequence)) + } return n } @@ -392,6 +411,25 @@ func (m *GenesisState) Unmarshal(dAtA []byte) error { } } m.CreateLocalhost = bool(v != 0) + case 5: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field NextClientSequence", wireType) + } + m.NextClientSequence = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.NextClientSequence |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } default: iNdEx = preIndex skippy, err := skipGenesis(dAtA[iNdEx:]) diff --git a/x/ibc/core/02-client/types/genesis_test.go b/x/ibc/core/02-client/types/genesis_test.go index 7d131bae619..81eff78335d 100644 --- a/x/ibc/core/02-client/types/genesis_test.go +++ b/x/ibc/core/02-client/types/genesis_test.go @@ -90,6 +90,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, types.NewParams(exported.Tendermint, exported.Localhost), false, + 0, ), expPass: true, }, @@ -119,6 +120,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, types.NewParams(exported.Tendermint), false, + 0, ), expPass: false, }, @@ -134,6 +136,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { nil, types.NewParams(exported.Tendermint), false, + 0, ), expPass: false, }, @@ -163,6 +166,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, types.NewParams(exported.Tendermint), false, + 0, ), expPass: false, }, @@ -192,6 +196,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, types.NewParams(exported.Tendermint), false, + 0, ), expPass: false, }, @@ -221,6 +226,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, types.NewParams(exported.Tendermint), false, + 0, ), expPass: false, }, @@ -250,6 +256,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, types.NewParams(exported.Solomachine), false, + 0, ), expPass: false, }, @@ -279,6 +286,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, types.NewParams(" "), false, + 0, ), expPass: false, }, @@ -308,6 +316,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, types.NewParams(" "), true, + 0, ), expPass: false, }, @@ -337,6 +346,7 @@ func (suite *TypesTestSuite) TestValidateGenesis() { }, types.NewParams(exported.Tendermint), true, + 0, ), expPass: false, }, diff --git a/x/ibc/core/02-client/types/height.go b/x/ibc/core/02-client/types/height.go index d507ddea8c8..9ac6fec7559 100644 --- a/x/ibc/core/02-client/types/height.go +++ b/x/ibc/core/02-client/types/height.go @@ -17,7 +17,7 @@ var _ exported.Height = (*Height)(nil) // IsRevisionFormat checks if a chainID is in the format required for parsing revisions // The chainID must be in the form: `{chainID}-{revision} // 24-host may enforce stricter checks on chainID -var IsRevisionFormat = regexp.MustCompile(`^.+[^-]-{1}[1-9][0-9]*$`).MatchString +var IsRevisionFormat = regexp.MustCompile(`^.*[^-]-{1}[1-9][0-9]*$`).MatchString // ZeroHeight is a helper function which returns an uninitialized height. func ZeroHeight() Height { diff --git a/x/ibc/core/02-client/types/height_test.go b/x/ibc/core/02-client/types/height_test.go index f2615c8c1d7..a455b7f58d4 100644 --- a/x/ibc/core/02-client/types/height_test.go +++ b/x/ibc/core/02-client/types/height_test.go @@ -104,6 +104,7 @@ func TestParseChainID(t *testing.T) { formatted bool }{ {"gaiamainnet-3", 3, true}, + {"a-1", 1, true}, {"gaia-mainnet-40", 40, true}, {"gaiamainnet-3-39", 39, true}, {"gaiamainnet--", 0, false}, @@ -111,10 +112,13 @@ func TestParseChainID(t *testing.T) { {"gaiamainnet--4", 0, false}, {"gaiamainnet-3.4", 0, false}, {"gaiamainnet", 0, false}, + {"a--1", 0, false}, + {"-1", 0, false}, + {"--1", 0, false}, } for i, tc := range cases { - require.Equal(t, tc.formatted, types.IsRevisionFormat(tc.chainID), "case %d does not match expected format", i) + require.Equal(t, tc.formatted, types.IsRevisionFormat(tc.chainID), "id %s does not match expected format", tc.chainID) revision := types.ParseChainID(tc.chainID) require.Equal(t, tc.revision, revision, "case %d returns incorrect revision", i) diff --git a/x/ibc/core/02-client/types/keys.go b/x/ibc/core/02-client/types/keys.go index f360393335d..321f5e3ffa3 100644 --- a/x/ibc/core/02-client/types/keys.go +++ b/x/ibc/core/02-client/types/keys.go @@ -1,5 +1,15 @@ package types +import ( + "fmt" + "regexp" + "strconv" + "strings" + + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + host "github.com/cosmos/cosmos-sdk/x/ibc/core/24-host" +) + const ( // SubModuleName defines the IBC client name SubModuleName string = "client" @@ -9,4 +19,47 @@ const ( // QuerierRoute is the querier route for IBC client QuerierRoute string = SubModuleName + + // KeyNextClientSequence is the key used to store the next client sequence in + // the keeper. + KeyNextClientSequence = "nextClientSequence" ) + +// FormatClientIdentifier returns the client identifier with the sequence appended. +// This is a SDK specific format not enforced by IBC protocol. +func FormatClientIdentifier(clientType string, sequence uint64) string { + return fmt.Sprintf("%s-%d", clientType, sequence) +} + +// IsClientIDFormat checks if a clientID is in the format required on the SDK for +// parsing client identifiers. The client identifier must be in the form: `{client-type}-{N} +var IsClientIDFormat = regexp.MustCompile(`^.*[^-]-[0-9]{1,20}$`).MatchString + +// IsValidClientID checks if the clientID is valid and can be parsed into the client +// identifier format. +func IsValidClientID(clientID string) bool { + _, _, err := ParseClientIdentifier(clientID) + return err == nil +} + +// ParseClientIdentifier parses the client type and sequence from the client identifier. +func ParseClientIdentifier(clientID string) (string, uint64, error) { + if !IsClientIDFormat(clientID) { + return "", 0, sdkerrors.Wrapf(host.ErrInvalidID, "invalid client identifier %s is not in format: `{client-type}-{N}`", clientID) + } + + splitStr := strings.Split(clientID, "-") + lastIndex := len(splitStr) - 1 + + clientType := strings.Join(splitStr[:lastIndex], "-") + if strings.TrimSpace(clientType) == "" { + return "", 0, sdkerrors.Wrap(host.ErrInvalidID, "client identifier must be in format: `{client-type}-{N}` and client type cannot be blank") + } + + sequence, err := strconv.ParseUint(splitStr[lastIndex], 10, 64) + if err != nil { + return "", 0, sdkerrors.Wrap(err, "failed to parse client identifier sequence") + } + + return clientType, sequence, nil +} diff --git a/x/ibc/core/02-client/types/keys_test.go b/x/ibc/core/02-client/types/keys_test.go new file mode 100644 index 00000000000..dbe56657ad8 --- /dev/null +++ b/x/ibc/core/02-client/types/keys_test.go @@ -0,0 +1,53 @@ +package types_test + +import ( + "math" + "testing" + + "github.com/cosmos/cosmos-sdk/x/ibc/core/02-client/types" + "github.com/stretchr/testify/require" +) + +// tests ParseClientIdentifier and IsValidClientID +func TestParseClientIdentifier(t *testing.T) { + testCases := []struct { + name string + clientID string + clientType string + expSeq uint64 + expPass bool + }{ + {"valid 0", "tendermint-0", "tendermint", 0, true}, + {"valid 1", "tendermint-1", "tendermint", 1, true}, + {"valid solemachine", "solomachine-v1-1", "solomachine-v1", 1, true}, + {"valid large sequence", types.FormatClientIdentifier("tendermint", math.MaxUint64), "tendermint", math.MaxUint64, true}, + {"valid short client type", "t-0", "t", 0, true}, + // one above uint64 max + {"invalid uint64", "tendermint-18446744073709551616", "tendermint", 0, false}, + // uint64 == 20 characters + {"invalid large sequence", "tendermint-2345682193567182931243", "tendermint", 0, false}, + {"missing dash", "tendermint0", "tendermint", 0, false}, + {"blank id", " ", " ", 0, false}, + {"empty id", "", "", 0, false}, + {"negative sequence", "tendermint--1", "tendermint", 0, false}, + {"invalid format", "tendermint-tm", "tendermint", 0, false}, + {"empty clientype", " -100", "tendermint", 0, false}, + } + + for _, tc := range testCases { + + clientType, seq, err := types.ParseClientIdentifier(tc.clientID) + valid := types.IsValidClientID(tc.clientID) + require.Equal(t, tc.expSeq, seq, tc.clientID) + + if tc.expPass { + require.NoError(t, err, tc.name) + require.True(t, valid) + require.Equal(t, tc.clientType, clientType) + } else { + require.Error(t, err, tc.name, tc.clientID) + require.False(t, valid) + require.Equal(t, "", clientType) + } + } +} diff --git a/x/ibc/core/02-client/types/msgs.go b/x/ibc/core/02-client/types/msgs.go index d1d084ddadd..1e884123d74 100644 --- a/x/ibc/core/02-client/types/msgs.go +++ b/x/ibc/core/02-client/types/msgs.go @@ -31,7 +31,7 @@ var ( // NewMsgCreateClient creates a new MsgCreateClient instance //nolint:interfacer func NewMsgCreateClient( - id string, clientState exported.ClientState, consensusState exported.ConsensusState, signer sdk.AccAddress, + clientState exported.ClientState, consensusState exported.ConsensusState, signer sdk.AccAddress, ) (*MsgCreateClient, error) { anyClientState, err := PackClientState(clientState) @@ -45,7 +45,6 @@ func NewMsgCreateClient( } return &MsgCreateClient{ - ClientId: id, ClientState: anyClientState, ConsensusState: anyConsensusState, Signer: signer.String(), @@ -75,17 +74,20 @@ func (msg MsgCreateClient) ValidateBasic() error { if err := clientState.Validate(); err != nil { return err } - if clientState.ClientType() == exported.Localhost || msg.ClientId == exported.Localhost { + if clientState.ClientType() == exported.Localhost { return sdkerrors.Wrap(ErrInvalidClient, "localhost client can only be created on chain initialization") } consensusState, err := UnpackConsensusState(msg.ConsensusState) if err != nil { return err } - if err := consensusState.ValidateBasic(); err != nil { - return err + if clientState.ClientType() != consensusState.ClientType() { + return sdkerrors.Wrap(ErrInvalidClientType, "client type for client state and consensus state do not match") } - return host.ClientIdentifierValidator(msg.ClientId) + if err := ValidateClientType(clientState.ClientType()); err != nil { + return sdkerrors.Wrap(err, "client type does not meet naming constraints") + } + return consensusState.ValidateBasic() } // GetSignBytes implements sdk.Msg. The function will panic since it is used diff --git a/x/ibc/core/02-client/types/msgs_test.go b/x/ibc/core/02-client/types/msgs_test.go index 7d2da65d2cd..e42725bae26 100644 --- a/x/ibc/core/02-client/types/msgs_test.go +++ b/x/ibc/core/02-client/types/msgs_test.go @@ -12,7 +12,6 @@ import ( "github.com/cosmos/cosmos-sdk/x/ibc/core/exported" solomachinetypes "github.com/cosmos/cosmos-sdk/x/ibc/light-clients/06-solomachine/types" ibctmtypes "github.com/cosmos/cosmos-sdk/x/ibc/light-clients/07-tendermint/types" - localhosttypes "github.com/cosmos/cosmos-sdk/x/ibc/light-clients/09-localhost/types" ibctesting "github.com/cosmos/cosmos-sdk/x/ibc/testing" ) @@ -50,14 +49,14 @@ func (suite *TypesTestSuite) TestMarshalMsgCreateClient() { { "solo machine client", func() { soloMachine := ibctesting.NewSolomachine(suite.T(), suite.chainA.Codec, "solomachine", "", 2) - msg, err = types.NewMsgCreateClient(soloMachine.ClientID, soloMachine.ClientState(), soloMachine.ConsensusState(), suite.chainA.SenderAccount.GetAddress()) + msg, err = types.NewMsgCreateClient(soloMachine.ClientState(), soloMachine.ConsensusState(), suite.chainA.SenderAccount.GetAddress()) suite.Require().NoError(err) }, }, { "tendermint client", func() { tendermintClient := ibctmtypes.NewClientState(suite.chainA.ChainID, ibctesting.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - msg, err = types.NewMsgCreateClient("tendermint", tendermintClient, suite.chainA.CurrentTMClientHeader().ConsensusState(), suite.chainA.SenderAccount.GetAddress()) + msg, err = types.NewMsgCreateClient(tendermintClient, suite.chainA.CurrentTMClientHeader().ConsensusState(), suite.chainA.SenderAccount.GetAddress()) suite.Require().NoError(err) }, }, @@ -98,18 +97,11 @@ func (suite *TypesTestSuite) TestMsgCreateClient_ValidateBasic() { malleate func() expPass bool }{ - { - "invalid client-id", - func() { - msg.ClientId = "" - }, - false, - }, { "valid - tendermint client", func() { tendermintClient := ibctmtypes.NewClientState(suite.chainA.ChainID, ibctesting.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - msg, err = types.NewMsgCreateClient("tendermint", tendermintClient, suite.chainA.CurrentTMClientHeader().ConsensusState(), suite.chainA.SenderAccount.GetAddress()) + msg, err = types.NewMsgCreateClient(tendermintClient, suite.chainA.CurrentTMClientHeader().ConsensusState(), suite.chainA.SenderAccount.GetAddress()) suite.Require().NoError(err) }, true, @@ -117,7 +109,7 @@ func (suite *TypesTestSuite) TestMsgCreateClient_ValidateBasic() { { "invalid tendermint client", func() { - msg, err = types.NewMsgCreateClient("tendermint", &ibctmtypes.ClientState{}, suite.chainA.CurrentTMClientHeader().ConsensusState(), suite.chainA.SenderAccount.GetAddress()) + msg, err = types.NewMsgCreateClient(&ibctmtypes.ClientState{}, suite.chainA.CurrentTMClientHeader().ConsensusState(), suite.chainA.SenderAccount.GetAddress()) suite.Require().NoError(err) }, false, @@ -133,7 +125,7 @@ func (suite *TypesTestSuite) TestMsgCreateClient_ValidateBasic() { "failed to unpack consensus state", func() { tendermintClient := ibctmtypes.NewClientState(suite.chainA.ChainID, ibctesting.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) - msg, err = types.NewMsgCreateClient("tendermint", tendermintClient, suite.chainA.CurrentTMClientHeader().ConsensusState(), suite.chainA.SenderAccount.GetAddress()) + msg, err = types.NewMsgCreateClient(tendermintClient, suite.chainA.CurrentTMClientHeader().ConsensusState(), suite.chainA.SenderAccount.GetAddress()) suite.Require().NoError(err) msg.ConsensusState = nil }, @@ -150,7 +142,7 @@ func (suite *TypesTestSuite) TestMsgCreateClient_ValidateBasic() { "valid - solomachine client", func() { soloMachine := ibctesting.NewSolomachine(suite.T(), suite.chainA.Codec, "solomachine", "", 2) - msg, err = types.NewMsgCreateClient(soloMachine.ClientID, soloMachine.ClientState(), soloMachine.ConsensusState(), suite.chainA.SenderAccount.GetAddress()) + msg, err = types.NewMsgCreateClient(soloMachine.ClientState(), soloMachine.ConsensusState(), suite.chainA.SenderAccount.GetAddress()) suite.Require().NoError(err) }, true, @@ -159,7 +151,7 @@ func (suite *TypesTestSuite) TestMsgCreateClient_ValidateBasic() { "invalid solomachine client", func() { soloMachine := ibctesting.NewSolomachine(suite.T(), suite.chainA.Codec, "solomachine", "", 2) - msg, err = types.NewMsgCreateClient(soloMachine.ClientID, &solomachinetypes.ClientState{}, soloMachine.ConsensusState(), suite.chainA.SenderAccount.GetAddress()) + msg, err = types.NewMsgCreateClient(&solomachinetypes.ClientState{}, soloMachine.ConsensusState(), suite.chainA.SenderAccount.GetAddress()) suite.Require().NoError(err) }, false, @@ -168,16 +160,17 @@ func (suite *TypesTestSuite) TestMsgCreateClient_ValidateBasic() { "invalid solomachine consensus state", func() { soloMachine := ibctesting.NewSolomachine(suite.T(), suite.chainA.Codec, "solomachine", "", 2) - msg, err = types.NewMsgCreateClient(soloMachine.ClientID, soloMachine.ClientState(), &solomachinetypes.ConsensusState{}, suite.chainA.SenderAccount.GetAddress()) + msg, err = types.NewMsgCreateClient(soloMachine.ClientState(), &solomachinetypes.ConsensusState{}, suite.chainA.SenderAccount.GetAddress()) suite.Require().NoError(err) }, false, }, { - "unsupported - localhost client", + "invalid - client state and consensus state client types do not match", func() { - localhostClient := localhosttypes.NewClientState(suite.chainA.ChainID, types.NewHeight(0, uint64(suite.chainA.LastHeader.Header.Height))) - msg, err = types.NewMsgCreateClient("localhost", localhostClient, suite.chainA.LastHeader.ConsensusState(), suite.chainA.SenderAccount.GetAddress()) + tendermintClient := ibctmtypes.NewClientState(suite.chainA.ChainID, ibctesting.DefaultTrustLevel, ibctesting.TrustingPeriod, ibctesting.UnbondingPeriod, ibctesting.MaxClockDrift, clientHeight, commitmenttypes.GetSDKSpecs(), ibctesting.UpgradePath, false, false) + soloMachine := ibctesting.NewSolomachine(suite.T(), suite.chainA.Codec, "solomachine", "", 2) + msg, err = types.NewMsgCreateClient(tendermintClient, soloMachine.ConsensusState(), suite.chainA.SenderAccount.GetAddress()) suite.Require().NoError(err) }, false, diff --git a/x/ibc/core/02-client/types/tx.pb.go b/x/ibc/core/02-client/types/tx.pb.go index 2a1a6b8885c..b0bf22d0cc1 100644 --- a/x/ibc/core/02-client/types/tx.pb.go +++ b/x/ibc/core/02-client/types/tx.pb.go @@ -31,15 +31,13 @@ const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package // MsgCreateClient defines a message to create an IBC client type MsgCreateClient struct { - // client unique identifier - ClientId string `protobuf:"bytes,1,opt,name=client_id,json=clientId,proto3" json:"client_id,omitempty" yaml:"client_id"` // light client state - ClientState *types.Any `protobuf:"bytes,2,opt,name=client_state,json=clientState,proto3" json:"client_state,omitempty" yaml:"client_state"` + ClientState *types.Any `protobuf:"bytes,1,opt,name=client_state,json=clientState,proto3" json:"client_state,omitempty" yaml:"client_state"` // consensus state associated with the client that corresponds to a given // height. - ConsensusState *types.Any `protobuf:"bytes,3,opt,name=consensus_state,json=consensusState,proto3" json:"consensus_state,omitempty" yaml:"consensus_state"` + ConsensusState *types.Any `protobuf:"bytes,2,opt,name=consensus_state,json=consensusState,proto3" json:"consensus_state,omitempty" yaml:"consensus_state"` // signer address - Signer string `protobuf:"bytes,4,opt,name=signer,proto3" json:"signer,omitempty"` + Signer string `protobuf:"bytes,3,opt,name=signer,proto3" json:"signer,omitempty"` } func (m *MsgCreateClient) Reset() { *m = MsgCreateClient{} } @@ -374,45 +372,45 @@ func init() { func init() { proto.RegisterFile("ibc/core/client/v1/tx.proto", fileDescriptor_cb5dc4651eb49a04) } var fileDescriptor_cb5dc4651eb49a04 = []byte{ - // 598 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe4, 0x55, 0x3f, 0x6f, 0xd3, 0x4e, - 0x18, 0x8e, 0x9b, 0xdf, 0x2f, 0x6a, 0xaf, 0x81, 0x56, 0x26, 0xb4, 0xa9, 0xab, 0xda, 0x95, 0xe9, - 0x10, 0x44, 0xeb, 0x23, 0x61, 0x00, 0x75, 0x23, 0x9d, 0x18, 0x22, 0x51, 0x57, 0x0c, 0xb0, 0x04, - 0xff, 0xb9, 0x5e, 0xac, 0x26, 0xbe, 0xc8, 0x67, 0x47, 0xcd, 0x37, 0x60, 0x44, 0x82, 0x0f, 0x50, - 0x31, 0xf0, 0x59, 0x18, 0x3b, 0x30, 0x30, 0x45, 0x28, 0x59, 0x98, 0xf3, 0x09, 0x90, 0xef, 0x1c, - 0xcb, 0x76, 0xed, 0x28, 0x82, 0x91, 0xc9, 0x7e, 0xef, 0x7d, 0xee, 0x79, 0x9e, 0x7b, 0xdf, 0xf7, - 0x6c, 0xb0, 0xef, 0x98, 0x16, 0xb4, 0x88, 0x87, 0xa0, 0xd5, 0x77, 0x90, 0xeb, 0xc3, 0x51, 0x13, - 0xfa, 0xd7, 0xda, 0xd0, 0x23, 0x3e, 0x11, 0x45, 0xc7, 0xb4, 0xb4, 0x30, 0xa9, 0xf1, 0xa4, 0x36, - 0x6a, 0x4a, 0x35, 0x4c, 0x30, 0x61, 0x69, 0x18, 0xbe, 0x71, 0xa4, 0xb4, 0x87, 0x09, 0xc1, 0x7d, - 0x04, 0x59, 0x64, 0x06, 0x97, 0xd0, 0x70, 0xc7, 0x51, 0x4a, 0xc9, 0x51, 0x88, 0xe8, 0x18, 0x40, - 0xfd, 0xb4, 0x06, 0xb6, 0x3a, 0x14, 0x9f, 0x79, 0xc8, 0xf0, 0xd1, 0x19, 0xcb, 0x88, 0x4d, 0xb0, - 0xc1, 0x31, 0x5d, 0xc7, 0xae, 0x0b, 0x87, 0x42, 0x63, 0xa3, 0x5d, 0x9b, 0x4f, 0x94, 0xed, 0xb1, - 0x31, 0xe8, 0x9f, 0xaa, 0x71, 0x4a, 0xd5, 0xd7, 0xf9, 0xfb, 0x2b, 0x5b, 0x7c, 0x0d, 0xaa, 0xd1, - 0x3a, 0xf5, 0x0d, 0x1f, 0xd5, 0xd7, 0x0e, 0x85, 0xc6, 0x66, 0xab, 0xa6, 0x71, 0x67, 0xda, 0xc2, - 0x99, 0xf6, 0xd2, 0x1d, 0xb7, 0x77, 0xe7, 0x13, 0xe5, 0x41, 0x8a, 0x8b, 0xed, 0x51, 0xf5, 0x4d, - 0x1e, 0x5e, 0x84, 0x91, 0xf8, 0x16, 0x6c, 0x59, 0xc4, 0xa5, 0xc8, 0xa5, 0x01, 0x8d, 0x48, 0xcb, - 0x4b, 0x48, 0xa5, 0xf9, 0x44, 0xd9, 0x89, 0x48, 0xd3, 0xdb, 0x54, 0xfd, 0x7e, 0xbc, 0xc2, 0xa9, - 0x77, 0x40, 0x85, 0x3a, 0xd8, 0x45, 0x5e, 0xfd, 0xbf, 0xf0, 0x70, 0x7a, 0x14, 0x9d, 0xae, 0x7f, - 0xb8, 0x51, 0x4a, 0xbf, 0x6e, 0x94, 0x92, 0xba, 0x07, 0x76, 0x33, 0x45, 0xd1, 0x11, 0x1d, 0x86, - 0x2c, 0xea, 0x67, 0x81, 0x15, 0xec, 0xcd, 0xd0, 0xfe, 0xab, 0x82, 0x1d, 0x83, 0x4a, 0x0f, 0x19, - 0x36, 0xf2, 0x96, 0x95, 0x4a, 0x8f, 0x30, 0x09, 0xc7, 0xe5, 0xa5, 0x8e, 0x93, 0xae, 0x62, 0xc7, - 0xdf, 0xcb, 0x60, 0x9b, 0xe5, 0xb0, 0x67, 0xd8, 0xff, 0x4a, 0x8f, 0xcf, 0x41, 0x6d, 0xe8, 0x11, - 0x72, 0xd9, 0x0d, 0xf8, 0xb1, 0xbb, 0x5c, 0x97, 0x75, 0xbc, 0xda, 0x56, 0xe6, 0x13, 0x65, 0x9f, - 0x33, 0xe5, 0xa1, 0x54, 0x5d, 0x64, 0xcb, 0xe9, 0x92, 0x5d, 0x81, 0x83, 0x0c, 0x38, 0xe3, 0xfd, - 0x7f, 0xc6, 0xdd, 0x98, 0x4f, 0x94, 0xa3, 0x5c, 0xee, 0xac, 0x67, 0x29, 0x25, 0x52, 0x34, 0xa3, - 0x95, 0x82, 0x8e, 0x4b, 0xa0, 0x9e, 0xed, 0x6a, 0xdc, 0xf2, 0xaf, 0x02, 0x78, 0xd8, 0xa1, 0xf8, - 0x22, 0x30, 0x07, 0x8e, 0xdf, 0x71, 0xa8, 0x89, 0x7a, 0xc6, 0xc8, 0x21, 0x81, 0xf7, 0x27, 0x7d, - 0x7f, 0x01, 0xaa, 0x83, 0x04, 0xc5, 0xd2, 0x81, 0x4d, 0x21, 0x57, 0x18, 0x5b, 0x05, 0x1c, 0xe4, - 0xfa, 0x5c, 0x9c, 0xa4, 0xf5, 0xa5, 0x0c, 0xca, 0x1d, 0x8a, 0xc5, 0xf7, 0xa0, 0x9a, 0xfa, 0x46, - 0x3d, 0xd2, 0xee, 0x7e, 0x1e, 0xb5, 0xcc, 0x9d, 0x95, 0x9e, 0xac, 0x00, 0x5a, 0x28, 0x85, 0x0a, - 0xa9, 0x4b, 0x5d, 0xa4, 0x90, 0x04, 0x15, 0x2a, 0xe4, 0x5d, 0x44, 0xd1, 0x02, 0xf7, 0xd2, 0x13, - 0x75, 0x54, 0xb8, 0x3b, 0x81, 0x92, 0x8e, 0x57, 0x41, 0xc5, 0x22, 0x1e, 0x10, 0x73, 0xda, 0xfe, - 0xb8, 0x80, 0xe3, 0x2e, 0x54, 0x6a, 0xae, 0x0c, 0x5d, 0x68, 0xb6, 0xcf, 0xbf, 0x4d, 0x65, 0xe1, - 0x76, 0x2a, 0x0b, 0x3f, 0xa7, 0xb2, 0xf0, 0x71, 0x26, 0x97, 0x6e, 0x67, 0x72, 0xe9, 0xc7, 0x4c, - 0x2e, 0xbd, 0x7b, 0x8e, 0x1d, 0xbf, 0x17, 0x98, 0x9a, 0x45, 0x06, 0xd0, 0x22, 0x74, 0x40, 0x68, - 0xf4, 0x38, 0xa1, 0xf6, 0x15, 0xbc, 0x86, 0xf1, 0xef, 0xe9, 0x69, 0xeb, 0x24, 0xfa, 0x43, 0xf9, - 0xe3, 0x21, 0xa2, 0x66, 0x85, 0x8d, 0xd5, 0xb3, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0xd2, 0x19, - 0x59, 0x52, 0x23, 0x07, 0x00, 0x00, + // 601 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x55, 0x3f, 0x6f, 0xd3, 0x40, + 0x1c, 0x8d, 0x1b, 0x88, 0x9a, 0x6b, 0xa0, 0x95, 0x09, 0x6d, 0xea, 0xaa, 0x76, 0x64, 0x3a, 0x04, + 0xd1, 0xfa, 0x48, 0x18, 0x40, 0xdd, 0x48, 0x27, 0x86, 0x48, 0xd4, 0x15, 0x03, 0x2c, 0xc1, 0x7f, + 0xae, 0x97, 0x53, 0x13, 0x5f, 0xe4, 0xb3, 0xa3, 0xe6, 0x1b, 0x30, 0x32, 0xf0, 0x01, 0x2a, 0x06, + 0x3e, 0x0b, 0x63, 0x07, 0x06, 0xa6, 0xa8, 0x4a, 0x16, 0xe6, 0x7c, 0x02, 0x14, 0x9f, 0x13, 0x62, + 0xd7, 0x8e, 0x2c, 0xa0, 0x53, 0x7c, 0xfe, 0xbd, 0x7b, 0xef, 0xf7, 0xf2, 0x7e, 0xe7, 0x03, 0x7b, + 0xc4, 0xb4, 0xa0, 0x45, 0x5d, 0x04, 0xad, 0x2e, 0x41, 0x8e, 0x07, 0x07, 0x75, 0xe8, 0x5d, 0x6a, + 0x7d, 0x97, 0x7a, 0x54, 0x14, 0x89, 0x69, 0x69, 0xb3, 0xa2, 0xc6, 0x8b, 0xda, 0xa0, 0x2e, 0x95, + 0x31, 0xc5, 0x34, 0x28, 0xc3, 0xd9, 0x13, 0x47, 0x4a, 0xbb, 0x98, 0x52, 0xdc, 0x45, 0x30, 0x58, + 0x99, 0xfe, 0x39, 0x34, 0x9c, 0x61, 0x58, 0x52, 0x12, 0x14, 0x42, 0xba, 0x00, 0xa0, 0xde, 0x08, + 0x60, 0xb3, 0xc5, 0xf0, 0x89, 0x8b, 0x0c, 0x0f, 0x9d, 0x04, 0x15, 0xf1, 0x2d, 0x28, 0x71, 0x4c, + 0x9b, 0x79, 0x86, 0x87, 0x2a, 0x42, 0x55, 0xa8, 0x6d, 0x34, 0xca, 0x1a, 0x97, 0xd1, 0xe6, 0x32, + 0xda, 0x6b, 0x67, 0xd8, 0xdc, 0x99, 0x8e, 0x94, 0x47, 0x43, 0xa3, 0xd7, 0x3d, 0x56, 0x97, 0xf7, + 0xa8, 0xfa, 0x06, 0x5f, 0x9e, 0xcd, 0x56, 0xe2, 0x7b, 0xb0, 0x69, 0x51, 0x87, 0x21, 0x87, 0xf9, + 0x2c, 0x24, 0x5d, 0x5b, 0x41, 0x2a, 0x4d, 0x47, 0xca, 0x76, 0x48, 0x1a, 0xdd, 0xa6, 0xea, 0x0f, + 0x17, 0x6f, 0x38, 0xf5, 0x36, 0x28, 0x30, 0x82, 0x1d, 0xe4, 0x56, 0xf2, 0x55, 0xa1, 0x56, 0xd4, + 0xc3, 0xd5, 0xf1, 0xfa, 0xa7, 0x2b, 0x25, 0xf7, 0xeb, 0x4a, 0xc9, 0xa9, 0xbb, 0x60, 0x27, 0xe6, + 0x50, 0x47, 0xac, 0x3f, 0x63, 0x51, 0xbf, 0x70, 0xf7, 0xef, 0xfa, 0xf6, 0x1f, 0xf7, 0x75, 0x50, + 0x0c, 0x9d, 0x10, 0x3b, 0xb0, 0x5e, 0x6c, 0x96, 0xa7, 0x23, 0x65, 0x2b, 0x62, 0x92, 0xd8, 0xaa, + 0xbe, 0xce, 0x9f, 0xdf, 0xd8, 0xe2, 0x21, 0x28, 0x74, 0x90, 0x61, 0x23, 0x77, 0x95, 0x2b, 0x3d, + 0xc4, 0x64, 0xee, 0x78, 0xb9, 0xab, 0x45, 0xc7, 0x3f, 0xf2, 0x60, 0x2b, 0xa8, 0x61, 0xd7, 0xb0, + 0xff, 0xa1, 0xe5, 0x78, 0xc6, 0x6b, 0x77, 0x91, 0x71, 0xfe, 0x3f, 0x65, 0x7c, 0x0a, 0xca, 0x7d, + 0x97, 0xd2, 0xf3, 0xb6, 0xcf, 0x6d, 0xb7, 0xb9, 0x6e, 0xe5, 0x5e, 0x55, 0xa8, 0x95, 0x9a, 0xca, + 0x74, 0xa4, 0xec, 0x71, 0xa6, 0x24, 0x94, 0xaa, 0x8b, 0xc1, 0xeb, 0xe8, 0x5f, 0x76, 0x01, 0xf6, + 0x63, 0xe0, 0x58, 0xef, 0xf7, 0x03, 0xee, 0xda, 0x74, 0xa4, 0x1c, 0x24, 0x72, 0xc7, 0x7b, 0x96, + 0x22, 0x22, 0x69, 0x33, 0x5a, 0x48, 0x49, 0x5c, 0x02, 0x95, 0x78, 0xaa, 0x8b, 0xc8, 0xbf, 0x09, + 0xe0, 0x71, 0x8b, 0xe1, 0x33, 0xdf, 0xec, 0x11, 0xaf, 0x45, 0x98, 0x89, 0x3a, 0xc6, 0x80, 0x50, + 0xdf, 0xfd, 0x9b, 0xdc, 0x5f, 0x81, 0x52, 0x6f, 0x89, 0x62, 0xe5, 0xc0, 0x46, 0x90, 0x19, 0xc6, + 0x56, 0x01, 0xfb, 0x89, 0x7d, 0xce, 0x9d, 0x34, 0xbe, 0xe6, 0x41, 0xbe, 0xc5, 0xb0, 0xf8, 0x11, + 0x94, 0x22, 0x1f, 0x9c, 0x27, 0xda, 0xed, 0x6f, 0x9d, 0x16, 0x3b, 0xb3, 0xd2, 0xb3, 0x0c, 0xa0, + 0xb9, 0xd2, 0x4c, 0x21, 0x72, 0xa8, 0xd3, 0x14, 0x96, 0x41, 0xa9, 0x0a, 0x49, 0x07, 0x51, 0xb4, + 0xc0, 0x83, 0xe8, 0x44, 0x1d, 0xa4, 0xee, 0x5e, 0x42, 0x49, 0x87, 0x59, 0x50, 0x0b, 0x11, 0x17, + 0x88, 0x09, 0xb1, 0x3f, 0x4d, 0xe1, 0xb8, 0x0d, 0x95, 0xea, 0x99, 0xa1, 0x73, 0xcd, 0xe6, 0xe9, + 0xf7, 0xb1, 0x2c, 0x5c, 0x8f, 0x65, 0xe1, 0x66, 0x2c, 0x0b, 0x9f, 0x27, 0x72, 0xee, 0x7a, 0x22, + 0xe7, 0x7e, 0x4e, 0xe4, 0xdc, 0x87, 0x97, 0x98, 0x78, 0x1d, 0xdf, 0xd4, 0x2c, 0xda, 0x83, 0x16, + 0x65, 0x3d, 0xca, 0xc2, 0x9f, 0x23, 0x66, 0x5f, 0xc0, 0x4b, 0xb8, 0xb8, 0x6b, 0x9e, 0x37, 0x8e, + 0xc2, 0xeb, 0xc6, 0x1b, 0xf6, 0x11, 0x33, 0x0b, 0xc1, 0x58, 0xbd, 0xf8, 0x1d, 0x00, 0x00, 0xff, + 0xff, 0xf4, 0xf1, 0xa7, 0x9a, 0xf0, 0x06, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -636,7 +634,7 @@ func (m *MsgCreateClient) MarshalToSizedBuffer(dAtA []byte) (int, error) { copy(dAtA[i:], m.Signer) i = encodeVarintTx(dAtA, i, uint64(len(m.Signer))) i-- - dAtA[i] = 0x22 + dAtA[i] = 0x1a } if m.ConsensusState != nil { { @@ -648,7 +646,7 @@ func (m *MsgCreateClient) MarshalToSizedBuffer(dAtA []byte) (int, error) { i = encodeVarintTx(dAtA, i, uint64(size)) } i-- - dAtA[i] = 0x1a + dAtA[i] = 0x12 } if m.ClientState != nil { { @@ -660,13 +658,6 @@ func (m *MsgCreateClient) MarshalToSizedBuffer(dAtA []byte) (int, error) { i = encodeVarintTx(dAtA, i, uint64(size)) } i-- - dAtA[i] = 0x12 - } - if len(m.ClientId) > 0 { - i -= len(m.ClientId) - copy(dAtA[i:], m.ClientId) - i = encodeVarintTx(dAtA, i, uint64(len(m.ClientId))) - i-- dAtA[i] = 0xa } return len(dAtA) - i, nil @@ -954,10 +945,6 @@ func (m *MsgCreateClient) Size() (n int) { } var l int _ = l - l = len(m.ClientId) - if l > 0 { - n += 1 + l + sovTx(uint64(l)) - } if m.ClientState != nil { l = m.ClientState.Size() n += 1 + l + sovTx(uint64(l)) @@ -1120,38 +1107,6 @@ func (m *MsgCreateClient) Unmarshal(dAtA []byte) error { } switch fieldNum { case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field ClientId", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowTx - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthTx - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthTx - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.ClientId = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field ClientState", wireType) } @@ -1187,7 +1142,7 @@ func (m *MsgCreateClient) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex - case 3: + case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field ConsensusState", wireType) } @@ -1223,7 +1178,7 @@ func (m *MsgCreateClient) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex - case 4: + case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Signer", wireType) } diff --git a/x/ibc/core/03-connection/keeper/handshake_test.go b/x/ibc/core/03-connection/keeper/handshake_test.go index eb9ca5c401b..d70fd013dcc 100644 --- a/x/ibc/core/03-connection/keeper/handshake_test.go +++ b/x/ibc/core/03-connection/keeper/handshake_test.go @@ -41,8 +41,9 @@ func (suite *KeeperTestSuite) TestConnOpenInit() { version = &types.Version{} }, false}, {"couldn't add connection to client", func() { - // swap client identifiers to result in client that does not exist - clientB, clientA = suite.coordinator.SetupClients(suite.chainA, suite.chainB, exported.Tendermint) + clientA, clientB = suite.coordinator.SetupClients(suite.chainA, suite.chainB, exported.Tendermint) + // set clientA to invalid client identifier + clientA = "clientidentifier" }, false}, } diff --git a/x/ibc/core/03-connection/keeper/keeper.go b/x/ibc/core/03-connection/keeper/keeper.go index 05d110fec80..6637268687c 100644 --- a/x/ibc/core/03-connection/keeper/keeper.go +++ b/x/ibc/core/03-connection/keeper/keeper.go @@ -1,8 +1,6 @@ package keeper import ( - "fmt" - "github.com/tendermint/tendermint/libs/log" "github.com/cosmos/cosmos-sdk/codec" @@ -36,7 +34,7 @@ func NewKeeper(cdc codec.BinaryMarshaler, key sdk.StoreKey, ck types.ClientKeepe // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s/%s", host.ModuleName, types.SubModuleName)) + return ctx.Logger().With("module", "x/"+host.ModuleName+"/"+types.SubModuleName) } // GetCommitmentPrefix returns the IBC connection store prefix as a commitment diff --git a/x/ibc/core/03-connection/types/keys.go b/x/ibc/core/03-connection/types/keys.go index c44602203e5..65af565c2ab 100644 --- a/x/ibc/core/03-connection/types/keys.go +++ b/x/ibc/core/03-connection/types/keys.go @@ -2,10 +2,10 @@ package types import ( "fmt" - "strconv" - "strings" + "regexp" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + host "github.com/cosmos/cosmos-sdk/x/ibc/core/24-host" ) const ( @@ -30,11 +30,17 @@ const ( ) // FormatConnectionIdentifier returns the connection identifier with the sequence appended. +// This is a SDK specific format not enforced by IBC protocol. func FormatConnectionIdentifier(sequence uint64) string { return fmt.Sprintf("%s%d", ConnectionPrefix, sequence) } -// IsValidConnectionID return true if the connection identifier is valid. +// IsConnectionIDFormat checks if a connectionID is in the format required on the SDK for +// parsing connection identifiers. The connection identifier must be in the form: `connection-{N} +var IsConnectionIDFormat = regexp.MustCompile(`^connection-[0-9]{1,20}$`).MatchString + +// IsValidConnectionID checks if the connection identifier is valid and can be parsed to +// the connection identifier format. func IsValidConnectionID(connectionID string) bool { _, err := ParseConnectionSequence(connectionID) return err == nil @@ -42,18 +48,14 @@ func IsValidConnectionID(connectionID string) bool { // ParseConnectionSequence parses the connection sequence from the connection identifier. func ParseConnectionSequence(connectionID string) (uint64, error) { - if !strings.HasPrefix(connectionID, ConnectionPrefix) { - return 0, sdkerrors.Wrapf(ErrInvalidConnectionIdentifier, "doesn't contain prefix `%s`", ConnectionPrefix) - } - - splitStr := strings.Split(connectionID, ConnectionPrefix) - if len(splitStr) != 2 { - return 0, sdkerrors.Wrap(ErrInvalidConnectionIdentifier, "connection identifier must be in format: `connection-{N}`") + if !IsConnectionIDFormat(connectionID) { + return 0, sdkerrors.Wrap(host.ErrInvalidID, "connection identifier is not in the format: `connection-{N}`") } - sequence, err := strconv.ParseUint(splitStr[1], 10, 64) + sequence, err := host.ParseIdentifier(connectionID, ConnectionPrefix) if err != nil { - return 0, sdkerrors.Wrap(err, "failed to parse connection identifier sequence") + return 0, sdkerrors.Wrap(err, "invalid connection identifier") } + return sequence, nil } diff --git a/x/ibc/core/03-connection/types/keys_test.go b/x/ibc/core/03-connection/types/keys_test.go index 261bd2895fb..c899dc3c53a 100644 --- a/x/ibc/core/03-connection/types/keys_test.go +++ b/x/ibc/core/03-connection/types/keys_test.go @@ -1,6 +1,7 @@ package types_test import ( + "math" "testing" "github.com/cosmos/cosmos-sdk/x/ibc/core/03-connection/types" @@ -17,10 +18,13 @@ func TestParseConnectionSequence(t *testing.T) { }{ {"valid 0", "connection-0", 0, true}, {"valid 1", "connection-1", 1, true}, - {"valid large sequence", "connection-234568219356718293", 234568219356718293, true}, + {"valid large sequence", types.FormatConnectionIdentifier(math.MaxUint64), math.MaxUint64, true}, + // one above uint64 max + {"invalid uint64", "connection-18446744073709551616", 0, false}, // uint64 == 20 characters {"invalid large sequence", "connection-2345682193567182931243", 0, false}, {"capital prefix", "Connection-0", 0, false}, + {"double prefix", "connection-connection-0", 0, false}, {"missing dash", "connection0", 0, false}, {"blank id", " ", 0, false}, {"empty id", "", 0, false}, diff --git a/x/ibc/core/04-channel/keeper/keeper.go b/x/ibc/core/04-channel/keeper/keeper.go index 2390fb41cff..1651eec5ec8 100644 --- a/x/ibc/core/04-channel/keeper/keeper.go +++ b/x/ibc/core/04-channel/keeper/keeper.go @@ -1,7 +1,6 @@ package keeper import ( - "fmt" "strconv" "strings" @@ -52,7 +51,7 @@ func NewKeeper( // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s/%s", host.ModuleName, types.SubModuleName)) + return ctx.Logger().With("module", "x/"+host.ModuleName+"/"+types.SubModuleName) } // GenerateChannelIdentifier returns the next channel identifier. diff --git a/x/ibc/core/04-channel/types/keys.go b/x/ibc/core/04-channel/types/keys.go index b6c5745dda1..d3a6cde24d5 100644 --- a/x/ibc/core/04-channel/types/keys.go +++ b/x/ibc/core/04-channel/types/keys.go @@ -2,10 +2,10 @@ package types import ( "fmt" - "strconv" - "strings" + "regexp" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + host "github.com/cosmos/cosmos-sdk/x/ibc/core/24-host" ) const ( @@ -30,11 +30,17 @@ const ( ) // FormatChannelIdentifier returns the channel identifier with the sequence appended. +// This is a SDK specific format not enforced by IBC protocol. func FormatChannelIdentifier(sequence uint64) string { return fmt.Sprintf("%s%d", ChannelPrefix, sequence) } -// IsValidChannelID return true if the channel identifier is valid. +// IsChannelIDFormat checks if a channelID is in the format required on the SDK for +// parsing channel identifiers. The channel identifier must be in the form: `channel-{N} +var IsChannelIDFormat = regexp.MustCompile(`^channel-[0-9]{1,20}$`).MatchString + +// IsValidChannelID checks if a channelID is valid and can be parsed to the channel +// identifier format. func IsValidChannelID(channelID string) bool { _, err := ParseChannelSequence(channelID) return err == nil @@ -42,18 +48,14 @@ func IsValidChannelID(channelID string) bool { // ParseChannelSequence parses the channel sequence from the channel identifier. func ParseChannelSequence(channelID string) (uint64, error) { - if !strings.HasPrefix(channelID, ChannelPrefix) { - return 0, sdkerrors.Wrapf(ErrInvalidChannelIdentifier, "doesn't contain prefix `%s`", ChannelPrefix) - } - - splitStr := strings.Split(channelID, ChannelPrefix) - if len(splitStr) != 2 { - return 0, sdkerrors.Wrap(ErrInvalidChannelIdentifier, "channel identifier must be in format: `channel-{N}`") + if !IsChannelIDFormat(channelID) { + return 0, sdkerrors.Wrap(host.ErrInvalidID, "channel identifier is not in the format: `channel-{N}`") } - sequence, err := strconv.ParseUint(splitStr[1], 10, 64) + sequence, err := host.ParseIdentifier(channelID, ChannelPrefix) if err != nil { - return 0, sdkerrors.Wrap(err, "failed to parse channel identifier sequence") + return 0, sdkerrors.Wrap(err, "invalid channel identifier") } + return sequence, nil } diff --git a/x/ibc/core/04-channel/types/keys_test.go b/x/ibc/core/04-channel/types/keys_test.go index 418174cb040..86e4e61aa2c 100644 --- a/x/ibc/core/04-channel/types/keys_test.go +++ b/x/ibc/core/04-channel/types/keys_test.go @@ -18,6 +18,8 @@ func TestParseChannelSequence(t *testing.T) { {"valid 0", "channel-0", 0, true}, {"valid 1", "channel-1", 1, true}, {"valid large sequence", "channel-234568219356718293", 234568219356718293, true}, + // one above uint64 max + {"invalid uint64", "channel-18446744073709551616", 0, false}, // uint64 == 20 characters {"invalid large sequence", "channel-2345682193567182931243", 0, false}, {"capital prefix", "Channel-0", 0, false}, diff --git a/x/ibc/core/05-port/keeper/keeper.go b/x/ibc/core/05-port/keeper/keeper.go index 5321aeb66a5..8a4b2300a4e 100644 --- a/x/ibc/core/05-port/keeper/keeper.go +++ b/x/ibc/core/05-port/keeper/keeper.go @@ -26,7 +26,7 @@ func NewKeeper(sck capabilitykeeper.ScopedKeeper) Keeper { // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s/%s", host.ModuleName, types.SubModuleName)) + return ctx.Logger().With("module", "x/"+host.ModuleName+"/"+types.SubModuleName) } // isBounded checks a given port ID is already bounded. diff --git a/x/ibc/core/24-host/parse.go b/x/ibc/core/24-host/parse.go index 7e6301a391f..8c3459500d9 100644 --- a/x/ibc/core/24-host/parse.go +++ b/x/ibc/core/24-host/parse.go @@ -1,11 +1,37 @@ package host import ( + "strconv" "strings" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" ) +// ParseIdentifier parses the sequence from the identifier using the provided prefix. This function +// does not need to be used by counterparty chains. SDK generated connection and channel identifiers +// are required to use this format. +func ParseIdentifier(identifier, prefix string) (uint64, error) { + if !strings.HasPrefix(identifier, prefix) { + return 0, sdkerrors.Wrapf(ErrInvalidID, "identifier doesn't contain prefix `%s`", prefix) + } + + splitStr := strings.Split(identifier, prefix) + if len(splitStr) != 2 { + return 0, sdkerrors.Wrapf(ErrInvalidID, "identifier must be in format: `%s{N}`", prefix) + } + + // sanity check + if splitStr[0] != "" { + return 0, sdkerrors.Wrapf(ErrInvalidID, "identifier must begin with prefix %s", prefix) + } + + sequence, err := strconv.ParseUint(splitStr[1], 10, 64) + if err != nil { + return 0, sdkerrors.Wrap(err, "failed to parse identifier sequence") + } + return sequence, nil +} + // ParseConnectionPath returns the connection ID from a full path. It returns // an error if the provided path is invalid. func ParseConnectionPath(path string) (string, error) { diff --git a/x/ibc/core/24-host/parse_test.go b/x/ibc/core/24-host/parse_test.go new file mode 100644 index 00000000000..cbee37ddbcb --- /dev/null +++ b/x/ibc/core/24-host/parse_test.go @@ -0,0 +1,47 @@ +package host_test + +import ( + "math" + "testing" + + connectiontypes "github.com/cosmos/cosmos-sdk/x/ibc/core/03-connection/types" + host "github.com/cosmos/cosmos-sdk/x/ibc/core/24-host" + "github.com/stretchr/testify/require" +) + +func TestParseIdentifier(t *testing.T) { + testCases := []struct { + name string + identifier string + prefix string + expSeq uint64 + expPass bool + }{ + {"valid 0", "connection-0", "connection-", 0, true}, + {"valid 1", "connection-1", "connection-", 1, true}, + {"valid large sequence", connectiontypes.FormatConnectionIdentifier(math.MaxUint64), "connection-", math.MaxUint64, true}, + // one above uint64 max + {"invalid uint64", "connection-18446744073709551616", "connection-", 0, false}, + // uint64 == 20 characters + {"invalid large sequence", "connection-2345682193567182931243", "conenction-", 0, false}, + {"capital prefix", "Connection-0", "connection-", 0, false}, + {"double prefix", "connection-connection-0", "connection-", 0, false}, + {"doesn't have prefix", "connection-0", "prefix", 0, false}, + {"missing dash", "connection0", "connection-", 0, false}, + {"blank id", " ", "connection-", 0, false}, + {"empty id", "", "connection-", 0, false}, + {"negative sequence", "connection--1", "connection-", 0, false}, + } + + for _, tc := range testCases { + + seq, err := host.ParseIdentifier(tc.identifier, tc.prefix) + require.Equal(t, tc.expSeq, seq) + + if tc.expPass { + require.NoError(t, err, tc.name) + } else { + require.Error(t, err, tc.name) + } + } +} diff --git a/x/ibc/core/genesis_test.go b/x/ibc/core/genesis_test.go index 29a26aad740..bc4c5834d3e 100644 --- a/x/ibc/core/genesis_test.go +++ b/x/ibc/core/genesis_test.go @@ -97,6 +97,7 @@ func (suite *IBCTestSuite) TestValidateGenesis() { }, clienttypes.NewParams(exported.Tendermint, exported.Localhost), true, + 0, ), ConnectionGenesis: connectiontypes.NewGenesisState( []connectiontypes.IdentifiedConnection{ @@ -154,6 +155,7 @@ func (suite *IBCTestSuite) TestValidateGenesis() { nil, clienttypes.NewParams(exported.Tendermint), false, + 0, ), ConnectionGenesis: connectiontypes.DefaultGenesisState(), }, @@ -239,6 +241,7 @@ func (suite *IBCTestSuite) TestInitGenesis() { }, clienttypes.NewParams(exported.Tendermint, exported.Localhost), true, + 0, ), ConnectionGenesis: connectiontypes.NewGenesisState( []connectiontypes.IdentifiedConnection{ diff --git a/x/ibc/core/keeper/msg_server.go b/x/ibc/core/keeper/msg_server.go index 2b655ff9a2f..78f822e4d5a 100644 --- a/x/ibc/core/keeper/msg_server.go +++ b/x/ibc/core/keeper/msg_server.go @@ -32,14 +32,15 @@ func (k Keeper) CreateClient(goCtx context.Context, msg *clienttypes.MsgCreateCl return nil, err } - if err = k.ClientKeeper.CreateClient(ctx, msg.ClientId, clientState, consensusState); err != nil { + clientID, err := k.ClientKeeper.CreateClient(ctx, clientState, consensusState) + if err != nil { return nil, err } ctx.EventManager().EmitEvents(sdk.Events{ sdk.NewEvent( clienttypes.EventTypeCreateClient, - sdk.NewAttribute(clienttypes.AttributeKeyClientID, msg.ClientId), + sdk.NewAttribute(clienttypes.AttributeKeyClientID, clientID), sdk.NewAttribute(clienttypes.AttributeKeyClientType, clientState.ClientType()), sdk.NewAttribute(clienttypes.AttributeKeyConsensusHeight, clientState.GetLatestHeight().String()), ), diff --git a/x/ibc/core/spec/01_concepts.md b/x/ibc/core/spec/01_concepts.md index c3059e5f587..045508999d3 100644 --- a/x/ibc/core/spec/01_concepts.md +++ b/x/ibc/core/spec/01_concepts.md @@ -10,11 +10,11 @@ this [document](https://github.com/cosmos/ics/blob/master/ibc/1_IBC_TERMINOLOGY. ## Client Creation, Updates, and Upgrades IBC clients are on chain light clients. The light client is responsible for verifying -counterparty state. A light client can be created by any user submitting a client -identifier and a valid initial `ClientState` and `ConsensusState`. The client identifier -must not already be used. Clients are given a client identifier prefixed store to -store their associated client state and consensus states. Consensus states are -stored using their associated height. +counterparty state. A light client can be created by any user submitting a valid initial +`ClientState` and `ConsensusState`. The client identifier is auto generated using the +client type and the global client counter appended in the format: `{client-type}-{N}`. +Clients are given a client identifier prefixed store to store their associated client +state and consensus states. Consensus states are stored using their associated height. Clients can be updated by any user submitting a valid `Header`. The client state callback to `CheckHeaderAndUpdateState` is responsible for verifying the header against previously diff --git a/x/ibc/core/spec/02_state.md b/x/ibc/core/spec/02_state.md index 54eb41bc4bc..2c85a525a95 100644 --- a/x/ibc/core/spec/02_state.md +++ b/x/ibc/core/spec/02_state.md @@ -13,9 +13,12 @@ The client type is not stored since it can be obtained through the client state. | "0/" | "clients/{identifier}/clientState" | ClientState | | "0/" | "clients/{identifier}/consensusStates/{height}" | ConsensusState | | "0/" | "clients/{identifier}/connections" | []string | +| "0/" | "nextClientSequence | uint64 | | "0/" | "connections/{identifier}" | ConnectionEnd | +| "0/" | "nextConnectionSequence" | uint64 | | "0/" | "ports/{identifier}" | CapabilityKey | | "0/" | "channelEnds/ports/{identifier}/channels/{identifier}" | ChannelEnd | +| "0/" | "nextChannelSequence" | uint64 | | "0/" | "capabilities/ports/{identifier}/channels/{identifier}" | CapabilityKey | | "0/" | "nextSequenceSend/ports/{identifier}/channels/{identifier}" | uint64 | | "0/" | "nextSequenceRecv/ports/{identifier}/channels/{identifier}" | uint64 | diff --git a/x/ibc/core/spec/03_state_transitions.md b/x/ibc/core/spec/03_state_transitions.md index de31957d66e..be3b508b795 100644 --- a/x/ibc/core/spec/03_state_transitions.md +++ b/x/ibc/core/spec/03_state_transitions.md @@ -9,7 +9,7 @@ The described state transitions assume successful message exection. ## Create Client `MsgCreateClient` will initialize and store a `ClientState` and `ConsensusState` in the sub-store -created using the given client identifier. +created using a generated client identifier. ## Update Client diff --git a/x/ibc/core/spec/04_messages.md b/x/ibc/core/spec/04_messages.md index 34d68200b28..3728e6d6f32 100644 --- a/x/ibc/core/spec/04_messages.md +++ b/x/ibc/core/spec/04_messages.md @@ -14,7 +14,6 @@ A light client is created using the `MsgCreateClient`. ```go type MsgCreateClient struct { - ClientId string ClientState *types.Any // proto-packed client state ConsensusState *types.Any // proto-packed consensus state Signer sdk.AccAddress @@ -23,13 +22,11 @@ type MsgCreateClient struct { This message is expected to fail if: -- `ClientId` is invalid (see naming requirements) - `ClientState` is empty or invalid - `ConsensusState` is empty or invalid - `Signer` is empty -- A light client with the provided id and type already exist -The message creates and stores a light client with an initial consensus state for the given client +The message creates and stores a light client with an initial consensus state using a generated client identifier. ### MsgUpdateClient @@ -112,7 +109,6 @@ A connection is initialized on a light client using the `MsgConnectionOpenInit`. ```go type MsgConnectionOpenInit struct { ClientId string - ConnectionId string Counterparty Counterparty Version string Signer sdk.AccAddress @@ -121,7 +117,6 @@ type MsgConnectionOpenInit struct { This message is expected to fail if: - `ClientId` is invalid (see naming requirements) -- `ConnectionId` is invalid (see naming requirements) - `Counterparty` is empty - 'Version' is not empty and invalid - `Signer` is empty @@ -138,8 +133,7 @@ using the `MsgConnectionOpenTry`. ```go type MsgConnectionOpenTry struct { ClientId string - DesiredConnectionId string - CounterpartyChosenConnectionId string + PreviousConnectionId string ClientState *types.Any // proto-packed counterparty client Counterparty Counterparty CounterpartyVersions []string @@ -155,8 +149,7 @@ type MsgConnectionOpenTry struct { This message is expected to fail if: - `ClientId` is invalid (see naming requirements) -- `DesiredConnectionId` is invalid (see naming requirements) -- `CounterpartyChosenConnectionId` is not empty and doesn't match `DesiredConnectionId` +- `PreviousConnectionId` is not empty and invalid (see naming requirements) - `ClientState` is not a valid client of the executing chain - `Counterparty` is empty - `CounterpartyVersions` is empty @@ -167,15 +160,13 @@ This message is expected to fail if: - `ConsensusHeight` is zero - `Signer` is empty - A Client hasn't been created for the given ID -- A Connection for the given ID already exists +- If a previous connection exists but does not match the supplied parameters. - `ProofInit` does not prove that the counterparty connection is in state INIT - `ProofClient` does not prove that the counterparty has stored the `ClientState` provided in message - `ProofConsensus` does not prove that the counterparty has the correct consensus state for this chain -The message creates a connection for the given ID with an TRYOPEN State. The `CounterpartyChosenConnectionID` -represents the connection ID the counterparty set under `connection.Counterparty.ConnectionId` -to represent the connection ID this chain should use. An empty string indicates the connection -identifier is flexible and gives this chain an opportunity to choose its own identifier. +The message creates a connection for a generated connection ID with an TRYOPEN State. If a previous +connection already exists, it updates the connection state from INIT to TRYOPEN. ### MsgConnectionOpenAck @@ -251,7 +242,6 @@ message. ```go type MsgChannelOpenInit struct { PortId string - ChannelId string Channel Channel Signer sdk.AccAddress } @@ -260,12 +250,11 @@ type MsgChannelOpenInit struct { This message is expected to fail if: - `PortId` is invalid (see naming requirements) -- `ChannelId` is invalid (see naming requirements) - `Channel` is empty - `Signer` is empty - A Channel End exists for the given Channel ID and Port ID -The message creates a channel on chain A with an INIT state for the given Channel ID +The message creates a channel on chain A with an INIT state for a generated Channel ID and Port ID. ### MsgChannelOpenTry @@ -276,8 +265,7 @@ the `MsgChannelOpenTry` message. ```go type MsgChannelOpenTry struct { PortId string - DesiredChannelId string - CounterpartyChosenChannelId string + PreviousChannelId string Channel Channel CounterpartyVersion string ProofInit []byte @@ -289,21 +277,18 @@ type MsgChannelOpenTry struct { This message is expected to fail if: - `PortId` is invalid (see naming requirements) -- `DesiredChannelId` is invalid (see naming requirements) -- `CounterpartyChosenChannelId` is not empty and not equal to `ChannelId` +- `PreviousChannelId` is not empty and invalid (see naming requirements) - `Channel` is empty - `CounterpartyVersion` is empty - `ProofInit` is empty - `ProofHeight` is zero - `Signer` is empty -- A Channel End exists for the given Channel and Port ID +- A previous channel exists and does not match the provided parameters. - `ProofInit` does not prove that the counterparty's Channel state is in INIT -The message creates a channel on chain B with an TRYOPEN state for the given Channel ID -and Port ID. The `CounterpartyChosenChannelId` represents the channel ID the counterparty set under -`connection.Counterparty.ChannelId` to represent the channel ID this chain should use. -An empty string indicates the channel identifier is flexible and gives this chain an -opportunity to choose its own identifier. +The message creates a channel on chain B with an TRYOPEN state for using a generated Channel ID +and given Port ID if the previous channel does not already exist. Otherwise it udates the +previous channel state from INIT to TRYOPEN. ### MsgChannelOpenAck diff --git a/x/ibc/light-clients/06-solomachine/client/cli/tx.go b/x/ibc/light-clients/06-solomachine/client/cli/tx.go index 37db768a302..8d1709fc279 100644 --- a/x/ibc/light-clients/06-solomachine/client/cli/tx.go +++ b/x/ibc/light-clients/06-solomachine/client/cli/tx.go @@ -24,12 +24,12 @@ const ( // NewCreateClientCmd defines the command to create a new solo machine client. func NewCreateClientCmd() *cobra.Command { cmd := &cobra.Command{ - Use: "create [client-id] [sequence] [path/to/consensus_state.json]", + Use: "create [sequence] [path/to/consensus_state.json]", Short: "create new solo machine client", Long: `create a new solo machine client with the specified identifier and public key - ConsensusState json example: {"public_key":{"@type":"/cosmos.crypto.secp256k1.PubKey","key":"A/3SXL2ONYaOkxpdR5P8tHTlSlPv1AwQwSFxKRee5JQW"},"diversifier":"diversifier","timestamp":"10"}`, - Example: fmt.Sprintf("%s tx ibc %s create [client-id] [sequence] [path/to/consensus_state] --from node0 --home ../node0/cli --chain-id $CID", version.AppName, types.SubModuleName), - Args: cobra.ExactArgs(3), + Example: fmt.Sprintf("%s tx ibc %s create [sequence] [path/to/consensus_state] --from node0 --home ../node0/cli --chain-id $CID", version.AppName, types.SubModuleName), + Args: cobra.ExactArgs(2), RunE: func(cmd *cobra.Command, args []string) error { clientCtx := client.GetClientContextFromCmd(cmd) clientCtx, err := client.ReadTxCommandFlags(clientCtx, cmd.Flags()) @@ -37,9 +37,7 @@ func NewCreateClientCmd() *cobra.Command { return err } - clientID := args[0] - - sequence, err := strconv.ParseUint(args[1], 10, 64) + sequence, err := strconv.ParseUint(args[0], 10, 64) if err != nil { return err } @@ -48,10 +46,10 @@ func NewCreateClientCmd() *cobra.Command { // attempt to unmarshal consensus state argument consensusState := &types.ConsensusState{} - if err := cdc.UnmarshalJSON([]byte(args[2]), consensusState); err != nil { + if err := cdc.UnmarshalJSON([]byte(args[1]), consensusState); err != nil { // check for file path if JSON input is not provided - contents, err := ioutil.ReadFile(args[2]) + contents, err := ioutil.ReadFile(args[1]) if err != nil { return errors.Wrap(err, "neither JSON input nor path to .json file for consensus state were provided") } @@ -64,7 +62,7 @@ func NewCreateClientCmd() *cobra.Command { allowUpdateAfterProposal, _ := cmd.Flags().GetBool(flagAllowUpdateAfterProposal) clientState := types.NewClientState(sequence, consensusState, allowUpdateAfterProposal) - msg, err := clienttypes.NewMsgCreateClient(clientID, clientState, consensusState, clientCtx.GetFromAddress()) + msg, err := clienttypes.NewMsgCreateClient(clientState, consensusState, clientCtx.GetFromAddress()) if err != nil { return err } diff --git a/x/ibc/light-clients/07-tendermint/client/cli/tx.go b/x/ibc/light-clients/07-tendermint/client/cli/tx.go index 3bd2f0c0b73..04a274aceca 100644 --- a/x/ibc/light-clients/07-tendermint/client/cli/tx.go +++ b/x/ibc/light-clients/07-tendermint/client/cli/tx.go @@ -34,15 +34,15 @@ const ( // in https://github.com/cosmos/ics/tree/master/spec/ics-002-client-semantics#create func NewCreateClientCmd() *cobra.Command { cmd := &cobra.Command{ - Use: "create [client-id] [path/to/consensus_state.json] [trusting_period] [unbonding_period] [max_clock_drift]", + Use: "create [path/to/consensus_state.json] [trusting_period] [unbonding_period] [max_clock_drift]", Short: "create new tendermint client", Long: `Create a new tendermint IBC client. - 'trust-level' flag can be a fraction (eg: '1/3') or 'default' - 'proof-specs' flag can be JSON input, a path to a .json file or 'default' - 'upgrade-path' flag is a string specifying the upgrade path for this chain where a future upgraded client will be stored. The path is a comma-separated list representing the keys in order of the keyPath to the committed upgraded client. e.g. 'upgrade/upgradedClient'`, - Example: fmt.Sprintf("%s tx ibc %s create [client-id] [path/to/consensus_state.json] [trusting_period] [unbonding_period] [max_clock_drift] --trust-level default --consensus-params [path/to/consensus-params.json] --proof-specs [path/to/proof-specs.json] --upgrade-path upgrade/upgradedClient --from node0 --home ../node0/cli --chain-id $CID", version.AppName, types.SubModuleName), - Args: cobra.ExactArgs(5), + Example: fmt.Sprintf("%s tx ibc %s create [path/to/consensus_state.json] [trusting_period] [unbonding_period] [max_clock_drift] --trust-level default --consensus-params [path/to/consensus-params.json] --proof-specs [path/to/proof-specs.json] --upgrade-path upgrade/upgradedClient --from node0 --home ../node0/cli --chain-id $CID", version.AppName, types.SubModuleName), + Args: cobra.ExactArgs(4), RunE: func(cmd *cobra.Command, args []string) error { clientCtx := client.GetClientContextFromCmd(cmd) clientCtx, err := client.ReadTxCommandFlags(clientCtx, cmd.Flags()) @@ -50,15 +50,13 @@ func NewCreateClientCmd() *cobra.Command { return err } - clientID := args[0] - cdc := codec.NewProtoCodec(clientCtx.InterfaceRegistry) legacyAmino := codec.NewLegacyAmino() var header *types.Header - if err := cdc.UnmarshalJSON([]byte(args[1]), header); err != nil { + if err := cdc.UnmarshalJSON([]byte(args[0]), header); err != nil { // check for file path if JSON input is not provided - contents, err := ioutil.ReadFile(args[1]) + contents, err := ioutil.ReadFile(args[0]) if err != nil { return errors.New("neither JSON input nor path to .json file were provided for consensus header") } @@ -83,17 +81,17 @@ func NewCreateClientCmd() *cobra.Command { } } - trustingPeriod, err := time.ParseDuration(args[2]) + trustingPeriod, err := time.ParseDuration(args[1]) if err != nil { return err } - ubdPeriod, err := time.ParseDuration(args[3]) + ubdPeriod, err := time.ParseDuration(args[2]) if err != nil { return err } - maxClockDrift, err := time.ParseDuration(args[4]) + maxClockDrift, err := time.ParseDuration(args[3]) if err != nil { return err } @@ -137,7 +135,7 @@ func NewCreateClientCmd() *cobra.Command { consensusState := header.ConsensusState() msg, err := clienttypes.NewMsgCreateClient( - clientID, clientState, consensusState, clientCtx.GetFromAddress(), + clientState, consensusState, clientCtx.GetFromAddress(), ) if err != nil { return err diff --git a/x/ibc/testing/chain.go b/x/ibc/testing/chain.go index b63e72c3e9d..4dc8ec4f25e 100644 --- a/x/ibc/testing/chain.go +++ b/x/ibc/testing/chain.go @@ -369,8 +369,8 @@ func (chain *TestChain) GetPrefix() commitmenttypes.MerklePrefix { // NewClientID appends a new clientID string in the format: // ClientFor -func (chain *TestChain) NewClientID(counterpartyChainID string) string { - clientID := "client" + strconv.Itoa(len(chain.ClientIDs)) + "For" + counterpartyChainID +func (chain *TestChain) NewClientID(clientType string) string { + clientID := fmt.Sprintf("%s-%s", clientType, strconv.Itoa(len(chain.ClientIDs))) chain.ClientIDs = append(chain.ClientIDs, clientID) return clientID } @@ -460,7 +460,7 @@ func (chain *TestChain) ConstructMsgCreateClient(counterparty *TestChain, client } msg, err := clienttypes.NewMsgCreateClient( - clientID, clientState, consensusState, chain.SenderAccount.GetAddress(), + clientState, consensusState, chain.SenderAccount.GetAddress(), ) require.NoError(chain.t, err) return msg diff --git a/x/ibc/testing/chain_test.go b/x/ibc/testing/chain_test.go index bf5af6de636..361a9c4c15a 100644 --- a/x/ibc/testing/chain_test.go +++ b/x/ibc/testing/chain_test.go @@ -33,6 +33,7 @@ func TestCreateSortedSignerArray(t *testing.T) { // smaller address validator1.Address = []byte{1} + validator2.Address = []byte{2} validator2.VotingPower = 1 expected = []tmtypes.PrivValidator{privVal1, privVal2} diff --git a/x/ibc/testing/coordinator.go b/x/ibc/testing/coordinator.go index aecdcd4b704..95b59a1db9b 100644 --- a/x/ibc/testing/coordinator.go +++ b/x/ibc/testing/coordinator.go @@ -95,7 +95,7 @@ func (coord *Coordinator) CreateClient( ) (clientID string, err error) { coord.CommitBlock(source, counterparty) - clientID = source.NewClientID(counterparty.ChainID) + clientID = source.NewClientID(clientType) switch clientType { case exported.Tendermint: diff --git a/x/mint/keeper/keeper.go b/x/mint/keeper/keeper.go index 485ac961b1f..de0a3122284 100644 --- a/x/mint/keeper/keeper.go +++ b/x/mint/keeper/keeper.go @@ -1,8 +1,6 @@ package keeper import ( - "fmt" - "github.com/tendermint/tendermint/libs/log" "github.com/cosmos/cosmos-sdk/codec" @@ -51,7 +49,7 @@ func NewKeeper( // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) + return ctx.Logger().With("module", "x/"+types.ModuleName) } // get the minter diff --git a/x/params/keeper/keeper.go b/x/params/keeper/keeper.go index a2fdeaa5c0f..b3d649a2ec4 100644 --- a/x/params/keeper/keeper.go +++ b/x/params/keeper/keeper.go @@ -1,8 +1,6 @@ package keeper import ( - "fmt" - "github.com/tendermint/tendermint/libs/log" "github.com/cosmos/cosmos-sdk/codec" @@ -33,7 +31,7 @@ func NewKeeper(cdc codec.BinaryMarshaler, legacyAmino *codec.LegacyAmino, key, t // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s", proposal.ModuleName)) + return ctx.Logger().With("module", "x/"+proposal.ModuleName) } // Allocate subspace used for keepers diff --git a/x/slashing/keeper/infractions.go b/x/slashing/keeper/infractions.go index 168b97b235b..0c93717d13e 100644 --- a/x/slashing/keeper/infractions.go +++ b/x/slashing/keeper/infractions.go @@ -48,6 +48,8 @@ func (k Keeper) HandleValidatorSignature(ctx sdk.Context, addr cryptotypes.Addre // Array value at this index has not changed, no need to update counter } + minSignedPerWindow := k.MinSignedPerWindow(ctx) + if missed { ctx.EventManager().EmitEvent( sdk.NewEvent( @@ -59,21 +61,22 @@ func (k Keeper) HandleValidatorSignature(ctx sdk.Context, addr cryptotypes.Addre ) logger.Info( - fmt.Sprintf("Absent validator %s at height %d, %d missed, threshold %d", consAddr, height, signInfo.MissedBlocksCounter, k.MinSignedPerWindow(ctx))) + "absent validator", + "height", height, + "validator", consAddr.String(), + "missed", signInfo.MissedBlocksCounter, + "threshold", minSignedPerWindow, + ) } minHeight := signInfo.StartHeight + k.SignedBlocksWindow(ctx) - maxMissed := k.SignedBlocksWindow(ctx) - k.MinSignedPerWindow(ctx) + maxMissed := k.SignedBlocksWindow(ctx) - minSignedPerWindow // if we are past the minimum height and the validator has missed too many blocks, punish them if height > minHeight && signInfo.MissedBlocksCounter > maxMissed { validator := k.sk.ValidatorByConsAddr(ctx, consAddr) if validator != nil && !validator.IsJailed() { - // Downtime confirmed: slash and jail the validator - logger.Info(fmt.Sprintf("Validator %s past min height of %d and below signed blocks threshold of %d", - consAddr, minHeight, k.MinSignedPerWindow(ctx))) - // We need to retrieve the stake distribution which signed the block, so we subtract ValidatorUpdateDelay from the evidence height, // and subtract an additional 1 since this is the LastCommit. // Note that this *can* result in a negative "distributionHeight" up to -ValidatorUpdateDelay-1, @@ -99,10 +102,21 @@ func (k Keeper) HandleValidatorSignature(ctx sdk.Context, addr cryptotypes.Addre signInfo.MissedBlocksCounter = 0 signInfo.IndexOffset = 0 k.clearValidatorMissedBlockBitArray(ctx, consAddr) + + logger.Info( + "slashing and jailing validator due to liveness fault", + "height", height, + "validator", consAddr.String(), + "min_height", minHeight, + "threshold", minSignedPerWindow, + "slashed", k.SlashFractionDowntime(ctx).String(), + "jailed_until", signInfo.JailedUntil, + ) } else { - // Validator was (a) not found or (b) already jailed, don't slash + // validator was (a) not found or (b) already jailed so we do not slash logger.Info( - fmt.Sprintf("Validator %s would have been slashed for downtime, but was either not found in store or already jailed", consAddr), + "validator would have been slashed for downtime, but was either not found in store or already jailed", + "validator", consAddr.String(), ) } } diff --git a/x/slashing/keeper/keeper.go b/x/slashing/keeper/keeper.go index b5304a6c5e2..cbed27e9a19 100644 --- a/x/slashing/keeper/keeper.go +++ b/x/slashing/keeper/keeper.go @@ -36,7 +36,7 @@ func NewKeeper(cdc codec.BinaryMarshaler, key sdk.StoreKey, sk types.StakingKeep // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) + return ctx.Logger().With("module", "x/"+types.ModuleName) } // AddPubkey sets a address-pubkey relation diff --git a/x/staking/client/rest/query.go b/x/staking/client/rest/query.go index cbe0813c32e..d515d5dfed9 100644 --- a/x/staking/client/rest/query.go +++ b/x/staking/client/rest/query.go @@ -9,6 +9,7 @@ import ( "github.com/gorilla/mux" "github.com/cosmos/cosmos-sdk/client" + clientrest "github.com/cosmos/cosmos-sdk/client/rest" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/rest" "github.com/cosmos/cosmos-sdk/x/staking/types" @@ -278,6 +279,19 @@ func validatorsHandlerFn(clientCtx client.Context) http.HandlerFunc { } status := r.FormValue("status") + // These are query params that were available in =<0.39. We show a nice + // error message for this breaking change. + if status == "bonded" || status == "unbonding" || status == "unbonded" { + err := fmt.Errorf("cosmos sdk v0.40 introduces a breaking change on this endpoint:"+ + " instead of querying using `?status=%s`, please use `status=BOND_STATUS_%s`. For more"+ + " info, please see our REST endpoint migration guide at %s", status, strings.ToUpper(status), clientrest.DeprecationURL) + + if rest.CheckBadRequestError(w, err) { + return + } + + } + if status == "" { status = types.BondStatusBonded } diff --git a/x/staking/client/rest/rest_test.go b/x/staking/client/rest/rest_test.go new file mode 100644 index 00000000000..43b9afdb6dd --- /dev/null +++ b/x/staking/client/rest/rest_test.go @@ -0,0 +1,62 @@ +// +build norace + +package rest_test + +import ( + "fmt" + + "github.com/cosmos/cosmos-sdk/types/rest" + "github.com/cosmos/cosmos-sdk/x/staking/types" +) + +func (s *IntegrationTestSuite) TestLegacyGetValidators() { + val := s.network.Validators[0] + baseURL := val.APIAddress + + testCases := []struct { + name string + url string + expErr bool + expErrMsg string + }{ + { + "old status should show error message", + fmt.Sprintf("%s/staking/validators?status=bonded", baseURL), + true, "cosmos sdk v0.40 introduces a breaking change on this endpoint: instead of" + + " querying using `?status=bonded`, please use `status=BOND_STATUS_BONDED`. For more" + + " info, please see our REST endpoint migration guide at https://docs.cosmos.network/master/migrations/rest.html", + }, + { + "new status should work", + fmt.Sprintf("%s/staking/validators?status=BOND_STATUS_BONDED", baseURL), + false, "", + }, + } + + for _, tc := range testCases { + tc := tc + s.Run(tc.name, func() { + respJSON, err := rest.GetRequest(tc.url) + s.Require().NoError(err) + + if tc.expErr { + var errResp rest.ErrorResponse + s.Require().NoError(val.ClientCtx.LegacyAmino.UnmarshalJSON(respJSON, &errResp)) + + s.Require().Equal(errResp.Error, tc.expErrMsg) + } else { + var resp = rest.ResponseWithHeight{} + err = val.ClientCtx.LegacyAmino.UnmarshalJSON(respJSON, &resp) + s.Require().NoError(err) + + // Check result is not empty. + var validators []types.Validator + s.Require().NoError(val.ClientCtx.LegacyAmino.UnmarshalJSON(resp.Result, &validators)) + s.Require().Greater(len(validators), 0) + // While we're at it, also check that the consensus_pubkey is + // an Any, and not bech32 anymore. + s.Require().Contains(string(resp.Result), "\"consensus_pubkey\": {\n \"type\": \"tendermint/PubKeyEd25519\",") + } + }) + } +} diff --git a/x/staking/keeper/keeper.go b/x/staking/keeper/keeper.go index 86e32665d11..74d85a645bf 100644 --- a/x/staking/keeper/keeper.go +++ b/x/staking/keeper/keeper.go @@ -65,7 +65,7 @@ func NewKeeper( // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) + return ctx.Logger().With("module", "x/"+types.ModuleName) } // Set the validator hooks diff --git a/x/staking/keeper/slash.go b/x/staking/keeper/slash.go index 7bef3220f73..a36f6f2e6e2 100644 --- a/x/staking/keeper/slash.go +++ b/x/staking/keeper/slash.go @@ -41,10 +41,10 @@ func (k Keeper) Slash(ctx sdk.Context, consAddr sdk.ConsAddress, infractionHeigh // NOTE: Correctness dependent on invariant that unbonding delegations / redelegations must also have been completely // slashed in this case - which we don't explicitly check, but should be true. // Log the slash attempt for future reference (maybe we should tag it too) - logger.Error(fmt.Sprintf( - "WARNING: Ignored attempt to slash a nonexistent validator with address %s, we recommend you investigate immediately", - consAddr)) - + logger.Error( + "WARNING: ignored attempt to slash a nonexistent validator; we recommend you investigate immediately", + "validator", consAddr.String(), + ) return } @@ -71,10 +71,12 @@ func (k Keeper) Slash(ctx sdk.Context, consAddr sdk.ConsAddress, infractionHeigh infractionHeight, ctx.BlockHeight())) case infractionHeight == ctx.BlockHeight(): - // Special-case slash at current height for efficiency - we don't need to look through unbonding delegations or redelegations - logger.Info(fmt.Sprintf( - "slashing at current height %d, not scanning unbonding delegations & redelegations", - infractionHeight)) + // Special-case slash at current height for efficiency - we don't need to + // look through unbonding delegations or redelegations. + logger.Info( + "slashing at current height; not scanning unbonding delegations & redelegations", + "height", infractionHeight, + ) case infractionHeight < ctx.BlockHeight(): // Iterate through unbonding delegations from slashed validator @@ -132,10 +134,12 @@ func (k Keeper) Slash(ctx sdk.Context, consAddr sdk.ConsAddress, infractionHeigh panic("invalid validator status") } - // Log that a slash occurred! - logger.Info(fmt.Sprintf( - "validator %s slashed by slash factor of %s; burned %v tokens", - validator.GetOperator(), slashFactor.String(), tokensToBurn)) + logger.Info( + "validator slashed by slash factor", + "validator", validator.GetOperator().String(), + "slash_factor", slashFactor.String(), + "burned", tokensToBurn, + ) } // jail a validator @@ -143,7 +147,7 @@ func (k Keeper) Jail(ctx sdk.Context, consAddr sdk.ConsAddress) { validator := k.mustGetValidatorByConsAddr(ctx, consAddr) k.jailValidator(ctx, validator) logger := k.Logger(ctx) - logger.Info(fmt.Sprintf("validator %s jailed", consAddr)) + logger.Info("validator jailed", "validator", consAddr) } // unjail a validator @@ -151,7 +155,7 @@ func (k Keeper) Unjail(ctx sdk.Context, consAddr sdk.ConsAddress) { validator := k.mustGetValidatorByConsAddr(ctx, consAddr) k.unjailValidator(ctx, validator) logger := k.Logger(ctx) - logger.Info(fmt.Sprintf("validator %s unjailed", consAddr)) + logger.Info("validator un-jailed", "validator", consAddr) } // slash an unbonding delegation and update the pool diff --git a/x/upgrade/keeper/keeper.go b/x/upgrade/keeper/keeper.go index 861fd0393a3..05fe6bd0f72 100644 --- a/x/upgrade/keeper/keeper.go +++ b/x/upgrade/keeper/keeper.go @@ -3,7 +3,6 @@ package keeper import ( "encoding/binary" "encoding/json" - "fmt" "io/ioutil" "os" "path" @@ -183,7 +182,7 @@ func (k Keeper) ClearUpgradePlan(ctx sdk.Context) { // Logger returns a module-specific logger. func (k Keeper) Logger(ctx sdk.Context) log.Logger { - return ctx.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName)) + return ctx.Logger().With("module", "x/"+types.ModuleName) } // GetUpgradePlan returns the currently scheduled Plan if any, setting havePlan to true if there is a scheduled