From 77e76d837779e2448cf6b2230230a5f71d37bb78 Mon Sep 17 00:00:00 2001 From: ksaric Date: Sun, 14 Feb 2021 23:58:12 +0100 Subject: [PATCH] [CAD-2581] Use the general db-sync plugins system to implement SMASH. --- cabal.project | 58 +- nix/pkgs.nix | 2 +- nix/sources.json | 12 +- schema/force-resync.sql | 1 + schema/migration-2-0008-20210215.sql | 21 + .../src/Cardano/SMASH/DBSync/Db/Error.hs | 2 - .../src/Cardano/SMASH/Types.hs | 5 + smash-sync/LICENSE | 202 ------ smash-sync/Setup.hs | 2 - smash-sync/smash-sync.cabal | 107 --- smash-sync/src/Cardano/Sync/SmashDbSync.hs | 682 ------------------ smash/app/Main.hs | 286 +++++--- smash/smash.cabal | 27 +- smash/src/Cardano/SMASH/DBSync/Db/Query.hs | 14 +- smash/src/Cardano/SMASH/DBSync/Db/Schema.hs | 14 +- smash/src/Cardano/SMASH/DBSyncPlugin.hs | 100 ++- smash/src/Cardano/SMASH/Offline.hs | 2 +- stack.yaml | 11 +- 18 files changed, 373 insertions(+), 1175 deletions(-) create mode 100644 schema/migration-2-0008-20210215.sql delete mode 100644 smash-sync/LICENSE delete mode 100644 smash-sync/Setup.hs delete mode 100644 smash-sync/smash-sync.cabal delete mode 100644 smash-sync/src/Cardano/Sync/SmashDbSync.hs diff --git a/cabal.project b/cabal.project index ca65251..8774db1 100644 --- a/cabal.project +++ b/cabal.project @@ -1,8 +1,7 @@ -index-state: 2020-11-15T00:00:00Z +index-state: 2021-01-08T00:00:00Z packages: ./smash - ./smash-sync ./smash-servant-types constraints: @@ -11,10 +10,12 @@ constraints: -- systemd-2.3.0 requires at least network 3.1.1.0 but it doesn't declare -- that dependency , network >= 3.1.1.0 - , prometheus >= 2.1.2 + , persistent-postgresql >= 2.11.0.1 --- ----------------------------------------------------------------------------- --- Disable all tests by defauly and yhen enable specific tests in this repo +package smash + ghc-options: -Wall -Werror -Wredundant-constraints -Wincomplete-uni-patterns -Wincomplete-record-updates -Wpartial-fields -Wunused-imports + +------------------------------------------------------------------------------ tests: False @@ -24,44 +25,53 @@ package smash package smash-servant-types tests: True +-- These are needed because Nix is doing something crazy. +package cardano-api + tests: False + +package cardano-node + tests: False + +package ouroboros-consensus-cardano + tests: False + test-show-details: direct --- ----------------------------------------------------------------------------- +------------------------------------------------------------------------------ source-repository-package type: git location: https://github.com/input-output-hk/cardano-db-sync - tag: 3cf868ec8c06265e4b670ac737af640d716e5ef7 - --sha256: 152yhay3riak41saz5s5zdhlkb5c5iqpy1gw1rvwjp6rwg1mc77n + tag: d5aa846e0751227aa6461084d6ea0567535f752e + --sha256: 0xhm53yycjp6zgqzx6l3hvmgl3046g276qq7vx2wbhljwwqvvk15 subdir: + cardano-sync cardano-db cardano-db-sync - cardano-db-sync-extended source-repository-package type: git location: https://github.com/input-output-hk/cardano-base - tag: 2574600da11065937c1f07e4b234ecb451016a2e - --sha256: 0nq8bpzsr3fd2i59a6s6qb6slpymjh47zv57wlifjfvhh0xlgmpx + tag: b364d925e0a72689ecba40dd1f4899f76170b894 + --sha256: 0igb4gnzlwxy1h40vy5s1aysmaa04wypxn7sn67qy6din7ysmad3 subdir: binary binary/test cardano-crypto-class - cardano-crypto-tests cardano-crypto-praos slotting source-repository-package type: git location: https://github.com/input-output-hk/cardano-crypto - tag: 2547ad1e80aeabca2899951601079408becbc92c - --sha256: 1p2kg2w02q5w1cvqzhfhqmxviy4xrzada3mmb096j2n6hfr20kri + tag: f73079303f663e028288f9f4a9e08bcca39a923e + --sha256: 1n87i15x54s0cjkh3nsxs4r1x016cdw1fypwmr68936n3xxsjn6q source-repository-package type: git location: https://github.com/input-output-hk/cardano-ledger-specs - tag: 581767d1329f3f702e332af08355e81a0f85333e - --sha256: 198p4v2bi36y6x512w35qycvjm7nds7jf8qh7r84pj1qsy43vf7w + tag: 097890495cbb0e8b62106bcd090a5721c3f4b36f + --sha256: 0i3y9n0rsyarvhfqzzzjccqnjgwb9fbmbs6b7vj40afjhimf5hcj subdir: byron/crypto byron/crypto/test @@ -79,22 +89,19 @@ source-repository-package source-repository-package type: git location: https://github.com/input-output-hk/cardano-node - tag: 400d18092ce604352cf36fe5f105b0d7c78be074 - --sha256: 19r4mamm9bxc1hz32qgsrfnrfxwp4pgnb4d28fzai3izznil03vi + tag: 9a7331cce5e8bc0ea9c6bfa1c28773f4c5a7000f + --sha256: 1scffi7igv4kj93bpjf8yibcaq0sskfikmm00f7r6q031l53y50c subdir: cardano-api - cardano-api/test - cardano-cli cardano-config cardano-node - cardano-node-chairman hedgehog-extras source-repository-package type: git location: https://github.com/input-output-hk/cardano-prelude - tag: 742e8525b96bf4b66fb61a00c8298d75d7931d5e - --sha256: 1132r58bjgdcf7yz3n77nlrkanqcmpn5b5km4nw151yar2dgifsv + tag: ee4e7b547a991876e6b05ba542f4e62909f4a571 + --sha256: 0dg6ihgrn5mgqp95c4f11l6kh9k3y75lwfqf47hdp554w7wyvaw6 subdir: cardano-prelude cardano-prelude-test @@ -123,8 +130,8 @@ source-repository-package source-repository-package type: git location: https://github.com/input-output-hk/ouroboros-network - tag: c2bd6814e231bfd48059f306ef486b830e524aa8 - --sha256: 0sjp5i4szp5nf1dkwang5w8pydjx5p22by8wisihs1410rxgwd7n + tag: 6cb9052bde39472a0555d19ade8a42da63d3e904 + --sha256: 0rz4acz15wda6yfc7nls6g94gcwg2an5zibv0irkxk297n76gkmg subdir: cardano-client io-sim @@ -140,3 +147,4 @@ source-repository-package typed-protocols-examples network-mux Win32-network + diff --git a/nix/pkgs.nix b/nix/pkgs.nix index 21c87e4..ace007e 100644 --- a/nix/pkgs.nix +++ b/nix/pkgs.nix @@ -1,7 +1,7 @@ # our packages overlay pkgs: _: with pkgs; let - compiler = config.haskellNix.compiler or "ghc865"; + compiler = config.haskellNix.compiler or "ghc8102"; src = haskell-nix.haskellLib.cleanGit { name = "smash-src"; src = ../.; diff --git a/nix/sources.json b/nix/sources.json index a2e44e4..316b226 100644 --- a/nix/sources.json +++ b/nix/sources.json @@ -17,10 +17,10 @@ "homepage": "https://input-output-hk.github.io/haskell.nix", "owner": "input-output-hk", "repo": "haskell.nix", - "rev": "4c42100df06a0a1d976e5e883a3eedf053d8b91f", - "sha256": "0rkwldvw0qcwssjm72iqw4z3l1xyh0n0fs5jfm35jr62s84z6085", + "rev": "42b10678ff45cd1bd668093c3a9bcb093d5c1760", + "sha256": "0bianzf3y17qikx3cbzcr587hnljg3fgnax7y93nn4s9q2b8w283", "type": "tarball", - "url": "https://github.com/input-output-hk/haskell.nix/archive/4c42100df06a0a1d976e5e883a3eedf053d8b91f.tar.gz", + "url": "https://github.com/input-output-hk/haskell.nix/archive/42b10678ff45cd1bd668093c3a9bcb093d5c1760.tar.gz", "url_template": "https://github.com///archive/.tar.gz" }, "iohk-nix": { @@ -29,10 +29,10 @@ "homepage": null, "owner": "input-output-hk", "repo": "iohk-nix", - "rev": "f0542228f98a2590e0f39c507b364321995ad802", - "sha256": "0yhgjg4bj7cv1lmrb9ig2iapv1mslhn6m750zn2x0n1a3ll267yh", + "rev": "4efc38924c64c23a582c84950c8c25f72ff049cc", + "sha256": "0nhwyrd0xc72yj5q3jqa2wl4khp4g7n72i45cxy2rgn9nrp8wqh0", "type": "tarball", - "url": "https://github.com/input-output-hk/iohk-nix/archive/f0542228f98a2590e0f39c507b364321995ad802.tar.gz", + "url": "https://github.com/input-output-hk/iohk-nix/archive/4efc38924c64c23a582c84950c8c25f72ff049cc.tar.gz", "url_template": "https://github.com///archive/.tar.gz" } } diff --git a/schema/force-resync.sql b/schema/force-resync.sql index 40f69e0..af49399 100644 --- a/schema/force-resync.sql +++ b/schema/force-resync.sql @@ -5,3 +5,4 @@ TRUNCATE pool; TRUNCATE retired_pool; TRUNCATE pool_metadata_fetch_error CASCADE; TRUNCATE block; +TRUNCATE meta; diff --git a/schema/migration-2-0008-20210215.sql b/schema/migration-2-0008-20210215.sql new file mode 100644 index 0000000..908ac96 --- /dev/null +++ b/schema/migration-2-0008-20210215.sql @@ -0,0 +1,21 @@ +-- Persistent generated migration. + +CREATE FUNCTION migrate() RETURNS void AS $$ +DECLARE + next_version int ; +BEGIN + SELECT stage_two + 1 INTO next_version FROM schema_version ; + IF next_version = 8 THEN + ALTER TABLE "meta" DROP COLUMN "protocol_const"; + ALTER TABLE "meta" DROP COLUMN "slot_duration"; + ALTER TABLE "meta" DROP COLUMN "slots_per_epoch"; + -- Hand written SQL statements can be added here. + UPDATE schema_version SET stage_two = 8 ; + RAISE NOTICE 'DB has been migrated to stage_two version %', next_version ; + END IF ; +END ; +$$ LANGUAGE plpgsql ; + +SELECT migrate() ; + +DROP FUNCTION migrate() ; diff --git a/smash-servant-types/src/Cardano/SMASH/DBSync/Db/Error.hs b/smash-servant-types/src/Cardano/SMASH/DBSync/Db/Error.hs index f77514c..01bbc90 100644 --- a/smash-servant-types/src/Cardano/SMASH/DBSync/Db/Error.hs +++ b/smash-servant-types/src/Cardano/SMASH/DBSync/Db/Error.hs @@ -10,8 +10,6 @@ import Cardano.Prelude import Data.Aeson (ToJSON (..), (.=), object, Value (..)) -import Data.ByteString.Char8 (ByteString) - import Cardano.SMASH.DBSync.Db.Types diff --git a/smash-servant-types/src/Cardano/SMASH/Types.hs b/smash-servant-types/src/Cardano/SMASH/Types.hs index e10c800..7246ea6 100644 --- a/smash-servant-types/src/Cardano/SMASH/Types.hs +++ b/smash-servant-types/src/Cardano/SMASH/Types.hs @@ -1,5 +1,6 @@ {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} module Cardano.SMASH.Types @@ -355,6 +356,10 @@ instance FromHttpApiData TimeStringFormat where parsedTime = parseTimeM False defaultTimeLocale timeFormat $ toS queryParam in TimeStringFormat <$> parsedTime +-- Required for the above, error with newer GHC versions +instance MonadFail (Either Text) where + fail = Left . toS + instance ToParamSchema TimeStringFormat -- |The data for returning the health check for SMASH. diff --git a/smash-sync/LICENSE b/smash-sync/LICENSE deleted file mode 100644 index d645695..0000000 --- a/smash-sync/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/smash-sync/Setup.hs b/smash-sync/Setup.hs deleted file mode 100644 index 9a994af..0000000 --- a/smash-sync/Setup.hs +++ /dev/null @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff --git a/smash-sync/smash-sync.cabal b/smash-sync/smash-sync.cabal deleted file mode 100644 index 7d2dda4..0000000 --- a/smash-sync/smash-sync.cabal +++ /dev/null @@ -1,107 +0,0 @@ -cabal-version: 1.12 -name: smash-sync -version: 1.4.0 -description: - Please see the README on GitHub at - -homepage: https://github.com/input-output-hk/smash#readme -bug-reports: https://github.com/input-output-hk/smash/issues -author: IOHK -maintainer: operations@iohk.io -license: Apache-2.0 -license-file: LICENSE -build-type: Simple - -source-repository head - type: git - location: https://github.com/input-output-hk/smash - -flag disable-basic-auth - description: Disable basic authentication scheme for other authentication mechanisms. - default: False - -flag testing-mode - description: A flag for allowing operations that promote easy testing. - default: False - -library - if flag(disable-basic-auth) - cpp-options: -DDISABLE_BASIC_AUTH - - if flag(testing-mode) - cpp-options: -DTESTING_MODE - - exposed-modules: - Cardano.Sync.SmashDbSync - - hs-source-dirs: src - build-depends: - aeson - , base >=4.7 && <5 - , base16-bytestring - , bytestring - , cardano-binary - , cardano-client - , cardano-config - , cardano-crypto - , cardano-crypto-class - , cardano-crypto-wrapper - , cardano-db-sync - , cardano-ledger - , cardano-prelude - , cardano-slotting - , cborg - , conduit-extra - , containers - , contra-tracer - , directory - , esqueleto - , extra - , fast-logger - , filepath - , http-client - , http-client-tls - , http-types - , io-sim-classes - , iohk-monitoring - , monad-logger - , network - , network-mux - , ouroboros-consensus - , ouroboros-consensus-byron - , ouroboros-consensus-cardano - , ouroboros-consensus-shelley - , ouroboros-network - , ouroboros-network-framework - , persistent - , persistent-postgresql - , persistent-template >=2.7.0 - , postgresql-simple - , prometheus - , quiet - , resourcet - , servant - , servant-server - , servant-swagger - , shelley-spec-ledger - , smash-servant-types - , swagger2 - , template-haskell - , text - , time - , transformers - , transformers-except - , typed-protocols - , unix - , wai - , warp - - default-language: Haskell2010 - default-extensions: - NoImplicitPrelude - OverloadedStrings - - ghc-options: - -Wall -Wcompat -Wincomplete-record-updates - -Wincomplete-uni-patterns -Wredundant-constraints -Wpartial-fields - diff --git a/smash-sync/src/Cardano/Sync/SmashDbSync.hs b/smash-sync/src/Cardano/Sync/SmashDbSync.hs deleted file mode 100644 index 4ef62b9..0000000 --- a/smash-sync/src/Cardano/Sync/SmashDbSync.hs +++ /dev/null @@ -1,682 +0,0 @@ -{-# LANGUAGE AllowAmbiguousTypes #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE NamedFieldPuns #-} -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE TypeApplications #-} -{-# LANGUAGE TypeFamilies #-} - -module Cardano.Sync.SmashDbSync - ( ConfigFile (..) - --, SmashDbSyncNodeParams (..) - , DbSyncNodePlugin (..) - , NetworkName (..) - , SocketPath (..) - , runDbSyncNode - , MetricsLayer (..) - , CardanoSyncDataLayer (..) - , CardanoSyncError (..) - -- * Types - , LogFileDir (..) - , MigrationDir (..) - - , BlockId (..) - , MetaId (..) - - , Meta (..) - , Block (..) - ) where - -import Cardano.Prelude hiding - (Meta, - Nat, - option, - (%)) - -import Control.Monad.Trans.Except.Extra (hoistEither, - left, - newExceptT) -import Control.Tracer (Tracer, contramap) - -import Cardano.BM.Data.Tracer (ToLogObject (..)) -import qualified Cardano.BM.Setup as Logging -import Cardano.BM.Trace (Trace, appendName, - logInfo) -import qualified Cardano.BM.Trace as Logging -import qualified Cardano.Crypto as Crypto - -import Cardano.Client.Subscription (subscribe) - -import Cardano.DbSync.Config -import Cardano.DbSync.Config.Types hiding (adjustGenesisFilePath) -import Cardano.DbSync.Error -import Cardano.DbSync.Plugin (DbSyncNodePlugin (..)) -import Cardano.DbSync.Tracing.ToObjectOrphans () - -import Cardano.DbSync.Util - -import Cardano.Slotting.Slot (SlotNo (..), - WithOrigin (..), - unEpochSize) - -import qualified Codec.CBOR.Term as CBOR -import Control.Monad.IO.Class (liftIO) -import Control.Monad.Trans.Except.Exit (orDie) - -import qualified Data.ByteString.Char8 as BS -import qualified Data.ByteString.Lazy as BSL -import Data.Text (Text) -import qualified Data.Text as Text -import Data.Time (UTCTime (..)) -import qualified Data.Time as Time -import Data.Void (Void) - -import Network.Mux (MuxTrace, - WithMuxBearer) -import Network.Mux.Types (MuxMode (..)) -import Network.Socket (SockAddr (..)) - -import Network.TypedProtocol.Pipelined (Nat (Succ, Zero)) - -import Ouroboros.Network.Driver.Simple (runPipelinedPeer) -import Ouroboros.Network.Protocol.LocalStateQuery.Client (localStateQueryClientPeer) - -import Ouroboros.Consensus.Block.Abstract (ConvertRawHash (..)) -import Ouroboros.Consensus.BlockchainTime.WallClock.Types (mkSlotLength, - slotLengthToMillisec) -import Ouroboros.Consensus.Byron.Ledger (CodecConfig, - mkByronCodecConfig) -import Ouroboros.Consensus.Cardano.Block (CardanoEras, - CodecConfig (CardanoCodecConfig), - StandardCrypto, - StandardShelley) -import Ouroboros.Consensus.Network.NodeToClient (ClientCodecs, - cChainSyncCodec, - cStateQueryCodec, - cTxSubmissionCodec) -import Ouroboros.Consensus.Node.ErrorPolicy (consensusErrorPolicy) -import Ouroboros.Consensus.Shelley.Ledger.Config (CodecConfig (ShelleyCodecConfig)) -import Ouroboros.Consensus.Shelley.Node (ShelleyGenesis (..)) - -import qualified Ouroboros.Network.NodeToClient.Version as Network - -import Ouroboros.Network.Block (BlockNo (..), - HeaderHash, - Point (..), - Tip, - blockNo, - genesisPoint, - getTipBlockNo, - getTipPoint) -import Ouroboros.Network.Mux (MuxPeer (..), - RunMiniProtocol (..)) -import Ouroboros.Network.NodeToClient (ClientSubscriptionParams (..), - ConnectionId, - ErrorPolicyTrace (..), - Handshake, - IOManager, - LocalAddress, - NetworkSubscriptionTracers (..), - NodeToClientProtocols (..), - TraceSendRecv, - WithAddr (..), - localSnocket, - localTxSubmissionPeerNull, - networkErrorPolicies, - withIOManager) - -import Ouroboros.Network.Point (withOrigin) -import qualified Ouroboros.Network.Point as Point - -import Ouroboros.Network.Protocol.ChainSync.ClientPipelined (ChainSyncClientPipelined (..), - ClientPipelinedStIdle (..), - ClientPipelinedStIntersect (..), - ClientStNext (..), - chainSyncClientPeerPipelined, - recvMsgIntersectFound, - recvMsgIntersectNotFound, - recvMsgRollBackward, - recvMsgRollForward) -import Ouroboros.Network.Protocol.ChainSync.PipelineDecision (MkPipelineDecision, - PipelineDecision (..), - pipelineDecisionLowHighMark, - runPipelineDecision) -import Ouroboros.Network.Protocol.ChainSync.Type (ChainSync) -import qualified Ouroboros.Network.Snocket as Snocket -import Ouroboros.Network.Subscription (SubscriptionTrace) - - -import qualified Shelley.Spec.Ledger.Genesis as Shelley - -import System.Directory (createDirectoryIfMissing) - -import Ouroboros.Consensus.HardFork.History.Qry (Interpreter) - -import Cardano.DbSync (MigrationDir (..)) -import Cardano.DbSync.DbAction -import Cardano.DbSync.LedgerState -import Cardano.DbSync.StateQuery - -import qualified Cardano.Chain.Genesis as Byron - --- The hash must be unique! -data Block = Block - { bHash :: !ByteString - , bEpochNo :: !(Maybe Word64) - , bSlotNo :: !(Maybe Word64) - , bBlockNo :: !(Maybe Word64) - } deriving (Eq, Show) - --- The startTime must be unique! -data Meta = Meta - { mProtocolConst :: !Word64 - -- ^ The block security parameter. - , mSlotDuration :: !Word64 - -- ^ Slot duration in milliseconds. - , mStartTime :: !UTCTime - , mSlotsPerEpoch :: !Word64 - , mNetworkName :: !(Maybe Text) - } deriving (Eq, Show) - -data CardanoSyncError = CardanoSyncError Text - deriving (Eq, Show) - -renderCardanoSyncError :: CardanoSyncError -> Text -renderCardanoSyncError (CardanoSyncError cardanoSyncError') = cardanoSyncError' - -cardanoSyncError :: Monad m => Text -> ExceptT CardanoSyncError m a -cardanoSyncError = left . CardanoSyncError - --- @Word64@ is valid as well. -newtype BlockId = BlockId Int - deriving (Eq, Show) - --- @Word64@ is valid as well. -newtype MetaId = MetaId Int - deriving (Eq, Show) - --- The base @DataLayer@ that contains the functions required for syncing to work. -data CardanoSyncDataLayer = CardanoSyncDataLayer - { csdlGetBlockId :: ByteString -> IO (Either CardanoSyncError BlockId) - -- ^ TODO(KS): Wrap @ByteString@. - , csdlGetMeta :: IO (Either CardanoSyncError Meta) - , csdlGetSlotHash :: SlotNo -> IO (Maybe (SlotNo, ByteString)) - -- ^ TODO(KS): Wrap @ByteString@. - , csdlGetLatestBlock :: IO (Maybe Block) - , csdlAddGenesisMetaBlock :: Meta -> Block -> IO (Either CardanoSyncError (MetaId, BlockId)) - } - --- The metrics we use. -data MetricsLayer = MetricsLayer - { gmSetNodeHeight :: Double -> IO () - , gmSetQueuePostWrite :: Double -> IO () - } - -data Peer = Peer SockAddr SockAddr deriving Show - --- The function -type RunDBThreadFunction - = Trace IO Text - -> DbSyncEnv - -> DbSyncNodePlugin - -> MetricsLayer - -> DbActionQueue - -> LedgerStateVar - -> IO () - -runDbSyncNode - :: CardanoSyncDataLayer - -> MetricsLayer - -> (Trace IO Text -> IO ()) - -> DbSyncNodePlugin - -> DbSyncNodeParams - -> RunDBThreadFunction - -> IO () -runDbSyncNode dataLayer metricsLayer runDbStartup plugin enp runDBThreadFunction = - withIOManager $ \iomgr -> do - - let configFile = enpConfigFile enp - enc <- readDbSyncNodeConfig configFile - - createDirectoryIfMissing True (unLedgerStateDir $ enpLedgerStateDir enp) - - trce <- if not (dncEnableLogging enc) - then pure Logging.nullTracer - else liftIO $ Logging.setupTrace (Right $ dncLoggingConfig enc) "smash-node" - - logInfo trce $ "Using byron genesis file from: " <> (show . unGenesisFile $ dncByronGenesisFile enc) - logInfo trce $ "Using shelley genesis file from: " <> (show . unGenesisFile $ dncShelleyGenesisFile enc) - - orDie renderDbSyncNodeError $ do - liftIO . logInfo trce $ "Reading genesis config." - - genCfg <- readCardanoGenesisConfig enc - genesisEnv <- hoistEither $ genesisConfigToEnv enp genCfg - - logProtocolMagicId trce $ genesisProtocolMagicId genCfg - - liftIO . logInfo trce $ "Starting DB." - - liftIO $ do - -- Must run plugin startup after the genesis distribution has been inserted/validate. - logInfo trce $ "Run DB startup." - runDbStartup trce - logInfo trce $ "DB startup complete." - case genCfg of - GenesisCardano _ bCfg sCfg -> do - orDie renderCardanoSyncError $ insertValidateGenesisDistSmash dataLayer trce (dncNetworkName enc) (scConfig sCfg) - - ledgerVar <- initLedgerStateVar genCfg - runDbSyncNodeNodeClient dataLayer metricsLayer genesisEnv ledgerVar - iomgr trce plugin runDBThreadFunction (cardanoCodecConfig bCfg) (enpSocketPath enp) - - where - cardanoCodecConfig :: Byron.Config -> CodecConfig CardanoBlock - cardanoCodecConfig cfg = - CardanoCodecConfig - (mkByronCodecConfig cfg) - ShelleyCodecConfig - ShelleyCodecConfig -- Allegra - ShelleyCodecConfig -- Mary - - logProtocolMagicId :: Trace IO Text -> Crypto.ProtocolMagicId -> ExceptT DbSyncNodeError IO () - logProtocolMagicId tracer pm = - liftIO . logInfo tracer $ mconcat - [ "NetworkMagic: ", textShow (Crypto.unProtocolMagicId pm) - ] - --- | Idempotent insert the initial Genesis distribution transactions into the DB. --- If these transactions are already in the DB, they are validated. -insertValidateGenesisDistSmash - :: CardanoSyncDataLayer - -> Trace IO Text - -> NetworkName - -> ShelleyGenesis StandardShelley - -> ExceptT CardanoSyncError IO () -insertValidateGenesisDistSmash dataLayer tracer (NetworkName networkName) cfg = do - newExceptT $ insertAtomicAction - where - insertAtomicAction :: IO (Either CardanoSyncError ()) - insertAtomicAction = do - let getBlockId = csdlGetBlockId dataLayer - ebid <- getBlockId (configGenesisHash cfg) - case ebid of - -- TODO(KS): This needs to be moved into DataLayer. - Right _bid -> runExceptT $ do - let getMeta = csdlGetMeta dataLayer - meta <- newExceptT getMeta - - newExceptT $ validateGenesisDistribution tracer meta networkName cfg - - Left _ -> do - liftIO $ logInfo tracer "Inserting Genesis distribution" - - let meta = Meta - { mProtocolConst = protocolConstant cfg - , mSlotDuration = configSlotDuration cfg - , mStartTime = configStartTime cfg - , mSlotsPerEpoch = configSlotsPerEpoch cfg - , mNetworkName = Just networkName - } - - let block = Block - { bHash = configGenesisHash cfg - , bEpochNo = Nothing - , bSlotNo = Nothing - , bBlockNo = Nothing - } - - let addGenesisMetaBlock = csdlAddGenesisMetaBlock dataLayer - metaIdBlockIdE <- addGenesisMetaBlock meta block - - case metaIdBlockIdE of - Right (_metaId, _blockId) -> pure $ Right () - Left err -> pure . Left . CardanoSyncError $ show err - --- | Validate that the initial Genesis distribution in the DB matches the Genesis data. -validateGenesisDistribution - :: (MonadIO m) - => Trace IO Text - -> Meta - -> Text - -> ShelleyGenesis StandardShelley - -> m (Either CardanoSyncError ()) -validateGenesisDistribution tracer meta networkName cfg = - runExceptT $ do - liftIO $ logInfo tracer "Validating Genesis distribution" - - -- Show configuration we are validating - print cfg - - when (mProtocolConst meta /= protocolConstant cfg) $ - cardanoSyncError $ Text.concat - [ "Shelley: Mismatch protocol constant. Config value " - , textShow (protocolConstant cfg) - , " does not match DB value of ", textShow (mProtocolConst meta) - ] - - when (mSlotDuration meta /= configSlotDuration cfg) $ - cardanoSyncError $ Text.concat - [ "Shelley: Mismatch slot duration time. Config value " - , textShow (configSlotDuration cfg) - , " does not match DB value of ", textShow (mSlotDuration meta) - ] - - when (mStartTime meta /= configStartTime cfg) $ - cardanoSyncError $ Text.concat - [ "Shelley: Mismatch chain start time. Config value " - , textShow (configStartTime cfg) - , " does not match DB value of ", textShow (mStartTime meta) - ] - - when (mSlotsPerEpoch meta /= configSlotsPerEpoch cfg) $ - cardanoSyncError $ Text.concat - [ "Shelley: Mismatch in slots per epoch. Config value " - , textShow (configSlotsPerEpoch cfg) - , " does not match DB value of ", textShow (mSlotsPerEpoch meta) - ] - - case mNetworkName meta of - Nothing -> - cardanoSyncError $ "Shelley.validateGenesisDistribution: Missing network name" - Just name -> - when (name /= networkName) $ - cardanoSyncError $ Text.concat - [ "Shelley.validateGenesisDistribution: Provided network name " - , networkName - , " does not match DB value " - , name - ] - ---------------------------------------------------------------------------------------------------- - -configGenesisHash :: ShelleyGenesis StandardShelley -> ByteString -configGenesisHash _ = BS.take 32 ("GenesisHash " <> BS.replicate 32 '\0') - -protocolConstant :: ShelleyGenesis StandardShelley -> Word64 -protocolConstant = Shelley.sgSecurityParam - --- | The genesis data is a NominalDiffTime (in picoseconds) and we need --- it as milliseconds. -configSlotDuration :: ShelleyGenesis StandardShelley -> Word64 -configSlotDuration = - fromIntegral . slotLengthToMillisec . mkSlotLength . sgSlotLength - -configSlotsPerEpoch :: ShelleyGenesis StandardShelley -> Word64 -configSlotsPerEpoch sg = unEpochSize (Shelley.sgEpochLength sg) - -configStartTime :: ShelleyGenesis StandardShelley -> UTCTime -configStartTime = roundToMillseconds . Shelley.sgSystemStart - -roundToMillseconds :: UTCTime -> UTCTime -roundToMillseconds (UTCTime day picoSecs) = - UTCTime day (Time.picosecondsToDiffTime $ 1000000 * (picoSeconds `div` 1000000)) - where - picoSeconds :: Integer - picoSeconds = Time.diffTimeToPicoseconds picoSecs - ---------------------------------------------------------------------------------------------------- - -runDbSyncNodeNodeClient - :: CardanoSyncDataLayer - -> MetricsLayer - -> DbSyncEnv - -> LedgerStateVar - -> IOManager - -> Trace IO Text - -> DbSyncNodePlugin - -> RunDBThreadFunction - -> CodecConfig CardanoBlock - -> SocketPath - -> IO () -runDbSyncNodeNodeClient dataLayer metricsLayer env ledgerVar iomgr trce plugin runDBThreadFunction codecConfig (SocketPath socketPath) = do - queryVar <- newStateQueryTMVar - logInfo trce $ "localInitiatorNetworkApplication: connecting to node via " <> textShow socketPath - - void $ subscribe - (localSnocket iomgr socketPath) - codecConfig - (envNetworkMagic env) - networkSubscriptionTracers - clientSubscriptionParams - (dbSyncProtocols dataLayer metricsLayer trce env plugin queryVar ledgerVar runDBThreadFunction) - where - clientSubscriptionParams = ClientSubscriptionParams { - cspAddress = Snocket.localAddressFromPath socketPath, - cspConnectionAttemptDelay = Nothing, - cspErrorPolicies = networkErrorPolicies <> consensusErrorPolicy (Proxy @CardanoBlock) - } - - networkSubscriptionTracers = NetworkSubscriptionTracers { - nsMuxTracer = muxTracer, - nsHandshakeTracer = handshakeTracer, - nsErrorPolicyTracer = errorPolicyTracer, - nsSubscriptionTracer = subscriptionTracer - } - - errorPolicyTracer :: Tracer IO (WithAddr LocalAddress ErrorPolicyTrace) - errorPolicyTracer = toLogObject $ appendName "ErrorPolicy" trce - - muxTracer :: Show peer => Tracer IO (WithMuxBearer peer MuxTrace) - muxTracer = toLogObject $ appendName "Mux" trce - - subscriptionTracer :: Tracer IO (Identity (SubscriptionTrace LocalAddress)) - subscriptionTracer = toLogObject $ appendName "Subscription" trce - - handshakeTracer :: Tracer IO (WithMuxBearer - (ConnectionId LocalAddress) - (TraceSendRecv (Handshake Network.NodeToClientVersion CBOR.Term))) - handshakeTracer = toLogObject $ appendName "Handshake" trce - --- Db sync protocols. -dbSyncProtocols - :: CardanoSyncDataLayer - -> MetricsLayer - -> Trace IO Text - -> DbSyncEnv - -> DbSyncNodePlugin - -> StateQueryTMVar CardanoBlock (Interpreter (CardanoEras StandardCrypto)) - -> LedgerStateVar - -> RunDBThreadFunction - -> Network.NodeToClientVersion - -> ClientCodecs CardanoBlock IO - -> ConnectionId LocalAddress - -> NodeToClientProtocols 'InitiatorMode BSL.ByteString IO () Void -dbSyncProtocols dataLayer metricsLayer trce env plugin queryVar ledgerVar runDBThreadFunction _version codecs _connectionId = - NodeToClientProtocols { - localChainSyncProtocol = localChainSyncProtocol - , localTxSubmissionProtocol = dummylocalTxSubmit - , localStateQueryProtocol = localStateQuery - } - where - localChainSyncTracer :: Tracer IO (TraceSendRecv (ChainSync CardanoBlock(Point CardanoBlock) (Tip CardanoBlock))) - localChainSyncTracer = toLogObject $ appendName "ChainSync" trce - - localChainSyncProtocol :: RunMiniProtocol 'InitiatorMode BSL.ByteString IO () Void - localChainSyncProtocol = InitiatorProtocolOnly $ MuxPeerRaw $ \channel -> - liftIO . logException trce "ChainSyncWithBlocksPtcl: " $ do - logInfo trce "Starting localChainSyncProtocol." - - latestPoints <- getLatestPoints dataLayer (envLedgerStateDir env) - currentTip <- getCurrentTipBlockNo dataLayer trce - logDbState dataLayer trce - actionQueue <- newDbActionQueue - - logInfo trce "Starting threads for client, db and offline fetch thread." - - -- This is something we need to inject - race_ - (runDBThreadFunction trce env plugin metricsLayer actionQueue ledgerVar) - (runPipelinedPeer - localChainSyncTracer - (cChainSyncCodec codecs) - channel - (chainSyncClientPeerPipelined $ chainSyncClient - dataLayer - metricsLayer - trce - env - queryVar - latestPoints - currentTip - actionQueue) - ) - - atomically $ writeDbActionQueue actionQueue DbFinish - - -- We should return leftover bytes returned by 'runPipelinedPeer', but - -- client application do not care about them (it's only important if one - -- would like to restart a protocol on the same mux and thus bearer). - pure ((), Nothing) - - dummylocalTxSubmit :: RunMiniProtocol 'InitiatorMode BSL.ByteString IO () Void - dummylocalTxSubmit = InitiatorProtocolOnly $ MuxPeer - Logging.nullTracer - (cTxSubmissionCodec codecs) - localTxSubmissionPeerNull - - localStateQuery :: RunMiniProtocol 'InitiatorMode BSL.ByteString IO () Void - localStateQuery = - InitiatorProtocolOnly $ MuxPeer - (contramap (Text.pack . show) . toLogObject $ appendName "local-state-query" trce) - (cStateQueryCodec codecs) - (localStateQueryClientPeer (localStateQueryHandler queryVar)) - -logDbState :: CardanoSyncDataLayer -> Trace IO Text -> IO () -logDbState dataLayer trce = do - let getLatestBlock = csdlGetLatestBlock dataLayer - mblk <- getLatestBlock - case mblk of - Nothing -> logInfo trce "Cardano.Db is empty" - Just block -> - logInfo trce $ Text.concat - [ "Cardano.Db tip is at " - , showTip block - ] - where - showTip :: Block -> Text - showTip blk = - case (bSlotNo blk, bBlockNo blk) of - (Just slotNo, Just blkNo) -> toS $ "slot " ++ show slotNo ++ ", block " ++ show blkNo - (Just slotNo, Nothing) -> toS $ "slot " ++ show slotNo - (Nothing, Just blkNo) -> toS $ "block " ++ show blkNo - (Nothing, Nothing) -> "empty (genesis)" - - -getLatestPoints :: CardanoSyncDataLayer -> LedgerStateDir -> IO [Point CardanoBlock] -getLatestPoints dataLayer ledgerStateDir = do - xs <- listLedgerStateSlotNos ledgerStateDir - - let getSlotHash = csdlGetSlotHash dataLayer - ys <- catMaybes <$> mapM getSlotHash xs - - pure $ mapMaybe convert ys - where - convert :: (SlotNo, ByteString) -> Maybe (Point CardanoBlock) - convert (slot, hashBlob) = - fmap (Point . Point.block slot) (convertHashBlob hashBlob) - - convertHashBlob :: ByteString -> Maybe (HeaderHash CardanoBlock) - convertHashBlob = Just . fromRawHash (Proxy @CardanoBlock) - -getCurrentTipBlockNo :: CardanoSyncDataLayer -> Trace IO Text -> IO (WithOrigin BlockNo) -getCurrentTipBlockNo dataLayer trce = do - let getLatestBlock = csdlGetLatestBlock dataLayer - maybeTip <- getLatestBlock - case maybeTip of - Just tip -> pure $ convert tip - Nothing -> do - logInfo trce "Current tip block, Nothing." - pure Origin - where - convert :: Block -> WithOrigin BlockNo - convert blk = - case bBlockNo blk of - Just blockno -> At (BlockNo blockno) - Nothing -> Origin - --- | 'ChainSyncClient' which traces received blocks and ignores when it --- receives a request to rollbackwar. A real wallet client should: --- --- * at startup send the list of points of the chain to help synchronise with --- the node; --- * update its state when the client receives next block or is requested to --- rollback, see 'clientStNext' below. -chainSyncClient - :: CardanoSyncDataLayer - -> MetricsLayer - -> Trace IO Text - -> DbSyncEnv - -> StateQueryTMVar CardanoBlock (Interpreter (CardanoEras StandardCrypto)) - -> [Point CardanoBlock] - -> WithOrigin BlockNo - -> DbActionQueue - -> ChainSyncClientPipelined CardanoBlock (Point CardanoBlock) (Tip CardanoBlock) IO () -chainSyncClient dataLayer metricsLayer trce env queryVar latestPoints currentTip actionQueue = do - ChainSyncClientPipelined $ pure $ - -- Notify the core node about the our latest points at which we are - -- synchronised. This client is not persistent and thus it just - -- synchronises from the genesis block. A real implementation should send - -- a list of points up to a point which is k blocks deep. - SendMsgFindIntersect - (if null latestPoints then [genesisPoint] else latestPoints) - ClientPipelinedStIntersect - { recvMsgIntersectFound = \_hdr tip -> pure $ go policy Zero currentTip (getTipBlockNo tip) - , recvMsgIntersectNotFound = \ tip -> pure $ go policy Zero currentTip (getTipBlockNo tip) - } - where - policy = pipelineDecisionLowHighMark 1000 10000 - - go :: MkPipelineDecision -> Nat n -> WithOrigin BlockNo -> WithOrigin BlockNo - -> ClientPipelinedStIdle n CardanoBlock (Point CardanoBlock) (Tip CardanoBlock) IO () - go mkPipelineDecision n clientTip serverTip = - case (n, runPipelineDecision mkPipelineDecision n clientTip serverTip) of - (_Zero, (Request, mkPipelineDecision')) -> do - SendMsgRequestNext clientStNext (pure clientStNext) - where - clientStNext = mkClientStNext $ \clientBlockNo newServerTip -> go mkPipelineDecision' n clientBlockNo (getTipBlockNo newServerTip) - (_, (Pipeline, mkPipelineDecision')) -> - SendMsgRequestNextPipelined - (go mkPipelineDecision' (Succ n) clientTip serverTip) - (Succ n', (CollectOrPipeline, mkPipelineDecision')) -> - CollectResponse - (Just $ SendMsgRequestNextPipelined $ go mkPipelineDecision' (Succ n) clientTip serverTip) - (mkClientStNext $ \clientBlockNo newServerTip -> go mkPipelineDecision' n' clientBlockNo (getTipBlockNo newServerTip)) - (Succ n', (Collect, mkPipelineDecision')) -> - CollectResponse - Nothing - (mkClientStNext $ \clientBlockNo newServerTip -> go mkPipelineDecision' n' clientBlockNo (getTipBlockNo newServerTip)) - - mkClientStNext :: (WithOrigin BlockNo -> Tip CardanoBlock - -> ClientPipelinedStIdle n CardanoBlock (Point CardanoBlock) (Tip CardanoBlock) IO a) - -> ClientStNext n CardanoBlock (Point CardanoBlock) (Tip CardanoBlock) IO a - mkClientStNext finish = - ClientStNext - { recvMsgRollForward = \blk tip -> do - logException trce "recvMsgRollForward: " $ do - let setNodeHeight = gmSetNodeHeight metricsLayer - setNodeHeight (withOrigin 0 (fromIntegral . unBlockNo) (getTipBlockNo tip)) - - details <- getSlotDetails trce env queryVar (getTipPoint tip) (cardanoBlockSlotNo blk) - newSize <- atomically $ do - writeDbActionQueue actionQueue $ mkDbApply blk details - lengthDbActionQueue actionQueue - - let setQueuePostWrite = gmSetQueuePostWrite metricsLayer - setQueuePostWrite (fromIntegral newSize) - - pure $ finish (At (blockNo blk)) tip - , recvMsgRollBackward = \point tip -> do - logException trce "recvMsgRollBackward: " $ do - -- This will get the current tip rather than what we roll back to - -- but will only be incorrect for a short time span. - let slot = toRollbackSlot point - atomically $ writeDbActionQueue actionQueue (mkDbRollback slot) - newTip <- getCurrentTipBlockNo dataLayer trce - pure $ finish newTip tip - } - diff --git a/smash/app/Main.hs b/smash/app/Main.hs index 239eb8a..8202f72 100644 --- a/smash/app/Main.hs +++ b/smash/app/Main.hs @@ -8,46 +8,52 @@ module Main where -import Cardano.Prelude hiding (Meta) +import Cardano.Prelude hiding (Meta) ---import Cardano.SMASH.DB -import qualified Cardano.SMASH.DB as DB -import Cardano.SMASH.DBSync.Db.Database (runDbStartup, - runDbThread) +import qualified Data.ByteString.Char8 as BS -import Cardano.SMASH.Offline (runOfflineFetchThread) +import Control.Monad.Trans.Except.Extra (firstExceptT, left, + newExceptT) + +import qualified Cardano.SMASH.DB as DB +import Cardano.SMASH.Offline (runOfflineFetchThread) import Cardano.SMASH.Lib import Cardano.SMASH.Types --- For reading configuration files. -import Cardano.DbSync.Config -import Cardano.Sync.SmashDbSync +import Cardano.Sync.Config +import Cardano.Sync.Config.Types +import Cardano.Sync.Error + +import Cardano.Sync (Block (..), ConfigFile (..), + Meta (..), SocketPath (..), + SyncDataLayer (..), + runSyncNode) +import Cardano.Sync.Database (runDbThread) -import Control.Applicative (optional) +import Control.Applicative (optional) import Control.Monad.Trans.Maybe -import Data.Monoid ((<>)) +import Data.Monoid ((<>)) import Database.Esqueleto -import Options.Applicative (Parser, ParserInfo, - ParserPrefs) -import qualified Options.Applicative as Opt +import Options.Applicative (Parser, ParserInfo, + ParserPrefs) +import qualified Options.Applicative as Opt + +import System.FilePath (()) -import System.FilePath (()) -import qualified System.Metrics.Prometheus.Metric.Gauge as Gauge +import qualified Cardano.BM.Setup as Logging +import Cardano.BM.Trace (Trace, logInfo, modifyName) +import Cardano.Slotting.Slot (SlotNo (..)) +import Cardano.SMASH.DBSyncPlugin (poolMetadataDbSyncNodePlugin) -import qualified Cardano.BM.Setup as Logging -import Cardano.BM.Trace (Trace, logInfo, - modifyName) -import Cardano.Slotting.Slot (SlotNo (..)) -import Cardano.SMASH.DBSync.Metrics (Metrics (..), - registerMetricsServer) -import Cardano.SMASH.DBSyncPlugin (poolMetadataDbSyncNodePlugin) -import Cardano.Sync.SmashDbSync (ConfigFile (..), - MetricsLayer (..), - SocketPath (..), - runDbSyncNode) +import Ouroboros.Consensus.Cardano.Block (StandardShelley) +import Ouroboros.Consensus.Shelley.Node (ShelleyGenesis (..)) +import qualified Shelley.Spec.Ledger.Genesis as Shelley + +import Data.Time.Clock (UTCTime (..)) +import qualified Data.Time.Clock as Time main :: IO () @@ -75,11 +81,11 @@ data Command #ifdef TESTING_MODE | RunStubApplication #endif - | RunApplicationWithDbSync DbSyncNodeParams + | RunApplicationWithDbSync SyncNodeParams setupTraceFromConfig :: ConfigFile -> IO (Trace IO Text) setupTraceFromConfig configFile = do - enc <- readDbSyncNodeConfig configFile + enc <- readSyncNodeConfig configFile trce <- Logging.setupTrace (Right $ dncLoggingConfig enc) "smash-node" return trce @@ -136,8 +142,22 @@ runCommand cmd = #endif RunApplicationWithDbSync dbSyncNodeParams -> runCardanoSyncWithSmash dbSyncNodeParams +renderCardanoSyncError :: CardanoSyncError -> Text +renderCardanoSyncError (CardanoSyncError cardanoSyncError') = cardanoSyncError' + +cardanoSyncError :: Monad m => Text -> ExceptT CardanoSyncError m a +cardanoSyncError = left . CardanoSyncError + +-- @Word64@ is valid as well. +newtype BlockId = BlockId Int + deriving (Eq, Show) + +-- @Word64@ is valid as well. +newtype MetaId = MetaId Int + deriving (Eq, Show) + -- Running SMASH with cardano-sync. -runCardanoSyncWithSmash :: DbSyncNodeParams -> IO () +runCardanoSyncWithSmash :: SyncNodeParams -> IO () runCardanoSyncWithSmash dbSyncNodeParams = do -- Setup trace @@ -152,17 +172,7 @@ runCardanoSyncWithSmash dbSyncNodeParams = do logInfo tracer $ "Migrations complete." -- Run metrics server - (metrics, server) <- registerMetricsServer 8080 - - -- Metrics layer. - let metricsLayer = - MetricsLayer - { gmSetNodeHeight = \nodeHeight -> - Gauge.set nodeHeight $ mNodeHeight metrics - - , gmSetQueuePostWrite = \queuePostWrite -> - Gauge.set queuePostWrite $ mQueuePostWrite metrics - } + --(metrics, server) <- registerMetricsServer 8080 let dataLayer :: DB.DataLayer dataLayer = DB.postgresqlDataLayer (Just tracer) @@ -170,59 +180,45 @@ runCardanoSyncWithSmash dbSyncNodeParams = do -- The plugin requires the @DataLayer@. let smashDbSyncNodePlugin = poolMetadataDbSyncNodePlugin dataLayer - let runDbStartupCall = runDbStartup smashDbSyncNodePlugin + let getLatestBlock = + runMaybeT $ do + block <- MaybeT $ DB.runDbIohkLogging tracer DB.queryLatestBlock + return $ convertFromDB block -- The base @DataLayer@. - let cardanoSyncDataLayer = - CardanoSyncDataLayer - { csdlGetBlockId = \genesisHash -> runExceptT $ do - blockId <- ExceptT $ dbFailToCardanoSyncError <$> (DB.runDbIohkLogging tracer $ DB.queryBlockId genesisHash) - return . BlockId . fromIntegral . fromSqlKey $ blockId - - , csdlGetMeta = runExceptT $ do - meta <- ExceptT $ dbFailToCardanoSyncError <$> (DB.runDbIohkLogging tracer DB.queryMeta) - return $ Meta - { mProtocolConst = DB.metaProtocolConst meta - , mSlotDuration = DB.metaSlotDuration meta - , mStartTime = DB.metaStartTime meta - , mSlotsPerEpoch = DB.metaSlotsPerEpoch meta - , mNetworkName = DB.metaNetworkName meta - } + let syncDataLayer = + SyncDataLayer + { sdlGetSlotHash = \slotNo -> do + slotHash <- DB.runDbIohkLogging tracer $ DB.querySlotHash slotNo + case slotHash of + Nothing -> return [] + Just slotHashPair -> return [slotHashPair] - , csdlGetSlotHash = \slotNo -> - DB.runDbIohkLogging tracer $ DB.querySlotHash slotNo + , sdlGetLatestBlock = getLatestBlock - , csdlGetLatestBlock = runMaybeT $ do - block <- MaybeT $ DB.runDbIohkLogging tracer DB.queryLatestBlock - return $ convertFromDB block + , sdlGetLatestSlotNo = SlotNo <$> DB.runDbNoLogging DB.queryLatestSlotNo - -- This is how all the calls should work, implemented on the base @DataLayer@. - , csdlAddGenesisMetaBlock = \meta block -> runExceptT $ do - let addGenesisMetaBlock = DB.dlAddGenesisMetaBlock dataLayer - - let metaDB = convertToDB meta - let blockDB = convertToDB block - - (metaId, blockId) <- ExceptT $ dbFailToCardanoSyncError <$> addGenesisMetaBlock metaDB blockDB - - return (MetaId . fromIntegral . fromSqlKey $ metaId, BlockId . fromIntegral . fromSqlKey $ blockId) } -- The actual DB Thread. let runDBThreadFunction = - \tracer' env plugin _metricsLayer actionQueue ledgerVar -> + \tracer' env plugin metrics' actionQueue -> race_ - (runDbThread tracer' env plugin actionQueue ledgerVar) + (runDbThread tracer' env plugin metrics' actionQueue) (runOfflineFetchThread $ modifyName (const "fetch") tracer') + let runInsertValidateGenesisSmashFunction = insertValidateGenesisSmashFunction syncDataLayer + race_ - (runDbSyncNode cardanoSyncDataLayer metricsLayer runDbStartupCall smashDbSyncNodePlugin dbSyncNodeParams runDBThreadFunction) + (runSyncNode syncDataLayer tracer smashDbSyncNodePlugin dbSyncNodeParams runInsertValidateGenesisSmashFunction runDBThreadFunction) (runApp dataLayer defaultConfiguration) - -- Finish and close the metrics server - -- TODO(KS): Bracket! - cancel server +data CardanoSyncError = CardanoSyncError Text + deriving (Eq, Show) + +cardanoSyncErrorToNodeError :: CardanoSyncError -> SyncNodeError +cardanoSyncErrorToNodeError (CardanoSyncError err) = NEError err dbFailToCardanoSyncError :: Either DBFail a -> Either CardanoSyncError a dbFailToCardanoSyncError (Left dbFail) = Left . CardanoSyncError . DB.renderLookupFail $ dbFail @@ -257,28 +253,140 @@ instance DBConversion DB.Block Block where instance DBConversion DB.Meta Meta where convertFromDB meta = Meta - { mProtocolConst = DB.metaProtocolConst meta - , mSlotDuration = DB.metaSlotDuration meta - , mStartTime = DB.metaStartTime meta - , mSlotsPerEpoch = DB.metaSlotsPerEpoch meta + { mStartTime = DB.metaStartTime meta , mNetworkName = DB.metaNetworkName meta } convertToDB meta = DB.Meta - { DB.metaProtocolConst = mProtocolConst meta - , DB.metaSlotDuration = mSlotDuration meta - , DB.metaStartTime = mStartTime meta - , DB.metaSlotsPerEpoch = mSlotsPerEpoch meta + { DB.metaStartTime = mStartTime meta , DB.metaNetworkName = mNetworkName meta } ------------------------------------------------------------------------------- ---pCommandLine :: Parser SmashDbSyncNodeParams -pCommandLine :: Parser DbSyncNodeParams +insertValidateGenesisSmashFunction + :: SyncDataLayer + -> Trace IO Text + -> NetworkName + -> GenesisConfig + -> ExceptT SyncNodeError IO () +insertValidateGenesisSmashFunction dataLayer tracer networkName genCfg = + firstExceptT cardanoSyncErrorToNodeError $ case genCfg of + GenesisCardano _ _bCfg sCfg -> + insertValidateGenesisDistSmash dataLayer tracer networkName (scConfig sCfg) + +-- | Idempotent insert the initial Genesis distribution transactions into the DB. +-- If these transactions are already in the DB, they are validated. +insertValidateGenesisDistSmash + :: SyncDataLayer + -> Trace IO Text + -> NetworkName + -> ShelleyGenesis StandardShelley + -> ExceptT CardanoSyncError IO () +insertValidateGenesisDistSmash _dataLayer tracer (NetworkName networkName) cfg = do + newExceptT $ insertAtomicAction + where + insertAtomicAction :: IO (Either CardanoSyncError ()) + insertAtomicAction = do + let getBlockId = \genesisHash -> runExceptT $ do + blockId <- ExceptT $ dbFailToCardanoSyncError <$> (DB.runDbIohkLogging tracer $ DB.queryBlockId genesisHash) + return . BlockId . fromIntegral . fromSqlKey $ blockId + ebid <- getBlockId (configGenesisHash cfg) + + case ebid of + -- TODO(KS): This needs to be moved into DataLayer. + Right _bid -> runExceptT $ do + let getMeta = runExceptT $ do + meta <- ExceptT $ dbFailToCardanoSyncError <$> (DB.runDbIohkLogging tracer DB.queryMeta) + return $ Meta + { mStartTime = DB.metaStartTime meta + , mNetworkName = DB.metaNetworkName meta + } + + meta <- newExceptT getMeta + + newExceptT $ validateGenesisDistribution tracer meta networkName cfg + + Left _ -> do + liftIO $ logInfo tracer "Inserting Genesis distribution" + + let meta = Meta + { mStartTime = configStartTime cfg + , mNetworkName = Just networkName + } + + let block = Block + { bHash = configGenesisHash cfg + , bEpochNo = Nothing + , bSlotNo = Nothing + , bBlockNo = Nothing + } + + let addGenesisMetaBlock = DB.dlAddGenesisMetaBlock (DB.postgresqlDataLayer $ Just tracer) + metaIdBlockIdE <- addGenesisMetaBlock (convertToDB meta) (convertToDB block) + + case metaIdBlockIdE of + Right (_metaId, _blockId) -> pure $ Right () + Left err -> pure . Left . CardanoSyncError $ show err + +-- | Validate that the initial Genesis distribution in the DB matches the Genesis data. +validateGenesisDistribution + :: (MonadIO m) + => Trace IO Text + -> Meta + -> Text + -> ShelleyGenesis StandardShelley + -> m (Either CardanoSyncError ()) +validateGenesisDistribution tracer meta networkName cfg = + runExceptT $ do + liftIO $ logInfo tracer "Validating Genesis distribution" + + -- Show configuration we are validating + print cfg + + when (mStartTime meta /= configStartTime cfg) $ + cardanoSyncError $ mconcat + [ "Shelley: Mismatch chain start time. Config value " + , textShow (configStartTime cfg) + , " does not match DB value of ", textShow (mStartTime meta) + ] + + case mNetworkName meta of + Nothing -> + cardanoSyncError $ "Shelley.validateGenesisDistribution: Missing network name" + Just name -> + when (name /= networkName) $ + cardanoSyncError $ mconcat + [ "Shelley.validateGenesisDistribution: Provided network name " + , networkName + , " does not match DB value " + , name + ] + +--------------------------------------------------------------------------------------------------- + +textShow :: Show a => a -> Text +textShow = show + +configStartTime :: ShelleyGenesis StandardShelley -> UTCTime +configStartTime = roundToMillseconds . Shelley.sgSystemStart + +roundToMillseconds :: UTCTime -> UTCTime +roundToMillseconds (UTCTime day picoSecs) = + UTCTime day (Time.picosecondsToDiffTime $ 1000000 * (picoSeconds `div` 1000000)) + where + picoSeconds :: Integer + picoSeconds = Time.diffTimeToPicoseconds picoSecs + +configGenesisHash :: ShelleyGenesis StandardShelley -> ByteString +configGenesisHash _ = BS.take 32 ("GenesisHash " <> BS.replicate 32 '\0') + +--------------------------------------------------------------------------------------------------- + +pCommandLine :: Parser SyncNodeParams pCommandLine = - DbSyncNodeParams + SyncNodeParams <$> pConfigFile <*> pSocketPath <*> pLedgerStateDir diff --git a/smash/smash.cabal b/smash/smash.cabal index 9d53469..7dd9a5f 100644 --- a/smash/smash.cabal +++ b/smash/smash.cabal @@ -33,16 +33,17 @@ library exposed-modules: Cardano.SMASH.DB - Cardano.SMASH.DBSync.Db.Database Cardano.SMASH.DBSync.Db.Delete Cardano.SMASH.DBSync.Db.Insert + Cardano.SMASH.DBSync.Db.Query + Cardano.SMASH.DBSync.Db.Schema + Cardano.SMASH.DBSync.Db.Migration Cardano.SMASH.DBSync.Db.Migration.Haskell Cardano.SMASH.DBSync.Db.Migration.Version Cardano.SMASH.DBSync.Db.PGConfig - Cardano.SMASH.DBSync.Db.Query Cardano.SMASH.DBSync.Db.Run - Cardano.SMASH.DBSync.Db.Schema + Cardano.SMASH.DBSync.Metrics Cardano.SMASH.DBSyncPlugin Cardano.SMASH.FetchQueue @@ -57,10 +58,11 @@ library , base16-bytestring , bytestring , cardano-crypto-class - , cardano-db-sync + , cardano-sync , cardano-ledger , cardano-prelude , cardano-slotting + , cardano-db-sync , cborg , conduit-extra , containers @@ -97,7 +99,6 @@ library , transformers , transformers-except , typed-protocols - , smash-sync , unix , wai , warp @@ -117,23 +118,33 @@ executable smash-exe cpp-options: -DTESTING_MODE main-is: Main.hs - other-modules: Paths_smash + other-modules: + Paths_smash hs-source-dirs: app ghc-options: -threaded -rtsopts -with-rtsopts=-N build-depends: base >=4.7 && <5 , cardano-prelude - , cardano-db-sync + , cardano-sync , cardano-slotting , optparse-applicative , iohk-monitoring , smash , smash-servant-types - , smash-sync , esqueleto , transformers , prometheus , filepath + , bytestring + , transformers-except + , cardano-api + , cardano-db + , time + , persistent-postgresql + , persistent + , ouroboros-consensus-cardano + , ouroboros-consensus-shelley + , shelley-spec-ledger default-language: Haskell2010 default-extensions: diff --git a/smash/src/Cardano/SMASH/DBSync/Db/Query.hs b/smash/src/Cardano/SMASH/DBSync/Db/Query.hs index 0b2fc6e..649c554 100644 --- a/smash/src/Cardano/SMASH/DBSync/Db/Query.hs +++ b/smash/src/Cardano/SMASH/DBSync/Db/Query.hs @@ -7,6 +7,7 @@ module Cardano.SMASH.DBSync.Db.Query ( DBFail (..) , querySchemaVersion , querySlotHash + , queryLatestSlotNo , queryAllPools , queryPoolByPoolId , queryPoolMetadata @@ -71,6 +72,16 @@ querySlotHash slotNo = do pure (blk ^. BlockHash) pure $ (\vh -> (slotNo, unValue vh)) <$> listToMaybe res +-- | Get the latest slot number +queryLatestSlotNo :: MonadIO m => ReaderT SqlBackend m Word64 +queryLatestSlotNo = do + res <- select . from $ \ blk -> do + where_ (isJust $ blk ^. BlockSlotNo) + orderBy [desc (blk ^. BlockSlotNo)] + limit 1 + pure $ blk ^. BlockSlotNo + pure $ fromMaybe 0 (listToMaybe $ mapMaybe unValue res) + -- |Return all pools. queryAllPools :: MonadIO m => ReaderT SqlBackend m [Pool] queryAllPools = do @@ -160,9 +171,10 @@ queryMeta = do queryLatestBlock :: MonadIO m => ReaderT SqlBackend m (Maybe Block) queryLatestBlock = do res <- select $ from $ \ blk -> do + where_ (isJust $ blk ^. BlockSlotNo) orderBy [desc (blk ^. BlockSlotNo)] limit 1 - pure $ blk + pure blk pure $ fmap entityVal (listToMaybe res) -- | Get the 'BlockNo' of the latest block. diff --git a/smash/src/Cardano/SMASH/DBSync/Db/Schema.hs b/smash/src/Cardano/SMASH/DBSync/Db/Schema.hs index 190e68f..595a2fc 100644 --- a/smash/src/Cardano/SMASH/DBSync/Db/Schema.hs +++ b/smash/src/Cardano/SMASH/DBSync/Db/Schema.hs @@ -108,25 +108,21 @@ share slotNo Word64 Maybe sqltype=uinteger blockNo Word64 Maybe sqltype=uinteger UniqueBlock hash - - -------------------------------------------------------------------------- - -- Tables below should be preserved when migration occurs! - -------------------------------------------------------------------------- + deriving Show -- A table containing metadata about the chain. There will probably only ever be one -- row in this table. -- TODO(KS): This can be left alone when migration occurs since it should be the same! Meta - protocolConst Word64 -- The block security parameter. - slotDuration Word64 -- Slot duration in milliseconds. - -- System start time used to calculate slot time stamps. - -- Use 'sqltype' here to force timestamp without time zone. startTime UTCTime sqltype=timestamp - slotsPerEpoch Word64 -- Number of slots per epoch. networkName Text Maybe UniqueMeta startTime deriving Show + -------------------------------------------------------------------------- + -- Tables below should be preserved when migration occurs! + -------------------------------------------------------------------------- + -- A table containing a list of delisted pools. DelistedPool poolId Types.PoolId sqltype=text diff --git a/smash/src/Cardano/SMASH/DBSyncPlugin.hs b/smash/src/Cardano/SMASH/DBSyncPlugin.hs index 4779070..e3d8c57 100644 --- a/smash/src/Cardano/SMASH/DBSyncPlugin.hs +++ b/smash/src/Cardano/SMASH/DBSyncPlugin.hs @@ -9,7 +9,8 @@ module Cardano.SMASH.DBSyncPlugin import Cardano.Prelude -import Cardano.BM.Trace (Trace, logError, logInfo) +import Cardano.BM.Trace (Trace, logDebug, logError, + logInfo) import Control.Monad.Logger (LoggingT) import Control.Monad.Trans.Except.Extra (firstExceptT, newExceptT, @@ -23,7 +24,7 @@ import Cardano.SMASH.Types (PoolId (..), PoolMetadataHash (..), PoolUrl (..)) -import qualified Cardano.Chain.Block as Byron +import Cardano.Chain.Block (ABlockOrBoundary (..)) import qualified Data.ByteString.Base16 as B16 @@ -32,20 +33,23 @@ import Database.Persist.Sql (IsolationLevel (..), transactionSaveWithIsolation) import qualified Cardano.SMASH.DBSync.Db.Insert as DB +import qualified Cardano.SMASH.DBSync.Db.Run as DB import qualified Cardano.SMASH.DBSync.Db.Schema as DB -import Cardano.DbSync.Config.Types -import Cardano.DbSync.Error -import Cardano.DbSync.Types as DbSync -import Cardano.DbSync.LedgerState +import Cardano.Sync.Error +import Cardano.Sync.Types as DbSync -import Cardano.DbSync (DbSyncNodePlugin (..)) -import Cardano.DbSync.Util +import Cardano.Sync.LedgerState +import Cardano.Sync (SyncEnv (..), + SyncNodePlugin (..)) +import Cardano.Sync.Util -import qualified Cardano.DbSync.Era.Byron.Util as Byron + +import qualified Cardano.DbSync.Era.Shelley.Generic as Generic import qualified Cardano.DbSync.Era.Shelley.Generic as Shelley +import qualified Cardano.Sync.Era.Byron.Util as Byron import Cardano.Slotting.Block (BlockNo (..)) import Cardano.Slotting.Slot (EpochNo (..), @@ -59,16 +63,14 @@ import qualified Shelley.Spec.Ledger.TxBody as Shelley import Ouroboros.Consensus.Byron.Ledger (ByronBlock (..)) import Ouroboros.Consensus.Cardano.Block (HardForkBlock (..), - StandardShelley) - -import qualified Cardano.DbSync.Era.Shelley.Generic as Generic + StandardCrypto) -- |Pass in the @DataLayer@. -poolMetadataDbSyncNodePlugin :: DataLayer -> DbSyncNodePlugin +poolMetadataDbSyncNodePlugin :: DataLayer -> SyncNodePlugin poolMetadataDbSyncNodePlugin dataLayer = - DbSyncNodePlugin + SyncNodePlugin { plugOnStartup = [] - , plugInsertBlock = [insertDefaultBlock dataLayer] + , plugInsertBlock = [insertDefaultBlocks dataLayer] , plugRollbackBlock = [] } @@ -79,19 +81,28 @@ data BlockName | Mary deriving (Eq, Show) +insertDefaultBlocks + :: DataLayer + -> Trace IO Text + -> SyncEnv + -> [BlockDetails] + -> IO (Either SyncNodeError ()) +insertDefaultBlocks dataLayer tracer env blockDetails = + DB.runDbAction (Just tracer) $ + traverseMEither (\blockDetail -> insertDefaultBlock dataLayer tracer env blockDetail)blockDetails + -- |TODO(KS): We need to abstract over these blocks so we can test this functionality -- separatly from the actual blockchain, using tests only. insertDefaultBlock :: DataLayer -> Trace IO Text - -> DbSyncEnv - -> LedgerStateVar + -> SyncEnv -> BlockDetails - -> ReaderT SqlBackend (LoggingT IO) (Either DbSyncNodeError ()) -insertDefaultBlock dataLayer tracer env ledgerStateVar (BlockDetails cblk details) = do + -> ReaderT SqlBackend (LoggingT IO) (Either SyncNodeError ()) +insertDefaultBlock dataLayer tracer env (BlockDetails cblk details) = do -- Calculate the new ledger state to pass to the DB insert functions but do not yet -- update ledgerStateVar. - lStateSnap <- liftIO $ applyBlock env ledgerStateVar cblk + lStateSnap <- liftIO $ applyBlock (envLedger env) cblk res <- case cblk of BlockByron blk -> do insertByronBlock tracer blk details @@ -103,8 +114,8 @@ insertDefaultBlock dataLayer tracer env ledgerStateVar (BlockDetails cblk detail insertShelleyBlock Mary dataLayer tracer env (Generic.fromMaryBlock blk) lStateSnap details -- Now we update it in ledgerStateVar and (possibly) store it to disk. - liftIO $ saveLedgerState (envLedgerStateDir env) ledgerStateVar - (lssState lStateSnap) (isSyncedWithinSeconds details 60) + liftIO $ saveLedgerStateMaybe (envLedger env) + lStateSnap (isSyncedWithinSeconds details 60) pure res @@ -114,15 +125,30 @@ insertByronBlock :: Trace IO Text -> ByronBlock -> DbSync.SlotDetails - -> ReaderT SqlBackend (LoggingT IO) (Either DbSyncNodeError ()) + -> ReaderT SqlBackend (LoggingT IO) (Either SyncNodeError ()) insertByronBlock tracer blk _details = do case byronBlockRaw blk of - Byron.ABOBBlock byronBlock -> do + ABOBBlock byronBlock -> do + let blockHash = Byron.blockHash byronBlock let slotNum = Byron.slotNumber byronBlock + let blockNumber = Byron.blockNumber byronBlock + -- Output in intervals, don't add too much noise to the output. when (slotNum `mod` 5000 == 0) $ liftIO . logInfo tracer $ "Byron block, slot: " <> show slotNum - Byron.ABOBBoundary {} -> pure () + + -- TODO(KS): Move to DataLayer. + _blkId <- DB.insertBlock $ + DB.Block + { DB.blockHash = blockHash + , DB.blockEpochNo = Nothing + , DB.blockSlotNo = Just $ slotNum + , DB.blockBlockNo = Just $ blockNumber + } + + return () + + ABOBBoundary {} -> pure () return $ Right () @@ -131,11 +157,11 @@ insertShelleyBlock :: BlockName -> DataLayer -> Trace IO Text - -> DbSyncEnv + -> SyncEnv -> Generic.Block -> LedgerStateSnapshot -> SlotDetails - -> ReaderT SqlBackend (LoggingT IO) (Either DbSyncNodeError ()) + -> ReaderT SqlBackend (LoggingT IO) (Either SyncNodeError ()) insertShelleyBlock blockName dataLayer tracer env blk _lStateSnap details = do runExceptT $ do @@ -174,10 +200,10 @@ insertTx => DataLayer -> BlockNo -> Trace IO Text - -> DbSyncEnv + -> SyncEnv -> Word64 -> Generic.Tx - -> ExceptT DbSyncNodeError m () + -> ExceptT SyncNodeError m () insertTx dataLayer blockNumber tracer env _blockIndex tx = mapM_ (insertCertificate dataLayer blockNumber tracer env) $ Generic.txCertificates tx @@ -186,13 +212,15 @@ insertCertificate => DataLayer -> BlockNo -> Trace IO Text - -> DbSyncEnv + -> SyncEnv -> Generic.TxCertificate - -> ExceptT DbSyncNodeError m () + -> ExceptT SyncNodeError m () insertCertificate dataLayer blockNumber tracer _env (Generic.TxCertificate _idx cert) = case cert of Shelley.DCertDeleg _deleg -> - liftIO $ logInfo tracer "insertCertificate: DCertDeleg" + -- Since at some point we start to have a large number of delegation + -- certificates, this should be output just in debug mode. + liftIO $ logDebug tracer "insertCertificate: DCertDeleg" Shelley.DCertPool pool -> insertPoolCert dataLayer blockNumber tracer pool Shelley.DCertMir _mir -> @@ -205,8 +233,8 @@ insertPoolCert => DataLayer -> BlockNo -> Trace IO Text - -> Shelley.PoolCert StandardShelley - -> ExceptT DbSyncNodeError m () + -> Shelley.PoolCert StandardCrypto + -> ExceptT SyncNodeError m () insertPoolCert dataLayer blockNumber tracer pCert = case pCert of Shelley.RegPool pParams -> do @@ -264,8 +292,8 @@ insertPoolRegister :: forall m. (MonadIO m) => DataLayer -> Trace IO Text - -> Shelley.PoolParams StandardShelley - -> ExceptT DbSyncNodeError m () + -> Shelley.PoolParams StandardCrypto + -> ExceptT SyncNodeError m () insertPoolRegister dataLayer tracer params = do let poolIdHash = B16.encode . Generic.unKeyHashRaw $ Shelley._poolId params let poolId = PoolId . decodeUtf8 $ poolIdHash diff --git a/smash/src/Cardano/SMASH/Offline.hs b/smash/src/Cardano/SMASH/Offline.hs index eb5dca7..bfd1f52 100644 --- a/smash/src/Cardano/SMASH/Offline.hs +++ b/smash/src/Cardano/SMASH/Offline.hs @@ -75,7 +75,7 @@ fetchInsertNewPoolMetadata -> Trace IO Text -> DB.PoolMetadataReferenceId -> PoolId - -> Shelley.PoolMetaData + -> Shelley.PoolMetadata -> IO () fetchInsertNewPoolMetadata dataLayer tracer refId poolId md = do now <- Time.getPOSIXTime diff --git a/stack.yaml b/stack.yaml index 31b32b6..3bc505b 100644 --- a/stack.yaml +++ b/stack.yaml @@ -1,10 +1,9 @@ -resolver: https://raw.githubusercontent.com/input-output-hk/cardano-haskell/5ed4af4df2a609361260f159cd0e47e1c4073e2c/snapshots/cardano-1.24.2.yaml +resolver: https://raw.githubusercontent.com/input-output-hk/cardano-haskell/master/snapshots/cardano-1.25.1.yaml compiler: ghc-8.6.5 #allow-newer: true packages: -- smash-sync - smash - smash-servant-types @@ -18,6 +17,10 @@ ghc-options: smash: -Wall -Werror -fno-warn-redundant-constraints #smash-servant-types: -Wall -Werror -fno-warn-redundant-constraints +# Generate files required by Weeder. +# See https://github.com/ndmitchell/weeder/issues/53 +ghc-options: {"$locals": -ddump-to-file -ddump-hi} + extra-deps: - persistent-2.11.0.1 - persistent-postgresql-2.11.0.0 @@ -29,11 +32,11 @@ extra-deps: - prometheus-2.2.2 - git: https://github.com/input-output-hk/cardano-db-sync - commit: 3cf868ec8c06265e4b670ac737af640d716e5ef7 + commit: d5aa846e0751227aa6461084d6ea0567535f752e subdirs: + - cardano-sync - cardano-db - cardano-db-sync - - cardano-db-sync-extended nix: shell-file: nix/stack-shell.nix