diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index 5099340d1e8..3e58e17499a 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -1,4 +1,9 @@ steps: + - label: 'Windows' + command: 'PowerShell.exe -executionpolicy remotesigned -File .\.buildkite\windows.ps1' + agents: + system: x86_64-win2016 + - label: 'stack2nix' command: 'scripts/check-stack2nix.sh' agents: diff --git a/.buildkite/windows.ps1 b/.buildkite/windows.ps1 new file mode 100644 index 00000000000..48557d8e768 --- /dev/null +++ b/.buildkite/windows.ps1 @@ -0,0 +1,155 @@ +############################################################################ +# Windows build script for Buildkite +# +# Requires the devops buildkite-windows AMI +############################################################################ + +#Set-PSDebug -Trace 1 + +Write-Output "--- Prepare work directory" + +# Avoid long paths on Windows +$env:STACK_ROOT="Z:\s" +$env:STACK_WORK=".w" +$env:WORK_DIR="Z:\w" +# Override the temp directory to avoid sed escaping issues +# See https://github.com/haskell/cabal/issues/5386 +$env:TMP="Z:\\tmp" + +# Store the original checkout directory +$env:CHECKOUT_PATH=(Get-Item -Path ".\").FullName + +# Temp directory needs to exist +rd -r -fo $env:TMP +New-Item -ItemType Directory -Force -Path $env:TMP + +New-Item -ItemType Directory -Force -Path z:\ghc + +New-Item -ItemType Directory -Force -Path z:\Downloads +New-Item -ItemType Directory -Force -Path $env:STACK_ROOT + +$StackConfig = @" +templates: + params: null +system-ghc: true +local-programs-path: "z:\\s\\programs" +local-bin-path: "z:\\s\\bin" +"@ + +$StackConfig | Out-File -FilePath $env:STACK_ROOT\config.yaml -Encoding ASCII + +# +if (!([System.IO.File]::Exists("Z:\ghc\ghc-8.2.2.tar.xz"))) { + echo "Downloading and extracting GHC" + curl.exe https://s3.eu-central-1.amazonaws.com/ci-static/ghc-8.2.2-x86_64-unknown-mingw32.tar.xz -o Z:\ghc\ghc-8.2.2.tar.xz -L + 7z x Z:\ghc\ghc-8.2.2.tar.xz -oZ:\ghc + 7z x Z:\ghc\ghc-8.2.2.tar -oZ:\ghc +} + +# OpenSSL +# +#$env:USERPROFILE +if (!([System.IO.File]::Exists("Z:\Downloads\Win64OpenSSL-1_0_2q.exe"))) { + echo "Downloading and installing OpenSSL" + rd -r -fo z:\OpenSSL-Win64-v102 + curl.exe https://s3.eu-central-1.amazonaws.com/ci-static/Win64OpenSSL-1_0_2q.exe -o Z:\Downloads\Win64OpenSSL-1_0_2q.exe -L + cmd /c start /wait "Z:\Downloads\Win64OpenSSL-1_0_2q.exe" /silent /verysilent /sp- /suppressmsgboxes /DIR=Z:\OpenSSL-Win64-v102 +} +## Install stack +if (!([System.IO.File]::Exists("Z:\Downloads\stack-1.9.1-windows-x86_64.zip"))) { + echo "Downloading and extracting stack" + rd -r -fo z:\stack + curl.exe https://s3.eu-central-1.amazonaws.com/ci-static/stack-1.9.1-windows-x86_64.zip -o z:\Downloads\stack-1.9.1-windows-x86_64.zip -L + 7z -oZ:\stack x Z:\Downloads\stack-1.9.1-windows-x86_64.zip +} + +$env:PATH="$env:PATH;Z:\ghc\ghc-8.2.2\bin;Z:\stack;Z:\w" + +# Install liblzma/xz +if (!([System.IO.File]::Exists("Z:\Downloads\xz-5.2.3-windows.zip"))) { + echo "Downloading and extracting xz" + rd -r -fo z:\xz_extracted + curl.exe -L https://s3.eu-central-1.amazonaws.com/ci-static/xz-5.2.3-windows.zip -o Z:\Downloads\xz-5.2.3-windows.zip + 7z -oZ:\xz_extracted x Z:\Downloads\xz-5.2.3-windows.zip +} + +# Clear out work directory, copy source tree there, then cd into it. +rd -r -fo $env:WORK_DIR +mkdir $env:WORK_DIR +copy-item $env:CHECKOUT_PATH\* $env:WORK_DIR -force -recurse +cd $env:WORK_DIR + +# Install pre-built patched version of rocksdb. The git repo provides +# headers and the zip provides rocksdb.dll. This is needed to build +# rocksdb-haskell. +git.exe clone https://github.com/facebook/rocksdb.git --branch v4.13.5 +if (!([System.IO.File]::Exists("Z:\Downloads\rocksdb-325427fc709183c8fdf777ad5ea09f8d92bf8585.zip"))) { + echo "Downloading rocksdb" + curl.exe -L 'https://s3.eu-central-1.amazonaws.com/ci-static/serokell-rocksdb-haskell-325427fc709183c8fdf777ad5ea09f8d92bf8585.zip' -o Z:\Downloads\rocksdb-325427fc709183c8fdf777ad5ea09f8d92bf8585.zip +} +7z x Z:\Downloads\rocksdb-325427fc709183c8fdf777ad5ea09f8d92bf8585.zip + +# CSL-1509: After moving the 'cardano-sl' project itself into a separate folder ('lib/'), the 'cardano-text.exe' executable fails on AppVeyor CI. +# After some investigation, it was discovered that this was because 'rocksdb.dll' has to be located in this folder as well, or else the test executable doesn't work. +copy rocksdb.dll node +copy rocksdb.dll lib +copy rocksdb.dll wallet-new + +# Terminate on all errors +$ErrorActionPreference = "Stop" + +############################################################################ +# Prepare stack and build tools +# + +Write-Output "--- Stack setup and install tools" + +# Make stack use a pached system-ghc. This will allow us to work around the command +# line argument limit on windows by having GHC copy all libraries into a common folder +# prior to linking; and thus reduces the number library search patch flags passed on the +# command line. +# +# WARNING: if you bump the LTS, make sure you have the right (patched) GHC version in path! +stack.exe config --system-ghc set system-ghc --global true + +stack.exe path +stack.exe exec -- ghc-pkg recache +stack.exe --verbosity warn setup --no-reinstall + +# Install happy separately: https://github.com/commercialhaskell/stack/issues/3151#issuecomment-310642487. Also install cpphs because it's a build-tool and Stack can't figure out by itself that it should be installed +stack.exe --verbosity warn install happy cpphs -j 2 --no-terminal --local-bin-path $env:SYSTEMROOT\system32 --extra-include-dirs="Z:\OpenSSL-Win64-v102\include" --extra-lib-dirs="Z:\OpenSSL-Win64-v102" --extra-include-dirs="Z:\xz_extracted\include" --extra-lib-dirs="Z:\xz_extracted\bin_x86-64" --extra-include-dirs="$env:WORK_DIR\rocksdb\include" --extra-lib-dirs="$env:WORK_DIR" + +############################################################################ +# Build and test. +# +# We intentionally don't build auxx here, because this build is for installer. +# + +Write-Output "+++ Stack build and test" + +stack.exe --dump-logs install cardano-sl cardano-sl-tools cardano-sl-wallet-new -j 3 --no-terminal --local-bin-path $env:WORK_DIR --no-haddock-deps --flag cardano-sl-core:-asserts --flag cardano-sl-tools:for-installer --extra-include-dirs="Z:\OpenSSL-Win64-v102\include" --extra-lib-dirs="Z:\OpenSSL-Win64-v102" --extra-include-dirs="Z:\xz_extracted\include" --extra-lib-dirs="Z:\xz_extracted\bin_x86-64" --extra-include-dirs="$env:WORK_DIR\rocksdb\include" --extra-lib-dirs="$env:WORK_DIR" + +############################################################################ +# Assemble artifact zip file for use by the Daedalus installers build +# + +Write-Output "--- Create zip file" + +# Cardano pieces, modulo the frontend +mkdir daedalus +## log config is called `log-config-prod.yaml` just in case, it's the old name +copy log-configs\daedalus.yaml daedalus\log-config-prod.yaml +copy lib\configuration.yaml daedalus\ +copy lib\*genesis*.json daedalus\ +copy cardano-launcher.exe daedalus\ +copy cardano-node.exe daedalus\ +copy cardano-x509-certificates.exe daedalus\ +cd daedalus +$env:BUILDKITE_BUILD_NUMBER | Out-File -Encoding ASCII build-id +$env:BUILDKITE_COMMIT | Out-File -Encoding ASCII commit-id +$env:BUILDKITE_BUILD_URL | Out-File -Encoding ASCII ci-url +cd .. + +$daedaluszip = "$env:BUILDKITE_COMMIT.zip" +7z.exe a $daedaluszip .\daedalus\* +buildkite-agent artifact upload $daedaluszip diff --git a/appveyor.yml b/appveyor.yml index 7225a8cc8b7..9a08430b05e 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -3,172 +3,5 @@ image: Visual Studio 2015 build: off -environment: - global: - # Avoid long paths on Windows - STACK_ROOT: "c:\\s" - STACK_WORK: ".w" - WORK_DIR: "c:\\w" - # Override the temp directory to avoid sed escaping issues - # See https://github.com/haskell/cabal/issues/5386 - TMP: "c:\\tmp" - CACHE_S3_VERSION: v0.1.4 - CACHE_S3_MAX_SIZE: 1600MB # AppVeyor limits the amount uploaded to approx 2GB - AWS_REGION: us-west-1 - S3_BUCKET: appveyor-ci-cache - AWS_ACCESS_KEY_ID: - secure: sQWt5CpaN0H+jwUVoTsrET46pADUDEcrJ5D9MHmKX0M= - AWS_SECRET_ACCESS_KEY: - secure: m5sQYd16K8HA0zoZaD0gOl4EEWUso1D51L5rp+kT3hLaIE3tt4iT+b+iW8F4F0FU - -init: -- ps: $env:CACHE_S3_READY = (("$env:CACHE_S3_VERSION" -ne "") -and ("$env:S3_BUCKET" -ne "") -and ("$env:AWS_ACCESS_KEY_ID" -ne "") -and ("$env:AWS_SECRET_ACCESS_KEY" -ne "")) - -before_test: -# Avoid long paths not to each MAX_PATH of 260 chars -- xcopy /q /s /e /r /k /i /v /h /y "%APPVEYOR_BUILD_FOLDER%" "%WORK_DIR%" -- cd "%WORK_DIR%" -# Restore cache -- Echo %APPVEYOR_BUILD_VERSION% > build-id -- ps: >- - Write-Host "in pagefile script" ; - $c = Get-WmiObject Win32_computersystem -EnableAllPrivileges ; - if($c.AutomaticManagedPagefile){ - Write-Host "disabling managed page file settings" - $c.AutomaticManagedPagefile = $false - $c.Put() | Out-Null - } ; - $new_page_size=25000 ; - $CurrentPageFile = Get-WmiObject -Class Win32_PageFileSetting ; - if ($CurrentPageFile.InitialSize -ne $new_page_size) { - Write-Host "setting new page file size to $new_page_size" - $CurrentPageFile.InitialSize=$new_page_size - $CurrentPageFile.MaximumSize=$new_page_size - $CurrentPageFile.Put() | Out-Null - } ; - if ( $env:CACHE_S3_READY -eq $true ) { - Start-FileDownload https://github.com/fpco/cache-s3/releases/download/$env:CACHE_S3_VERSION/cache-s3-$env:CACHE_S3_VERSION-windows-x86_64.zip -FileName cache-s3.zip - 7z x cache-s3.zip cache-s3.exe - .\cache-s3 --max-size=$env:CACHE_S3_MAX_SIZE --prefix=$env:APPVEYOR_PROJECT_NAME --git-branch=$env:APPVEYOR_REPO_BRANCH --suffix=windows -v info -c restore stack --base-branch=develop - .\cache-s3 --max-size=$env:CACHE_S3_MAX_SIZE --prefix=$env:APPVEYOR_PROJECT_NAME --git-branch=$env:APPVEYOR_REPO_BRANCH --suffix=windows -v info -c restore stack work --base-branch=develop - } - -# Get custom GHC -- ps: >- - mkdir C:\ghc - - Invoke-WebRequest "https://s3.eu-central-1.amazonaws.com/ci-static/ghc-8.4.4-x86_64-unknown-mingw32-20181113-b907eb0f9b.tar.xz" -OutFile "C:\ghc\ghc.tar.xz" -UserAgent "Curl" - - 7z x C:\ghc\ghc.tar.xz -oC:\ghc - - 7z x C:\ghc\ghc.tar -oC:\ghc - - $env:PATH="$env:PATH;C:\ghc\ghc-8.4.4\bin" - -# Install OpenSSL 1.0.2 (see https://github.com/appveyor/ci/issues/1665) -- ps: (New-Object Net.WebClient).DownloadFile('https://slproweb.com/download/Win64OpenSSL-1_0_2q.exe', "$($env:USERPROFILE)\Win64OpenSSL.exe") -- ps: cmd /c start /wait "$($env:USERPROFILE)\Win64OpenSSL.exe" /silent /verysilent /sp- /suppressmsgboxes /DIR=C:\OpenSSL-Win64-v102 -- ps: Install-Product node 6 -# Install stack -- ps: Start-FileDownload http://www.stackage.org/stack/windows-x86_64 -FileName stack.zip -- 7z x stack.zip stack.exe - - -# Install rocksdb -- git clone https://github.com/facebook/rocksdb.git --branch v4.13.5 -- ps: Start-FileDownload 'https://s3.eu-central-1.amazonaws.com/ci-static/serokell-rocksdb-haskell-325427fc709183c8fdf777ad5ea09f8d92bf8585.zip' -FileName rocksdb.zip -- 7z x rocksdb.zip - -# CSL-1509: After moving the 'cardano-sl' project itself into a separate folder ('lib/'), the 'cardano-text.exe' executable fails on AppVeyor CI. -# After some investigation, it was discovered that this was because 'rocksdb.dll' has to be located in this folder as well, or else the test executable doesn't work. -- copy rocksdb.dll node -- copy rocksdb.dll lib -- copy rocksdb.dll wallet - -# Install liblzma/xz -- ps: Start-FileDownload https://tukaani.org/xz/xz-5.2.3-windows.zip -Filename xz-5.2.3-windows.zip -- 7z -oC:\xz_extracted x xz-5.2.3-windows.zip - test_script: - - cd "%WORK_DIR%" - - stack config --system-ghc set system-ghc --global true - - stack exec -- ghc-pkg recache - - stack --verbosity warn setup --no-reinstall > nul - # Install happy separately: https://github.com/commercialhaskell/stack/issues/3151#issuecomment-310642487. Also install cpphs because it's a build-tool and Stack can't figure out by itself that it should be installed - - scripts\ci\appveyor-retry call stack --verbosity warn install happy cpphs - -j 2 - --no-terminal - --local-bin-path %SYSTEMROOT%\system32 - --extra-include-dirs="C:\OpenSSL-Win64-v102\include" - --extra-lib-dirs="C:\OpenSSL-Win64-v102" - --extra-include-dirs="C:\xz_extracted\include" - --extra-lib-dirs="C:\xz_extracted\bin_x86-64" - --extra-include-dirs="%WORK_DIR%\rocksdb\include" - --extra-lib-dirs="%WORK_DIR%" - --ghc-options="-copy-libs-when-linking" -# TODO: CSL-1133. To be reenabled. -# - stack test --coverage -# - stack hpc report cardano-sl cardano-sl-txp cardano-sl-core cardano-sl-db cardano-sl-update cardano-sl-infra cardano-sl-lrc cardano-sl-ssc -# Retry transient failures due to https://github.com/haskell/cabal/issues/4005 - # We intentionally don't build auxx here, because this build is for installer. - - scripts\ci\appveyor-retry call stack --dump-logs install cardano-sl cardano-sl-tools cardano-wallet - -j 3 - --no-terminal - --local-bin-path %WORK_DIR% - --no-haddock-deps - --flag cardano-sl-core:-asserts - --flag cardano-sl-tools:for-installer - --extra-include-dirs="C:\OpenSSL-Win64-v102\include" - --extra-lib-dirs="C:\OpenSSL-Win64-v102" - --extra-include-dirs="C:\xz_extracted\include" - --extra-lib-dirs="C:\xz_extracted\bin_x86-64" - --extra-include-dirs="%WORK_DIR%\rocksdb\include" - --extra-lib-dirs="%WORK_DIR%" - --ghc-options="-copy-libs-when-linking" - # Cardano pieces, modulo the frontend - - mkdir daedalus - # log config is called `log-config-prod.yaml` just in case, it's the old name - - copy log-configs\daedalus.yaml daedalus\log-config-prod.yaml - - copy lib\configuration.yaml daedalus\ - - copy lib\*genesis*.json daedalus\ - - copy cardano-launcher.exe daedalus\ - - copy cardano-node.exe daedalus\ - - copy cardano-x509-certificates.exe daedalus\ - - cd daedalus - - Echo %APPVEYOR_BUILD_VERSION% > build-id - - Echo %APPVEYOR_REPO_COMMIT% > commit-id - - Echo https://ci.appveyor.com/project/%APPVEYOR_ACCOUNT_NAME%/%APPVEYOR_PROJECT_SLUG%/build/%APPVEYOR_BUILD_VERSION% > ci-url - -after_test: - - xcopy /q /s /e /r /k /i /v /h /y "%WORK_DIR%\daedalus" "%APPVEYOR_BUILD_FOLDER%\daedalus" - - cd "%WORK_DIR%/daedalus" - - 7z a "%APPVEYOR_REPO_COMMIT%.zip" * - - appveyor PushArtifact "%APPVEYOR_REPO_COMMIT%.zip" - - cd "%WORK_DIR%" # Get back to where cache-s3.exe is located - - ps: >- - if ( ($env:CACHE_S3_READY -eq $true) -and (-not $env:APPVEYOR_PULL_REQUEST_NUMBER) ) { - if ($env:APPVEYOR_REPO_BRANCH -eq "master" -Or $env:APPVEYOR_REPO_BRANCH -eq "develop" -Or $env:APPVEYOR_REPO_BRANCH -like "release*") { - Write-Host "saving stack" - .\cache-s3 --max-size=$env:CACHE_S3_MAX_SIZE --prefix=$env:APPVEYOR_PROJECT_NAME --git-branch=$env:APPVEYOR_REPO_BRANCH --suffix=windows -c -v info save stack - Write-Host "done stack" - } - Write-Host "saving stack work" - .\cache-s3 --max-size=$env:CACHE_S3_MAX_SIZE --prefix=$env:APPVEYOR_PROJECT_NAME --git-branch=$env:APPVEYOR_REPO_BRANCH --suffix=windows -c -v info save stack work - Write-Host "done stack work" - } -artifacts: - - path: daedalus/ - name: CardanoSL - type: zip - -deploy: - provider: S3 - access_key_id: - secure: IEky6PsMzHaKHNBMxR8tQaQI8X7qWRB9+HuEroTVRBk= - secret_access_key: - secure: cqjzG96hWB1x3JDbVSbF9E+aJ5jKvIGacJRUDWATHaTOYfSt6Rvive/NrF4lKBIm - bucket: appveyor-ci-deploy - region: ap-northeast-1 - set_public: true - folder: cardano-sl - artifact: $(APPVEYOR_REPO_COMMIT).zip +- ps: echo "No Longer Used, check buildkite windows job" diff --git a/db/cardano-sl-db.cabal b/db/cardano-sl-db.cabal index d6cf118087b..c01bac31cf7 100644 --- a/db/cardano-sl-db.cabal +++ b/db/cardano-sl-db.cabal @@ -141,6 +141,7 @@ library , ekg-core , ether >= 0.5 , exceptions + , extra , filepath , formatting , lens diff --git a/db/src/Pos/DB/Block/Epoch.hs b/db/src/Pos/DB/Block/Epoch.hs index f02aa1dd376..46f78c0102f 100644 --- a/db/src/Pos/DB/Block/Epoch.hs +++ b/db/src/Pos/DB/Block/Epoch.hs @@ -47,6 +47,7 @@ import Data.Binary (decode, encode) import qualified Data.ByteString.Char8 as BS import qualified Data.ByteString.Lazy as LBS import Data.Either (partitionEithers) +import Data.List.Extra (chunksOf) import Formatting (build, int, sformat, shown, (%)) import System.Directory (removeFile) import System.FilePath (()) @@ -351,7 +352,12 @@ consolidateOneEpoch ccp epochSlots = do -- After the check point is written, delete old blunds for the epoch we have just -- consolidated. - lift $ mapM_ deleteOldBlund sihs + lift $ mapM_ deleter $ chunksOf 1000 sihs + where + deleter :: ConsolidateM ctx m => [SlotIndexHash] -> m () + deleter xs = do + mapM_ deleteOldBlund xs + sleepSeconds 2 deleteOldBlund :: ConsolidateM ctx m => SlotIndexHash -> m () deleteOldBlund (SlotIndexHash _ hh) = do @@ -377,7 +383,7 @@ consolidateEpochBlocks fpath xs = ExceptT $ do (liftIO . hClose) (\hdl -> do liftIO $ BS.hPutStr hdl epochFileHeader - mapM (consolidate hdl) xs + mapM (consolidate hdl) $ zip [0 .. ] xs ) pure $ case partitionEithers ys of ([], zs) -> Right $ epochIndexToOffset zs @@ -385,8 +391,10 @@ consolidateEpochBlocks fpath xs = ExceptT $ do where consolidate :: ConsolidateM ctx m - => Handle -> SlotIndexHash -> m (Either ConsolidateError SlotIndexLength) - consolidate hdl (SlotIndexHash lsi hh) = do + => Handle -> (Int, SlotIndexHash) -> m (Either ConsolidateError SlotIndexLength) + consolidate hdl (indx, SlotIndexHash lsi hh) = do + when (indx `mod` 1000 == 0) $ + sleepSeconds 2 mblund <- getSerializedBlund hh case mblund of Nothing -> diff --git a/db/src/Pos/DB/Rocks/Functions.hs b/db/src/Pos/DB/Rocks/Functions.hs index a9b87465952..e2235704339 100644 --- a/db/src/Pos/DB/Rocks/Functions.hs +++ b/db/src/Pos/DB/Rocks/Functions.hs @@ -41,6 +41,7 @@ import qualified Database.RocksDB as Rocks import System.Directory (createDirectoryIfMissing, doesDirectoryExist, removeDirectoryRecursive) import System.FilePath (takeDirectory, ()) +import qualified System.Info (os) import Pos.Binary.Class (Bi, serialize') import Pos.DB.BatchOp (rocksWriteBatch) @@ -82,6 +83,7 @@ openNodeDBs recreate fp = do let gStatePath = fp "gState" let lrcPath = fp "lrc" let miscPath = fp "misc" + mapM_ ensureDirectoryExists [ blocksDir , _blockDataDir @@ -91,6 +93,11 @@ openNodeDBs recreate fp = do , lrcPath , miscPath ] + + when (System.Info.os == "darwin") $ + -- Prevent indexing of blocks on OSX + ensureEmptyFileExists (fp ".metadata_never_index") + _blockIndexDB <- openRocksDB blocksIndexPath _gStateDB <- openRocksDB gStatePath _lrcDB <- openRocksDB lrcPath @@ -101,6 +108,9 @@ openNodeDBs recreate fp = do ensureDirectoryExists :: MonadIO m => FilePath -> m () ensureDirectoryExists = liftIO . createDirectoryIfMissing True + ensureEmptyFileExists :: MonadIO m => FilePath -> m () + ensureEmptyFileExists file = liftIO $ withFile file AppendMode (\_ -> return ()) + -- | Safely close all databases from 'NodeDBs'. closeNodeDBs :: MonadIO m => NodeDBs -> m () closeNodeDBs NodeDBs {..} = diff --git a/lib/src/Pos/Diffusion/Full/Block.hs b/lib/src/Pos/Diffusion/Full/Block.hs index 660fe12d13e..babf9815060 100644 --- a/lib/src/Pos/Diffusion/Full/Block.hs +++ b/lib/src/Pos/Diffusion/Full/Block.hs @@ -724,6 +724,7 @@ handleStreamStart logTrace logic oq = listenerConv logTrace oq $ \__ourVerInfo n lift $ traceWith logTrace (Debug, sformat ("handleStreamStart:loop MsgStart, expected MsgStreamUpdate from "%build) nodeId) return () MsgUpdate u -> do + lift $ OQ.clearFailureOf oq nodeId lift $ traceWith logTrace (Debug, sformat ("handleStreamStart:loop new window "%shown%" from "%build) u nodeId) loop nodeId conv (msuWindow u) loop nodeId conv window = diff --git a/log-configs/connect-to-cluster.yaml b/log-configs/connect-to-cluster.yaml index f7ab8ee09bc..89079c0712f 100644 --- a/log-configs/connect-to-cluster.yaml +++ b/log-configs/connect-to-cluster.yaml @@ -4,8 +4,19 @@ rotation: logLimit: 104857600 # 100MB keepFiles: 100 + loggerTree: - severity: Debug+ - files: - - node.pub - - node + severity: Info+ + + handlers: + - { name: "Public" + , filepath: "pub/node.log" + , logsafety: PublicLogLevel + , severity: Debug+ + , backend: FileTextBE } + - { name: "Secret" + , filepath: "node" + , logsafety: SecretLogLevel + , severity: Info+ + , backend: FileTextBE } + diff --git a/nix/.stack.nix/cardano-sl-db.nix b/nix/.stack.nix/cardano-sl-db.nix index deedaeb995f..b77694dcc61 100644 --- a/nix/.stack.nix/cardano-sl-db.nix +++ b/nix/.stack.nix/cardano-sl-db.nix @@ -44,6 +44,7 @@ (hsPkgs.ekg-core) (hsPkgs.ether) (hsPkgs.exceptions) + (hsPkgs.extra) (hsPkgs.filepath) (hsPkgs.formatting) (hsPkgs.lens) diff --git a/nix/.stack.nix/cardano-sl-tools.nix b/nix/.stack.nix/cardano-sl-tools.nix index a7e89b9bd1b..4be8821560e 100644 --- a/nix/.stack.nix/cardano-sl-tools.nix +++ b/nix/.stack.nix/cardano-sl-tools.nix @@ -115,7 +115,9 @@ (hsPkgs.universum) (hsPkgs.unordered-containers) (hsPkgs.yaml) - ] ++ pkgs.lib.optional (!system.isWindows) (hsPkgs.unix); + ] ++ (if !system.isWindows + then [ (hsPkgs.unix) ] + else [ (hsPkgs.Win32) ]); }; "cardano-addr-convert" = { depends = [ diff --git a/pkgs/default.nix b/pkgs/default.nix index 6bf60a5407f..ee9475c22ea 100644 --- a/pkgs/default.nix +++ b/pkgs/default.nix @@ -15006,6 +15006,7 @@ license = stdenv.lib.licenses.mit; , ekg-core , ether , exceptions +, extra , filepath , formatting , hedgehog @@ -15059,6 +15060,7 @@ directory ekg-core ether exceptions +extra filepath formatting lens