From b0ecdffcf2d68f485f69a4d668ba30d47c780e58 Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Thu, 31 Jan 2019 09:33:41 +0300 Subject: [PATCH 01/80] Use - instead of @ as the latter is a bad path character for Nix --- subs/curator/src/Curator/Unpack.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/subs/curator/src/Curator/Unpack.hs b/subs/curator/src/Curator/Unpack.hs index 95a22b8a9b..e48fb33fbc 100644 --- a/subs/curator/src/Curator/Unpack.hs +++ b/subs/curator/src/Curator/Unpack.hs @@ -40,7 +40,7 @@ unpackSnapshot cons snap root = do fromString (packageNameString name) <> "-" <> fromString (versionString version) <> - "@" <> + "-" <> display sha suffixTmp <- parseRelDir $ T.unpack $ utf8BuilderToText $ suffixBuilder <> ".tmp" let destTmp = root unpacked suffixTmp From a2f00a5fea78cc2775d913fd4d9cad063b7c3f32 Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Tue, 25 Dec 2018 16:19:10 +0300 Subject: [PATCH 02/80] Using source hashmap with haddocks for doc roots --- src/Stack/Path.hs | 156 ++++++++++++++++++++++++---------------------- src/main/Main.hs | 9 ++- 2 files changed, 90 insertions(+), 75 deletions(-) diff --git a/src/Stack/Path.hs b/src/Stack/Path.hs index c70ad554d9..49de0d7c3a 100644 --- a/src/Stack/Path.hs +++ b/src/Stack/Path.hs @@ -30,64 +30,70 @@ import RIO.Process (HasProcessContext (..), exeSearchPathL) -- | Print out useful path information in a human-readable format (and -- support others later). -path - :: HasEnvConfig env - => [Text] - -> RIO env () -path keys = - do -- We must use a BuildConfig from an EnvConfig to ensure that it contains the - -- full environment info including GHC paths etc. - bc <- view $ envConfigL.buildConfigL - -- This is the modified 'bin-path', - -- including the local GHC or MSYS if not configured to operate on - -- global GHC. - -- It was set up in 'withBuildConfigAndLock -> withBuildConfigExt -> setupEnv'. - -- So it's not the *minimal* override path. - snap <- packageDatabaseDeps - plocal <- packageDatabaseLocal - extra <- packageDatabaseExtra - whichCompiler <- view $ actualCompilerVersionL.whichCompilerL - global <- GhcPkg.getGlobalDB whichCompiler - snaproot <- installationRootDeps - localroot <- installationRootLocal - toolsDir <- bindirCompilerTools - distDir <- distRelativeDir - hpcDir <- hpcReportDir - compiler <- getCompilerPath whichCompiler - let deprecated = filter ((`elem` keys) . fst) deprecatedPathKeys +path :: + (HasEnvConfig envHaddocks, HasEnvConfig envNoHaddocks) + => (RIO envNoHaddocks () -> IO ()) + -> (RIO envHaddocks () -> IO ()) + -> [Text] + -> IO () +path runNoHaddocks runHaddocks keys = + do let deprecated = filter ((`elem` keys) . fst) deprecatedPathKeys liftIO $ forM_ deprecated $ \(oldOption, newOption) -> T.hPutStrLn stderr $ T.unlines [ "" , "'--" <> oldOption <> "' will be removed in a future release." , "Please use '--" <> newOption <> "' instead." , "" ] - forM_ - -- filter the chosen paths in flags (keys), + let -- filter the chosen paths in flags (keys), -- or show all of them if no specific paths chosen. - (filter + goodPaths = filter (\(_,key,_) -> (null keys && key /= T.pack deprecatedStackRootOptionName) || elem key keys) - paths) - (\(_,key,path') -> - liftIO $ T.putStrLn - -- If a single path type is requested, output it directly. - -- Otherwise, name all the paths. - ((if length keys == 1 - then "" - else key <> ": ") <> - path' - (PathInfo - bc - snap - plocal - global - snaproot - localroot - toolsDir - distDir - hpcDir - extra - compiler))) + paths + singlePath = length goodPaths == 1 + toEither (_, k, UseHaddocks p) = Left (k, p) + toEither (_, k, WithoutHaddocks p) = Right (k, p) + (with, without) = partitionEithers $ map toEither goodPaths + printKeys runEnv extractors single = runEnv $ do + pathInfo <- fillPathInfo + liftIO $ forM_ extractors $ \(key, extractPath) -> do + let prefix = if single then "" else key <> ": " + T.putStrLn $ prefix <> extractPath pathInfo + printKeys runHaddocks with singlePath + printKeys runNoHaddocks without singlePath + +fillPathInfo :: HasEnvConfig env => RIO env PathInfo +fillPathInfo = do + -- We must use a BuildConfig from an EnvConfig to ensure that it contains the + -- full environment info including GHC paths etc. + bc <- view $ envConfigL.buildConfigL + -- This is the modified 'bin-path', + -- including the local GHC or MSYS if not configured to operate on + -- global GHC. + -- It was set up in 'withBuildConfigAndLock -> withBuildConfigExt -> setupEnv'. + -- So it's not the *minimal* override path. + snap <- packageDatabaseDeps + plocal <- packageDatabaseLocal + extra <- packageDatabaseExtra + whichCompiler <- view $ actualCompilerVersionL.whichCompilerL + global <- GhcPkg.getGlobalDB whichCompiler + snaproot <- installationRootDeps + localroot <- installationRootLocal + toolsDir <- bindirCompilerTools + distDir <- distRelativeDir + hpcDir <- hpcReportDir + compiler <- getCompilerPath whichCompiler + return $ PathInfo bc + snap + plocal + global + snaproot + localroot + toolsDir + distDir + hpcDir + extra + compiler pathParser :: OA.Parser [Text] pathParser = @@ -133,6 +139,8 @@ instance HasBuildConfig PathInfo where buildConfigL = lens piBuildConfig (\x y -> x { piBuildConfig = y }) . buildConfigL +data UseHaddocks a = UseHaddocks a | WithoutHaddocks a + -- | The paths of interest to a user. The first tuple string is used -- for a description that the optparse flag uses, and the second -- string as a machine-readable key and also for @--foo@ flags. The user @@ -142,80 +150,80 @@ instance HasBuildConfig PathInfo where -- When printing output we generate @PathInfo@ and pass it to the -- function to generate an appropriate string. Trailing slashes are -- removed, see #506 -paths :: [(String, Text, PathInfo -> Text)] +paths :: [(String, Text, UseHaddocks (PathInfo -> Text))] paths = [ ( "Global stack root directory" , T.pack stackRootOptionName - , view $ stackRootL.to toFilePathNoTrailingSep.to T.pack) + , WithoutHaddocks $ view (stackRootL.to toFilePathNoTrailingSep.to T.pack)) , ( "Project root (derived from stack.yaml file)" , "project-root" - , view $ projectRootL.to toFilePathNoTrailingSep.to T.pack) + , WithoutHaddocks $ view (projectRootL.to toFilePathNoTrailingSep.to T.pack)) , ( "Configuration location (where the stack.yaml file is)" , "config-location" - , view $ stackYamlL.to toFilePath.to T.pack) + , WithoutHaddocks $ view (stackYamlL.to toFilePath.to T.pack)) , ( "PATH environment variable" , "bin-path" - , T.pack . intercalate [FP.searchPathSeparator] . view exeSearchPathL) + , WithoutHaddocks $ T.pack . intercalate [FP.searchPathSeparator] . view exeSearchPathL) , ( "Install location for GHC and other core tools" , "programs" - , view $ configL.to configLocalPrograms.to toFilePathNoTrailingSep.to T.pack) + , WithoutHaddocks $ view (configL.to configLocalPrograms.to toFilePathNoTrailingSep.to T.pack)) , ( "Compiler binary (e.g. ghc)" , "compiler-exe" - , T.pack . toFilePath . piCompiler ) + , WithoutHaddocks $ T.pack . toFilePath . piCompiler ) , ( "Directory containing the compiler binary (e.g. ghc)" , "compiler-bin" - , T.pack . toFilePathNoTrailingSep . parent . piCompiler ) + , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . parent . piCompiler ) , ( "Directory containing binaries specific to a particular compiler (e.g. intero)" , "compiler-tools-bin" - , T.pack . toFilePathNoTrailingSep . piToolsDir ) + , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . piToolsDir ) , ( "Local bin dir where stack installs executables (e.g. ~/.local/bin)" , "local-bin" - , view $ configL.to configLocalBin.to toFilePathNoTrailingSep.to T.pack) + , WithoutHaddocks $ view $ configL.to configLocalBin.to toFilePathNoTrailingSep.to T.pack) , ( "Extra include directories" , "extra-include-dirs" - , T.intercalate ", " . map T.pack . Set.elems . configExtraIncludeDirs . view configL ) + , WithoutHaddocks $ T.intercalate ", " . map T.pack . Set.elems . configExtraIncludeDirs . view configL ) , ( "Extra library directories" , "extra-library-dirs" - , T.intercalate ", " . map T.pack . Set.elems . configExtraLibDirs . view configL ) + , WithoutHaddocks $ T.intercalate ", " . map T.pack . Set.elems . configExtraLibDirs . view configL ) , ( "Snapshot package database" , "snapshot-pkg-db" - , T.pack . toFilePathNoTrailingSep . piSnapDb ) + , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . piSnapDb ) , ( "Local project package database" , "local-pkg-db" - , T.pack . toFilePathNoTrailingSep . piLocalDb ) + , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . piLocalDb ) , ( "Global package database" , "global-pkg-db" - , T.pack . toFilePathNoTrailingSep . piGlobalDb ) + , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . piGlobalDb ) , ( "GHC_PACKAGE_PATH environment variable" , "ghc-package-path" - , \pi' -> mkGhcPackagePath True (piLocalDb pi') (piSnapDb pi') (piExtraDbs pi') (piGlobalDb pi')) + , WithoutHaddocks $ \pi' -> mkGhcPackagePath True (piLocalDb pi') (piSnapDb pi') (piExtraDbs pi') (piGlobalDb pi')) , ( "Snapshot installation root" , "snapshot-install-root" - , T.pack . toFilePathNoTrailingSep . piSnapRoot ) + , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . piSnapRoot ) , ( "Local project installation root" , "local-install-root" - , T.pack . toFilePathNoTrailingSep . piLocalRoot ) + , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . piLocalRoot ) , ( "Snapshot documentation root" , "snapshot-doc-root" - , \pi' -> T.pack (toFilePathNoTrailingSep (piSnapRoot pi' docDirSuffix))) + , UseHaddocks $ \pi' -> T.pack (toFilePathNoTrailingSep (piSnapRoot pi' docDirSuffix))) , ( "Local project documentation root" , "local-doc-root" - , \pi' -> T.pack (toFilePathNoTrailingSep (piLocalRoot pi' docDirSuffix))) + , UseHaddocks $ \pi' -> T.pack (toFilePathNoTrailingSep (piLocalRoot pi' docDirSuffix))) , ( "Dist work directory, relative to package directory" , "dist-dir" - , T.pack . toFilePathNoTrailingSep . piDistDir ) + , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . piDistDir ) , ( "Where HPC reports and tix files are stored" , "local-hpc-root" - , T.pack . toFilePathNoTrailingSep . piHpcDir ) + , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . piHpcDir ) , ( "DEPRECATED: Use '--local-bin' instead" , "local-bin-path" - , T.pack . toFilePathNoTrailingSep . configLocalBin . view configL ) + , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . configLocalBin . view configL ) , ( "DEPRECATED: Use '--programs' instead" , "ghc-paths" - , T.pack . toFilePathNoTrailingSep . configLocalPrograms . view configL ) + , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . configLocalPrograms . view configL ) , ( "DEPRECATED: Use '--" <> stackRootOptionName <> "' instead" , T.pack deprecatedStackRootOptionName - , T.pack . toFilePathNoTrailingSep . view stackRootL ) + , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . view stackRootL ) ] deprecatedPathKeys :: [(Text, Text)] diff --git a/src/main/Main.hs b/src/main/Main.hs index 32983637a9..beb3a0417f 100644 --- a/src/main/Main.hs +++ b/src/main/Main.hs @@ -38,6 +38,7 @@ import Distribution.System (buildArch) import qualified Distribution.Text as Cabal (display) import Distribution.Version (mkVersion') import GHC.IO.Encoding (mkTextEncoding, textEncodingName) +import Lens.Micro ((?~)) import Options.Applicative import Options.Applicative.Help (errorHelp, stringChunk, vcatChunks) import Options.Applicative.Builder.Extra @@ -613,7 +614,13 @@ interpreterHandler currentDir args f = do return (a,(b,mempty)) pathCmd :: [Text] -> GlobalOpts -> IO () -pathCmd keys go = withDefaultBuildConfig go (Stack.Path.path keys) +pathCmd keys go = Stack.Path.path withoutHaddocks withHaddocks keys + where + withoutHaddocks = withDefaultBuildConfig goWithout + goWithout = go & globalOptsBuildOptsMonoidL . buildOptsMonoidHaddockL ?~ False + withHaddocks = withDefaultBuildConfig goWith + goWith = go & globalOptsBuildOptsMonoidL . buildOptsMonoidHaddockL ?~ True + setupCmd :: SetupCmdOpts -> GlobalOpts -> IO () setupCmd sco@SetupCmdOpts{..} go@GlobalOpts{..} = loadConfigWithOpts go $ \lc -> do From 353dd322fe746da6f1d488e7a91cc47de56d1db9 Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Wed, 26 Dec 2018 11:43:46 +0300 Subject: [PATCH 03/80] hoogle command with proper options and enabled hadddocks for doc paths --- src/Stack/Hoogle.hs | 24 ++++++++++-------------- src/Stack/Path.hs | 6 ++++++ src/main/Main.hs | 7 +++++-- 3 files changed, 21 insertions(+), 16 deletions(-) diff --git a/src/Stack/Hoogle.hs b/src/Stack/Hoogle.hs index 8ccc92de68..1800006185 100644 --- a/src/Stack/Hoogle.hs +++ b/src/Stack/Hoogle.hs @@ -13,6 +13,7 @@ import Data.Char (isSpace) import qualified Data.Text as T import Distribution.Types.PackageName (mkPackageName) import Distribution.Version (mkVersion) +import Lens.Micro ((?~)) import Path (parseAbsFile) import Path.IO hiding (findExecutable) import qualified Stack.Build @@ -24,11 +25,12 @@ import RIO.Process -- | Hoogle command. hoogleCmd :: ([String],Bool,Bool,Bool) -> GlobalOpts -> IO () -hoogleCmd (args,setup,rebuild,startServer) go = withDefaultBuildConfig go $ do +hoogleCmd (args,setup,rebuild,startServer) go = withDefaultBuildConfig haddocksGo $ do hooglePath <- ensureHoogleInPath generateDbIfNeeded hooglePath runHoogle hooglePath args' where + haddocksGo = go & globalOptsBuildOptsMonoidL . buildOptsMonoidHaddockL ?~ True args' :: [String] args' = if startServer then ["server", "--local", "--port", "8080"] @@ -60,16 +62,9 @@ hoogleCmd (args,setup,rebuild,startServer) go = withDefaultBuildConfig go $ do runHoogle hooglePath ["generate", "--local"] buildHaddocks :: RIO EnvConfig () buildHaddocks = - liftIO - (catch - (withDefaultBuildConfigAndLock - (set - (globalOptsBuildOptsMonoidL . buildOptsMonoidHaddockL) - (Just True) - go) - (Stack.Build.build Nothing)) - (\(_ :: ExitCode) -> - return ())) + liftIO $ + catch (withDefaultBuildConfigAndLock haddocksGo $ Stack.Build.build Nothing) + (\(_ :: ExitCode) -> return ()) hooglePackageName = mkPackageName "hoogle" hoogleMinVersion = mkVersion [5, 0] hoogleMinIdent = @@ -104,15 +99,16 @@ hoogleCmd (args,setup,rebuild,startServer) go = withDefaultBuildConfig go $ do let boptsCLI = defaultBuildOptsCLI { boptsCLITargets = pure $ + T.pack . packageIdentifierString $ either - (T.pack . packageIdentifierString) - (utf8BuilderToText . display) + id + (\(PackageIdentifierRevision n v _) -> PackageIdentifier n v) hooglePackageIdentifier } liftIO (catch (withBuildConfigAndLock - go + haddocksGo NeedTargets boptsCLI $ Stack.Build.build Nothing diff --git a/src/Stack/Path.hs b/src/Stack/Path.hs index 49de0d7c3a..47e6dc680f 100644 --- a/src/Stack/Path.hs +++ b/src/Stack/Path.hs @@ -80,6 +80,7 @@ fillPathInfo = do snaproot <- installationRootDeps localroot <- installationRootLocal toolsDir <- bindirCompilerTools + hoogle <- hoogleRoot distDir <- distRelativeDir hpcDir <- hpcReportDir compiler <- getCompilerPath whichCompiler @@ -90,6 +91,7 @@ fillPathInfo = do snaproot localroot toolsDir + hoogle distDir hpcDir extra @@ -114,6 +116,7 @@ data PathInfo = PathInfo , piSnapRoot :: Path Abs Dir , piLocalRoot :: Path Abs Dir , piToolsDir :: Path Abs Dir + , piHoogleRoot :: Path Abs Dir , piDistDir :: Path Rel Dir , piHpcDir :: Path Abs Dir , piExtraDbs :: [Path Abs Dir] @@ -209,6 +212,9 @@ paths = , ( "Local project documentation root" , "local-doc-root" , UseHaddocks $ \pi' -> T.pack (toFilePathNoTrailingSep (piLocalRoot pi' docDirSuffix))) + , ( "Local project documentation root" + , "local-hoogle-root" + , UseHaddocks $ T.pack . toFilePathNoTrailingSep . piHoogleRoot) , ( "Dist work directory, relative to package directory" , "dist-dir" , WithoutHaddocks $ T.pack . toFilePathNoTrailingSep . piDistDir ) diff --git a/src/main/Main.hs b/src/main/Main.hs index beb3a0417f..a2b0605ae1 100644 --- a/src/main/Main.hs +++ b/src/main/Main.hs @@ -616,9 +616,12 @@ interpreterHandler currentDir args f = do pathCmd :: [Text] -> GlobalOpts -> IO () pathCmd keys go = Stack.Path.path withoutHaddocks withHaddocks keys where - withoutHaddocks = withDefaultBuildConfig goWithout + continueOnSuccess f = catch f ignoreSuccess + ignoreSuccess ExitSuccess = return () + ignoreSuccess ex = throwIO ex + withoutHaddocks = continueOnSuccess . withDefaultBuildConfig goWithout goWithout = go & globalOptsBuildOptsMonoidL . buildOptsMonoidHaddockL ?~ False - withHaddocks = withDefaultBuildConfig goWith + withHaddocks = continueOnSuccess . withDefaultBuildConfig goWith goWith = go & globalOptsBuildOptsMonoidL . buildOptsMonoidHaddockL ?~ True From b7bad04254ac4a8cc7d61cdade79b8713fe10e2e Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Thu, 3 Jan 2019 12:09:27 +0300 Subject: [PATCH 04/80] Curator unpack and proper handling of expected test/haddock failures --- src/Stack/Build/Execute.hs | 54 +++++++++++++++++++++++++----- src/Stack/Types/Config.hs | 6 ++++ subs/curator/src/Curator/Unpack.hs | 42 ++++++++++++++--------- 3 files changed, 79 insertions(+), 23 deletions(-) diff --git a/src/Stack/Build/Execute.hs b/src/Stack/Build/Execute.hs index c342df45f1..3e2a35db58 100644 --- a/src/Stack/Build/Execute.hs +++ b/src/Stack/Build/Execute.hs @@ -1240,7 +1240,9 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap minstalled <- case mprecompiled of Just precompiled -> copyPreCompiled precompiled - Nothing -> realConfigAndBuild cache allDepsMap + Nothing -> do + mcurator <- view $ buildConfigL.to bcCurator + realConfigAndBuild cache mcurator allDepsMap case minstalled of Nothing -> return () Just installed -> do @@ -1256,6 +1258,15 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap packageHasExposedModules package && -- Special help for the curator tool to avoid haddocks that are known to fail maybe True (Set.notMember pname . curatorSkipHaddock) mcurator + expectHaddockFailure mcurator = + maybe False (Set.member pname . curatorExpectHaddockFailure) mcurator + fulfillHaddockExpectations mcurator action | expectHaddockFailure mcurator = do + eres <- tryAny action + case eres of + Right () -> logWarn $ fromString (packageNameString pname) <> ": unexpected Haddock success" + Left _ -> return () + fulfillHaddockExpectations _ action = do + action buildingFinals = isFinalBuild || taskAllInOne enableTests = buildingFinals && any isCTest (taskComponents task) @@ -1378,7 +1389,7 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap where bindir = toFilePath $ bcoSnapInstallRoot eeBaseConfigOpts bindirSuffix - realConfigAndBuild cache allDepsMap = withSingleContext ac ee task (Just allDepsMap) Nothing + realConfigAndBuild cache mcurator allDepsMap = withSingleContext ac ee task (Just allDepsMap) Nothing $ \package cabalfp pkgDir cabal announce _outputType -> do executableBuildStatuses <- getExecutableBuildStatuses package pkgDir when (not (cabalIsSatisfied executableBuildStatuses) && taskIsTarget task) @@ -1405,7 +1416,8 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap (_, True) | null acDownstream || installedMapHasThisPkg -> do initialBuildSteps executableBuildStatuses cabal announce return Nothing - _ -> liftM Just $ realBuild cache package pkgDir cabal announce executableBuildStatuses + _ -> fulfillTestExpectations pname mcurator Nothing $ + fmap Just $ realBuild cache package pkgDir cabal announce executableBuildStatuses initialBuildSteps executableBuildStatuses cabal announce = do () <- announce ("initial-build-steps" <> annSuffix executableBuildStatuses) @@ -1509,7 +1521,7 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap | ghcVer >= mkVersion [8, 4] -> ["--haddock-option=--quickjump"] _ -> [] - cabal KeepTHLoading $ concat + fulfillHaddockExpectations mcurator $ cabal KeepTHLoading $ concat [ ["haddock", "--html", "--hoogle", "--html-location=../$pkg-$version/"] , sourceFlag , ["--internal" | boptsHaddockInternal eeBuildOpts] @@ -1706,6 +1718,9 @@ singleTest topts testsToRun ac ee task installedMap = do -- FIXME: Since this doesn't use cabal, we should be able to avoid using a -- fullblown 'withSingleContext'. (allDepsMap, _cache) <- getConfigCache ee task installedMap True False + mcurator <- view $ buildConfigL.to bcCurator + let pname = pkgName $ taskProvides task + expectFailure = expectTestFailure pname mcurator withSingleContext ac ee task (Just allDepsMap) (Just "test") $ \package _cabalfp pkgDir _cabal announce outputType -> do config <- view configL let needHpc = toCoverage topts @@ -1774,8 +1789,9 @@ singleTest topts testsToRun ac ee task installedMap = do , esLocaleUtf8 = False , esKeepGhcRts = False } + let emptyResult = Map.singleton testName Nothing withProcessContext menv $ if exists - then do + then fulfillTestExpectations pname mcurator emptyResult $ do -- We clear out the .tix files before doing a run. when needHpc $ do tixexists <- doesFileExist tixPath @@ -1837,12 +1853,12 @@ singleTest topts testsToRun ac ee task installedMap = do announceResult "failed" return $ Map.singleton testName (Just ec) else do - logError $ displayShow $ TestSuiteExeMissing + unless expectFailure $ logError $ displayShow $ TestSuiteExeMissing (packageBuildType package == C.Simple) exeName (packageNameString (packageName package)) (T.unpack testName) - return $ Map.singleton testName Nothing + return emptyResult when needHpc $ do let testsToRun' = map f testsToRun @@ -1859,7 +1875,7 @@ singleTest topts testsToRun ac ee task installedMap = do hClose h S.readFile $ toFilePath logFile - unless (Map.null errs) $ throwM $ TestSuiteFailure + unless (Map.null errs || expectFailure) $ throwM $ TestSuiteFailure (taskProvides task) errs (case outputType of @@ -2128,3 +2144,25 @@ addGlobalPackages deps globals0 = -- None of the packages we checked can be added, therefore drop them all -- and return our results loop _ [] gids = gids + + +expectTestFailure :: PackageName -> Maybe Curator -> Bool +expectTestFailure pname mcurator = + maybe False (Set.member pname . curatorExpectTestFailure) mcurator + +fulfillTestExpectations :: + (HasLogFunc env) + => PackageName + -> Maybe Curator + -> b + -> RIO env b + -> RIO env b +fulfillTestExpectations pname mcurator defValue action | expectTestFailure pname mcurator = do + eres <- tryAny action + case eres of + Right res -> do + logWarn $ fromString (packageNameString pname) <> ": unexpected test success" + return res + Left _ -> return defValue +fulfillTestExpectations _ _ _ action = do + action diff --git a/src/Stack/Types/Config.hs b/src/Stack/Types/Config.hs index fb4da044d6..b68e3fcc2a 100644 --- a/src/Stack/Types/Config.hs +++ b/src/Stack/Types/Config.hs @@ -610,21 +610,27 @@ instance ToJSON Project where -- documented and exposed Stack API. SUBJECT TO CHANGE. data Curator = Curator { curatorSkipTest :: !(Set PackageName) + , curatorExpectTestFailure :: !(Set PackageName) , curatorSkipBenchmark :: !(Set PackageName) , curatorSkipHaddock :: !(Set PackageName) + , curatorExpectHaddockFailure :: !(Set PackageName) } deriving Show instance ToJSON Curator where toJSON c = object [ "skip-test" .= Set.map CabalString (curatorSkipTest c) + , "expect-test-failure" .= Set.map CabalString (curatorExpectTestFailure c) , "skip-bench" .= Set.map CabalString (curatorSkipBenchmark c) , "skip-haddock" .= Set.map CabalString (curatorSkipHaddock c) + , "expect-test-failure" .= Set.map CabalString (curatorExpectHaddockFailure c) ] instance FromJSON (WithJSONWarnings Curator) where parseJSON = withObjectWarnings "Curator" $ \o -> Curator <$> fmap (Set.map unCabalString) (o ..:? "skip-test" ..!= mempty) + <*> fmap (Set.map unCabalString) (o ..:? "expect-test-failure" ..!= mempty) <*> fmap (Set.map unCabalString) (o ..:? "skip-bench" ..!= mempty) <*> fmap (Set.map unCabalString) (o ..:? "skip-haddock" ..!= mempty) + <*> fmap (Set.map unCabalString) (o ..:? "expect-haddock-failure" ..!= mempty) -- An uninterpreted representation of configuration options. -- Configurations may be "cascaded" using mappend (left-biased). diff --git a/subs/curator/src/Curator/Unpack.hs b/subs/curator/src/Curator/Unpack.hs index e48fb33fbc..6e5abf2801 100644 --- a/subs/curator/src/Curator/Unpack.hs +++ b/subs/curator/src/Curator/Unpack.hs @@ -24,16 +24,23 @@ unpackSnapshot -> RIO env () unpackSnapshot cons snap root = do unpacked <- parseRelDir "unpacked" - (suffixes, flags, skipTest, skipBench, skipHaddock) <- fmap fold $ for (rsPackages snap) $ \sp -> do + (suffixes, flags, (skipTest, expectTestFailure), skipBench, + (skipHaddock, expectHaddockFailure)) <- fmap fold $ for (rsPackages snap) $ \sp -> do let pl = rspLocation sp TreeKey (BlobKey sha _size) <- getRawPackageLocationTreeKey pl PackageIdentifier name version <- getRawPackageLocationIdent pl - pc <- - case Map.lookup name $ consPackages cons of - Nothing -> error $ "Package not found in constraints: " ++ packageNameString name - Just pc -> pure pc - unless (pcFlags pc == rspFlags sp) $ error "mismatched flags!" - if pcSkipBuild pc + let (flags, skipBuild, test, bench, haddock) = + case Map.lookup name $ consPackages cons of + Nothing -> + (mempty, False, CAExpectSuccess, CAExpectSuccess, CAExpectSuccess) + Just pc -> + (pcFlags pc, pcSkipBuild pc, pcTests pc, pcBenchmarks pc, pcHaddock pc) + unless (flags == rspFlags sp) $ error $ unlines + [ "mismatched flags for " ++ show pl + , " snapshot: " ++ show (rspFlags sp) + , " constraints: " ++ show flags + ] + if skipBuild then pure mempty else do let suffixBuilder = @@ -55,16 +62,19 @@ unpackSnapshot cons snap root = do renameDir destTmp dest pure ( Set.singleton suffix - , if Map.null (pcFlags pc) then Map.empty else Map.singleton name (pcFlags pc) - , case pcTests pc of + , if Map.null flags then Map.empty else Map.singleton name flags + , case test of CAExpectSuccess -> mempty - _ -> Set.singleton name -- FIXME this and others, want to differentiate skip and expect failure - , case pcBenchmarks pc of + CAExpectFailure -> (mempty, Set.singleton name) + CASkip -> (Set.singleton name, mempty) + , case bench of + CASkip -> Set.singleton name + _ -> mempty -- FIXME maybe we want to differentiate skip and expect failure but + -- we don't run benchmarks, only compile them + , case haddock of CAExpectSuccess -> mempty - _ -> Set.singleton name - , case pcHaddock pc of - CAExpectSuccess -> mempty - _ -> Set.singleton name + CAExpectFailure -> (mempty, Set.singleton name) + CASkip -> (Set.singleton name, mempty) ) stackYaml <- parseRelFile "stack.yaml" let stackYamlFP = toFilePath $ root stackYaml @@ -74,7 +84,9 @@ unpackSnapshot cons snap root = do , "flags" .= fmap toCabalStringMap (toCabalStringMap flags) , "curator" .= object [ "skip-test" .= Set.map CabalString skipTest + , "expect-test-failure" .= Set.map CabalString expectTestFailure , "skip-bench" .= Set.map CabalString skipBench , "skip-haddock" .= Set.map CabalString skipHaddock + , "expect-haddock-failure" .= Set.map CabalString expectHaddockFailure ] ] From ed5c0a92799e592f3fd477125f4c44cff084a33e Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Wed, 9 Jan 2019 17:30:25 +0300 Subject: [PATCH 05/80] Introduce test-suite-timeout parameter --- snapshot.yaml | 1 + src/Stack/Build/Execute.hs | 14 +++++++++----- src/Stack/Config/Build.hs | 1 + src/Stack/Options/TestParser.hs | 5 +++++ src/Stack/Types/Config/Build.hs | 7 +++++++ 5 files changed, 23 insertions(+), 5 deletions(-) diff --git a/snapshot.yaml b/snapshot.yaml index 611ac9e5ce..53d0778ec2 100644 --- a/snapshot.yaml +++ b/snapshot.yaml @@ -18,6 +18,7 @@ packages: - unliftio-0.2.8.0@sha256:5a47f12ffcee837215c67b05abf35dffb792096564a6f81652d75a54668224cd,2250 - happy-1.19.9@sha256:f8c774230735a390c287b2980cfcd2703d24d8dde85a01ea721b7b4b4c82944f,4667 - fsnotify-0.3.0.1@rev:1 +- process-1.6.3.0@sha256:fc77cfe75a9653b8c54ae455ead8c06cb8adc4d7a340984d84d8ca880b579919,2370 #because of https://github.com/haskell/process/pull/101 flags: cabal-install: diff --git a/src/Stack/Build/Execute.hs b/src/Stack/Build/Execute.hs index 3e2a35db58..76069ea94f 100644 --- a/src/Stack/Build/Execute.hs +++ b/src/Stack/Build/Execute.hs @@ -35,10 +35,7 @@ import Conduit import qualified Data.Conduit.Binary as CB import qualified Data.Conduit.Filesystem as CF import qualified Data.Conduit.List as CL -import Data.Conduit.Process.Typed - (ExitCodeException (..), waitExitCode, - useHandleOpen, setStdin, setStdout, setStderr, - runProcess_, getStdout, getStderr, createSource) +import Data.Conduit.Process.Typed (createSource) import qualified Data.Conduit.Text as CT import Data.List hiding (any) import qualified Data.Map.Strict as M @@ -1818,9 +1815,16 @@ singleTest topts testsToRun ac ee task installedMap = do case outputType of OTConsole _ -> id OTLogFile _ h -> setter (useHandleOpen h) + optionalTimeout action + | Just maxSecs <- toMaximumTimeSeconds topts, maxSecs > 0 = do + mres <- timeout (maxSecs * 1000000) action + case mres of + Nothing -> throwString "test suite timed out" + Just res -> return res + | otherwise = action ec <- withWorkingDir (toFilePath pkgDir) $ - proc (toFilePath exePath) args $ \pc0 -> do + optionalTimeout $ proc (toFilePath exePath) args $ \pc0 -> do stdinBS <- if isTestTypeLib then do diff --git a/src/Stack/Config/Build.hs b/src/Stack/Config/Build.hs index 8c744d614f..6d29d27b7b 100644 --- a/src/Stack/Config/Build.hs +++ b/src/Stack/Config/Build.hs @@ -109,6 +109,7 @@ testOptsFromMonoid TestOptsMonoid{..} madditional = , toAdditionalArgs = fromMaybe [] madditional <> toMonoidAdditionalArgs , toCoverage = fromFirst (toCoverage defaultTestOpts) toMonoidCoverage , toDisableRun = fromFirst (toDisableRun defaultTestOpts) toMonoidDisableRun + , toMaximumTimeSeconds = fromFirst (toMaximumTimeSeconds defaultTestOpts) toMonoidMaximumTimeSeconds } benchmarkOptsFromMonoid :: BenchmarkOptsMonoid -> Maybe [String] -> BenchmarkOpts diff --git a/src/Stack/Options/TestParser.hs b/src/Stack/Options/TestParser.hs index e5c735edd1..6380eb406e 100644 --- a/src/Stack/Options/TestParser.hs +++ b/src/Stack/Options/TestParser.hs @@ -36,4 +36,9 @@ testOptsParser hide0 = (long "no-run-tests" <> help "Disable running of tests. (Tests will still be built.)" <> hide)) + <*> optionalFirst + (option (fmap Just $ auto) + (long "test-suite-timeout" <> + help "Maximum test suite run time in seconds." <> + hide)) where hide = hideMods hide0 diff --git a/src/Stack/Types/Config/Build.hs b/src/Stack/Types/Config/Build.hs index 1b920764e8..6e0d412194 100644 --- a/src/Stack/Types/Config/Build.hs +++ b/src/Stack/Types/Config/Build.hs @@ -347,6 +347,7 @@ data TestOpts = ,toAdditionalArgs :: ![String] -- ^ Arguments passed to the test program ,toCoverage :: !Bool -- ^ Generate a code coverage report ,toDisableRun :: !Bool -- ^ Disable running of tests + ,toMaximumTimeSeconds :: !(Maybe Int) -- ^ test suite timeout in seconds } deriving (Eq,Show) defaultTestOpts :: TestOpts @@ -355,6 +356,7 @@ defaultTestOpts = TestOpts , toAdditionalArgs = [] , toCoverage = False , toDisableRun = False + , toMaximumTimeSeconds = Nothing } data TestOptsMonoid = @@ -363,6 +365,7 @@ data TestOptsMonoid = , toMonoidAdditionalArgs :: ![String] , toMonoidCoverage :: !(First Bool) , toMonoidDisableRun :: !(First Bool) + , toMonoidMaximumTimeSeconds :: !(First (Maybe Int)) } deriving (Show, Generic) instance FromJSON (WithJSONWarnings TestOptsMonoid) where @@ -371,6 +374,7 @@ instance FromJSON (WithJSONWarnings TestOptsMonoid) where toMonoidAdditionalArgs <- o ..:? toMonoidAdditionalArgsName ..!= [] toMonoidCoverage <- First <$> o ..:? toMonoidCoverageArgName toMonoidDisableRun <- First <$> o ..:? toMonoidDisableRunArgName + toMonoidMaximumTimeSeconds <- First <$> o ..:? toMonoidMaximumTimeSecondsArgName return TestOptsMonoid{..}) toMonoidRerunTestsArgName :: Text @@ -385,6 +389,9 @@ toMonoidCoverageArgName = "coverage" toMonoidDisableRunArgName :: Text toMonoidDisableRunArgName = "no-run-tests" +toMonoidMaximumTimeSecondsArgName :: Text +toMonoidMaximumTimeSecondsArgName = "test-suite-timeout" + instance Semigroup TestOptsMonoid where (<>) = mappenddefault From 960538fbdf0ddda69a7ba96caa3068cd5d680e6d Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Wed, 9 Jan 2019 17:35:29 +0300 Subject: [PATCH 06/80] Handle expected benchmark compilation failures --- src/Stack/Build/Execute.hs | 25 ++++++++++++++++++++----- src/Stack/Types/Config.hs | 3 +++ subs/curator/src/Curator/Unpack.hs | 9 +++++---- 3 files changed, 28 insertions(+), 9 deletions(-) diff --git a/src/Stack/Build/Execute.hs b/src/Stack/Build/Execute.hs index 76069ea94f..33f0509e7c 100644 --- a/src/Stack/Build/Execute.hs +++ b/src/Stack/Build/Execute.hs @@ -1413,7 +1413,7 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap (_, True) | null acDownstream || installedMapHasThisPkg -> do initialBuildSteps executableBuildStatuses cabal announce return Nothing - _ -> fulfillTestExpectations pname mcurator Nothing $ + _ -> fulfillCuratorExpectations pname mcurator enableTests enableBenchmarks Nothing $ fmap Just $ realBuild cache package pkgDir cabal announce executableBuildStatuses initialBuildSteps executableBuildStatuses cabal announce = do @@ -1788,7 +1788,7 @@ singleTest topts testsToRun ac ee task installedMap = do } let emptyResult = Map.singleton testName Nothing withProcessContext menv $ if exists - then fulfillTestExpectations pname mcurator emptyResult $ do + then fulfillCuratorExpectations pname mcurator True False emptyResult $ do -- We clear out the .tix files before doing a run. when needHpc $ do tixexists <- doesFileExist tixPath @@ -2154,19 +2154,34 @@ expectTestFailure :: PackageName -> Maybe Curator -> Bool expectTestFailure pname mcurator = maybe False (Set.member pname . curatorExpectTestFailure) mcurator -fulfillTestExpectations :: +expectBenchmarkFailure :: PackageName -> Maybe Curator -> Bool +expectBenchmarkFailure pname mcurator = + maybe False (Set.member pname . curatorExpectBenchmarkFailure) mcurator + +fulfillCuratorExpectations :: (HasLogFunc env) => PackageName -> Maybe Curator + -> Bool + -> Bool -> b -> RIO env b -> RIO env b -fulfillTestExpectations pname mcurator defValue action | expectTestFailure pname mcurator = do +fulfillCuratorExpectations pname mcurator enableTests _ defValue action | enableTests && + expectTestFailure pname mcurator = do eres <- tryAny action case eres of Right res -> do logWarn $ fromString (packageNameString pname) <> ": unexpected test success" return res Left _ -> return defValue -fulfillTestExpectations _ _ _ action = do +fulfillCuratorExpectations pname mcurator _ enableBench defValue action | enableBench && + expectBenchmarkFailure pname mcurator = do + eres <- tryAny action + case eres of + Right res -> do + logWarn $ fromString (packageNameString pname) <> ": unexpected benchmark success" + return res + Left _ -> return defValue +fulfillCuratorExpectations _ _ _ _ _ action = do action diff --git a/src/Stack/Types/Config.hs b/src/Stack/Types/Config.hs index b68e3fcc2a..fee917948b 100644 --- a/src/Stack/Types/Config.hs +++ b/src/Stack/Types/Config.hs @@ -612,6 +612,7 @@ data Curator = Curator { curatorSkipTest :: !(Set PackageName) , curatorExpectTestFailure :: !(Set PackageName) , curatorSkipBenchmark :: !(Set PackageName) + , curatorExpectBenchmarkFailure :: !(Set PackageName) , curatorSkipHaddock :: !(Set PackageName) , curatorExpectHaddockFailure :: !(Set PackageName) } @@ -621,6 +622,7 @@ instance ToJSON Curator where [ "skip-test" .= Set.map CabalString (curatorSkipTest c) , "expect-test-failure" .= Set.map CabalString (curatorExpectTestFailure c) , "skip-bench" .= Set.map CabalString (curatorSkipBenchmark c) + , "expect-benchmark-failure" .= Set.map CabalString (curatorExpectTestFailure c) , "skip-haddock" .= Set.map CabalString (curatorSkipHaddock c) , "expect-test-failure" .= Set.map CabalString (curatorExpectHaddockFailure c) ] @@ -629,6 +631,7 @@ instance FromJSON (WithJSONWarnings Curator) where <$> fmap (Set.map unCabalString) (o ..:? "skip-test" ..!= mempty) <*> fmap (Set.map unCabalString) (o ..:? "expect-test-failure" ..!= mempty) <*> fmap (Set.map unCabalString) (o ..:? "skip-bench" ..!= mempty) + <*> fmap (Set.map unCabalString) (o ..:? "expect-benchmark-failure" ..!= mempty) <*> fmap (Set.map unCabalString) (o ..:? "skip-haddock" ..!= mempty) <*> fmap (Set.map unCabalString) (o ..:? "expect-haddock-failure" ..!= mempty) diff --git a/subs/curator/src/Curator/Unpack.hs b/subs/curator/src/Curator/Unpack.hs index 6e5abf2801..13c6198237 100644 --- a/subs/curator/src/Curator/Unpack.hs +++ b/subs/curator/src/Curator/Unpack.hs @@ -24,7 +24,7 @@ unpackSnapshot -> RIO env () unpackSnapshot cons snap root = do unpacked <- parseRelDir "unpacked" - (suffixes, flags, (skipTest, expectTestFailure), skipBench, + (suffixes, flags, (skipTest, expectTestFailure), (skipBench, expectBenchmarkFailure), (skipHaddock, expectHaddockFailure)) <- fmap fold $ for (rsPackages snap) $ \sp -> do let pl = rspLocation sp TreeKey (BlobKey sha _size) <- getRawPackageLocationTreeKey pl @@ -68,9 +68,9 @@ unpackSnapshot cons snap root = do CAExpectFailure -> (mempty, Set.singleton name) CASkip -> (Set.singleton name, mempty) , case bench of - CASkip -> Set.singleton name - _ -> mempty -- FIXME maybe we want to differentiate skip and expect failure but - -- we don't run benchmarks, only compile them + CAExpectSuccess -> mempty + CAExpectFailure -> (mempty, Set.singleton name) + CASkip -> (Set.singleton name, mempty) , case haddock of CAExpectSuccess -> mempty CAExpectFailure -> (mempty, Set.singleton name) @@ -86,6 +86,7 @@ unpackSnapshot cons snap root = do [ "skip-test" .= Set.map CabalString skipTest , "expect-test-failure" .= Set.map CabalString expectTestFailure , "skip-bench" .= Set.map CabalString skipBench + , "expect-benchmark-failure" .= Set.map CabalString expectBenchmarkFailure , "skip-haddock" .= Set.map CabalString skipHaddock , "expect-haddock-failure" .= Set.map CabalString expectHaddockFailure ] From 59acc7d127dec8603d1e0bedad9c61b2ca0e0b73 Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Thu, 10 Jan 2019 10:21:13 +0300 Subject: [PATCH 07/80] Forward hidden packages from constraints to Stackage stack.yaml --- subs/curator/src/Curator/Unpack.hs | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/subs/curator/src/Curator/Unpack.hs b/subs/curator/src/Curator/Unpack.hs index 13c6198237..f4d90aca2f 100644 --- a/subs/curator/src/Curator/Unpack.hs +++ b/subs/curator/src/Curator/Unpack.hs @@ -24,17 +24,17 @@ unpackSnapshot -> RIO env () unpackSnapshot cons snap root = do unpacked <- parseRelDir "unpacked" - (suffixes, flags, (skipTest, expectTestFailure), (skipBench, expectBenchmarkFailure), + (suffixes, (flags, hidden), (skipTest, expectTestFailure), (skipBench, expectBenchmarkFailure), (skipHaddock, expectHaddockFailure)) <- fmap fold $ for (rsPackages snap) $ \sp -> do let pl = rspLocation sp TreeKey (BlobKey sha _size) <- getRawPackageLocationTreeKey pl PackageIdentifier name version <- getRawPackageLocationIdent pl - let (flags, skipBuild, test, bench, haddock) = + let (flags, hide, skipBuild, test, bench, haddock) = case Map.lookup name $ consPackages cons of Nothing -> - (mempty, False, CAExpectSuccess, CAExpectSuccess, CAExpectSuccess) + (mempty, False, False, CAExpectSuccess, CAExpectSuccess, CAExpectSuccess) Just pc -> - (pcFlags pc, pcSkipBuild pc, pcTests pc, pcBenchmarks pc, pcHaddock pc) + (pcFlags pc, pcHide pc, pcSkipBuild pc, pcTests pc, pcBenchmarks pc, pcHaddock pc) unless (flags == rspFlags sp) $ error $ unlines [ "mismatched flags for " ++ show pl , " snapshot: " ++ show (rspFlags sp) @@ -62,7 +62,9 @@ unpackSnapshot cons snap root = do renameDir destTmp dest pure ( Set.singleton suffix - , if Map.null flags then Map.empty else Map.singleton name flags + , ( if Map.null flags then Map.empty else Map.singleton name flags + , if hide then Map.singleton name True else Map.empty + ) , case test of CAExpectSuccess -> mempty CAExpectFailure -> (mempty, Set.singleton name) @@ -82,6 +84,7 @@ unpackSnapshot cons snap root = do [ "resolver" .= ("ghc-" ++ versionString (consGhcVersion cons)) , "packages" .= Set.map (\suffix -> toFilePath (unpacked suffix)) suffixes , "flags" .= fmap toCabalStringMap (toCabalStringMap flags) + , "hidden" .= toCabalStringMap hidden , "curator" .= object [ "skip-test" .= Set.map CabalString skipTest , "expect-test-failure" .= Set.map CabalString expectTestFailure From e1002a274369434a6422c42dd081c10d4338e7c1 Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Tue, 5 Feb 2019 13:40:52 +0300 Subject: [PATCH 08/80] Fixes from hlint suggestions --- src/Stack/Build/Execute.hs | 2 +- src/Stack/Options/TestParser.hs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Stack/Build/Execute.hs b/src/Stack/Build/Execute.hs index 33f0509e7c..3300e89b41 100644 --- a/src/Stack/Build/Execute.hs +++ b/src/Stack/Build/Execute.hs @@ -1414,7 +1414,7 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap initialBuildSteps executableBuildStatuses cabal announce return Nothing _ -> fulfillCuratorExpectations pname mcurator enableTests enableBenchmarks Nothing $ - fmap Just $ realBuild cache package pkgDir cabal announce executableBuildStatuses + Just <$> realBuild cache package pkgDir cabal announce executableBuildStatuses initialBuildSteps executableBuildStatuses cabal announce = do () <- announce ("initial-build-steps" <> annSuffix executableBuildStatuses) diff --git a/src/Stack/Options/TestParser.hs b/src/Stack/Options/TestParser.hs index 6380eb406e..a852190231 100644 --- a/src/Stack/Options/TestParser.hs +++ b/src/Stack/Options/TestParser.hs @@ -37,7 +37,7 @@ testOptsParser hide0 = help "Disable running of tests. (Tests will still be built.)" <> hide)) <*> optionalFirst - (option (fmap Just $ auto) + (option (fmap Just auto) (long "test-suite-timeout" <> help "Maximum test suite run time in seconds." <> hide)) From 5cf535b8511664b1ee248a898eef5e80a84412e5 Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Tue, 5 Feb 2019 16:02:04 +0300 Subject: [PATCH 09/80] Use 10 minute timeout for test suites in Stackage builds --- subs/curator/app/Main.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/subs/curator/app/Main.hs b/subs/curator/app/Main.hs index be9e1c4d3b..d48673bc45 100644 --- a/subs/curator/app/Main.hs +++ b/subs/curator/app/Main.hs @@ -124,7 +124,7 @@ build = do logInfo "Building" withWorkingDir "unpack-dir" $ proc "stack" - (words "build --test --bench --no-rerun-tests --no-run-benchmarks --haddock") + (words "build --test --bench --test-suite-timeout=600 --no-rerun-tests --no-run-benchmarks --haddock") runProcess_ loadPantrySnapshotLayerFile :: FilePath -> RIO PantryApp RawSnapshotLayer From 6a6b3c8c7cc8010d703ac47d61accd280e953681 Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Wed, 6 Feb 2019 10:01:30 +0300 Subject: [PATCH 10/80] Don't error out on missing latest revision while constructing a build plan --- src/Stack/Build/ConstructPlan.hs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/Stack/Build/ConstructPlan.hs b/src/Stack/Build/ConstructPlan.hs index b453b0ca04..5a27fb0c12 100644 --- a/src/Stack/Build/ConstructPlan.hs +++ b/src/Stack/Build/ConstructPlan.hs @@ -425,7 +425,11 @@ addDep treatAsDep' name = do let version = installedVersion installed mrev <- liftRIO $ getLatestHackageRevision name version case mrev of - Nothing -> error $ "No package revision found for: " <> show name + Nothing -> + -- this could happen for GHC boot libraries missing from Hackage + logWarn $ "No latest package revision found for: " <> + fromString (packageNameString name) <> ", dependency callstack: " <> + displayShow (map packageNameString $ callStack ctx) Just (_rev, cfKey, treeKey) -> tellExecutablesUpstream name From b13486bf5c8083dbedfc7d3280da97ef86533139 Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Fri, 8 Feb 2019 15:31:52 +0300 Subject: [PATCH 11/80] Disable all-in-one builds for curator packages with expected failures --- src/Stack/Build/ConstructPlan.hs | 38 +++++++++++++++++++++++--------- src/Stack/Build/Execute.hs | 2 +- 2 files changed, 28 insertions(+), 12 deletions(-) diff --git a/src/Stack/Build/ConstructPlan.hs b/src/Stack/Build/ConstructPlan.hs index 5a27fb0c12..b196b04764 100644 --- a/src/Stack/Build/ConstructPlan.hs +++ b/src/Stack/Build/ConstructPlan.hs @@ -129,6 +129,7 @@ data Ctx = Ctx , callStack :: ![PackageName] , wanted :: !(Set PackageName) , localNames :: !(Set PackageName) + , mcurator :: !(Maybe Curator) } instance HasPlatform Ctx @@ -183,10 +184,11 @@ constructPlan baseConfigOpts0 localDumpPkgs loadPackage0 sourceMap installedMap econfig <- view envConfigL sources <- getSources + mcur <- view $ buildConfigL.to bcCurator let onTarget = void . addDep False let inner = mapM_ onTarget $ Map.keys (smtTargets $ smTargets sourceMap) - let ctx = mkCtx econfig sources + let ctx = mkCtx econfig sources mcur ((), m, W efinals installExes dirtyReason deps warnings parents) <- liftIO $ runRWST inner ctx M.empty mapM_ (logWarn . RIO.display) (warnings []) @@ -225,7 +227,7 @@ constructPlan baseConfigOpts0 localDumpPkgs loadPackage0 sourceMap installedMap where hasBaseInDeps = Map.member (mkPackageName "base") (smDeps sourceMap) - mkCtx econfig sources = Ctx + mkCtx econfig sources mcur = Ctx { baseConfigOpts = baseConfigOpts0 , loadPackage = \x y z -> runRIO econfig $ loadPackage0 x y z , combinedMap = combineMap sources installedMap @@ -233,6 +235,7 @@ constructPlan baseConfigOpts0 localDumpPkgs loadPackage0 sourceMap installedMap , callStack = [] , wanted = Map.keysSet (smtTargets $ smTargets sourceMap) , localNames = Map.keysSet (smProject sourceMap) + , mcurator = mcur } prunedGlobalDeps = flip Map.mapMaybe (smGlobal sourceMap) $ \gp -> @@ -499,12 +502,17 @@ installPackage treatAsDep name ps minstalled = do planDebug $ "installPackage: Doing all-in-one build for upstream package " ++ show name package <- loadPackage ctx pkgLoc (cpFlags cp) (cpGhcOptions cp) resolveDepsAndInstall True treatAsDep (cpHaddocks cp) ps package minstalled - PSFilePath lp -> + PSFilePath lp -> do + -- in curator builds we can't do all-in-one build as test/benchmark failure + -- could prevent library from being available to its dependencies + splitRequired <- expectedTestOrBenchFailures <$> asks mcurator case lpTestBench lp of Nothing -> do planDebug $ "installPackage: No test / bench component for " ++ show name ++ " so doing an all-in-one build." resolveDepsAndInstall True treatAsDep (lpBuildHaddocks lp) ps (lpPackage lp) minstalled - Just tb -> do + Just tb | splitRequired -> + splitInstallSteps lp tb + Just tb | otherwise -> do -- Attempt to find a plan which performs an all-in-one -- build. Ignore the writer action + reset the state if -- it fails. @@ -531,13 +539,21 @@ installPackage treatAsDep name ps minstalled = do put s -- Otherwise, fall back on building the -- tests / benchmarks in a separate step. - res' <- resolveDepsAndInstall False treatAsDep (lpBuildHaddocks lp) ps (lpPackage lp) minstalled - when (isRight res') $ do - -- Insert it into the map so that it's - -- available for addFinal. - updateLibMap name res' - addFinal lp tb False False - return res' + splitInstallSteps lp tb + where + expectedTestOrBenchFailures maybeCurator = fromMaybe False $ do + curator <- maybeCurator + pure $ Set.member name (curatorExpectTestFailure curator) || + Set.member name (curatorExpectBenchmarkFailure curator) + + splitInstallSteps lp tb = do + res' <- resolveDepsAndInstall False treatAsDep (lpBuildHaddocks lp) ps (lpPackage lp) minstalled + when (isRight res') $ do + -- Insert it into the map so that it's + -- available for addFinal. + updateLibMap name res' + addFinal lp tb False False + return res' resolveDepsAndInstall :: Bool -> Bool diff --git a/src/Stack/Build/Execute.hs b/src/Stack/Build/Execute.hs index 3300e89b41..120d9d7f55 100644 --- a/src/Stack/Build/Execute.hs +++ b/src/Stack/Build/Execute.hs @@ -763,7 +763,7 @@ getConfigCache ExecuteEnv {..} task@Task {..} installedMap enableTest enableBenc Just (_, installed) <- Map.lookup (pkgName ident) installedMap -> installedToGhcPkgId ident installed Just installed -> installedToGhcPkgId ident installed - _ -> error "singleBuild: invariant violated, missing package ID missing" + _ -> error $ "singleBuild: invariant violated, missing package ID missing: " ++ show ident installedToGhcPkgId ident (Library ident' x _) = assert (ident == ident') $ Just (ident, x) installedToGhcPkgId _ (Executable _) = Nothing missing' = Map.fromList $ mapMaybe getMissing $ Set.toList missing From abf530f98b80273147f236a4bbcda15e5809dec3 Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Fri, 8 Feb 2019 16:34:53 +0300 Subject: [PATCH 12/80] Test fixes --- src/test/Stack/ConfigSpec.hs | 3 ++- subs/http-download/package.yaml | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/test/Stack/ConfigSpec.hs b/src/test/Stack/ConfigSpec.hs index abcafb2209..3889e790be 100644 --- a/src/test/Stack/ConfigSpec.hs +++ b/src/test/Stack/ConfigSpec.hs @@ -173,7 +173,8 @@ spec = beforeAll setup $ do boptsTestOpts `shouldBe` TestOpts {toRerunTests = True ,toAdditionalArgs = ["-fprof"] ,toCoverage = True - ,toDisableRun = True} + ,toDisableRun = True + ,toMaximumTimeSeconds = Nothing} boptsBenchmarks `shouldBe` True boptsBenchmarkOpts `shouldBe` BenchmarkOpts {beoAdditionalArgs = Just "-O2" ,beoDisableRun = True} diff --git a/subs/http-download/package.yaml b/subs/http-download/package.yaml index e104374b3e..bc3dfc7285 100644 --- a/subs/http-download/package.yaml +++ b/subs/http-download/package.yaml @@ -40,3 +40,4 @@ tests: dependencies: - http-download - hspec + - hspec-discover From cb1f824d91b5fc5675620543d68ca65f92085927 Mon Sep 17 00:00:00 2001 From: Vance Palacio Date: Wed, 31 Oct 2018 11:26:27 -0700 Subject: [PATCH 13/80] Implemented stack purge --- src/Stack/Clean.hs | 6 ++++++ src/Stack/Options/CleanParser.hs | 8 +++++--- src/main/Main.hs | 8 ++++++-- 3 files changed, 17 insertions(+), 5 deletions(-) diff --git a/src/Stack/Clean.hs b/src/Stack/Clean.hs index c14f583b6c..168d8c4dc7 100644 --- a/src/Stack/Clean.hs +++ b/src/Stack/Clean.hs @@ -8,6 +8,7 @@ module Stack.Clean (clean ,CleanOpts(..) + ,CleanCommand(..) ,StackCleanException(..) ) where @@ -61,6 +62,11 @@ data CleanOpts | CleanFull -- ^ Delete all work directories in the project. +-- | Clean commands +data CleanCommand + = Clean + | Purge + -- | Exceptions during cleanup. newtype StackCleanException = NonLocalPackages [PackageName] diff --git a/src/Stack/Options/CleanParser.hs b/src/Stack/Options/CleanParser.hs index de566e9638..b80cf68687 100644 --- a/src/Stack/Options/CleanParser.hs +++ b/src/Stack/Options/CleanParser.hs @@ -2,13 +2,13 @@ module Stack.Options.CleanParser where import Options.Applicative -import Stack.Clean (CleanOpts (..)) +import Stack.Clean (CleanCommand(..), CleanOpts (..)) import Stack.Prelude import Stack.Types.PackageName -- | Command-line parser for the clean command. -cleanOptsParser :: Parser CleanOpts -cleanOptsParser = CleanShallow <$> packages <|> doFullClean +cleanOptsParser :: CleanCommand -> Parser CleanOpts +cleanOptsParser Clean = CleanShallow <$> packages <|> doFullClean where packages = many @@ -20,3 +20,5 @@ cleanOptsParser = CleanShallow <$> packages <|> doFullClean CleanFull (long "full" <> help "Delete all work directories (.stack-work by default) in the project") + +cleanOptsParser Purge = pure CleanFull diff --git a/src/main/Main.hs b/src/main/Main.hs index 32983637a9..981df02ad3 100644 --- a/src/main/Main.hs +++ b/src/main/Main.hs @@ -53,7 +53,7 @@ import RIO.PrettyPrint import qualified RIO.PrettyPrint as PP (style) import Stack.Build import Stack.Build.Target (NeedTargets(..)) -import Stack.Clean (CleanOpts(..), clean) +import Stack.Clean (CleanCommand(..), CleanOpts(..), clean) import Stack.Config import Stack.ConfigCmd as ConfigCmd import Stack.Constants @@ -405,7 +405,11 @@ commandLineHandler currentDir progName isInterpreter = complicatedOptions addCommand' "clean" "Clean the local packages" cleanCmd - cleanOptsParser + (cleanOptsParser Clean) + addCommand' "purge" + "Delete all work directories (.stack-work by default) in the project. Shortcut for 'stack clean --full'" + cleanCmd + (cleanOptsParser Purge) addCommand' "list-dependencies" "List the dependencies" (listDependenciesCmd True) From d4d73529cd9c92963a8af613badf21c24cd79551 Mon Sep 17 00:00:00 2001 From: Vance Palacio Date: Wed, 31 Oct 2018 11:57:35 -0700 Subject: [PATCH 14/80] Updated comment for clean shallow. (No fix was needed) --- src/Stack/Clean.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Stack/Clean.hs b/src/Stack/Clean.hs index 168d8c4dc7..05b87e1a07 100644 --- a/src/Stack/Clean.hs +++ b/src/Stack/Clean.hs @@ -56,7 +56,7 @@ dirsToDelete cleanOpts = do -- | Options for @stack clean@. data CleanOpts = CleanShallow [PackageName] - -- ^ Delete the "dist directories" as defined in 'Stack.Constants.distRelativeDir' + -- ^ Delete the "dist directories" as defined in 'Stack.Constants.Config.distRelativeDir' -- for the given local packages. If no packages are given, all project packages -- should be cleaned. | CleanFull From bac2fcfdcd9caa3dc8351c641a9ad8d3958fc965 Mon Sep 17 00:00:00 2001 From: Vance Palacio Date: Fri, 2 Nov 2018 12:02:42 -0700 Subject: [PATCH 15/80] Updated docs for stack clean --- doc/GUIDE.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/doc/GUIDE.md b/doc/GUIDE.md index 850cf64fbd..708f24e67f 100644 --- a/doc/GUIDE.md +++ b/doc/GUIDE.md @@ -280,6 +280,23 @@ As you can see from that path (and as emphasized earlier), the installation is placed to not interfere with any other GHC installation, whether system-wide or even different GHC versions installed by stack. +## Cleaning your project +You can clean up build artifacts for your project using the `stack clean` and `stack purge`. + +### `stack clean` +`stack clean` Deletes the local working directories containing compiler output. +(typically '.stack-work/dist' by default) + +Use `stack clean ` to delete the output for a package specific-package only. + +### `stack purge` +`stack purge` Deletes the local stack working directory, including extra-deps, git dependencies and the compiler output. +It does not delete any snapshot packages, compilers or installed programs. -- This essentially +reverts your project to a completely fresh state, as if it had never been built. +`stack purge` is just a shortcut for `stack clean --full` + +- Note: `stack purge` is not available when used in docker + ### The build command The build command is the heart and soul of stack. It is the engine that powers From caa2b9baee6d525f41c41eec38e3a775eea408b4 Mon Sep 17 00:00:00 2001 From: Vance Palacio Date: Fri, 2 Nov 2018 12:52:09 -0700 Subject: [PATCH 16/80] Updated help messages for stack clean and purge --- src/Stack/Options/CleanParser.hs | 2 +- src/main/Main.hs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Stack/Options/CleanParser.hs b/src/Stack/Options/CleanParser.hs index b80cf68687..edd124bcf5 100644 --- a/src/Stack/Options/CleanParser.hs +++ b/src/Stack/Options/CleanParser.hs @@ -19,6 +19,6 @@ cleanOptsParser Clean = CleanShallow <$> packages <|> doFullClean flag' CleanFull (long "full" <> - help "Delete all work directories (.stack-work by default) in the project") + help "Delete the local stack working directory (.stack-work by default).") cleanOptsParser Purge = pure CleanFull diff --git a/src/main/Main.hs b/src/main/Main.hs index 981df02ad3..6804591870 100644 --- a/src/main/Main.hs +++ b/src/main/Main.hs @@ -407,7 +407,7 @@ commandLineHandler currentDir progName isInterpreter = complicatedOptions cleanCmd (cleanOptsParser Clean) addCommand' "purge" - "Delete all work directories (.stack-work by default) in the project. Shortcut for 'stack clean --full'" + "Delete the local stack working directory (.stack-work by default). Shortcut for 'stack clean --full'" cleanCmd (cleanOptsParser Purge) addCommand' "list-dependencies" From 23d534d98449a9d36f9b4707282b29e32226ea81 Mon Sep 17 00:00:00 2001 From: dbaynard Date: Wed, 7 Nov 2018 11:11:38 -0800 Subject: [PATCH 17/80] fix docs wording Co-Authored-By: vanceism7 --- doc/GUIDE.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/GUIDE.md b/doc/GUIDE.md index 708f24e67f..988391896f 100644 --- a/doc/GUIDE.md +++ b/doc/GUIDE.md @@ -287,7 +287,7 @@ You can clean up build artifacts for your project using the `stack clean` and `s `stack clean` Deletes the local working directories containing compiler output. (typically '.stack-work/dist' by default) -Use `stack clean ` to delete the output for a package specific-package only. +Use `stack clean ` to delete the output for a package _specific-package_ only. ### `stack purge` `stack purge` Deletes the local stack working directory, including extra-deps, git dependencies and the compiler output. From 854593bc407bcfc653c5ccc96fe68245a7fcf8c1 Mon Sep 17 00:00:00 2001 From: Matt Audesse Date: Thu, 8 Nov 2018 20:54:00 -0800 Subject: [PATCH 18/80] fixed wording Co-Authored-By: vanceism7 --- doc/GUIDE.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/GUIDE.md b/doc/GUIDE.md index 988391896f..f8740b9e54 100644 --- a/doc/GUIDE.md +++ b/doc/GUIDE.md @@ -281,7 +281,7 @@ placed to not interfere with any other GHC installation, whether system-wide or even different GHC versions installed by stack. ## Cleaning your project -You can clean up build artifacts for your project using the `stack clean` and `stack purge`. +You can clean up build artifacts for your project using the `stack clean` and `stack purge` commands. ### `stack clean` `stack clean` Deletes the local working directories containing compiler output. From dad8f922607cee9b8aacec8806a2944d2d03b032 Mon Sep 17 00:00:00 2001 From: dbaynard Date: Sat, 17 Nov 2018 20:52:10 +0100 Subject: [PATCH 19/80] Clarify differences between clean and purge --- doc/GUIDE.md | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/doc/GUIDE.md b/doc/GUIDE.md index f8740b9e54..a558176323 100644 --- a/doc/GUIDE.md +++ b/doc/GUIDE.md @@ -281,18 +281,21 @@ placed to not interfere with any other GHC installation, whether system-wide or even different GHC versions installed by stack. ## Cleaning your project + You can clean up build artifacts for your project using the `stack clean` and `stack purge` commands. ### `stack clean` -`stack clean` Deletes the local working directories containing compiler output. -(typically '.stack-work/dist' by default) -Use `stack clean ` to delete the output for a package _specific-package_ only. +`stack clean` deletes the local working directories containing compiler output. +By default, that means the contents of directories in `.stack-work/dist`, for all the `.stack-work` directories within a project. + +Use `stack clean ` to delete the output for the package _specific-package_ only. ### `stack purge` -`stack purge` Deletes the local stack working directory, including extra-deps, git dependencies and the compiler output. -It does not delete any snapshot packages, compilers or installed programs. -- This essentially -reverts your project to a completely fresh state, as if it had never been built. + +`stack purge` deletes the local stack working directories, including extra-deps, git dependencies and the compiler output (including logs). +It does not delete any snapshot packages, compilers or programs installed using `stack install`. This essentially +reverts the project to a completely fresh state, as if it had never been built. `stack purge` is just a shortcut for `stack clean --full` - Note: `stack purge` is not available when used in docker From ccba7f2e2730d1960d0b81532b5698066aee6685 Mon Sep 17 00:00:00 2001 From: dbaynard Date: Sat, 17 Nov 2018 20:53:38 +0100 Subject: [PATCH 20/80] Improve description of --full --- src/Stack/Options/CleanParser.hs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Stack/Options/CleanParser.hs b/src/Stack/Options/CleanParser.hs index edd124bcf5..b90845ff05 100644 --- a/src/Stack/Options/CleanParser.hs +++ b/src/Stack/Options/CleanParser.hs @@ -14,11 +14,11 @@ cleanOptsParser Clean = CleanShallow <$> packages <|> doFullClean many (packageNameArgument (metavar "PACKAGE" <> - help "If none specified, clean all local packages")) + help "If none specified, clean all project packages")) doFullClean = flag' CleanFull (long "full" <> - help "Delete the local stack working directory (.stack-work by default).") + help "Delete the project’s stack working directories (.stack-work by default).") cleanOptsParser Purge = pure CleanFull From 9ccfdabcacf0fa40c65cd5c7070362644c6ecd79 Mon Sep 17 00:00:00 2001 From: dbaynard Date: Sat, 17 Nov 2018 21:05:28 +0100 Subject: [PATCH 21/80] Clarify clean and purge help text --- src/main/Main.hs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/Main.hs b/src/main/Main.hs index 6804591870..09e04174ac 100644 --- a/src/main/Main.hs +++ b/src/main/Main.hs @@ -403,11 +403,11 @@ commandLineHandler currentDir progName isInterpreter = complicatedOptions evalCmd (evalOptsParser "CODE") addCommand' "clean" - "Clean the local packages" + "Delete build artefacts for the project packages." cleanCmd (cleanOptsParser Clean) addCommand' "purge" - "Delete the local stack working directory (.stack-work by default). Shortcut for 'stack clean --full'" + "Delete the project stack working directories (.stack-work by default). Shortcut for 'stack clean --full'" cleanCmd (cleanOptsParser Purge) addCommand' "list-dependencies" From 97f280b618b81700da0d58d841f8c0a6493f0c85 Mon Sep 17 00:00:00 2001 From: Vance Palacio Date: Wed, 28 Nov 2018 20:21:38 -0800 Subject: [PATCH 22/80] Added stack purge integration test --- test/integration/tests/3863-purge-command/Main.hs | 7 +++++++ .../tests/3863-purge-command/files/new-template.cabal | 11 +++++++++++ .../tests/3863-purge-command/files/src/Lib.hs | 4 ++++ .../tests/3863-purge-command/files/stack.yaml | 5 +++++ 4 files changed, 27 insertions(+) create mode 100644 test/integration/tests/3863-purge-command/Main.hs create mode 100644 test/integration/tests/3863-purge-command/files/new-template.cabal create mode 100644 test/integration/tests/3863-purge-command/files/src/Lib.hs create mode 100644 test/integration/tests/3863-purge-command/files/stack.yaml diff --git a/test/integration/tests/3863-purge-command/Main.hs b/test/integration/tests/3863-purge-command/Main.hs new file mode 100644 index 0000000000..ec56caa206 --- /dev/null +++ b/test/integration/tests/3863-purge-command/Main.hs @@ -0,0 +1,7 @@ +import StackTest +import System.Directory + +main :: IO () +main = do + stack ["build"] + stack ["purge"] diff --git a/test/integration/tests/3863-purge-command/files/new-template.cabal b/test/integration/tests/3863-purge-command/files/new-template.cabal new file mode 100644 index 0000000000..192e0b2dfb --- /dev/null +++ b/test/integration/tests/3863-purge-command/files/new-template.cabal @@ -0,0 +1,11 @@ +name: new-template +version: 0.1.0.0 +build-type: Simple +cabal-version: >=1.10 + + +library + hs-source-dirs: src + exposed-modules: Lib + build-depends: base >= 4.7 && < 5 + default-language: Haskell2010 diff --git a/test/integration/tests/3863-purge-command/files/src/Lib.hs b/test/integration/tests/3863-purge-command/files/src/Lib.hs new file mode 100644 index 0000000000..1c88a82644 --- /dev/null +++ b/test/integration/tests/3863-purge-command/files/src/Lib.hs @@ -0,0 +1,4 @@ +module Lib where + +someFunc :: () +someFunc = () diff --git a/test/integration/tests/3863-purge-command/files/stack.yaml b/test/integration/tests/3863-purge-command/files/stack.yaml new file mode 100644 index 0000000000..227c646ed3 --- /dev/null +++ b/test/integration/tests/3863-purge-command/files/stack.yaml @@ -0,0 +1,5 @@ +flags: {} +packages: +- '.' +extra-deps: [] +resolver: lts-11.22 From 893a8b7faa4bed83de6fcf1ee96d1cfd00e73e18 Mon Sep 17 00:00:00 2001 From: David Baynard Date: Sun, 10 Feb 2019 22:41:02 +0000 Subject: [PATCH 23/80] Check correct directory deletion behaviour for simple project - The integration test ensures the correct directories are present, then deleted, for a project with only one package, not in a subdirectory. --- .../tests/3863-purge-command/Main.hs | 38 +++++++++++++++++-- 1 file changed, 35 insertions(+), 3 deletions(-) diff --git a/test/integration/tests/3863-purge-command/Main.hs b/test/integration/tests/3863-purge-command/Main.hs index ec56caa206..4c54e19faa 100644 --- a/test/integration/tests/3863-purge-command/Main.hs +++ b/test/integration/tests/3863-purge-command/Main.hs @@ -1,7 +1,39 @@ import StackTest +import Data.Maybe (listToMaybe, fromMaybe) import System.Directory +import System.FilePath main :: IO () -main = do - stack ["build"] - stack ["purge"] +main = + -- For these commands, we'll need to know the `dist` directory. + -- This is usually `.stack-work/dist/$compiler-variant/Cabal-xxxx` + stackCheckStdout [defaultResolverArg, "path", "--dist-dir"] $ \distDir -> + + stackCheckStdout [defaultResolverArg, "path", "--local-install-root"] $ \localInstallRoot -> do + + -- Usually `.stack-work` + let stackWork = fromMaybe (error "There must be a stack working directory.") $ + listToMaybe (splitDirectories distDir) + + -- First, clean the .stack-work directory. + -- This is only necessary when running individual tests. + stack [defaultResolverArg, "purge"] + doesNotExist stackWork + + -- The dist directory should exist after a build + stack [defaultResolverArg, "build"] + doesExist distDir + doesExist localInstallRoot + doesExist stackWork + + -- The dist directory should not exist after a clean, whereas the + -- .stack-work directory should + stack [defaultResolverArg, "clean"] + run "exa" ["-T", ".stack-work"] + doesNotExist distDir + doesExist localInstallRoot + doesExist stackWork + + -- The .stack-work directory should not exist after a purge + stack [defaultResolverArg, "purge"] + doesNotExist stackWork From 1b1c2af8d0c0c173d7b2357d5a79a862f2ac8da1 Mon Sep 17 00:00:00 2001 From: Vance Palacio Date: Thu, 10 Jan 2019 14:34:12 -0800 Subject: [PATCH 24/80] Fixes #4324 - global package deps not listed in stack dot graph --- src/Stack/Dot.hs | 33 +++++++++++++++++++++++---------- 1 file changed, 23 insertions(+), 10 deletions(-) diff --git a/src/Stack/Dot.hs b/src/Stack/Dot.hs index 08a7c687d8..e4b0454862 100644 --- a/src/Stack/Dot.hs +++ b/src/Stack/Dot.hs @@ -275,17 +275,16 @@ createDepLoader sourceMap installed globalDumpMap globalIdMap loadPackageDeps pk ghcOptions = cpGhcOptions common assert (pkgName == name) (loadPackageDeps pkgName version loc flags ghcOptions) Nothing -> - pure (Set.empty, payloadFromInstalled (Map.lookup pkgName installed)) + -- If package is in global dump map. load deps from there (#4324) + case Map.lookup pkgName globalDumpMap of + Nothing -> + pure (Set.empty, payloadFromInstalled (Map.lookup pkgName installed)) + dp -> + getDepsFromDump dp -- For wired-in-packages, use information from ghc-pkg (see #3084) - else case Map.lookup pkgName globalDumpMap of - Nothing -> error ("Invariant violated: Expected to find wired-in-package " ++ packageNameString pkgName ++ " in global DB") - Just dp -> pure (Set.fromList deps, payloadFromDump dp) - where - deps = map (\depId -> maybe (error ("Invariant violated: Expected to find " ++ ghcPkgIdString depId ++ " in global DB")) - Stack.Prelude.pkgName - (Map.lookup depId globalIdMap)) - (dpDepends dp) - where + else + getDepsFromDump $ Map.lookup pkgName globalDumpMap + where payloadFromLocal pkg = DotPayload (Just $ packageVersion pkg) (Just $ packageLicense pkg) payloadFromInstalled maybePkg = DotPayload (fmap (installedVersion . snd) maybePkg) $ case maybePkg of @@ -293,6 +292,20 @@ createDepLoader sourceMap installed globalDumpMap globalIdMap loadPackageDeps pk _ -> Nothing payloadFromDump dp = DotPayload (Just $ pkgVersion $ dpPackageIdent dp) (Right <$> dpLicense dp) + getDepsFromDump dump = + case dump of + Nothing -> + let errText = "Invariant violated: Expected to find wired-in-package " + in error (errText ++ packageNameString pkgName ++ " in global DB") + Just dp -> pure (Set.fromList deps, payloadFromDump dp) + where + deps = map ghcIdToPackageName (dpDepends dp) + ghcIdToPackageName depId = + let errText = "Invariant violated: Expected to find " + in maybe (error (errText ++ ghcPkgIdString depId ++ " in global DB")) + Stack.Prelude.pkgName + (Map.lookup depId globalIdMap) + -- | Resolve the direct (depth 0) external dependencies of the given local packages (assumed to come from project packages) projectPackageDependencies :: DotOpts -> [LocalPackage] -> [(PackageName, (Set PackageName, DotPayload))] projectPackageDependencies dotOpts locals = From 097fa8d74371c917befa5b2724088821a77cb339 Mon Sep 17 00:00:00 2001 From: Vance Palacio Date: Mon, 14 Jan 2019 18:19:14 -0800 Subject: [PATCH 25/80] Refactored dep loader into alternatives --- src/Stack/Dot.hs | 93 ++++++++++++++++++++++++++---------------------- 1 file changed, 50 insertions(+), 43 deletions(-) diff --git a/src/Stack/Dot.hs b/src/Stack/Dot.hs index e4b0454862..364f6a9ffd 100644 --- a/src/Stack/Dot.hs +++ b/src/Stack/Dot.hs @@ -255,36 +255,57 @@ createDepLoader :: HasEnvConfig env Map FlagName Bool -> [Text] -> RIO env (Set PackageName, DotPayload)) -> PackageName -> RIO env (Set PackageName, DotPayload) -createDepLoader sourceMap installed globalDumpMap globalIdMap loadPackageDeps pkgName = - if not (pkgName `Set.member` wiredInPackages) - then case Map.lookup pkgName (smProject sourceMap) of - Just pp -> do - pkg <- loadCommonPackage (ppCommon pp) - pure (packageAllDeps pkg, payloadFromLocal pkg) - Nothing -> - case Map.lookup pkgName (smDeps sourceMap) of - Just DepPackage{dpLocation=PLMutable dir} -> do - pp <- mkProjectPackage YesPrintWarnings dir False - pkg <- loadCommonPackage (ppCommon pp) - pure (packageAllDeps pkg, payloadFromLocal pkg) - Just dp@DepPackage{dpLocation=PLImmutable loc} -> do - let common = dpCommon dp - gpd <- liftIO $ cpGPD common - let PackageIdentifier name version = PD.package $ PD.packageDescription gpd - flags = cpFlags common - ghcOptions = cpGhcOptions common - assert (pkgName == name) (loadPackageDeps pkgName version loc flags ghcOptions) - Nothing -> - -- If package is in global dump map. load deps from there (#4324) - case Map.lookup pkgName globalDumpMap of - Nothing -> - pure (Set.empty, payloadFromInstalled (Map.lookup pkgName installed)) - dp -> - getDepsFromDump dp - -- For wired-in-packages, use information from ghc-pkg (see #3084) +createDepLoader sourceMap installed globalDumpMap globalIdMap loadPackageDeps pkgName = do + case projectPackageDeps <|> dependencyDeps <|> globalDeps of + Just x -> x + Nothing -> + if isWiredInPackage then + let errText = "Invariant violated: Expected to find wired-in-package " + in error (errText ++ packageNameString pkgName ++ " in global DB") else - getDepsFromDump $ Map.lookup pkgName globalDumpMap - where + pure noDeps + where + isWiredInPackage = pkgName `Set.member` wiredInPackages + projectPackageDeps = + fmap loadDeps $ Map.lookup pkgName (smProject sourceMap) + where + loadDeps pp = do + pkg <- loadCommonPackage (ppCommon pp) + pure (packageAllDeps pkg, payloadFromLocal pkg) + + dependencyDeps = + fmap loadDeps $ Map.lookup pkgName (smDeps sourceMap) + where + loadDeps dp = + case dp of + DepPackage{dpLocation=PLMutable dir} -> do + pp <- mkProjectPackage YesPrintWarnings dir False + pkg <- loadCommonPackage (ppCommon pp) + pure (packageAllDeps pkg, payloadFromLocal pkg) + DepPackage{dpLocation=PLImmutable loc} -> do + let common = dpCommon dp + gpd <- liftIO $ cpGPD common + let PackageIdentifier name version = PD.package $ PD.packageDescription gpd + flags = cpFlags common + ghcOptions = cpGhcOptions common + assert (pkgName == name) (loadPackageDeps pkgName version loc flags ghcOptions) + + -- If package is a wired-in-package or a global package, use info from ghc-pkg (#4324, #3084) + globalDeps = + fmap (pure . getDepsFromDump) $ Map.lookup pkgName globalDumpMap + where + getDepsFromDump dump = + (Set.fromList deps, payloadFromDump dump) + where + deps = map ghcIdToPackageName (dpDepends dump) + ghcIdToPackageName depId = + let errText = "Invariant violated: Expected to find " + in maybe (error (errText ++ ghcPkgIdString depId ++ " in global DB")) + Stack.Prelude.pkgName + (Map.lookup depId globalIdMap) + + noDeps = (Set.empty, payloadFromInstalled (Map.lookup pkgName installed)) + payloadFromLocal pkg = DotPayload (Just $ packageVersion pkg) (Just $ packageLicense pkg) payloadFromInstalled maybePkg = DotPayload (fmap (installedVersion . snd) maybePkg) $ case maybePkg of @@ -292,20 +313,6 @@ createDepLoader sourceMap installed globalDumpMap globalIdMap loadPackageDeps pk _ -> Nothing payloadFromDump dp = DotPayload (Just $ pkgVersion $ dpPackageIdent dp) (Right <$> dpLicense dp) - getDepsFromDump dump = - case dump of - Nothing -> - let errText = "Invariant violated: Expected to find wired-in-package " - in error (errText ++ packageNameString pkgName ++ " in global DB") - Just dp -> pure (Set.fromList deps, payloadFromDump dp) - where - deps = map ghcIdToPackageName (dpDepends dp) - ghcIdToPackageName depId = - let errText = "Invariant violated: Expected to find " - in maybe (error (errText ++ ghcPkgIdString depId ++ " in global DB")) - Stack.Prelude.pkgName - (Map.lookup depId globalIdMap) - -- | Resolve the direct (depth 0) external dependencies of the given local packages (assumed to come from project packages) projectPackageDependencies :: DotOpts -> [LocalPackage] -> [(PackageName, (Set PackageName, DotPayload))] projectPackageDependencies dotOpts locals = From 811307ba252069b3772e44ab589f39b32f5f7992 Mon Sep 17 00:00:00 2001 From: Vance Palacio Date: Wed, 16 Jan 2019 17:39:11 -0800 Subject: [PATCH 26/80] Changed to throw error for any global packages missing from dump --- src/Stack/Dot.hs | 50 ++++++++++++++++++------------------------------ 1 file changed, 19 insertions(+), 31 deletions(-) diff --git a/src/Stack/Dot.hs b/src/Stack/Dot.hs index 364f6a9ffd..a0532008f9 100644 --- a/src/Stack/Dot.hs +++ b/src/Stack/Dot.hs @@ -115,13 +115,13 @@ createDependencyGraph dotOpts = do locals <- projectLocalPackages let graph = Map.fromList $ projectPackageDependencies dotOpts (filter lpWanted locals) installMap <- toInstallMap sourceMap - (installedMap, globalDump, _, _) <- getInstalled (GetInstalledOpts False False False) - installMap + (_, globalDump, _, _) <- getInstalled (GetInstalledOpts False False False) + installMap -- TODO: Can there be multiple entries for wired-in-packages? If so, -- this will choose one arbitrarily.. let globalDumpMap = Map.fromList $ map (\dp -> (Stack.Prelude.pkgName (dpPackageIdent dp), dp)) globalDump globalIdMap = Map.fromList $ map (\dp -> (dpGhcPkgId dp, dpPackageIdent dp)) globalDump - let depLoader = createDepLoader sourceMap installedMap globalDumpMap globalIdMap loadPackageDeps + let depLoader = createDepLoader sourceMap globalDumpMap globalIdMap loadPackageDeps loadPackageDeps name version loc flags ghcOptions -- Skip packages that can't be loaded - see -- https://github.com/commercialhaskell/stack/issues/2967 @@ -248,24 +248,16 @@ resolveDependencies limit graph loadPackageDeps = do -- | Given a SourceMap and a dependency loader, load the set of dependencies for a package createDepLoader :: HasEnvConfig env => SourceMap - -> Map PackageName (InstallLocation, Installed) -> Map PackageName (DumpPackage () () ()) -> Map GhcPkgId PackageIdentifier -> (PackageName -> Version -> PackageLocationImmutable -> Map FlagName Bool -> [Text] -> RIO env (Set PackageName, DotPayload)) -> PackageName -> RIO env (Set PackageName, DotPayload) -createDepLoader sourceMap installed globalDumpMap globalIdMap loadPackageDeps pkgName = do - case projectPackageDeps <|> dependencyDeps <|> globalDeps of - Just x -> x - Nothing -> - if isWiredInPackage then - let errText = "Invariant violated: Expected to find wired-in-package " - in error (errText ++ packageNameString pkgName ++ " in global DB") - else - pure noDeps +createDepLoader sourceMap globalDumpMap globalIdMap loadPackageDeps pkgName = do + fromMaybe noDepsErr + (projectPackageDeps <|> dependencyDeps <|> globalDeps) where - isWiredInPackage = pkgName `Set.member` wiredInPackages projectPackageDeps = fmap loadDeps $ Map.lookup pkgName (smProject sourceMap) where @@ -276,21 +268,20 @@ createDepLoader sourceMap installed globalDumpMap globalIdMap loadPackageDeps pk dependencyDeps = fmap loadDeps $ Map.lookup pkgName (smDeps sourceMap) where - loadDeps dp = - case dp of - DepPackage{dpLocation=PLMutable dir} -> do + loadDeps DepPackage{dpLocation=PLMutable dir} = do pp <- mkProjectPackage YesPrintWarnings dir False pkg <- loadCommonPackage (ppCommon pp) pure (packageAllDeps pkg, payloadFromLocal pkg) - DepPackage{dpLocation=PLImmutable loc} -> do - let common = dpCommon dp - gpd <- liftIO $ cpGPD common - let PackageIdentifier name version = PD.package $ PD.packageDescription gpd - flags = cpFlags common - ghcOptions = cpGhcOptions common - assert (pkgName == name) (loadPackageDeps pkgName version loc flags ghcOptions) - - -- If package is a wired-in-package or a global package, use info from ghc-pkg (#4324, #3084) + + loadDeps dp@DepPackage{dpLocation=PLImmutable loc} = do + let common = dpCommon dp + gpd <- liftIO $ cpGPD common + let PackageIdentifier name version = PD.package $ PD.packageDescription gpd + flags = cpFlags common + ghcOptions = cpGhcOptions common + assert (pkgName == name) (loadPackageDeps pkgName version loc flags ghcOptions) + + -- If package is a global package, use info from ghc-pkg (#4324, #3084) globalDeps = fmap (pure . getDepsFromDump) $ Map.lookup pkgName globalDumpMap where @@ -304,13 +295,10 @@ createDepLoader sourceMap installed globalDumpMap globalIdMap loadPackageDeps pk Stack.Prelude.pkgName (Map.lookup depId globalIdMap) - noDeps = (Set.empty, payloadFromInstalled (Map.lookup pkgName installed)) + noDepsErr = error ("Invariant violated: Expected to find " + ++ packageNameString pkgName ++ " in global DB") payloadFromLocal pkg = DotPayload (Just $ packageVersion pkg) (Just $ packageLicense pkg) - payloadFromInstalled maybePkg = DotPayload (fmap (installedVersion . snd) maybePkg) $ - case maybePkg of - Just (_, Library _ _ mlicense) -> mlicense - _ -> Nothing payloadFromDump dp = DotPayload (Just $ pkgVersion $ dpPackageIdent dp) (Right <$> dpLicense dp) -- | Resolve the direct (depth 0) external dependencies of the given local packages (assumed to come from project packages) From 5e4253cdb3f685658b9146f2f02061fffe8d61b5 Mon Sep 17 00:00:00 2001 From: Vance Palacio Date: Thu, 24 Jan 2019 10:09:56 -0800 Subject: [PATCH 27/80] Fixed noDepsErr message --- src/Stack/Dot.hs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Stack/Dot.hs b/src/Stack/Dot.hs index a0532008f9..a407bd8898 100644 --- a/src/Stack/Dot.hs +++ b/src/Stack/Dot.hs @@ -295,8 +295,8 @@ createDepLoader sourceMap globalDumpMap globalIdMap loadPackageDeps pkgName = do Stack.Prelude.pkgName (Map.lookup depId globalIdMap) - noDepsErr = error ("Invariant violated: Expected to find " - ++ packageNameString pkgName ++ " in global DB") + noDepsErr = error ("Invariant violated: The '" ++ packageNameString pkgName + ++ "' package was not found in any of the dependency sources") payloadFromLocal pkg = DotPayload (Just $ packageVersion pkg) (Just $ packageLicense pkg) payloadFromDump dp = DotPayload (Just $ pkgVersion $ dpPackageIdent dp) (Right <$> dpLicense dp) From dfeecdf5189c681b2b3883d82db6f30f90896ed5 Mon Sep 17 00:00:00 2001 From: David Baynard Date: Mon, 11 Feb 2019 14:23:35 +0000 Subject: [PATCH 28/80] Fix hlint issues --- src/Stack/Dot.hs | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/Stack/Dot.hs b/src/Stack/Dot.hs index a407bd8898..03722726cb 100644 --- a/src/Stack/Dot.hs +++ b/src/Stack/Dot.hs @@ -38,7 +38,6 @@ import Stack.SourceMap import Stack.Types.Build import Stack.Types.Config import Stack.Types.GhcPkgId -import Stack.Types.Package import Stack.Types.SourceMap -- | Options record for @stack dot@ @@ -259,14 +258,14 @@ createDepLoader sourceMap globalDumpMap globalIdMap loadPackageDeps pkgName = do (projectPackageDeps <|> dependencyDeps <|> globalDeps) where projectPackageDeps = - fmap loadDeps $ Map.lookup pkgName (smProject sourceMap) + loadDeps <$> Map.lookup pkgName (smProject sourceMap) where loadDeps pp = do pkg <- loadCommonPackage (ppCommon pp) pure (packageAllDeps pkg, payloadFromLocal pkg) dependencyDeps = - fmap loadDeps $ Map.lookup pkgName (smDeps sourceMap) + loadDeps <$> Map.lookup pkgName (smDeps sourceMap) where loadDeps DepPackage{dpLocation=PLMutable dir} = do pp <- mkProjectPackage YesPrintWarnings dir False @@ -283,7 +282,7 @@ createDepLoader sourceMap globalDumpMap globalIdMap loadPackageDeps pkgName = do -- If package is a global package, use info from ghc-pkg (#4324, #3084) globalDeps = - fmap (pure . getDepsFromDump) $ Map.lookup pkgName globalDumpMap + pure . getDepsFromDump <$> Map.lookup pkgName globalDumpMap where getDepsFromDump dump = (Set.fromList deps, payloadFromDump dump) From 286a5f5d219a48c9c856292a301fd2f3953be92c Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Tue, 12 Feb 2019 10:25:06 +0300 Subject: [PATCH 29/80] Give more details about timed out test suite --- src/Stack/Build/Execute.hs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/Stack/Build/Execute.hs b/src/Stack/Build/Execute.hs index ecec8cb96e..454dbc8b77 100644 --- a/src/Stack/Build/Execute.hs +++ b/src/Stack/Build/Execute.hs @@ -1858,7 +1858,9 @@ singleTest topts testsToRun ac ee task installedMap = do | Just maxSecs <- toMaximumTimeSeconds topts, maxSecs > 0 = do mres <- timeout (maxSecs * 1000000) action case mres of - Nothing -> throwString "test suite timed out" + Nothing -> throwString $ "test suite timed out, package " <> + packageNameString pname <> ", suite: " <> + T.unpack testName <> T.unpack argsDisplay Just res -> return res | otherwise = action From 96d84e8d90c9794ea9f68c3ba536be55a336e11b Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Tue, 12 Feb 2019 10:51:14 +0300 Subject: [PATCH 30/80] Retrieve (and cache) archive once when doing location completion --- subs/pantry/src/Pantry.hs | 42 +++++++++++++------------------ subs/pantry/src/Pantry/Archive.hs | 42 ++++++++++++++++++++----------- subs/pantry/src/Pantry/Hackage.hs | 2 +- subs/pantry/src/Pantry/Repo.hs | 2 +- 4 files changed, 47 insertions(+), 41 deletions(-) diff --git a/subs/pantry/src/Pantry.hs b/subs/pantry/src/Pantry.hs index d6d08fcf7d..a27bda97d4 100644 --- a/subs/pantry/src/Pantry.hs +++ b/subs/pantry/src/Pantry.hs @@ -694,7 +694,7 @@ loadPackage => PackageLocationImmutable -> RIO env Package loadPackage (PLIHackage ident cfHash tree) = getHackageTarball (pirForHash ident cfHash) (Just tree) -loadPackage pli@(PLIArchive archive pm) = getArchive (toRawPLI pli) (toRawArchive archive) (toRawPM pm) +loadPackage pli@(PLIArchive archive pm) = getArchivePackage (toRawPLI pli) (toRawArchive archive) (toRawPM pm) loadPackage (PLIRepo repo pm) = getRepo repo (toRawPM pm) -- | Load a 'Package' from a 'RawPackageLocationImmutable'. @@ -705,7 +705,7 @@ loadPackageRaw => RawPackageLocationImmutable -> RIO env Package loadPackageRaw (RPLIHackage pir mtree) = getHackageTarball pir mtree -loadPackageRaw rpli@(RPLIArchive archive pm) = getArchive rpli archive pm +loadPackageRaw rpli@(RPLIArchive archive pm) = getArchivePackage rpli archive pm loadPackageRaw (RPLIRepo repo rpm) = getRepo repo rpm -- | Fill in optional fields in a 'PackageLocationImmutable' for more reproducible builds. @@ -732,24 +732,17 @@ completePackageLocation (RPLIHackage pir0@(PackageIdentifierRevision name versio pure (pir, BlobKey sha size) treeKey <- getHackageTarballKey pir pure $ PLIHackage (PackageIdentifier name version) cfKey treeKey -completePackageLocation pl@(RPLIArchive archive pm) = - PLIArchive <$> completeArchive archive <*> completePM pl pm +completePackageLocation pl@(RPLIArchive archive rpm) = do + -- getArchive checks archive and package metadata + (sha, size, package) <- getArchive pl archive rpm + let RawArchive loc _ _ subdir = archive + pure $ PLIArchive (Archive loc sha size subdir) (packagePM package) completePackageLocation pl@(RPLIRepo repo rpm) = do unless (isSHA1 (repoCommit repo)) $ throwIO $ CannotCompleteRepoNonSHA1 repo PLIRepo repo <$> completePM pl rpm where isSHA1 t = T.length t == 40 && T.all isHexDigit t -completeArchive - :: (HasPantryConfig env, HasLogFunc env) - => RawArchive - -> RIO env Archive -completeArchive (RawArchive loc (Just sha) (Just size) subdir) = - pure $ Archive loc sha size subdir -completeArchive a@(RawArchive loc _ _ subdir) = - withArchiveLoc a $ \_fp sha size -> - pure $ Archive loc sha size subdir - completePM :: (HasPantryConfig env, HasLogFunc env, HasProcessContext env) => RawPackageLocationImmutable @@ -759,16 +752,8 @@ completePM plOrig rpm@(RawPackageMetadata mn mv mtk mc) | Just n <- mn, Just v <- mv, Just tk <- mtk, Just c <- mc = pure $ PackageMetadata (PackageIdentifier n v) tk c | otherwise = do - package <- loadPackageRaw plOrig - let pm = PackageMetadata - { pmIdent = packageIdent package - , pmTreeKey = packageTreeKey package - , pmCabal = teBlob $ case packageCabalEntry package of - PCCabalFile cfile -> cfile - PCHpack hfile -> phGenerated hfile - } - - isSame x (Just y) = x == y + pm <- packagePM <$> loadPackageRaw plOrig + let isSame x (Just y) = x == y isSame _ _ = True allSame = @@ -780,6 +765,15 @@ completePM plOrig rpm@(RawPackageMetadata mn mv mtk mc) then pure pm else throwIO $ CompletePackageMetadataMismatch plOrig pm +packagePM :: Package -> PackageMetadata +packagePM package = PackageMetadata + { pmIdent = packageIdent package + , pmTreeKey = packageTreeKey package + , pmCabal = teBlob $ case packageCabalEntry package of + PCCabalFile cfile -> cfile + PCHpack hfile -> phGenerated hfile + } + -- | Add in hashes to make a 'SnapshotLocation' reproducible. -- -- @since 0.1.0.0 diff --git a/subs/pantry/src/Pantry/Archive.hs b/subs/pantry/src/Pantry/Archive.hs index d341210887..f847c75ce2 100644 --- a/subs/pantry/src/Pantry/Archive.hs +++ b/subs/pantry/src/Pantry/Archive.hs @@ -4,11 +4,11 @@ {-# LANGUAGE ScopedTypeVariables #-} -- | Logic for loading up trees from HTTPS archives. module Pantry.Archive - ( getArchive + ( getArchivePackage + , getArchive , getArchiveKey , fetchArchivesRaw , fetchArchives - , withArchiveLoc ) where import RIO @@ -64,20 +64,32 @@ getArchiveKey -> RawArchive -> RawPackageMetadata -> RIO env TreeKey -getArchiveKey rpli archive rpm = packageTreeKey <$> getArchive rpli archive rpm -- potential optimization +getArchiveKey rpli archive rpm = + packageTreeKey <$> getArchivePackage rpli archive rpm -- potential optimization -getArchive - :: forall env. (HasPantryConfig env, HasLogFunc env, HasProcessContext env) +thd3 :: (a, b, c) -> c +thd3 (_, _, z) = z + +getArchivePackage + :: forall env. (HasPantryConfig env, HasLogFunc env, HasProcessContext env, HasCallStack) => RawPackageLocationImmutable -- ^ for exceptions -> RawArchive -> RawPackageMetadata -> RIO env Package +getArchivePackage rpli archive rpm = thd3 <$> getArchive rpli archive rpm + +getArchive + :: forall env. (HasPantryConfig env, HasLogFunc env, HasProcessContext env, HasCallStack) + => RawPackageLocationImmutable -- ^ for exceptions + -> RawArchive + -> RawPackageMetadata + -> RIO env (SHA256, FileSize, Package) getArchive rpli archive rpm = do -- Check if the value is in the archive, and use it if possible - mpa <- loadCache rpli archive - pa <- - case mpa of - Just pa -> pure pa + mcached <- loadCache rpli archive + cached@(_, _, pa) <- + case mcached of + Just stored -> pure stored -- Not in the archive. Load the archive. Completely ignore the -- PackageMetadata for now, we'll check that the Package -- info matches next. @@ -86,9 +98,9 @@ getArchive rpli archive rpm = do -- Storing in the cache exclusively uses information we have -- about the archive itself, not metadata from the user. storeCache archive sha size pa - pure pa + pure (sha, size, pa) - either throwIO pure $ checkPackageMetadata rpli rpm pa + either throwIO (\_ -> pure cached) $ checkPackageMetadata rpli rpm pa storeCache :: forall env. (HasPantryConfig env, HasLogFunc env) @@ -106,7 +118,7 @@ loadCache :: forall env. (HasPantryConfig env, HasLogFunc env, HasProcessContext env) => RawPackageLocationImmutable -> RawArchive - -> RIO env (Maybe Package) + -> RIO env (Maybe (SHA256, FileSize, Package)) loadCache rpli archive = case loc of ALFilePath _ -> pure Nothing -- TODO can we do something intelligent here? @@ -132,7 +144,7 @@ loadCache rpli archive = logWarn $ "Cached hash is " <> display sha <> ", file size " <> display size logWarn "For security and reproducibility, please add a hash and file size to your configuration" ALFilePath _ -> pure () - loadFromCache tid + fmap (sha, size,) <$> loadFromCache tid Just sha' | sha == sha' -> case msize of @@ -142,9 +154,9 @@ loadCache rpli archive = logWarn $ "Archive from " <> display url <> " does not specify a size" logWarn $ "To avoid an overflow attack, please add the file size to your configuration: " <> display size ALFilePath _ -> pure () - loadFromCache tid + fmap (sha, size,) <$> loadFromCache tid Just size' - | size == size' -> loadFromCache tid + | size == size' -> fmap (sha, size,) <$> loadFromCache tid | otherwise -> do logWarn $ "Archive from " <> display loc <> " has a matching hash but mismatched size" diff --git a/subs/pantry/src/Pantry/Hackage.hs b/subs/pantry/src/Pantry/Hackage.hs index 1c6c218f60..4286c7da75 100644 --- a/subs/pantry/src/Pantry/Hackage.hs +++ b/subs/pantry/src/Pantry/Hackage.hs @@ -482,7 +482,7 @@ getHackageTarball pir@(PackageIdentifierRevision name ver _cfi) mtreeKey = do , T.pack $ Distribution.Text.display ver , ".tar.gz" ] - package <- getArchive + package <- getArchivePackage rpli RawArchive { raLocation = ALUrl url diff --git a/subs/pantry/src/Pantry/Repo.hs b/subs/pantry/src/Pantry/Repo.hs index d036fe2a6c..95e989288e 100644 --- a/subs/pantry/src/Pantry/Repo.hs +++ b/subs/pantry/src/Pantry/Repo.hs @@ -115,7 +115,7 @@ getRepo' repo@(Repo url commit repoType' subdir) rpm = when osIsWindows $ void $ liftIO $ hSupportsANSIWithoutEmulation stdout runCommand archiveArgs abs' <- resolveFile' tarball - getArchive + getArchivePackage (RPLIRepo repo rpm) RawArchive { raLocation = ALFilePath $ ResolvedPath From 90bdd2cd33c9e1856abfd46b338aaac1e0fa3d86 Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Wed, 13 Feb 2019 10:23:54 +0300 Subject: [PATCH 31/80] Use proper input for detailed test suite type --- src/Stack/Build/Execute.hs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/Stack/Build/Execute.hs b/src/Stack/Build/Execute.hs index 4320813e6c..319b38726b 100644 --- a/src/Stack/Build/Execute.hs +++ b/src/Stack/Build/Execute.hs @@ -56,6 +56,7 @@ import Distribution.System (OS (Windows), Platform (Platform)) import qualified Distribution.Text as C import Distribution.Types.PackageName (mkPackageName) +import Distribution.Types.UnqualComponentName (mkUnqualComponentName) import Distribution.Version (mkVersion, nullVersion) import Foreign.C.Types (CTime) import Path @@ -1850,7 +1851,8 @@ singleTest topts testsToRun ac ee task installedMap = do logPath <- buildLogPath package (Just stestName) ensureDir (parent logPath) pure $ BL.fromStrict - $ encodeUtf8 $ fromString $ show (logPath, testName) + $ encodeUtf8 $ fromString $ + show (logPath, mkUnqualComponentName (T.unpack testName)) else pure mempty let pc = setStdin (byteStringInput stdinBS) $ output setStdout From 7446085552fb6c6413499710c28cf29e8eeb8f8b Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Wed, 13 Feb 2019 10:24:15 +0300 Subject: [PATCH 32/80] Can't use failing tests in a proper integration test --- test/integration/tests/4453-detailed/files/test/Spec.hs | 1 - 1 file changed, 1 deletion(-) diff --git a/test/integration/tests/4453-detailed/files/test/Spec.hs b/test/integration/tests/4453-detailed/files/test/Spec.hs index 78acaf4517..fdce306520 100644 --- a/test/integration/tests/4453-detailed/files/test/Spec.hs +++ b/test/integration/tests/4453-detailed/files/test/Spec.hs @@ -6,7 +6,6 @@ tests :: IO [Test] tests = do return [ test "foo" Pass - , test "bar" (Fail "It did not work out!") ] test :: String -> Result -> Test From 5dd9aa3643730191dc3ccb525f15a0c04ab73fc5 Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Wed, 13 Feb 2019 10:53:28 +0300 Subject: [PATCH 33/80] Proper filter for tasks required by dependencies --- src/Stack/Build/ConstructPlan.hs | 63 ++++++++++++++++---------------- 1 file changed, 32 insertions(+), 31 deletions(-) diff --git a/src/Stack/Build/ConstructPlan.hs b/src/Stack/Build/ConstructPlan.hs index 9cc512ba94..95e507b327 100644 --- a/src/Stack/Build/ConstructPlan.hs +++ b/src/Stack/Build/ConstructPlan.hs @@ -102,8 +102,6 @@ data W = W -- ^ executable to be installed, and location where the binary is placed , wDirty :: !(Map PackageName Text) -- ^ why a local package is considered dirty - , wDeps :: !(Set PackageName) - -- ^ Packages which count as dependencies , wWarnings :: !([Text] -> [Text]) -- ^ Warnings , wParents :: !ParentMap @@ -184,10 +182,10 @@ constructPlan baseConfigOpts0 localDumpPkgs loadPackage0 sourceMap installedMap econfig <- view envConfigL sources <- getSources - let onTarget = void . addDep False + let onTarget = void . addDep let inner = mapM_ onTarget $ Map.keys (smtTargets $ smTargets sourceMap) let ctx = mkCtx econfig sources - ((), m, W efinals installExes dirtyReason deps warnings parents) <- + ((), m, W efinals installExes dirtyReason warnings parents) <- liftIO $ runRWST inner ctx M.empty mapM_ (logWarn . RIO.display) (warnings []) let toEither (_, Left e) = Left e @@ -204,7 +202,7 @@ constructPlan baseConfigOpts0 localDumpPkgs loadPackage0 sourceMap installedMap case boptsCLIBuildSubset $ bcoBuildOptsCLI baseConfigOpts0 of BSAll -> id BSOnlySnapshot -> stripLocals - BSOnlyDependencies -> stripNonDeps deps + BSOnlyDependencies -> stripNonDeps (M.keysSet $ smDeps sourceMap) return $ takeSubset Plan { planTasks = tasks , planFinals = M.fromList finals @@ -356,7 +354,7 @@ mkUnregisterLocal tasks dirtyReason localDumpPkgs sourceMap initialBuildSteps = -- step. addFinal :: LocalPackage -> Package -> Bool -> Bool -> M () addFinal lp package isAllInOne buildHaddocks = do - depsRes <- addPackageDeps False package + depsRes <- addPackageDeps {-False-} package res <- case depsRes of Left e -> return $ Left e Right (missing, present, _minLoc) -> do @@ -394,13 +392,10 @@ addFinal lp package isAllInOne buildHaddocks = do -- forcing this package to be marked as a dependency, even if it is -- directly wanted. This makes sense - if we left out packages that are -- deps, it would break the --only-dependencies build plan. -addDep :: Bool -- ^ is this being used by a dependency? - -> PackageName +addDep :: PackageName -> M (Either ConstructPlanException AddDepRes) -addDep treatAsDep' name = do +addDep name = do ctx <- ask - let treatAsDep = treatAsDep' || name `Set.notMember` wanted ctx - when treatAsDep $ markAsDep name m <- get case Map.lookup name m of Just res -> do @@ -439,10 +434,10 @@ addDep treatAsDep' name = do return $ Right $ ADRFound loc installed Just (PIOnlySource ps) -> do tellExecutables name ps - installPackage treatAsDep name ps Nothing + installPackage name ps Nothing Just (PIBoth ps installed) -> do tellExecutables name ps - installPackage treatAsDep name ps (Just installed) + installPackage name ps (Just installed) updateLibMap name res return res @@ -494,30 +489,29 @@ tellExecutablesPackage loc p = do -- | Given a 'PackageSource' and perhaps an 'Installed' value, adds -- build 'Task's for the package and its dependencies. -installPackage :: Bool -- ^ is this being used by a dependency? - -> PackageName +installPackage :: PackageName -> PackageSource -> Maybe Installed -> M (Either ConstructPlanException AddDepRes) -installPackage treatAsDep name ps minstalled = do +installPackage name ps minstalled = do ctx <- ask case ps of PSRemote pkgLoc _version _fromSnaphot cp -> do planDebug $ "installPackage: Doing all-in-one build for upstream package " ++ show name package <- loadPackage ctx pkgLoc (cpFlags cp) (cpGhcOptions cp) - resolveDepsAndInstall True treatAsDep (cpHaddocks cp) ps package minstalled + resolveDepsAndInstall True (cpHaddocks cp) ps package minstalled PSFilePath lp -> case lpTestBench lp of Nothing -> do planDebug $ "installPackage: No test / bench component for " ++ show name ++ " so doing an all-in-one build." - resolveDepsAndInstall True treatAsDep (lpBuildHaddocks lp) ps (lpPackage lp) minstalled + resolveDepsAndInstall True (lpBuildHaddocks lp) ps (lpPackage lp) minstalled Just tb -> do -- Attempt to find a plan which performs an all-in-one -- build. Ignore the writer action + reset the state if -- it fails. s <- get res <- pass $ do - res <- addPackageDeps treatAsDep tb + res <- addPackageDeps tb let writerFunc w = case res of Left _ -> mempty _ -> w @@ -538,7 +532,7 @@ installPackage treatAsDep name ps minstalled = do put s -- Otherwise, fall back on building the -- tests / benchmarks in a separate step. - res' <- resolveDepsAndInstall False treatAsDep (lpBuildHaddocks lp) ps (lpPackage lp) minstalled + res' <- resolveDepsAndInstall False (lpBuildHaddocks lp) ps (lpPackage lp) minstalled when (isRight res') $ do -- Insert it into the map so that it's -- available for addFinal. @@ -547,14 +541,13 @@ installPackage treatAsDep name ps minstalled = do return res' resolveDepsAndInstall :: Bool - -> Bool -> Bool -> PackageSource -> Package -> Maybe Installed -> M (Either ConstructPlanException AddDepRes) -resolveDepsAndInstall isAllInOne treatAsDep buildHaddocks ps package minstalled = do - res <- addPackageDeps treatAsDep package +resolveDepsAndInstall isAllInOne buildHaddocks ps package minstalled = do + res <- addPackageDeps package case res of Left err -> return $ Left err Right deps -> liftM Right $ installPackageGivenDeps isAllInOne buildHaddocks ps package minstalled deps @@ -641,13 +634,12 @@ addEllipsis t -- then the parent package must be installed locally. Otherwise, if it -- is 'Snap', then it can either be installed locally or in the -- snapshot. -addPackageDeps :: Bool -- ^ is this being used by a dependency? - -> Package -> M (Either ConstructPlanException (Set PackageIdentifier, Map PackageIdentifier GhcPkgId, IsMutable)) -addPackageDeps treatAsDep package = do +addPackageDeps :: Package -> M (Either ConstructPlanException (Set PackageIdentifier, Map PackageIdentifier GhcPkgId, IsMutable)) +addPackageDeps package = do ctx <- ask deps' <- packageDepsWithTools package deps <- forM (Map.toList deps') $ \(depname, DepValue range depType) -> do - eres <- addDep treatAsDep depname + eres <- addDep depname let getLatestApplicableVersionAndRev :: M (Maybe (Version, BlobKey)) getLatestApplicableVersionAndRev = do vsAndRevs <- runRIO ctx $ getHackagePackageVersions UsePreferredVersions depname @@ -929,10 +921,19 @@ stripNonDeps deps plan = plan , planInstallExes = Map.empty -- TODO maybe don't disable this? } where - checkTask task = pkgName (taskProvides task) `Set.member` deps - -markAsDep :: PackageName -> M () -markAsDep name = tell mempty { wDeps = Set.singleton name } + checkTask task = taskProvides task `Set.member` missingForDeps + providesDep task = pkgName (taskProvides task) `Set.member` deps + missing = Map.fromList $ map (taskProvides &&& tcoMissing . taskConfigOpts) $ + Map.elems (planTasks plan) + missingForDeps = flip execState mempty $ do + for_ (Map.elems $ planTasks plan) $ \task -> + when (providesDep task) $ collectMissing mempty (taskProvides task) + + collectMissing dependents pid = do + when (pid `elem` dependents) $ error $ + "Unexpected: task cycle for " <> packageNameString (pkgName pid) + modify' $ (<> Set.singleton pid) + mapM_ (collectMissing (pid:dependents)) (fromMaybe mempty $ M.lookup pid missing) -- | Is the given package/version combo defined in the snapshot? inSnapshot :: PackageName -> Version -> M Bool From 39353ec60b7f372155f3be3bd60b05c0b27c85c7 Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Wed, 13 Feb 2019 12:10:35 +0300 Subject: [PATCH 34/80] Remove unnecessary $ --- src/Stack/Build/ConstructPlan.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Stack/Build/ConstructPlan.hs b/src/Stack/Build/ConstructPlan.hs index 95e507b327..16e63f3d76 100644 --- a/src/Stack/Build/ConstructPlan.hs +++ b/src/Stack/Build/ConstructPlan.hs @@ -932,7 +932,7 @@ stripNonDeps deps plan = plan collectMissing dependents pid = do when (pid `elem` dependents) $ error $ "Unexpected: task cycle for " <> packageNameString (pkgName pid) - modify' $ (<> Set.singleton pid) + modify'(<> Set.singleton pid) mapM_ (collectMissing (pid:dependents)) (fromMaybe mempty $ M.lookup pid missing) -- | Is the given package/version combo defined in the snapshot? From 6125471743ff9fd503e3efd27c47a4a063a6b095 Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Thu, 14 Feb 2019 10:09:32 +0300 Subject: [PATCH 35/80] Allow local installs of mutable snapshot packages --- src/Stack/Build/ConstructPlan.hs | 5 ----- src/Stack/Build/Installed.hs | 2 +- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/src/Stack/Build/ConstructPlan.hs b/src/Stack/Build/ConstructPlan.hs index 9cc512ba94..8cb8f248c0 100644 --- a/src/Stack/Build/ConstructPlan.hs +++ b/src/Stack/Build/ConstructPlan.hs @@ -75,7 +75,6 @@ combineSourceInstalled :: PackageSource -> PackageInfo combineSourceInstalled ps (location, installed) = assert (psVersion ps == installedVersion installed) $ - assert (psLocation ps == location) $ case location of -- Always trust something in the snapshot Snap -> PIOnlyInstalled location installed @@ -330,10 +329,6 @@ mkUnregisterLocal tasks dirtyReason localDumpPkgs sourceMap initialBuildSteps = = if initialBuildSteps && taskIsTarget task && taskProvides task == ident then Nothing else Just $ fromMaybe "" $ Map.lookup name dirtyReason - -- Check if we're no longer using the local version - | Just (dpLocation -> PLImmutable _) <- Map.lookup name (smDeps sourceMap) - -- FIXME:qrilka do git/archive count as snapshot installed? - = Just "Switching to snapshot installed package" -- Check if a dependency is going to be unregistered | (dep, _):_ <- mapMaybe (`Map.lookup` toUnregister) deps = Just $ "Dependency being unregistered: " <> T.pack (packageIdentifierString dep) diff --git a/src/Stack/Build/Installed.hs b/src/Stack/Build/Installed.hs index ef729cfee7..51e3f7e7a3 100644 --- a/src/Stack/Build/Installed.hs +++ b/src/Stack/Build/Installed.hs @@ -261,7 +261,7 @@ isAllowed opts mcache installMap mloc dp PackageIdentifier name version = dpPackageIdent dp -- Ensure that the installed location matches where the sourceMap says it -- should be installed - checkLocation Snap = mloc /= Just (InstalledTo Local) -- we can allow either global or snap + checkLocation Snap = True -- snapshot deps could become mutable after getting any mutable dependency checkLocation Local = mloc == Just (InstalledTo Local) || mloc == Just ExtraGlobal -- 'locally' installed snapshot packages can come from extra dbs -- Check if a package is allowed if it is found in the sourceMap checkFound (installLoc, installVer) From 91e37ada6eb7fb09ded514c847549862c2cbb505 Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Thu, 14 Feb 2019 16:35:54 +0300 Subject: [PATCH 36/80] Remove unused argument --- src/Stack/Build/ConstructPlan.hs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/Stack/Build/ConstructPlan.hs b/src/Stack/Build/ConstructPlan.hs index 8cb8f248c0..eedc5e4599 100644 --- a/src/Stack/Build/ConstructPlan.hs +++ b/src/Stack/Build/ConstructPlan.hs @@ -207,7 +207,7 @@ constructPlan baseConfigOpts0 localDumpPkgs loadPackage0 sourceMap installedMap return $ takeSubset Plan { planTasks = tasks , planFinals = M.fromList finals - , planUnregisterLocal = mkUnregisterLocal tasks dirtyReason localDumpPkgs sourceMap initialBuildSteps + , planUnregisterLocal = mkUnregisterLocal tasks dirtyReason localDumpPkgs initialBuildSteps , planInstallExes = if boptsInstallExes (bcoBuildOpts baseConfigOpts0) || boptsInstallCompilerTool (bcoBuildOpts baseConfigOpts0) @@ -275,12 +275,11 @@ mkUnregisterLocal :: Map PackageName Task -- ^ Reasons why packages are dirty and must be rebuilt -> [DumpPackage () () ()] -- ^ Local package database dump - -> SourceMap -> Bool -- ^ If true, we're doing a special initialBuildSteps -- build - don't unregister target packages. -> Map GhcPkgId (PackageIdentifier, Text) -mkUnregisterLocal tasks dirtyReason localDumpPkgs sourceMap initialBuildSteps = +mkUnregisterLocal tasks dirtyReason localDumpPkgs initialBuildSteps = -- We'll take multiple passes through the local packages. This -- will allow us to detect that a package should be unregistered, -- as well as all packages directly or transitively depending on From 99afabca4a4ca2e305d2923863d9d469cc4dc77a Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Thu, 14 Feb 2019 18:01:32 +0300 Subject: [PATCH 37/80] Allow local exe installs from mutable snapshot deps --- src/Stack/Build/Installed.hs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/Stack/Build/Installed.hs b/src/Stack/Build/Installed.hs index 51e3f7e7a3..36c82e55cd 100644 --- a/src/Stack/Build/Installed.hs +++ b/src/Stack/Build/Installed.hs @@ -105,11 +105,15 @@ getInstalled opts installMap = do Nothing -> m Just (iLoc, iVersion) -- Not the version we want, ignore it - | version /= iVersion || loc /= iLoc -> Map.empty + | version /= iVersion || mismatchingLoc loc iLoc -> Map.empty | otherwise -> m where m = Map.singleton name (loc, Executable $ PackageIdentifier name version) + mismatchingLoc installed target | target == installed = False + | installed == Local = False -- snapshot dependency could end up + -- in a local install as being mutable + | otherwise = True exesSnap <- getInstalledExes Snap exesLocal <- getInstalledExes Local let installedMap = Map.unions From 35b5a12eb5eb3bbc3b61f67b4836f0a2e9d8b2c4 Mon Sep 17 00:00:00 2001 From: Michael Snoyman Date: Mon, 18 Feb 2019 08:17:45 +0200 Subject: [PATCH 38/80] Proper relative paths for script resolvers Previously, any relative paths in a script's --resolver flag would be considered relative to the current working directory. It's more logical and consistent to have it relative to the directory the script lives in. This makes the change. Possible argument: it's a bit weird to put the resolver root into the GlobalOptsMonoid value. Overall it seems like the best option to me, but I'm open to other approaches. --- ChangeLog.md | 3 +++ src/Options/Applicative/Complicated.hs | 5 +++-- src/Stack/Options/GlobalParser.hs | 10 ++++++--- src/Stack/Types/Config.hs | 1 + src/main/Main.hs | 22 ++++++++++++------- .../tests/relative-script-snapshots/Main.hs | 4 ++++ .../files/subdir/script.hs | 6 +++++ .../files/subdir/snapshot.yaml | 4 ++++ 8 files changed, 42 insertions(+), 13 deletions(-) create mode 100644 test/integration/tests/relative-script-snapshots/Main.hs create mode 100644 test/integration/tests/relative-script-snapshots/files/subdir/script.hs create mode 100644 test/integration/tests/relative-script-snapshots/files/subdir/snapshot.yaml diff --git a/ChangeLog.md b/ChangeLog.md index 7033542d89..cf24149d96 100644 --- a/ChangeLog.md +++ b/ChangeLog.md @@ -56,6 +56,9 @@ Behavior changes: directory could affect interpretation of the script. See [#4538](https://github.com/commercialhaskell/stack/pull/4538) +* When using `stack script`, custom snapshot files will be resolved + relative to the directory containing the script. + Other enhancements: * Defer loading up of files for local packages. This allows us to get diff --git a/src/Options/Applicative/Complicated.hs b/src/Options/Applicative/Complicated.hs index ecf94aa97a..a11bf51a67 100644 --- a/src/Options/Applicative/Complicated.hs +++ b/src/Options/Applicative/Complicated.hs @@ -79,11 +79,12 @@ addCommand :: String -- ^ command string -> String -- ^ title of command -> String -- ^ footer of command help -> (a -> b) -- ^ constructor to wrap up command in common data type + -> (a -> c -> c) -- ^ extend common settings from local settings -> Parser c -- ^ common parser -> Parser a -- ^ command parser -> ExceptT b (Writer (Mod CommandFields (b,c))) () -addCommand cmd title footerStr constr = - addCommand' cmd title footerStr (\a c -> (constr a,c)) +addCommand cmd title footerStr constr extendCommon = + addCommand' cmd title footerStr (\a c -> (constr a,extendCommon a c)) -- | Add a command that takes sub-commands to the options dispatcher. addSubCommands diff --git a/src/Stack/Options/GlobalParser.hs b/src/Stack/Options/GlobalParser.hs index 19c8ed7c4d..fd415aa250 100644 --- a/src/Stack/Options/GlobalParser.hs +++ b/src/Stack/Options/GlobalParser.hs @@ -5,7 +5,7 @@ module Stack.Options.GlobalParser where import Options.Applicative import Options.Applicative.Builder.Extra -import Path.IO (getCurrentDir) +import Path.IO (getCurrentDir, resolveDir') import qualified Stack.Docker as Docker import Stack.Init import Stack.Prelude @@ -29,6 +29,7 @@ globalOptsParser currentDir kind defLogLevel = hide <*> configOptsParser currentDir kind <*> optionalFirst (abstractResolverOptsParser hide0) <*> + pure (First Nothing) <*> -- resolver root is only set via the script command optionalFirst (compilerOptsParser hide0) <*> firstBoolFlags "terminal" @@ -68,8 +69,11 @@ globalOptsParser currentDir kind defLogLevel = globalOptsFromMonoid :: MonadIO m => Bool -> GlobalOptsMonoid -> m GlobalOpts globalOptsFromMonoid defaultTerminal GlobalOptsMonoid{..} = do resolver <- for (getFirst globalMonoidResolver) $ \ur -> do - cwd <- getCurrentDir - resolvePaths (Just cwd) ur + root <- + case globalMonoidResolverRoot of + First Nothing -> getCurrentDir + First (Just dir) -> resolveDir' dir + resolvePaths (Just root) ur pure GlobalOpts { globalReExecVersion = getFirst globalMonoidReExecVersion , globalDockerEntrypoint = getFirst globalMonoidDockerEntrypoint diff --git a/src/Stack/Types/Config.hs b/src/Stack/Types/Config.hs index fb4da044d6..fd43b2a505 100644 --- a/src/Stack/Types/Config.hs +++ b/src/Stack/Types/Config.hs @@ -444,6 +444,7 @@ data GlobalOptsMonoid = GlobalOptsMonoid , globalMonoidTimeInLog :: !(First Bool) -- ^ Whether to include timings in logs. , globalMonoidConfigMonoid :: !ConfigMonoid -- ^ Config monoid, for passing into 'loadConfig' , globalMonoidResolver :: !(First (Unresolved AbstractResolver)) -- ^ Resolver override + , globalMonoidResolverRoot :: !(First FilePath) -- ^ root directory for resolver relative path , globalMonoidCompiler :: !(First WantedCompiler) -- ^ Compiler override , globalMonoidTerminal :: !(First Bool) -- ^ We're in a terminal? , globalMonoidStyles :: !StylesUpdate -- ^ Stack's output styles diff --git a/src/main/Main.hs b/src/main/Main.hs index 32983637a9..20581cf78d 100644 --- a/src/main/Main.hs +++ b/src/main/Main.hs @@ -109,7 +109,7 @@ import qualified Stack.Upload as Upload import qualified System.Directory as D import System.Environment (getProgName, getArgs, withArgs) import System.Exit -import System.FilePath (isValid, pathSeparator) +import System.FilePath (isValid, pathSeparator, takeDirectory) import qualified System.FilePath as FP import System.IO (stderr, stdin, stdout, BufferMode(..), hPutStrLn, hPrint, hGetEncoding, hSetEncoding) import System.Terminal (hIsTerminalDeviceOrMinTTY) @@ -388,10 +388,16 @@ commandLineHandler currentDir progName isInterpreter = complicatedOptions "Run runghc (alias for 'runghc')" execCmd (execOptsParser $ Just ExecRunGhc) - addCommand' "script" - "Run a Stack Script" - scriptCmd - scriptOptsParser + addCommand "script" + "Run a Stack Script" + globalFooter + scriptCmd + (\so gom -> + gom + { globalMonoidResolverRoot = First $ Just $ takeDirectory $ soFile so + }) + (globalOpts OtherCmdGlobalOpts) + scriptOptsParser addCommand' "freeze" "Show project or snapshot with pinned dependencies if there are any such" freezeCmd @@ -491,7 +497,7 @@ commandLineHandler currentDir progName isInterpreter = complicatedOptions addCommand' :: String -> String -> (a -> GlobalOpts -> IO ()) -> Parser a -> AddCommand addCommand' cmd title constr = - addCommand cmd title globalFooter constr (globalOpts OtherCmdGlobalOpts) + addCommand cmd title globalFooter constr (\_ gom -> gom) (globalOpts OtherCmdGlobalOpts) addSubCommands' :: String -> String -> AddCommand -> AddCommand @@ -502,13 +508,13 @@ commandLineHandler currentDir progName isInterpreter = complicatedOptions addBuildCommand' :: String -> String -> (a -> GlobalOpts -> IO ()) -> Parser a -> AddCommand addBuildCommand' cmd title constr = - addCommand cmd title globalFooter constr (globalOpts BuildCmdGlobalOpts) + addCommand cmd title globalFooter constr (\_ gom -> gom) (globalOpts BuildCmdGlobalOpts) -- Additional helper that hides global options and shows some ghci options addGhciCommand' :: String -> String -> (a -> GlobalOpts -> IO ()) -> Parser a -> AddCommand addGhciCommand' cmd title constr = - addCommand cmd title globalFooter constr (globalOpts GhciCmdGlobalOpts) + addCommand cmd title globalFooter constr (\_ gom -> gom) (globalOpts GhciCmdGlobalOpts) globalOpts :: GlobalOptsContext -> Parser GlobalOptsMonoid globalOpts kind = diff --git a/test/integration/tests/relative-script-snapshots/Main.hs b/test/integration/tests/relative-script-snapshots/Main.hs new file mode 100644 index 0000000000..0a4044c6af --- /dev/null +++ b/test/integration/tests/relative-script-snapshots/Main.hs @@ -0,0 +1,4 @@ +import StackTest + +main :: IO () +main = stack ["subdir/script.hs"] diff --git a/test/integration/tests/relative-script-snapshots/files/subdir/script.hs b/test/integration/tests/relative-script-snapshots/files/subdir/script.hs new file mode 100644 index 0000000000..2858fcedcf --- /dev/null +++ b/test/integration/tests/relative-script-snapshots/files/subdir/script.hs @@ -0,0 +1,6 @@ +#!/usr/bin/env stack +-- stack --resolver snapshot.yaml script +import Acme.Missiles + +main :: IO () +main = launchMissiles diff --git a/test/integration/tests/relative-script-snapshots/files/subdir/snapshot.yaml b/test/integration/tests/relative-script-snapshots/files/subdir/snapshot.yaml new file mode 100644 index 0000000000..6182f40ef9 --- /dev/null +++ b/test/integration/tests/relative-script-snapshots/files/subdir/snapshot.yaml @@ -0,0 +1,4 @@ +resolver: ghc-8.2.2 +name: snapshot +packages: +- acme-missiles-0.3@rev:0 From d8103207ddae290b05d1946a0e4cc2e4173ea8fe Mon Sep 17 00:00:00 2001 From: Michael Snoyman Date: Mon, 18 Feb 2019 11:54:21 +0200 Subject: [PATCH 39/80] Remove unused field on SYLNoConfig --- src/Stack/Config.hs | 16 ++++++++-------- src/Stack/ConfigCmd.hs | 2 +- src/Stack/Script.hs | 4 ++-- src/Stack/Types/Config.hs | 4 +--- 4 files changed, 12 insertions(+), 14 deletions(-) diff --git a/src/Stack/Config.hs b/src/Stack/Config.hs index 9bd6699130..3364a59946 100644 --- a/src/Stack/Config.hs +++ b/src/Stack/Config.hs @@ -473,7 +473,7 @@ loadConfigMaybeProject configArgs mresolver mproject inner = do inner2 let withConfig = case mproject of - LCSNoConfig _ -> configNoLocalConfig stackRoot mresolver configArgs + LCSNoConfig -> configNoLocalConfig stackRoot mresolver configArgs LCSProject project -> loadHelper $ Just project LCSNoProject -> loadHelper Nothing @@ -495,7 +495,7 @@ loadConfigMaybeProject configArgs mresolver mproject inner = do case mprojectRoot of LCSProject fp -> Just fp LCSNoProject -> Nothing - LCSNoConfig _ -> Nothing + LCSNoConfig -> Nothing } -- | Load the configuration, using current directory, environment variables, @@ -538,7 +538,7 @@ loadBuildConfig mproject maresolver mcompiler = do LCSProject (project, fp, _) -> do forM_ (projectUserMsg project) (logWarn . fromString) return (project, fp) - LCSNoConfig _ -> do + LCSNoConfig -> do p <- assert (isJust mresolver) (getEmptyProject mresolver) return (p, configUserConfigPath config) LCSNoProject -> do @@ -658,7 +658,7 @@ loadBuildConfig mproject maresolver mcompiler = do case mproject of LCSNoProject -> True LCSProject _ -> False - LCSNoConfig _ -> False + LCSNoConfig -> False , bcCurator = projectCurator project , bcDownloadCompiler = WithDownloadCompiler } @@ -849,12 +849,12 @@ getProjectConfig SYLDefault = do if exists then return $ Just fp else return Nothing -getProjectConfig (SYLNoConfig parentDir) = return (LCSNoConfig parentDir) +getProjectConfig SYLNoConfig = return LCSNoConfig data LocalConfigStatus a = LCSNoProject | LCSProject a - | LCSNoConfig !(Path Abs Dir) + | LCSNoConfig -- ^ parent directory for making a concrete resolving deriving (Show,Functor,Foldable,Traversable) @@ -876,9 +876,9 @@ loadProjectConfig mstackYaml = do LCSNoProject -> do logDebug "No project config file found, using defaults." return LCSNoProject - LCSNoConfig mparentDir -> do + LCSNoConfig -> do logDebug "Ignoring config files" - return (LCSNoConfig mparentDir) + return LCSNoConfig where load fp = do iopc <- loadConfigYaml (parseProjectAndConfigMonoid (parent fp)) fp diff --git a/src/Stack/ConfigCmd.hs b/src/Stack/ConfigCmd.hs index d57a19f550..026c16f33f 100644 --- a/src/Stack/ConfigCmd.hs +++ b/src/Stack/ConfigCmd.hs @@ -60,7 +60,7 @@ cfgCmdSet go cmd = do case mstackYaml of LCSProject stackYaml -> return stackYaml LCSNoProject -> liftM ( stackDotYaml) (getImplicitGlobalProjectDir conf) - LCSNoConfig _ -> throwString "config command used when no local configuration available" + LCSNoConfig -> throwString "config command used when no local configuration available" CommandScopeGlobal -> return (configUserConfigPath conf) -- We don't need to worry about checking for a valid yaml here (config :: Yaml.Object) <- diff --git a/src/Stack/Script.hs b/src/Stack/Script.hs index 72b0ea06b6..bf176e0d71 100644 --- a/src/Stack/Script.hs +++ b/src/Stack/Script.hs @@ -40,7 +40,7 @@ scriptCmd opts go' = do { globalConfigMonoid = (globalConfigMonoid go') { configMonoidInstallGHC = First $ Just True } - , globalStackYaml = SYLNoConfig scriptDir + , globalStackYaml = SYLNoConfig } withDefaultBuildConfigAndLock go $ \lk -> do -- Some warnings in case the user somehow tries to set a @@ -53,7 +53,7 @@ scriptCmd opts go' = do "Ignoring override stack.yaml file for script command: " <> fromString fp SYLDefault -> return () - SYLNoConfig _ -> assert False (return ()) + SYLNoConfig -> assert False (return ()) config <- view configL menv <- liftIO $ configProcessContextSettings config defaultEnvSettings diff --git a/src/Stack/Types/Config.hs b/src/Stack/Types/Config.hs index fd43b2a505..cc027a4981 100644 --- a/src/Stack/Types/Config.hs +++ b/src/Stack/Types/Config.hs @@ -430,9 +430,7 @@ data GlobalOpts = GlobalOpts data StackYamlLoc filepath = SYLDefault | SYLOverride !filepath - | SYLNoConfig !(Path Abs Dir) - -- ^ FilePath is the directory containing the script file, used - -- for resolving custom snapshot files. + | SYLNoConfig deriving (Show,Functor,Foldable,Traversable) -- | Parsed global command-line options monoid. From d6891c5bd212b7b0e3b7dd26fa2b329b5cbe7a96 Mon Sep 17 00:00:00 2001 From: Michael Snoyman Date: Mon, 18 Feb 2019 12:05:41 +0200 Subject: [PATCH 40/80] Allow --extra-dep for script command --- ChangeLog.md | 3 ++ src/Stack/Config.hs | 28 +++++++++---------- src/Stack/ConfigCmd.hs | 2 +- src/Stack/Options/ScriptParser.hs | 7 +++++ src/Stack/Script.hs | 4 +-- src/Stack/Types/Config.hs | 3 +- .../tests/script-extra-dep/Main.hs | 4 +++ .../tests/script-extra-dep/files/script.hs | 6 ++++ 8 files changed, 39 insertions(+), 18 deletions(-) create mode 100644 test/integration/tests/script-extra-dep/Main.hs create mode 100644 test/integration/tests/script-extra-dep/files/script.hs diff --git a/ChangeLog.md b/ChangeLog.md index cf24149d96..015600b74a 100644 --- a/ChangeLog.md +++ b/ChangeLog.md @@ -101,6 +101,9 @@ Other enhancements: [#4535](https://github.com/commercialhaskell/stack/issues/4535)/ * Show snapshot being used when `stack ghci` is invoked outside of a project directory. See [#3651](https://github.com/commercialhaskell/stack/issues/3651) +* The script interpreter now accepts a `--extra-dep` flag for adding + packages not present in the snapshot. Currently, this only works + with packages from Hackage, not Git repos or archives. Bug fixes: diff --git a/src/Stack/Config.hs b/src/Stack/Config.hs index 3364a59946..f12807c2cb 100644 --- a/src/Stack/Config.hs +++ b/src/Stack/Config.hs @@ -473,7 +473,7 @@ loadConfigMaybeProject configArgs mresolver mproject inner = do inner2 let withConfig = case mproject of - LCSNoConfig -> configNoLocalConfig stackRoot mresolver configArgs + LCSNoConfig _extraDeps -> configNoLocalConfig stackRoot mresolver configArgs LCSProject project -> loadHelper $ Just project LCSNoProject -> loadHelper Nothing @@ -495,7 +495,7 @@ loadConfigMaybeProject configArgs mresolver mproject inner = do case mprojectRoot of LCSProject fp -> Just fp LCSNoProject -> Nothing - LCSNoConfig -> Nothing + LCSNoConfig _extraDeps -> Nothing } -- | Load the configuration, using current directory, environment variables, @@ -538,8 +538,8 @@ loadBuildConfig mproject maresolver mcompiler = do LCSProject (project, fp, _) -> do forM_ (projectUserMsg project) (logWarn . fromString) return (project, fp) - LCSNoConfig -> do - p <- assert (isJust mresolver) (getEmptyProject mresolver) + LCSNoConfig extraDeps -> do + p <- assert (isJust mresolver) (getEmptyProject mresolver extraDeps) return (p, configUserConfigPath config) LCSNoProject -> do logDebug "Run from outside a project, using implicit global project config" @@ -567,7 +567,7 @@ loadBuildConfig mproject maresolver mcompiler = do else do logInfo ("Writing implicit global project config file to: " <> fromString dest') logInfo "Note: You can change the snapshot via the resolver field there." - p <- getEmptyProject mresolver + p <- getEmptyProject mresolver [] liftIO $ do S.writeFile dest' $ S.concat [ "# This is the implicit global project's config file, which is only used when\n" @@ -658,13 +658,13 @@ loadBuildConfig mproject maresolver mcompiler = do case mproject of LCSNoProject -> True LCSProject _ -> False - LCSNoConfig -> False + LCSNoConfig _extraDeps -> False , bcCurator = projectCurator project , bcDownloadCompiler = WithDownloadCompiler } where - getEmptyProject :: Maybe RawSnapshotLocation -> RIO Config Project - getEmptyProject mresolver = do + getEmptyProject :: Maybe RawSnapshotLocation -> [PackageIdentifierRevision] -> RIO Config Project + getEmptyProject mresolver extraDeps = do r <- case mresolver of Just resolver -> do logInfo ("Using resolver: " <> display resolver <> " specified on command line") @@ -676,7 +676,7 @@ loadBuildConfig mproject maresolver mcompiler = do return Project { projectUserMsg = Nothing , projectPackages = [] - , projectDependencies = [] + , projectDependencies = map (RPLImmutable . flip RPLIHackage Nothing) extraDeps , projectFlags = mempty , projectResolver = r , projectCompiler = Nothing @@ -849,13 +849,13 @@ getProjectConfig SYLDefault = do if exists then return $ Just fp else return Nothing -getProjectConfig SYLNoConfig = return LCSNoConfig +getProjectConfig (SYLNoConfig extraDeps) = return $ LCSNoConfig extraDeps data LocalConfigStatus a = LCSNoProject | LCSProject a - | LCSNoConfig - -- ^ parent directory for making a concrete resolving + | LCSNoConfig ![PackageIdentifierRevision] + -- ^ Extra dependencies deriving (Show,Functor,Foldable,Traversable) -- | Find the project config file location, respecting environment variables @@ -876,9 +876,9 @@ loadProjectConfig mstackYaml = do LCSNoProject -> do logDebug "No project config file found, using defaults." return LCSNoProject - LCSNoConfig -> do + LCSNoConfig extraDeps -> do logDebug "Ignoring config files" - return LCSNoConfig + return $ LCSNoConfig extraDeps where load fp = do iopc <- loadConfigYaml (parseProjectAndConfigMonoid (parent fp)) fp diff --git a/src/Stack/ConfigCmd.hs b/src/Stack/ConfigCmd.hs index 026c16f33f..6982c12b4b 100644 --- a/src/Stack/ConfigCmd.hs +++ b/src/Stack/ConfigCmd.hs @@ -60,7 +60,7 @@ cfgCmdSet go cmd = do case mstackYaml of LCSProject stackYaml -> return stackYaml LCSNoProject -> liftM ( stackDotYaml) (getImplicitGlobalProjectDir conf) - LCSNoConfig -> throwString "config command used when no local configuration available" + LCSNoConfig _extraDeps -> throwString "config command used when no local configuration available" CommandScopeGlobal -> return (configUserConfigPath conf) -- We don't need to worry about checking for a valid yaml here (config :: Yaml.Object) <- diff --git a/src/Stack/Options/ScriptParser.hs b/src/Stack/Options/ScriptParser.hs index ade44325ef..ee84b37e47 100644 --- a/src/Stack/Options/ScriptParser.hs +++ b/src/Stack/Options/ScriptParser.hs @@ -12,6 +12,7 @@ data ScriptOpts = ScriptOpts , soArgs :: ![String] , soCompile :: !ScriptExecute , soGhcOptions :: ![String] + , soScriptExtraDeps :: ![PackageIdentifierRevision] } deriving Show @@ -40,3 +41,9 @@ scriptOptsParser = ScriptOpts metavar "OPTIONS" <> completer ghcOptsCompleter <> help "Additional options passed to GHC")) + <*> many (option extraDepRead + (long "extra-dep" <> + metavar "PACKAGE-VERSION" <> + help "Extra dependencies to be added to the snapshot")) + where + extraDepRead = eitherReader $ mapLeft show . parsePackageIdentifierRevision . fromString diff --git a/src/Stack/Script.hs b/src/Stack/Script.hs index bf176e0d71..21d905df7f 100644 --- a/src/Stack/Script.hs +++ b/src/Stack/Script.hs @@ -40,7 +40,7 @@ scriptCmd opts go' = do { globalConfigMonoid = (globalConfigMonoid go') { configMonoidInstallGHC = First $ Just True } - , globalStackYaml = SYLNoConfig + , globalStackYaml = SYLNoConfig $ soScriptExtraDeps opts } withDefaultBuildConfigAndLock go $ \lk -> do -- Some warnings in case the user somehow tries to set a @@ -53,7 +53,7 @@ scriptCmd opts go' = do "Ignoring override stack.yaml file for script command: " <> fromString fp SYLDefault -> return () - SYLNoConfig -> assert False (return ()) + SYLNoConfig _ -> assert False (return ()) config <- view configL menv <- liftIO $ configProcessContextSettings config defaultEnvSettings diff --git a/src/Stack/Types/Config.hs b/src/Stack/Types/Config.hs index cc027a4981..9137de309c 100644 --- a/src/Stack/Types/Config.hs +++ b/src/Stack/Types/Config.hs @@ -430,7 +430,8 @@ data GlobalOpts = GlobalOpts data StackYamlLoc filepath = SYLDefault | SYLOverride !filepath - | SYLNoConfig + | SYLNoConfig ![PackageIdentifierRevision] + -- ^ Extra dependencies included in the script command line. deriving (Show,Functor,Foldable,Traversable) -- | Parsed global command-line options monoid. diff --git a/test/integration/tests/script-extra-dep/Main.hs b/test/integration/tests/script-extra-dep/Main.hs new file mode 100644 index 0000000000..c2e9c88b99 --- /dev/null +++ b/test/integration/tests/script-extra-dep/Main.hs @@ -0,0 +1,4 @@ +import StackTest + +main :: IO () +main = stack ["script.hs"] diff --git a/test/integration/tests/script-extra-dep/files/script.hs b/test/integration/tests/script-extra-dep/files/script.hs new file mode 100644 index 0000000000..5652f3a433 --- /dev/null +++ b/test/integration/tests/script-extra-dep/files/script.hs @@ -0,0 +1,6 @@ +#!/usr/bin/env stack +-- stack --resolver ghc-8.2.2 script --extra-dep acme-missiles-0.3@rev:0 +import Acme.Missiles + +main :: IO () +main = launchMissiles From 1c246893ad7ed021830cbf863dec8aecb0fedb84 Mon Sep 17 00:00:00 2001 From: Michael Snoyman Date: Mon, 18 Feb 2019 12:22:48 +0200 Subject: [PATCH 41/80] Rename duplicated System.Permissions module Both the pantry and stack packages had the same module name, which prevented `stack ghci` from working. This rename resolves the problem. --- subs/pantry/src/Pantry/Repo.hs | 2 +- subs/pantry/src/unix/System/{Permissions.hs => IsWindows.hs} | 2 +- subs/pantry/src/windows/System/{Permissions.hs => IsWindows.hs} | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) rename subs/pantry/src/unix/System/{Permissions.hs => IsWindows.hs} (86%) rename subs/pantry/src/windows/System/{Permissions.hs => IsWindows.hs} (85%) diff --git a/subs/pantry/src/Pantry/Repo.hs b/subs/pantry/src/Pantry/Repo.hs index 95e989288e..7f104d55c0 100644 --- a/subs/pantry/src/Pantry/Repo.hs +++ b/subs/pantry/src/Pantry/Repo.hs @@ -19,7 +19,7 @@ import RIO.Process import Database.Persist (Entity (..)) import qualified RIO.Text as T import System.Console.ANSI (hSupportsANSIWithoutEmulation) -import System.Permissions (osIsWindows) +import System.IsWindows (osIsWindows) fetchReposRaw :: (HasPantryConfig env, HasLogFunc env, HasProcessContext env) diff --git a/subs/pantry/src/unix/System/Permissions.hs b/subs/pantry/src/unix/System/IsWindows.hs similarity index 86% rename from subs/pantry/src/unix/System/Permissions.hs rename to subs/pantry/src/unix/System/IsWindows.hs index b3194ca979..b8ef69ef46 100644 --- a/subs/pantry/src/unix/System/Permissions.hs +++ b/subs/pantry/src/unix/System/IsWindows.hs @@ -1,5 +1,5 @@ {-# LANGUAGE NoImplicitPrelude #-} -module System.Permissions +module System.IsWindows ( osIsWindows ) where diff --git a/subs/pantry/src/windows/System/Permissions.hs b/subs/pantry/src/windows/System/IsWindows.hs similarity index 85% rename from subs/pantry/src/windows/System/Permissions.hs rename to subs/pantry/src/windows/System/IsWindows.hs index c679a67a19..d0b3d9dd0d 100644 --- a/subs/pantry/src/windows/System/Permissions.hs +++ b/subs/pantry/src/windows/System/IsWindows.hs @@ -1,5 +1,5 @@ {-# LANGUAGE NoImplicitPrelude #-} -module System.Permissions +module System.IsWindows ( osIsWindows ) where From 0a964c70704b1ef3c40010c1b4a8b079db6c4cce Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Mon, 18 Feb 2019 14:21:55 +0300 Subject: [PATCH 42/80] Proper log file handling on expected haddock failures --- src/Stack/Build/Execute.hs | 47 +++++++++++++++++++++++--------------- src/Stack/SDist.hs | 2 +- 2 files changed, 29 insertions(+), 20 deletions(-) diff --git a/src/Stack/Build/Execute.hs b/src/Stack/Build/Execute.hs index 454dbc8b77..d9211a4488 100644 --- a/src/Stack/Build/Execute.hs +++ b/src/Stack/Build/Execute.hs @@ -19,6 +19,7 @@ module Stack.Build.Execute , withExecuteEnv , withSingleContext , ExcludeTHLoading(..) + , KeepOutputOpen(..) ) where import Control.Concurrent.Execute @@ -930,7 +931,7 @@ withSingleContext :: forall env a. HasEnvConfig env -> Path Abs Dir -- Package root directory file path -- Note that the `Path Abs Dir` argument is redundant with the `Path Abs File` -- argument, but we provide both to avoid recalculating `parent` of the `File`. - -> (ExcludeTHLoading -> [String] -> RIO env ()) + -> (KeepOutputOpen -> ExcludeTHLoading -> [String] -> RIO env ()) -- Function to run Cabal with args -> (Text -> RIO env ()) -- An 'announce' function, for different build phases -> OutputType @@ -1018,7 +1019,7 @@ withSingleContext ActionContext {..} ExecuteEnv {..} task@Task {..} mdeps msuffi :: Package -> Path Abs Dir -> OutputType - -> ((ExcludeTHLoading -> [String] -> RIO env ()) -> RIO env a) + -> ((KeepOutputOpen -> ExcludeTHLoading -> [String] -> RIO env ()) -> RIO env a) -> RIO env a withCabal package pkgDir outputType inner = do config <- view configL @@ -1040,7 +1041,7 @@ withSingleContext ActionContext {..} ExecuteEnv {..} task@Task {..} mdeps msuffi case (packageBuildType package, eeSetupExe) of (C.Simple, Just setupExe) -> return $ Left setupExe _ -> liftIO $ Right <$> getSetupHs pkgDir - inner $ \stripTHLoading args -> do + inner $ \keepOutputOpen stripTHLoading args -> do let cabalPackageArg -- Omit cabal package dependency when building -- Cabal. See @@ -1164,14 +1165,17 @@ withSingleContext ActionContext {..} ExecuteEnv {..} task@Task {..} mdeps msuffi (mlogFile, bss) <- case outputType of OTConsole _ -> return (Nothing, []) - OTLogFile logFile h -> do - liftIO $ hClose h - fmap (Just logFile,) $ withSourceFile (toFilePath logFile) $ \src -> - runConduit - $ src - .| CT.decodeUtf8Lenient - .| mungeBuildOutput stripTHLoading makeAbsolute pkgDir compilerVer - .| CL.consume + OTLogFile logFile h -> + if keepOutputOpen == KeepOpen + then return (Nothing, []) -- expected failure build continues further + else do + liftIO $ hClose h + fmap (Just logFile,) $ withSourceFile (toFilePath logFile) $ \src -> + runConduit + $ src + .| CT.decodeUtf8Lenient + .| mungeBuildOutput stripTHLoading makeAbsolute pkgDir compilerVer + .| CL.consume throwM $ CabalExitedUnsuccessfully (eceExitCode ece) taskProvides @@ -1297,12 +1301,12 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap expectHaddockFailure mcurator = maybe False (Set.member pname . curatorExpectHaddockFailure) mcurator fulfillHaddockExpectations mcurator action | expectHaddockFailure mcurator = do - eres <- tryAny action + eres <- tryAny $ action KeepOpen case eres of Right () -> logWarn $ fromString (packageNameString pname) <> ": unexpected Haddock success" Left _ -> return () fulfillHaddockExpectations _ action = do - action + action CloseOnException buildingFinals = isFinalBuild || taskAllInOne enableTests = buildingFinals && any isCTest (taskComponents task) @@ -1426,7 +1430,8 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap bindir = toFilePath $ bcoSnapInstallRoot eeBaseConfigOpts bindirSuffix realConfigAndBuild cache mcurator allDepsMap = withSingleContext ac ee task (Just allDepsMap) Nothing - $ \package cabalfp pkgDir cabal announce _outputType -> do + $ \package cabalfp pkgDir cabal0 announce _outputType -> do + let cabal = cabal0 CloseOnException executableBuildStatuses <- getExecutableBuildStatuses package pkgDir when (not (cabalIsSatisfied executableBuildStatuses) && taskIsTarget task) (logInfo @@ -1453,7 +1458,7 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap initialBuildSteps executableBuildStatuses cabal announce return Nothing _ -> fulfillCuratorExpectations pname mcurator enableTests enableBenchmarks Nothing $ - Just <$> realBuild cache package pkgDir cabal announce executableBuildStatuses + Just <$> realBuild cache package pkgDir cabal0 announce executableBuildStatuses initialBuildSteps executableBuildStatuses cabal announce = do () <- announce ("initial-build-steps" <> annSuffix executableBuildStatuses) @@ -1463,11 +1468,12 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap :: ConfigCache -> Package -> Path Abs Dir - -> (ExcludeTHLoading -> [String] -> RIO env ()) + -> (KeepOutputOpen -> ExcludeTHLoading -> [String] -> RIO env ()) -> (Text -> RIO env ()) -> Map Text ExecutableBuildStatus -> RIO env Installed - realBuild cache package pkgDir cabal announce executableBuildStatuses = do + realBuild cache package pkgDir cabal0 announce executableBuildStatuses = do + let cabal = cabal0 CloseOnException wc <- view $ actualCompilerVersionL.whichCompilerL markExeNotInstalled (taskLocation task) taskProvides @@ -1557,7 +1563,7 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap | ghcVer >= mkVersion [8, 4] -> ["--haddock-option=--quickjump"] _ -> [] - fulfillHaddockExpectations mcurator $ cabal KeepTHLoading $ concat + fulfillHaddockExpectations mcurator $ \keep -> cabal0 keep KeepTHLoading $ concat [ ["haddock", "--html", "--hoogle", "--html-location=../$pkg-$version/"] , sourceFlag , ["--internal" | boptsHaddockInternal eeBuildOpts] @@ -1956,10 +1962,13 @@ singleBench beopts benchesToRun ac ee task installedMap = do when toRun $ do announce "benchmarks" - cabal KeepTHLoading ("bench" : args) + cabal CloseOnException KeepTHLoading ("bench" : args) data ExcludeTHLoading = ExcludeTHLoading | KeepTHLoading data ConvertPathsToAbsolute = ConvertPathsToAbsolute | KeepPathsAsIs +-- | special marker for expected failures in curator builds, using those +-- we need to keep log handle open as build continues further even after a failure +data KeepOutputOpen = KeepOpen | CloseOnException deriving Eq -- | Strip Template Haskell "Loading package" lines and making paths absolute. mungeBuildOutput :: forall m. MonadIO m diff --git a/src/Stack/SDist.hs b/src/Stack/SDist.hs index 75b55944ab..9d00f15159 100644 --- a/src/Stack/SDist.hs +++ b/src/Stack/SDist.hs @@ -330,7 +330,7 @@ getSDistFileList lp = $ \ee -> withSingleContext ac ee task Nothing (Just "sdist") $ \_package cabalfp _pkgDir cabal _announce _outputType -> do let outFile = toFilePath tmpdir FP. "source-files-list" - cabal KeepTHLoading ["sdist", "--list-sources", outFile] + cabal CloseOnException KeepTHLoading ["sdist", "--list-sources", outFile] contents <- liftIO (S.readFile outFile) return (T.unpack $ T.decodeUtf8With T.lenientDecode contents, cabalfp) where From 4c7cf82df524f75e00cf9232ea70c895ad500b2b Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Mon, 18 Feb 2019 14:28:39 +0300 Subject: [PATCH 43/80] Revert "Forward hidden packages from constraints to Stackage stack.yaml" This reverts commit 59acc7d127dec8603d1e0bedad9c61b2ca0e0b73. --- subs/curator/src/Curator/Unpack.hs | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/subs/curator/src/Curator/Unpack.hs b/subs/curator/src/Curator/Unpack.hs index f4d90aca2f..13c6198237 100644 --- a/subs/curator/src/Curator/Unpack.hs +++ b/subs/curator/src/Curator/Unpack.hs @@ -24,17 +24,17 @@ unpackSnapshot -> RIO env () unpackSnapshot cons snap root = do unpacked <- parseRelDir "unpacked" - (suffixes, (flags, hidden), (skipTest, expectTestFailure), (skipBench, expectBenchmarkFailure), + (suffixes, flags, (skipTest, expectTestFailure), (skipBench, expectBenchmarkFailure), (skipHaddock, expectHaddockFailure)) <- fmap fold $ for (rsPackages snap) $ \sp -> do let pl = rspLocation sp TreeKey (BlobKey sha _size) <- getRawPackageLocationTreeKey pl PackageIdentifier name version <- getRawPackageLocationIdent pl - let (flags, hide, skipBuild, test, bench, haddock) = + let (flags, skipBuild, test, bench, haddock) = case Map.lookup name $ consPackages cons of Nothing -> - (mempty, False, False, CAExpectSuccess, CAExpectSuccess, CAExpectSuccess) + (mempty, False, CAExpectSuccess, CAExpectSuccess, CAExpectSuccess) Just pc -> - (pcFlags pc, pcHide pc, pcSkipBuild pc, pcTests pc, pcBenchmarks pc, pcHaddock pc) + (pcFlags pc, pcSkipBuild pc, pcTests pc, pcBenchmarks pc, pcHaddock pc) unless (flags == rspFlags sp) $ error $ unlines [ "mismatched flags for " ++ show pl , " snapshot: " ++ show (rspFlags sp) @@ -62,9 +62,7 @@ unpackSnapshot cons snap root = do renameDir destTmp dest pure ( Set.singleton suffix - , ( if Map.null flags then Map.empty else Map.singleton name flags - , if hide then Map.singleton name True else Map.empty - ) + , if Map.null flags then Map.empty else Map.singleton name flags , case test of CAExpectSuccess -> mempty CAExpectFailure -> (mempty, Set.singleton name) @@ -84,7 +82,6 @@ unpackSnapshot cons snap root = do [ "resolver" .= ("ghc-" ++ versionString (consGhcVersion cons)) , "packages" .= Set.map (\suffix -> toFilePath (unpacked suffix)) suffixes , "flags" .= fmap toCabalStringMap (toCabalStringMap flags) - , "hidden" .= toCabalStringMap hidden , "curator" .= object [ "skip-test" .= Set.map CabalString skipTest , "expect-test-failure" .= Set.map CabalString expectTestFailure From 1c01bb6e1ca2fd179b0aa3a20b144ff5b941b27e Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Tue, 19 Feb 2019 14:57:37 +0300 Subject: [PATCH 44/80] Fix building haddocks for packages with internal libraries --- src/Stack/Build/Haddock.hs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/Stack/Build/Haddock.hs b/src/Stack/Build/Haddock.hs index 8135ffe583..8281fcdd77 100644 --- a/src/Stack/Build/Haddock.hs +++ b/src/Stack/Build/Haddock.hs @@ -25,6 +25,7 @@ import Data.Time (UTCTime) import Path import Path.Extra import Path.IO +import RIO.List (intercalate) import RIO.PrettyPrint import Stack.Constants import Stack.PackageDump @@ -235,6 +236,9 @@ generateHaddockIndex descr wc bco dumpPackages docRelFP destDir = do docRelFP FP. packageIdentifierString dpPackageIdent FP. (packageNameString name FP.<.> "haddock") + interfaces = intercalate "," $ + maybeToList dpHaddockHtml ++ [srcInterfaceFP] + destInterfaceAbsFile <- parseCollapsedAbsFile (toFilePath destDir FP. destInterfaceRelFP) esrcInterfaceModTime <- tryGetModificationTime srcInterfaceAbsFile return $ @@ -242,11 +246,7 @@ generateHaddockIndex descr wc bco dumpPackages docRelFP destDir = do Left _ -> Nothing Right srcInterfaceModTime -> Just - ( [ "-i" - , concat - [ docRelFP FP. packageIdentifierString dpPackageIdent - , "," - , destInterfaceRelFP ]] + ( [ "-i", interfaces ] , srcInterfaceModTime , srcInterfaceAbsFile , destInterfaceAbsFile ) From b24fa3272461e962497a4772eb1346c46911b861 Mon Sep 17 00:00:00 2001 From: Sylvain Henry Date: Tue, 5 Feb 2019 23:35:39 +0100 Subject: [PATCH 45/80] Add support for repositories with submodules `git archive` doesn't include files from git submodules in the generated archive. This patch uses `git submodule foreach` to explicitly generate an archive for each submodule and to append it to the main one with `tar` command. We also export `withRepo` which can be useful independently of repo archive creation. --- ChangeLog.md | 1 + subs/pantry/src/Pantry/Repo.hs | 86 ++++++++++++++----- test/integration/tests/git-submodules/Main.hs | 47 ++++++++++ 3 files changed, 114 insertions(+), 20 deletions(-) create mode 100644 test/integration/tests/git-submodules/Main.hs diff --git a/ChangeLog.md b/ChangeLog.md index 7033542d89..573fbdd720 100644 --- a/ChangeLog.md +++ b/ChangeLog.md @@ -16,6 +16,7 @@ Major changes: * Support for archives and repos in the `packages` section has been removed. Instead, you must use `extra-deps` for such dependencies. `packages` now only supports local filepaths. + * Add support for Git repositories containing (recursive) submodules. * Addition of new configuration options for specifying a "pantry tree" key, which provides more reproducibility around builds, and (in the future) will be used for more efficient package diff --git a/subs/pantry/src/Pantry/Repo.hs b/subs/pantry/src/Pantry/Repo.hs index 7f104d55c0..6db5927c95 100644 --- a/subs/pantry/src/Pantry/Repo.hs +++ b/subs/pantry/src/Pantry/Repo.hs @@ -6,6 +6,9 @@ module Pantry.Repo , fetchRepos , getRepo , getRepoKey + , createRepoArchive + , withRepoArchive + , withRepo ) where import Pantry.Types @@ -71,26 +74,82 @@ getRepo' => Repo -> RawPackageMetadata -> RIO env Package -getRepo' repo@(Repo url commit repoType' subdir) rpm = - withSystemTempDirectory "get-repo" $ +getRepo' repo rpm = do + withRepoArchive repo $ \tarball -> do + abs' <- resolveFile' tarball + getArchivePackage + (RPLIRepo repo rpm) + RawArchive + { raLocation = ALFilePath $ ResolvedPath + { resolvedRelative = RelFilePath $ T.pack tarball + , resolvedAbsolute = abs' + } + , raHash = Nothing + , raSize = Nothing + , raSubdir = repoSubdir repo + } + rpm + +-- | Fetch a repository and create a (temporary) tar archive from it. Pass the +-- path of the generated tarball to the given action. +withRepoArchive + :: forall env a. (HasLogFunc env, HasProcessContext env) + => Repo + -> (FilePath -> RIO env a) + -> RIO env a +withRepoArchive repo action = + withSystemTempDirectory "with-repo-archive" $ \tmpdir -> do + let tarball = tmpdir "foo.tar" + createRepoArchive repo tarball + action tarball + +-- | Create a tarball containing files from a repository +createRepoArchive + :: forall env. (HasLogFunc env, HasProcessContext env) + => Repo + -> FilePath -- ^ Output tar archive filename + -> RIO env () +createRepoArchive repo tarball = do + let runCommand cmd args = void $ proc cmd args readProcess_ + + withRepo repo $ case repoType repo of + RepoGit -> do + runCommand "git" ["-c", "core.autocrlf=false", "archive", "-o", tarball, "HEAD"] + -- also include submodules files: use `git submodule foreach` to + -- execute `git archive` in each submodule and to append the + -- generated archive to the main one with `tar -A` + runCommand "git" + [ "submodule", "foreach", "--recursive" + , "git -c core.autocrlf=false archive --prefix=$displaypath/ -o bar.tar HEAD" + <> " && if [ -f bar.tar ]; then tar -Af " <> tarball <> " bar.tar ; fi" + ] + RepoHg -> runCommand "hg" ["archive", tarball, "-X", ".hg_archival.txt"] + + +-- | Clone the repository and execute the action with the working +-- directory set to the repository root. +withRepo + :: forall env a. (HasLogFunc env, HasProcessContext env) + => Repo + -> RIO env a + -> RIO env a +withRepo repo@(Repo url commit repoType' _subdir) action = + withSystemTempDirectory "with-repo" $ \tmpdir -> withWorkingDir tmpdir $ do let suffix = "cloned" dir = tmpdir suffix - tarball = tmpdir "foo.tar" - let (commandName, resetArgs, submoduleArgs, archiveArgs) = + let (commandName, resetArgs, submoduleArgs) = case repoType' of RepoGit -> ( "git" , ["reset", "--hard", T.unpack commit] , Just ["submodule", "update", "--init", "--recursive"] - , ["-c", "core.autocrlf=false", "archive", "-o", tarball, "HEAD"] ) RepoHg -> ( "hg" , ["update", "-C", T.unpack commit] , Nothing - , ["archive", tarball, "-X", ".hg_archival.txt"] ) let runCommand args = void $ proc commandName args readProcess_ @@ -113,17 +172,4 @@ getRepo' repo@(Repo url commit repoType' subdir) rpm = -- ENABLE_VIRTUAL_TERMINAL_PROCESSING flag for native terminals. The -- folowing hack re-enables the lost ANSI-capability. when osIsWindows $ void $ liftIO $ hSupportsANSIWithoutEmulation stdout - runCommand archiveArgs - abs' <- resolveFile' tarball - getArchivePackage - (RPLIRepo repo rpm) - RawArchive - { raLocation = ALFilePath $ ResolvedPath - { resolvedRelative = RelFilePath $ T.pack tarball - , resolvedAbsolute = abs' - } - , raHash = Nothing - , raSize = Nothing - , raSubdir = subdir - } - rpm + action diff --git a/test/integration/tests/git-submodules/Main.hs b/test/integration/tests/git-submodules/Main.hs new file mode 100644 index 0000000000..219ec1d5ab --- /dev/null +++ b/test/integration/tests/git-submodules/Main.hs @@ -0,0 +1,47 @@ +import StackTest +import System.Directory (createDirectoryIfMissing,withCurrentDirectory) + +main :: IO () +main = do + let + gitInit = do + runShell "git init ." + runShell "git config user.name Test" + runShell "git config user.email test@test.com" + + createDirectoryIfMissing True "tmpSubSubRepo" + withCurrentDirectory "tmpSubSubRepo" $ do + gitInit + stack ["new", "pkg ", defaultResolverArg] + runShell "git add pkg" + runShell "git commit -m SubSubCommit" + + createDirectoryIfMissing True "tmpSubRepo" + withCurrentDirectory "tmpSubRepo" $ do + gitInit + runShell "git submodule add ../tmpSubSubRepo sub" + runShell "git commit -a -m SubCommit" + + createDirectoryIfMissing True "tmpRepo" + withCurrentDirectory "tmpRepo" $ do + gitInit + runShell "git submodule add ../tmpSubRepo sub" + runShell "git commit -a -m Commit" + + stack ["new", defaultResolverArg, "tmpPackage"] + + withCurrentDirectory "tmpPackage" $ do + -- add git dependency on repo with recursive submodules + runShell "echo 'extra-deps:' >> stack.yaml" + runShell "echo \"- git: $(cd ../tmpRepo && pwd)\" >> stack.yaml" + runShell "echo \" commit: $(cd ../tmpRepo && git rev-parse HEAD)\" >> stack.yaml" + runShell "echo ' subdir: sub/sub/pkg' >> stack.yaml" + + -- Setup the package + stack ["setup"] + + -- cleanup + removeDirIgnore "tmpRepo" + removeDirIgnore "tmpSubRepo" + removeDirIgnore "tmpSubSubRepo" + removeDirIgnore "tmpPackage" From 93f17e6990456f57d7dbfbff3aed04103beb533d Mon Sep 17 00:00:00 2001 From: Michael Snoyman Date: Tue, 19 Feb 2019 16:29:18 +0200 Subject: [PATCH 46/80] Remove names from snapshots It doesn't seem to match our overall approach to other config files. Instead of giving a user-friendly name, I've modified the one place that was displaying the name to instead display the location of the snapshot. --- doc/pantry.md | 3 --- src/Stack/Config.hs | 2 +- src/Stack/Ghci.hs | 4 +++- src/Stack/Types/BuildPlan.hs | 5 +---- src/Stack/Types/SourceMap.hs | 3 ++- subs/pantry/src/Pantry.hs | 7 ------- subs/pantry/src/Pantry/Types.hs | 16 ---------------- 7 files changed, 7 insertions(+), 33 deletions(-) diff --git a/doc/pantry.md b/doc/pantry.md index 1b41c698c0..e4912e4126 100644 --- a/doc/pantry.md +++ b/doc/pantry.md @@ -306,8 +306,6 @@ directories is available in snapshots to ensure reproducibility. resolver: lts-8.21 # Inherits GHC version and package set compiler: ghc-8.0.1 # Overwrites GHC version in the resolver, optional -name: my-snapshot # User-friendly name - # Additional packages, follows extra-deps syntax packages: - unordered-containers-0.2.7.1 @@ -371,7 +369,6 @@ packages: pantry-tree: size: 7376 sha256: ac2601c49cf7bc0f5d66b2793eddc8352f51a6ee989980827a0d0d8169700a03 -name: my-snapshot hidden: warp: false wai: true diff --git a/src/Stack/Config.hs b/src/Stack/Config.hs index 9bd6699130..3700f41860 100644 --- a/src/Stack/Config.hs +++ b/src/Stack/Config.hs @@ -645,7 +645,7 @@ loadBuildConfig mproject maresolver mcompiler = do { smwCompiler = fromMaybe (snapshotCompiler snapshot) mcompiler , smwProject = packages , smwDeps = deps - , smwSnapshotName = snapshotName snapshot + , smwSnapshotLocation = projectResolver project } return BuildConfig diff --git a/src/Stack/Ghci.hs b/src/Stack/Ghci.hs index 4d9e606664..6c6d1ea721 100644 --- a/src/Stack/Ghci.hs +++ b/src/Stack/Ghci.hs @@ -857,7 +857,9 @@ targetWarnings localTargets nonLocalTargets mfileTargets = do prettyNote $ vsep [ flow "No local targets specified, so a plain ghci will be started with no package hiding or package options." , "" - , flow $ "You are using snapshot: " ++ T.unpack (smwSnapshotName smWanted) + , flow $ T.unpack $ utf8BuilderToText $ + "You are using snapshot: " <> + RIO.display (smwSnapshotLocation smWanted) , "" , flow "If you want to use package hiding and options, then you can try one of the following:" , "" diff --git a/src/Stack/Types/BuildPlan.hs b/src/Stack/Types/BuildPlan.hs index 164bbe362c..93c48a2e4c 100644 --- a/src/Stack/Types/BuildPlan.hs +++ b/src/Stack/Types/BuildPlan.hs @@ -57,10 +57,7 @@ instance Store SnapshotDef instance NFData SnapshotDef sdResolverName :: SnapshotDef -> Text -sdResolverName sd = - case sdSnapshot sd of - Nothing -> utf8BuilderToText $ display $ sdWantedCompilerVersion sd - Just (snapshot, _) -> rslName snapshot +sdResolverName = utf8BuilderToText . display . sdResolver sdSnapshots :: SnapshotDef -> [RawSnapshotLayer] sdSnapshots sd = diff --git a/src/Stack/Types/SourceMap.hs b/src/Stack/Types/SourceMap.hs index eee6c63b3b..b402a30ec3 100644 --- a/src/Stack/Types/SourceMap.hs +++ b/src/Stack/Types/SourceMap.hs @@ -87,7 +87,8 @@ data SMWanted = SMWanted { smwCompiler :: !WantedCompiler , smwProject :: !(Map PackageName ProjectPackage) , smwDeps :: !(Map PackageName DepPackage) - , smwSnapshotName :: !Text + , smwSnapshotLocation :: !RawSnapshotLocation + -- ^ Where this snapshot is loaded from. } -- | Adds in actual compiler information to 'SMWanted', in particular diff --git a/subs/pantry/src/Pantry.hs b/subs/pantry/src/Pantry.hs index a27bda97d4..de1242f393 100644 --- a/subs/pantry/src/Pantry.hs +++ b/subs/pantry/src/Pantry.hs @@ -803,7 +803,6 @@ completeSnapshotLayer rsnapshot = do { slParent = parent' , slLocations = pls , slCompiler= rslCompiler rsnapshot - , slName = rslName rsnapshot , slDropPackages = rslDropPackages rsnapshot , slFlags = rslFlags rsnapshot , slHidden = rslHidden rsnapshot @@ -893,7 +892,6 @@ loadSnapshotRaw loc = do Left wc -> pure RawSnapshot { rsCompiler = wc - , rsName = utf8BuilderToText $ display wc , rsPackages = mempty , rsDrop = mempty } @@ -913,7 +911,6 @@ loadSnapshotRaw loc = do warnUnusedAddPackagesConfig (display loc) unused pure RawSnapshot { rsCompiler = fromMaybe (rsCompiler snap0) (rslCompiler rsl) - , rsName = rslName rsl , rsPackages = packages , rsDrop = apcDrop unused } @@ -931,7 +928,6 @@ loadSnapshot loc = do Left wc -> pure RawSnapshot { rsCompiler = wc - , rsName = utf8BuilderToText $ display wc , rsPackages = mempty , rsDrop = mempty } @@ -951,7 +947,6 @@ loadSnapshot loc = do warnUnusedAddPackagesConfig (display loc) unused pure RawSnapshot { rsCompiler = fromMaybe (rsCompiler snap0) (rslCompiler rsl) - , rsName = rslName rsl , rsPackages = packages , rsDrop = apcDrop unused } @@ -983,7 +978,6 @@ loadAndCompleteSnapshotRaw loc = do Left wc -> let snapshot = Snapshot { snapshotCompiler = wc - , snapshotName = utf8BuilderToText $ display wc , snapshotPackages = mempty , snapshotDrop = mempty } @@ -1004,7 +998,6 @@ loadAndCompleteSnapshotRaw loc = do warnUnusedAddPackagesConfig (display loc) unused let snapshot = Snapshot { snapshotCompiler = fromMaybe (snapshotCompiler snap0) (rslCompiler rsl) - , snapshotName = rslName rsl , snapshotPackages = packages , snapshotDrop = apcDrop unused } diff --git a/subs/pantry/src/Pantry/Types.hs b/subs/pantry/src/Pantry/Types.hs index 49da897ac7..989006c099 100644 --- a/subs/pantry/src/Pantry/Types.hs +++ b/subs/pantry/src/Pantry/Types.hs @@ -1838,8 +1838,6 @@ toRawSL (SLFilePath fp) = RSLFilePath fp data RawSnapshot = RawSnapshot { rsCompiler :: !WantedCompiler -- ^ The compiler wanted for this snapshot. - , rsName :: !Text - -- ^ The 'slName' from the top 'SnapshotLayer'. , rsPackages :: !(Map PackageName RawSnapshotPackage) -- ^ Packages available in this snapshot for installation. This will be -- applied on top of any globally available packages. @@ -1853,8 +1851,6 @@ data RawSnapshot = RawSnapshot data Snapshot = Snapshot { snapshotCompiler :: !WantedCompiler -- ^ The compiler wanted for this snapshot. - , snapshotName :: !Text - -- ^ The 'slName' from the top 'SnapshotLayer'. , snapshotPackages :: !(Map PackageName SnapshotPackage) -- ^ Packages available in this snapshot for installation. This will be -- applied on top of any globally available packages. @@ -1907,10 +1903,6 @@ data RawSnapshotLayer = RawSnapshotLayer -- 'Nothing' if using 'SLCompiler'. -- -- @since 0.1.0.0 - , rslName :: !Text - -- ^ A user-friendly way of referring to this resolver. - -- - -- @since 0.1.0.0 , rslLocations :: ![RawPackageLocationImmutable] -- ^ Where to grab all of the packages from. -- @@ -1945,7 +1937,6 @@ instance ToJSON RawSnapshotLayer where toJSON rsnap = object $ concat [ ["resolver" .= rslParent rsnap] , maybe [] (\compiler -> ["compiler" .= compiler]) (rslCompiler rsnap) - , ["name" .= rslName rsnap] , ["packages" .= rslLocations rsnap] , if Set.null (rslDropPackages rsnap) then [] @@ -1975,7 +1966,6 @@ instance FromJSON (WithJSONWarnings (Unresolved RawSnapshotLayer)) where (RSLCompiler c1, Just c2) -> throwIO $ InvalidOverrideCompiler c1 c2 _ -> pure (sl, mcompiler) - rslName <- o ..: "name" unresolvedLocs <- jsonSubWarningsT (o ..:? "packages" ..!= []) rslDropPackages <- Set.map unCabalString <$> (o ..:? "drop-packages" ..!= Set.empty) rslFlags <- (unCabalStringMap . fmap unCabalStringMap) <$> (o ..:? "flags" ..!= Map.empty) @@ -2016,10 +2006,6 @@ data SnapshotLayer = SnapshotLayer -- 'Nothing' if using 'SLCompiler'. -- -- @since 0.1.0.0 - , slName :: !Text - -- ^ A user-friendly way of referring to this resolver. - -- - -- @since 0.1.0.0 , slLocations :: ![PackageLocationImmutable] -- ^ Where to grab all of the packages from. -- @@ -2051,7 +2037,6 @@ instance ToJSON SnapshotLayer where toJSON snap = object $ concat [ ["resolver" .= slParent snap] , ["compiler" .= slCompiler snap] - , ["name" .= slName snap] , ["packages" .= slLocations snap] , if Set.null (slDropPackages snap) then [] else ["drop-packages" .= Set.map CabalString (slDropPackages snap)] , if Map.null (slFlags snap) then [] else ["flags" .= fmap toCabalStringMap (toCabalStringMap (slFlags snap))] @@ -2066,7 +2051,6 @@ toRawSnapshotLayer :: SnapshotLayer -> RawSnapshotLayer toRawSnapshotLayer sl = RawSnapshotLayer { rslParent = toRawSL (slParent sl) , rslCompiler = slCompiler sl - , rslName = slName sl , rslLocations = map toRawPLI (slLocations sl) , rslDropPackages = slDropPackages sl , rslFlags = slFlags sl From f9095401f2c30d42088788a17b9858ead60c72c5 Mon Sep 17 00:00:00 2001 From: Michael Snoyman Date: Mon, 18 Feb 2019 11:01:38 +0200 Subject: [PATCH 47/80] Faster compiled scripts Note the changelog entry, which explains a downside to this approach. --- ChangeLog.md | 6 ++++++ src/Stack/Runners.hs | 1 + src/Stack/Script.hs | 36 ++++++++++++++++++++++++++++-------- 3 files changed, 35 insertions(+), 8 deletions(-) diff --git a/ChangeLog.md b/ChangeLog.md index 015600b74a..0e7caaaf4e 100644 --- a/ChangeLog.md +++ b/ChangeLog.md @@ -104,6 +104,12 @@ Other enhancements: * The script interpreter now accepts a `--extra-dep` flag for adding packages not present in the snapshot. Currently, this only works with packages from Hackage, not Git repos or archives. +* When using the script interpreter with `--optimize` or `--compile`, + Stack will perform an optimization of checking whether a newer + executable exists, making reruns significantly faster. There's a + downside to this, however: if you have a multifile script, and + change one of the dependency modules, Stack will not automatically + detect and recompile. Bug fixes: diff --git a/src/Stack/Runners.hs b/src/Stack/Runners.hs index 341780f9d1..20a2d25e1c 100644 --- a/src/Stack/Runners.hs +++ b/src/Stack/Runners.hs @@ -22,6 +22,7 @@ module Stack.Runners , loadCompilerVersion , withUserFileLock , munlockFile + , withRunnerGlobal ) where import Stack.Prelude diff --git a/src/Stack/Script.hs b/src/Stack/Script.hs index 21d905df7f..647f1efd84 100644 --- a/src/Stack/Script.hs +++ b/src/Stack/Script.hs @@ -28,6 +28,7 @@ import Stack.Types.Compiler import Stack.Types.Config import Stack.Types.SourceMap import System.FilePath (dropExtension, replaceExtension) +import qualified RIO.Directory as Dir import RIO.Process import qualified RIO.Text as T @@ -42,6 +43,25 @@ scriptCmd opts go' = do } , globalStackYaml = SYLNoConfig $ soScriptExtraDeps opts } + + -- Optimization: if we're compiling, and the executable is newer + -- than the source file, run it immediately. + case soCompile opts of + SEInterpret -> longWay file scriptDir go + SECompile -> shortCut file scriptDir go + SEOptimize -> shortCut file scriptDir go + + where + shortCut file scriptDir go = handleIO (const $ longWay file scriptDir go) $ do + srcMod <- getModificationTime file + exeMod <- Dir.getModificationTime $ toExeName $ toFilePath file + if srcMod < exeMod + then withRunnerGlobal go' $ \runner -> + runRIO runner $ + exec (toExeName $ toFilePath file) (soArgs opts) + else longWay file scriptDir go + + longWay file scriptDir go = do withDefaultBuildConfigAndLock go $ \lk -> do -- Some warnings in case the user somehow tries to set a -- stack.yaml location. Note that in this functions we use @@ -121,16 +141,16 @@ scriptCmd opts go' = do (ghcArgs ++ [toFilePath file]) (void . readProcessStdout_) exec (toExeName $ toFilePath file) (soArgs opts) - where - toPackageName = reverse . drop 1 . dropWhile (/= '-') . reverse - -- Like words, but splits on both commas and spaces - wordsComma = splitWhen (\c -> c == ' ' || c == ',') + toPackageName = reverse . drop 1 . dropWhile (/= '-') . reverse + + -- Like words, but splits on both commas and spaces + wordsComma = splitWhen (\c -> c == ' ' || c == ',') - toExeName fp = - if osIsWindows - then replaceExtension fp "exe" - else dropExtension fp + toExeName fp = + if osIsWindows + then replaceExtension fp "exe" + else dropExtension fp getPackagesFromModuleInfo :: ModuleInfo From 897e3e2fb4c04165b236f91a96aec45c95e3ec80 Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Wed, 20 Feb 2019 10:24:06 +0300 Subject: [PATCH 48/80] Remove superfluous 'otherwise' --- src/Stack/Build/ConstructPlan.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Stack/Build/ConstructPlan.hs b/src/Stack/Build/ConstructPlan.hs index fefb3e9f0b..5367a686de 100644 --- a/src/Stack/Build/ConstructPlan.hs +++ b/src/Stack/Build/ConstructPlan.hs @@ -507,7 +507,7 @@ installPackage name ps minstalled = do resolveDepsAndInstall True (lpBuildHaddocks lp) ps (lpPackage lp) minstalled Just tb | splitRequired -> splitInstallSteps lp tb - Just tb | otherwise -> do + Just tb -> do -- Attempt to find a plan which performs an all-in-one -- build. Ignore the writer action + reset the state if -- it fails. From dcb4a36ea1281b4080f26f299c215ded85a8533f Mon Sep 17 00:00:00 2001 From: Michael Snoyman Date: Wed, 20 Feb 2019 11:09:09 +0200 Subject: [PATCH 49/80] Fix compilation of curator --- subs/curator/app/Main.hs | 2 +- subs/curator/src/Curator/Snapshot.hs | 4 +--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/subs/curator/app/Main.hs b/subs/curator/app/Main.hs index be9e1c4d3b..8565d34f4a 100644 --- a/subs/curator/app/Main.hs +++ b/subs/curator/app/Main.hs @@ -54,7 +54,7 @@ snapshotIncomplete :: RIO PantryApp () snapshotIncomplete = do logInfo "Writing snapshot-incomplete.yaml" decodeFileThrow "constraints.yaml" >>= \constraints' -> - makeSnapshot constraints' "my-test-snapshot-2" >>= + makeSnapshot constraints' >>= liftIO . encodeFile "snapshot-incomplete.yaml" snapshot :: RIO PantryApp () diff --git a/subs/curator/src/Curator/Snapshot.hs b/subs/curator/src/Curator/Snapshot.hs index fda357f465..48559c935c 100644 --- a/subs/curator/src/Curator/Snapshot.hs +++ b/subs/curator/src/Curator/Snapshot.hs @@ -41,9 +41,8 @@ import qualified RIO.Text.Partial as TP makeSnapshot :: (HasPantryConfig env, HasLogFunc env, HasProcessContext env) => Constraints - -> Text -- ^ name -> RIO env RawSnapshotLayer -makeSnapshot cons name = do +makeSnapshot cons = do locs <- traverseValidate (\(pn, pc) -> (pn,) <$> toLoc pn pc) $ Map.toList $ consPackages cons @@ -53,7 +52,6 @@ makeSnapshot cons name = do RawSnapshotLayer { rslParent = RSLCompiler $ WCGhc $ consGhcVersion cons , rslCompiler = Nothing - , rslName = name , rslLocations = mapMaybe snd locs , rslDropPackages = mempty , rslFlags = Map.mapMaybeWithKey (\pn pc -> if (inSnapshot pn) then getFlags pc else Nothing) From 5f3b44966904397d460e3e87e3cd1cbaa179920a Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Thu, 21 Feb 2019 09:33:06 +0300 Subject: [PATCH 50/80] Pass proper build haddocks flag --- src/Stack/Build/ConstructPlan.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Stack/Build/ConstructPlan.hs b/src/Stack/Build/ConstructPlan.hs index 5367a686de..5be056daef 100644 --- a/src/Stack/Build/ConstructPlan.hs +++ b/src/Stack/Build/ConstructPlan.hs @@ -521,7 +521,7 @@ installPackage name ps minstalled = do case res of Right deps -> do planDebug $ "installPackage: For " ++ show name ++ ", successfully added package deps" - adr <- installPackageGivenDeps True False ps tb minstalled deps + adr <- installPackageGivenDeps True (lpBuildHaddocks lp) ps tb minstalled deps -- FIXME: this redundantly adds the deps (but -- they'll all just get looked up in the map) addFinal lp tb True False From cd68816373d4521c0f252cab7bb3f89b5412bf61 Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Fri, 22 Feb 2019 13:01:52 +0300 Subject: [PATCH 51/80] More correct handlding of curator expectations --- src/Stack/Build/Execute.hs | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/src/Stack/Build/Execute.hs b/src/Stack/Build/Execute.hs index be163318f8..99a51eec57 100644 --- a/src/Stack/Build/Execute.hs +++ b/src/Stack/Build/Execute.hs @@ -1458,7 +1458,7 @@ singleBuild ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap (_, True) | null acDownstream || installedMapHasThisPkg -> do initialBuildSteps executableBuildStatuses cabal announce return Nothing - _ -> fulfillCuratorExpectations pname mcurator enableTests enableBenchmarks Nothing $ + _ -> fulfillCuratorBuildExpectations pname mcurator enableTests enableBenchmarks Nothing $ Just <$> realBuild cache package pkgDir cabal0 announce executableBuildStatuses initialBuildSteps executableBuildStatuses cabal announce = do @@ -1834,7 +1834,7 @@ singleTest topts testsToRun ac ee task installedMap = do } let emptyResult = Map.singleton testName Nothing withProcessContext menv $ if exists - then fulfillCuratorExpectations pname mcurator True False emptyResult $ do + then do -- We clear out the .tix files before doing a run. when needHpc $ do tixexists <- doesFileExist tixPath @@ -1904,7 +1904,9 @@ singleTest topts testsToRun ac ee task installedMap = do return Map.empty _ -> do announceResult "failed" - return $ Map.singleton testName (Just ec) + if expectFailure + then return Map.empty + else return $ Map.singleton testName (Just ec) else do unless expectFailure $ logError $ displayShow $ TestSuiteExeMissing (packageBuildType package == C.Simple) @@ -2210,8 +2212,8 @@ expectBenchmarkFailure :: PackageName -> Maybe Curator -> Bool expectBenchmarkFailure pname mcurator = maybe False (Set.member pname . curatorExpectBenchmarkFailure) mcurator -fulfillCuratorExpectations :: - (HasLogFunc env) +fulfillCuratorBuildExpectations :: + (HasLogFunc env, HasCallStack) => PackageName -> Maybe Curator -> Bool @@ -2219,21 +2221,21 @@ fulfillCuratorExpectations :: -> b -> RIO env b -> RIO env b -fulfillCuratorExpectations pname mcurator enableTests _ defValue action | enableTests && +fulfillCuratorBuildExpectations pname mcurator enableTests _ defValue action | enableTests && expectTestFailure pname mcurator = do eres <- tryAny action case eres of Right res -> do - logWarn $ fromString (packageNameString pname) <> ": unexpected test success" + logWarn $ fromString (packageNameString pname) <> ": unexpected test build success" return res Left _ -> return defValue -fulfillCuratorExpectations pname mcurator _ enableBench defValue action | enableBench && +fulfillCuratorBuildExpectations pname mcurator _ enableBench defValue action | enableBench && expectBenchmarkFailure pname mcurator = do eres <- tryAny action case eres of Right res -> do - logWarn $ fromString (packageNameString pname) <> ": unexpected benchmark success" + logWarn $ fromString (packageNameString pname) <> ": unexpected benchmark build success" return res Left _ -> return defValue -fulfillCuratorExpectations _ _ _ _ _ action = do +fulfillCuratorBuildExpectations _ _ _ _ _ action = do action From 62d71bd700275470a620226e727dbe9c42d6fc16 Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Fri, 22 Feb 2019 16:10:28 +0300 Subject: [PATCH 52/80] Removed InstalledCache as is should be guaranteed by implicit snapshots --- package.yaml | 1 - src/Stack/Build.hs | 11 +-- src/Stack/Build/ConstructPlan.hs | 6 +- src/Stack/Build/Execute.hs | 20 ++-- src/Stack/Build/Haddock.hs | 20 ++-- src/Stack/Build/Installed.hs | 89 +++-------------- src/Stack/Constants.hs | 4 - src/Stack/Dot.hs | 7 +- src/Stack/Ghci.hs | 8 +- src/Stack/PackageDump.hs | 153 ++---------------------------- src/Stack/SDist.hs | 7 +- src/Stack/Script.hs | 8 +- src/Stack/Setup.hs | 3 - src/Stack/Snapshot.hs | 4 +- src/Stack/SourceMap.hs | 2 +- src/Stack/StoreTH.hs | 18 ---- src/Stack/Types/Config.hs | 5 - src/Stack/Types/PackageDump.hs | 32 ------- src/test/Stack/PackageDumpSpec.hs | 31 +----- 19 files changed, 54 insertions(+), 375 deletions(-) delete mode 100644 src/Stack/Types/PackageDump.hs diff --git a/package.yaml b/package.yaml index 2d26fa2771..b54ed6eea8 100644 --- a/package.yaml +++ b/package.yaml @@ -242,7 +242,6 @@ library: - Stack.Types.NamedComponent - Stack.Types.Nix - Stack.Types.Package - - Stack.Types.PackageDump - Stack.Types.PackageName - Stack.Types.Resolver - Stack.Types.Runner diff --git a/src/Stack/Build.hs b/src/Stack/Build.hs index 40133b50d6..db7b2f6b85 100644 --- a/src/Stack/Build.hs +++ b/src/Stack/Build.hs @@ -35,7 +35,6 @@ import Distribution.Version (mkVersion) import Path (parent) import Stack.Build.ConstructPlan import Stack.Build.Execute -import Stack.Build.Haddock import Stack.Build.Installed import Stack.Build.Source import Stack.Package @@ -63,9 +62,6 @@ build msetLocalFiles mbuildLk = do ghcVersion <- view $ actualCompilerVersionL.to getGhcVersion fixCodePage mcp ghcVersion $ do bopts <- view buildOptsL - let profiling = boptsLibProfile bopts || boptsExeProfile bopts - let symbols = not (boptsLibStrip bopts || boptsExeStrip bopts) - sourceMap <- view $ envConfigL.to envConfigSourceMap locals <- projectLocalPackages depsLocals <- localDependencies @@ -82,12 +78,7 @@ build msetLocalFiles mbuildLk = do installMap <- toInstallMap sourceMap (installedMap, globalDumpPkgs, snapshotDumpPkgs, localDumpPkgs) <- - getInstalled - GetInstalledOpts - { getInstalledProfiling = profiling - , getInstalledHaddock = shouldHaddockDeps bopts - , getInstalledSymbols = symbols } - installMap + getInstalled installMap boptsCli <- view $ envConfigL.to envConfigBuildOptsCLI baseConfigOpts <- mkBaseConfigOpts boptsCli diff --git a/src/Stack/Build/ConstructPlan.hs b/src/Stack/Build/ConstructPlan.hs index 5be056daef..27b07a929e 100644 --- a/src/Stack/Build/ConstructPlan.hs +++ b/src/Stack/Build/ConstructPlan.hs @@ -167,7 +167,7 @@ instance HasEnvConfig Ctx where -- some of its dependencies have changed. constructPlan :: forall env. HasEnvConfig env => BaseConfigOpts - -> [DumpPackage () () ()] -- ^ locally registered + -> [DumpPackage] -- ^ locally registered -> (PackageLocationImmutable -> Map FlagName Bool -> [Text] -> RIO EnvConfig Package) -- ^ load upstream package -> SourceMap -> InstalledMap @@ -264,7 +264,7 @@ constructPlan baseConfigOpts0 localDumpPkgs loadPackage0 sourceMap installedMap -- to unregister. data UnregisterState = UnregisterState { usToUnregister :: !(Map GhcPkgId (PackageIdentifier, Text)) - , usKeep :: ![DumpPackage () () ()] + , usKeep :: ![DumpPackage] , usAnyAdded :: !Bool } @@ -274,7 +274,7 @@ mkUnregisterLocal :: Map PackageName Task -- ^ Tasks -> Map PackageName Text -- ^ Reasons why packages are dirty and must be rebuilt - -> [DumpPackage () () ()] + -> [DumpPackage] -- ^ Local package database dump -> Bool -- ^ If true, we're doing a special initialBuildSteps diff --git a/src/Stack/Build/Execute.hs b/src/Stack/Build/Execute.hs index 99a51eec57..5661c001d9 100644 --- a/src/Stack/Build/Execute.hs +++ b/src/Stack/Build/Execute.hs @@ -201,9 +201,9 @@ data ExecuteEnv = ExecuteEnv , eeTotalWanted :: !Int , eeLocals :: ![LocalPackage] , eeGlobalDB :: !(Path Abs Dir) - , eeGlobalDumpPkgs :: !(Map GhcPkgId (DumpPackage () () ())) - , eeSnapshotDumpPkgs :: !(TVar (Map GhcPkgId (DumpPackage () () ()))) - , eeLocalDumpPkgs :: !(TVar (Map GhcPkgId (DumpPackage () () ()))) + , eeGlobalDumpPkgs :: !(Map GhcPkgId DumpPackage) + , eeSnapshotDumpPkgs :: !(TVar (Map GhcPkgId DumpPackage)) + , eeLocalDumpPkgs :: !(TVar (Map GhcPkgId DumpPackage)) , eeLogFiles :: !(TChan (Path Abs Dir, Path Abs File)) , eeGetGhcPath :: !(forall m. MonadIO m => m (Path Abs File)) , eeGetGhcjsPath :: !(forall m. MonadIO m => m (Path Abs File)) @@ -306,9 +306,9 @@ withExecuteEnv :: forall env a. HasEnvConfig env -> BuildOptsCLI -> BaseConfigOpts -> [LocalPackage] - -> [DumpPackage () () ()] -- ^ global packages - -> [DumpPackage () () ()] -- ^ snapshot packages - -> [DumpPackage () () ()] -- ^ local packages + -> [DumpPackage] -- ^ global packages + -> [DumpPackage] -- ^ snapshot packages + -> [DumpPackage] -- ^ local packages -> (ExecuteEnv -> RIO env a) -> RIO env a withExecuteEnv bopts boptsCli baseConfigOpts locals globalPackages snapshotPackages localPackages inner = @@ -471,9 +471,9 @@ executePlan :: HasEnvConfig env => BuildOptsCLI -> BaseConfigOpts -> [LocalPackage] - -> [DumpPackage () () ()] -- ^ global packages - -> [DumpPackage () () ()] -- ^ snapshot packages - -> [DumpPackage () () ()] -- ^ local packages + -> [DumpPackage] -- ^ global packages + -> [DumpPackage] -- ^ snapshot packages + -> [DumpPackage] -- ^ local packages -> InstalledMap -> Map PackageName Target -> Plan @@ -2147,7 +2147,7 @@ taskComponents task = -- -- * https://github.com/commercialhaskell/stack/issues/949 addGlobalPackages :: Map PackageIdentifier GhcPkgId -- ^ dependencies of the package - -> [DumpPackage () () ()] -- ^ global packages + -> [DumpPackage] -- ^ global packages -> Set GhcPkgId addGlobalPackages deps globals0 = res diff --git a/src/Stack/Build/Haddock.hs b/src/Stack/Build/Haddock.hs index 8281fcdd77..6c01019bd1 100644 --- a/src/Stack/Build/Haddock.hs +++ b/src/Stack/Build/Haddock.hs @@ -103,7 +103,7 @@ generateLocalHaddockIndex :: (HasProcessContext env, HasLogFunc env) => WhichCompiler -> BaseConfigOpts - -> Map GhcPkgId (DumpPackage () () ()) -- ^ Local package dump + -> Map GhcPkgId DumpPackage -- ^ Local package dump -> [LocalPackage] -> RIO env () generateLocalHaddockIndex wc bco localDumpPkgs locals = do @@ -127,9 +127,9 @@ generateDepsHaddockIndex :: (HasProcessContext env, HasLogFunc env) => WhichCompiler -> BaseConfigOpts - -> Map GhcPkgId (DumpPackage () () ()) -- ^ Global dump information - -> Map GhcPkgId (DumpPackage () () ()) -- ^ Snapshot dump information - -> Map GhcPkgId (DumpPackage () () ()) -- ^ Local dump information + -> Map GhcPkgId DumpPackage -- ^ Global dump information + -> Map GhcPkgId DumpPackage -- ^ Snapshot dump information + -> Map GhcPkgId DumpPackage -- ^ Local dump information -> [LocalPackage] -> RIO env () generateDepsHaddockIndex wc bco globalDumpPkgs snapshotDumpPkgs localDumpPkgs locals = do @@ -170,8 +170,8 @@ generateSnapHaddockIndex :: (HasProcessContext env, HasLogFunc env) => WhichCompiler -> BaseConfigOpts - -> Map GhcPkgId (DumpPackage () () ()) -- ^ Global package dump - -> Map GhcPkgId (DumpPackage () () ()) -- ^ Snapshot package dump + -> Map GhcPkgId DumpPackage -- ^ Global package dump + -> Map GhcPkgId DumpPackage -- ^ Snapshot package dump -> RIO env () generateSnapHaddockIndex wc bco globalDumpPkgs snapshotDumpPkgs = generateHaddockIndex @@ -188,7 +188,7 @@ generateHaddockIndex => Text -> WhichCompiler -> BaseConfigOpts - -> [DumpPackage () () ()] + -> [DumpPackage] -> FilePath -> Path Abs Dir -> RIO env () @@ -225,7 +225,7 @@ generateHaddockIndex descr wc bco dumpPackages docRelFP destDir = do " already up to date at:\n" <> fromString (toFilePath destIndexFile) where - toInterfaceOpt :: DumpPackage a b c -> IO (Maybe ([String], UTCTime, Path Abs File, Path Abs File)) + toInterfaceOpt :: DumpPackage -> IO (Maybe ([String], UTCTime, Path Abs File, Path Abs File)) toInterfaceOpt DumpPackage {..} = case dpHaddockInterfaces of [] -> return Nothing @@ -275,8 +275,8 @@ generateHaddockIndex descr wc bco dumpPackages docRelFP destDir = do -- | Find first DumpPackage matching the GhcPkgId lookupDumpPackage :: GhcPkgId - -> [Map GhcPkgId (DumpPackage () () ())] - -> Maybe (DumpPackage () () ()) + -> [Map GhcPkgId DumpPackage] + -> Maybe DumpPackage lookupDumpPackage ghcPkgId dumpPkgs = listToMaybe $ mapMaybe (Map.lookup ghcPkgId) dumpPkgs diff --git a/src/Stack/Build/Installed.hs b/src/Stack/Build/Installed.hs index 36c82e55cd..bed4643188 100644 --- a/src/Stack/Build/Installed.hs +++ b/src/Stack/Build/Installed.hs @@ -7,7 +7,6 @@ module Stack.Build.Installed ( InstalledMap , Installed (..) - , GetInstalledOpts (..) , getInstalled , InstallMap , toInstallMap @@ -15,7 +14,6 @@ module Stack.Build.Installed import Data.Conduit import qualified Data.Conduit.List as CL -import qualified Data.Foldable as F import qualified Data.Set as Set import Data.List import qualified Data.Map.Strict as Map @@ -30,19 +28,8 @@ import Stack.Types.Compiler import Stack.Types.Config import Stack.Types.GhcPkgId import Stack.Types.Package -import Stack.Types.PackageDump import Stack.Types.SourceMap --- | Options for 'getInstalled'. -data GetInstalledOpts = GetInstalledOpts - { getInstalledProfiling :: !Bool - -- ^ Require profiling libraries? - , getInstalledHaddock :: !Bool - -- ^ Require haddocks? - , getInstalledSymbols :: !Bool - -- ^ Require debugging symbols? - } - toInstallMap :: MonadIO m => SourceMap -> m InstallMap toInstallMap sourceMap = do projectInstalls <- @@ -60,26 +47,20 @@ toInstallMap sourceMap = do -- | Returns the new InstalledMap and all of the locally registered packages. getInstalled :: HasEnvConfig env - => GetInstalledOpts - -> InstallMap -- ^ does not contain any installed information + => InstallMap -- ^ does not contain any installed information -> RIO env ( InstalledMap - , [DumpPackage () () ()] -- globally installed - , [DumpPackage () () ()] -- snapshot installed - , [DumpPackage () () ()] -- locally installed + , [DumpPackage] -- globally installed + , [DumpPackage] -- snapshot installed + , [DumpPackage] -- locally installed ) -getInstalled opts installMap = do +getInstalled {-opts-} installMap = do logDebug "Finding out which packages are already installed" snapDBPath <- packageDatabaseDeps localDBPath <- packageDatabaseLocal extraDBPaths <- packageDatabaseExtra - mcache <- - if getInstalledProfiling opts || getInstalledHaddock opts - then configInstalledCache >>= liftM Just . loadInstalledCache - else return Nothing - - let loadDatabase' = loadDatabase opts mcache installMap + let loadDatabase' = loadDatabase {-opts mcache-} installMap (installedLibs0, globalDumpPkgs) <- loadDatabase' Nothing [] (installedLibs1, _extraInstalled) <- @@ -92,10 +73,6 @@ getInstalled opts installMap = do loadDatabase' (Just (InstalledTo Local, localDBPath)) installedLibs2 let installedLibs = Map.fromList $ map lhPair installedLibs3 - F.forM_ mcache $ \cache -> do - icache <- configInstalledCache - saveInstalledCache icache cache - -- Add in the executables that are installed, making sure to only trust a -- listed installation under the right circumstances (see below) let exesToSM loc = Map.unions . map (exeToSM loc) @@ -134,13 +111,11 @@ getInstalled opts installMap = do -- that it has profiling if necessary, and that it matches the version and -- location needed by the SourceMap loadDatabase :: HasEnvConfig env - => GetInstalledOpts - -> Maybe InstalledCache -- ^ if Just, profiling or haddock is required - -> InstallMap -- ^ to determine which installed things we should include + => InstallMap -- ^ to determine which installed things we should include -> Maybe (InstalledPackageLocation, Path Abs Dir) -- ^ package database, Nothing for global -> [LoadHelper] -- ^ from parent databases - -> RIO env ([LoadHelper], [DumpPackage () () ()]) -loadDatabase opts mcache installMap mdb lhs0 = do + -> RIO env ([LoadHelper], [DumpPackage]) +loadDatabase installMap mdb lhs0 = do wc <- view $ actualCompilerVersionL.to whichCompiler (lhs1', dps) <- ghcPkgDump wc (fmap snd (maybeToList mdb)) $ conduitDumpPackage .| sink @@ -154,29 +129,8 @@ loadDatabase opts mcache installMap mdb lhs0 = do (lhs0 ++ lhs1) return (map (\lh -> lh { lhDeps = [] }) $ Map.elems lhs, dps) where - conduitProfilingCache = - case mcache of - Just cache | getInstalledProfiling opts -> addProfiling cache - -- Just an optimization to avoid calculating the profiling - -- values when they aren't necessary - _ -> CL.map (\dp -> dp { dpProfiling = False }) - conduitHaddockCache = - case mcache of - Just cache | getInstalledHaddock opts -> addHaddock cache - -- Just an optimization to avoid calculating the haddock - -- values when they aren't necessary - _ -> CL.map (\dp -> dp { dpHaddock = False }) - conduitSymbolsCache = - case mcache of - Just cache | getInstalledSymbols opts -> addSymbols cache - -- Just an optimization to avoid calculating the debugging - -- symbol values when they aren't necessary - _ -> CL.map (\dp -> dp { dpSymbols = False }) mloc = fmap fst mdb - sinkDP = conduitProfilingCache - .| conduitHaddockCache - .| conduitSymbolsCache - .| CL.map (isAllowed opts mcache installMap mloc &&& toLoadHelper mloc) + sinkDP = CL.map (isAllowed installMap mloc &&& toLoadHelper mloc) .| CL.consume sink = getZipSink $ (,) <$> ZipSink sinkDP @@ -208,9 +162,6 @@ processLoadResult mdb _ (reason, lh) = do " due to" <> case reason of Allowed -> " the impossible?!?!" - NeedsProfiling -> " it needing profiling." - NeedsHaddock -> " it needing haddocks." - NeedsSymbols -> " it needing debugging symbols." UnknownPkg -> " it being unknown to the resolver / extra-deps." WrongLocation mloc loc -> " wrong location: " <> displayShow (mloc, loc) WrongVersion actual wanted -> @@ -222,9 +173,6 @@ processLoadResult mdb _ (reason, lh) = do data Allowed = Allowed - | NeedsProfiling - | NeedsHaddock - | NeedsSymbols | UnknownPkg | WrongLocation (Maybe InstalledPackageLocation) InstallLocation | WrongVersion Version Version @@ -233,20 +181,11 @@ data Allowed -- | Check if a can be included in the set of installed packages or not, based -- on the package selections made by the user. This does not perform any -- dirtiness or flag change checks. -isAllowed :: GetInstalledOpts - -> Maybe InstalledCache - -> InstallMap +isAllowed :: InstallMap -> Maybe InstalledPackageLocation - -> DumpPackage Bool Bool Bool + -> DumpPackage -> Allowed -isAllowed opts mcache installMap mloc dp - -- Check that it can do profiling if necessary - | getInstalledProfiling opts && isJust mcache && not (dpProfiling dp) = NeedsProfiling - -- Check that it has haddocks if necessary - | getInstalledHaddock opts && isJust mcache && not (dpHaddock dp) = NeedsHaddock - -- Check that it has haddocks if necessary - | getInstalledSymbols opts && isJust mcache && not (dpSymbols dp) = NeedsSymbols - | otherwise = +isAllowed installMap mloc dp = case Map.lookup name installMap of Nothing -> -- If the sourceMap has nothing to say about this package, @@ -288,7 +227,7 @@ data LoadHelper = LoadHelper } deriving Show -toLoadHelper :: Maybe InstalledPackageLocation -> DumpPackage Bool Bool Bool -> LoadHelper +toLoadHelper :: Maybe InstalledPackageLocation -> DumpPackage -> LoadHelper toLoadHelper mloc dp = LoadHelper { lhId = gid , lhDeps = diff --git a/src/Stack/Constants.hs b/src/Stack/Constants.hs index 75e8c1deea..de32a9380a 100644 --- a/src/Stack/Constants.hs +++ b/src/Stack/Constants.hs @@ -50,7 +50,6 @@ module Stack.Constants ,relFileReadmeTxt ,relDirScript ,relFileConfigYaml - ,relFileInstalledCacheBin ,relDirSnapshots ,relDirGlobalHints ,relFileGlobalHintsYaml @@ -377,9 +376,6 @@ relDirScript = $(mkRelDir "script") relFileConfigYaml :: Path Rel File relFileConfigYaml = $(mkRelFile "config.yaml") -relFileInstalledCacheBin :: Path Rel File -relFileInstalledCacheBin = $(mkRelFile "installed-cache.bin") - relDirSnapshots :: Path Rel Dir relDirSnapshots = $(mkRelDir "snapshots") diff --git a/src/Stack/Dot.hs b/src/Stack/Dot.hs index 08a7c687d8..c64ff0cd1d 100644 --- a/src/Stack/Dot.hs +++ b/src/Stack/Dot.hs @@ -27,7 +27,7 @@ import qualified Distribution.SPDX.License as SPDX import Distribution.License (License(BSD3), licenseFromSPDX) import Distribution.Types.PackageName (mkPackageName) import Stack.Build (loadPackage) -import Stack.Build.Installed (getInstalled, GetInstalledOpts(..), toInstallMap) +import Stack.Build.Installed (getInstalled, toInstallMap) import Stack.Build.Source import Stack.Constants import Stack.Package @@ -115,8 +115,7 @@ createDependencyGraph dotOpts = do locals <- projectLocalPackages let graph = Map.fromList $ projectPackageDependencies dotOpts (filter lpWanted locals) installMap <- toInstallMap sourceMap - (installedMap, globalDump, _, _) <- getInstalled (GetInstalledOpts False False False) - installMap + (installedMap, globalDump, _, _) <- getInstalled installMap -- TODO: Can there be multiple entries for wired-in-packages? If so, -- this will choose one arbitrarily.. let globalDumpMap = Map.fromList $ map (\dp -> (Stack.Prelude.pkgName (dpPackageIdent dp), dp)) globalDump @@ -249,7 +248,7 @@ resolveDependencies limit graph loadPackageDeps = do createDepLoader :: HasEnvConfig env => SourceMap -> Map PackageName (InstallLocation, Installed) - -> Map PackageName (DumpPackage () () ()) + -> Map PackageName DumpPackage -> Map GhcPkgId PackageIdentifier -> (PackageName -> Version -> PackageLocationImmutable -> Map FlagName Bool -> [Text] -> RIO env (Set PackageName, DotPayload)) diff --git a/src/Stack/Ghci.hs b/src/Stack/Ghci.hs index 4d9e606664..63ef0b2daf 100644 --- a/src/Stack/Ghci.hs +++ b/src/Stack/Ghci.hs @@ -675,13 +675,7 @@ getGhciPkgInfos -> [GhciPkgDesc] -> RIO env [GhciPkgInfo] getGhciPkgInfos installMap addPkgs mfileTargets localTargets = do - (installedMap, _, _, _) <- getInstalled - GetInstalledOpts - { getInstalledProfiling = False - , getInstalledHaddock = False - , getInstalledSymbols = False - } - installMap + (installedMap, _, _, _) <- getInstalled installMap let localLibs = [ packageName (ghciDescPkg desc) | desc <- localTargets diff --git a/src/Stack/PackageDump.hs b/src/Stack/PackageDump.hs index a16fa768af..1427c645bf 100644 --- a/src/Stack/PackageDump.hs +++ b/src/Stack/PackageDump.hs @@ -12,12 +12,6 @@ module Stack.PackageDump , conduitDumpPackage , ghcPkgDump , ghcPkgDescribe - , newInstalledCache - , loadInstalledCache - , saveInstalledCache - , addProfiling - , addHaddock - , addSymbols , sinkMatching , pruneDeps ) where @@ -28,22 +22,16 @@ import Data.Attoparsec.Text as P import Data.Conduit import qualified Data.Conduit.List as CL import qualified Data.Conduit.Text as CT -import Data.List (isPrefixOf) import qualified Data.Map as Map import qualified Data.Set as Set import qualified RIO.Text as T import qualified Distribution.License as C import Distribution.ModuleName (ModuleName) -import qualified Distribution.System as OS import qualified Distribution.Text as C import Path.Extra (toFilePathNoTrailingSep) import Stack.GhcPkg -import Stack.StoreTH import Stack.Types.Compiler import Stack.Types.GhcPkgId -import Stack.Types.PackageDump -import System.Directory (getDirectoryContents, doesFileExist) -import System.Process (readProcess) -- FIXME confirm that this is correct import RIO.Process hiding (readProcess) -- | Call ghc-pkg dump with appropriate flags and stream to the given @Sink@, for a single database @@ -89,22 +77,6 @@ ghcPkgCmdArgs cmd wc mpkgDbs sink = do ] sink' = CT.decodeUtf8 .| sink --- | Create a new, empty @InstalledCache@ -newInstalledCache :: MonadIO m => m InstalledCache -newInstalledCache = liftIO $ InstalledCache <$> newIORef (InstalledCacheInner Map.empty) - --- | Load a @InstalledCache@ from disk, swallowing any errors and returning an --- empty cache. -loadInstalledCache :: HasLogFunc env => Path Abs File -> RIO env InstalledCache -loadInstalledCache path = do - m <- decodeOrLoadInstalledCache path (return $ InstalledCacheInner Map.empty) - liftIO $ InstalledCache <$> newIORef m - --- | Save a @InstalledCache@ to disk -saveInstalledCache :: HasLogFunc env => Path Abs File -> InstalledCache -> RIO env () -saveInstalledCache path (InstalledCache ref) = - readIORef ref >>= encodeInstalledCache path - -- | Prune a list of possible packages down to those whose dependencies are met. -- -- * id uniquely identifies an item @@ -148,14 +120,9 @@ pruneDeps getName getId getDepends chooseBest = -- | Find the package IDs matching the given constraints with all dependencies installed. -- Packages not mentioned in the provided @Map@ are allowed to be present too. sinkMatching :: Monad m - => Bool -- ^ require profiling? - -> Bool -- ^ require haddock? - -> Bool -- ^ require debugging symbols? - -> Map PackageName Version -- ^ allowed versions - -> ConduitM (DumpPackage Bool Bool Bool) o - m - (Map PackageName (DumpPackage Bool Bool Bool)) -sinkMatching reqProfiling reqHaddock reqSymbols allowed = + => Map PackageName Version -- ^ allowed versions + -> ConduitM DumpPackage o m (Map PackageName DumpPackage) +sinkMatching allowed = Map.fromList . map (pkgName . dpPackageIdent &&& id) . Map.elems @@ -164,117 +131,15 @@ sinkMatching reqProfiling reqHaddock reqSymbols allowed = dpGhcPkgId dpDepends const -- Could consider a better comparison in the future - <$> (CL.filter predicate .| CL.consume) + <$> (CL.filter (isAllowed . dpPackageIdent) .| CL.consume) where - predicate dp = - isAllowed (dpPackageIdent dp) && - (not reqProfiling || dpProfiling dp) && - (not reqHaddock || dpHaddock dp) && - (not reqSymbols || dpSymbols dp) - isAllowed (PackageIdentifier name version) = case Map.lookup name allowed of Just version' | version /= version' -> False _ -> True --- | Add profiling information to the stream of @DumpPackage@s -addProfiling :: MonadIO m - => InstalledCache - -> ConduitM (DumpPackage a b c) (DumpPackage Bool b c) m () -addProfiling (InstalledCache ref) = - CL.mapM go - where - go dp = liftIO $ do - InstalledCacheInner m <- readIORef ref - let gid = dpGhcPkgId dp - p <- case Map.lookup gid m of - Just installed -> return (installedCacheProfiling installed) - Nothing | null (dpLibraries dp) -> return True - Nothing -> do - let loop [] = return False - loop (dir:dirs) = do - econtents <- tryIO $ getDirectoryContents dir - let contents = either (const []) id econtents - if or [isProfiling content lib - | content <- contents - , lib <- dpLibraries dp - ] && not (null contents) - then return True - else loop dirs - loop $ dpLibDirs dp - return dp { dpProfiling = p } - -isProfiling :: FilePath -- ^ entry in directory - -> Text -- ^ name of library - -> Bool -isProfiling content lib = - prefix `T.isPrefixOf` T.pack content - where - prefix = T.concat ["lib", lib, "_p"] - --- | Add haddock information to the stream of @DumpPackage@s -addHaddock :: MonadIO m - => InstalledCache - -> ConduitM (DumpPackage a b c) (DumpPackage a Bool c) m () -addHaddock (InstalledCache ref) = - CL.mapM go - where - go dp = liftIO $ do - InstalledCacheInner m <- readIORef ref - let gid = dpGhcPkgId dp - h <- case Map.lookup gid m of - Just installed -> return (installedCacheHaddock installed) - Nothing | not (dpHasExposedModules dp) -> return True - Nothing -> do - let loop [] = return False - loop (ifc:ifcs) = do - exists <- doesFileExist ifc - if exists - then return True - else loop ifcs - loop $ dpHaddockInterfaces dp - return dp { dpHaddock = h } - --- | Add debugging symbol information to the stream of @DumpPackage@s -addSymbols :: MonadIO m - => InstalledCache - -> ConduitM (DumpPackage a b c) (DumpPackage a b Bool) m () -addSymbols (InstalledCache ref) = - CL.mapM go - where - go dp = do - InstalledCacheInner m <- liftIO $ readIORef ref - let gid = dpGhcPkgId dp - s <- case Map.lookup gid m of - Just installed -> return (installedCacheSymbols installed) - Nothing | null (dpLibraries dp) -> return True - Nothing -> - case dpLibraries dp of - [] -> return True - lib:_ -> - liftM or . mapM (\dir -> liftIO $ hasDebuggingSymbols dir (T.unpack lib)) $ dpLibDirs dp - return dp { dpSymbols = s } - -hasDebuggingSymbols :: FilePath -- ^ library directory - -> String -- ^ name of library - -> IO Bool -hasDebuggingSymbols dir lib = do - let path = concat [dir, "/lib", lib, ".a"] - exists <- doesFileExist path - if not exists then return False - else case OS.buildOS of - OS.OSX -> liftM (any (isPrefixOf "0x") . lines) $ - readProcess "dwarfdump" [path] "" - OS.Linux -> liftM (any (isPrefixOf "Contents") . lines) $ - readProcess "readelf" ["--debug-dump=info", "--dwarf-depth=1", path] "" - OS.FreeBSD -> liftM (any (isPrefixOf "Contents") . lines) $ - readProcess "readelf" ["--debug-dump=info", "--dwarf-depth=1", path] "" - OS.Windows -> return False -- No support, so it can't be there. - _ -> return False - - -- | Dump information for a single package -data DumpPackage profiling haddock symbols = DumpPackage +data DumpPackage = DumpPackage { dpGhcPkgId :: !GhcPkgId , dpPackageIdent :: !PackageIdentifier , dpParentLibIdent :: !(Maybe PackageIdentifier) @@ -286,9 +151,6 @@ data DumpPackage profiling haddock symbols = DumpPackage , dpDepends :: ![GhcPkgId] , dpHaddockInterfaces :: ![FilePath] , dpHaddockHtml :: !(Maybe FilePath) - , dpProfiling :: !profiling - , dpHaddock :: !haddock - , dpSymbols :: !symbols , dpIsExposed :: !Bool } deriving (Show, Eq) @@ -310,7 +172,7 @@ instance Show PackageDumpException where -- | Convert a stream of bytes into a stream of @DumpPackage@s conduitDumpPackage :: MonadThrow m - => ConduitM Text (DumpPackage () () ()) m () + => ConduitM Text DumpPackage m () conduitDumpPackage = (.| CL.catMaybes) $ eachSection $ do pairs <- eachPair (\k -> (k, ) <$> CL.consume) .| CL.consume let m = Map.fromList pairs @@ -388,9 +250,6 @@ conduitDumpPackage = (.| CL.catMaybes) $ eachSection $ do , dpDepends = depends , dpHaddockInterfaces = haddockInterfaces , dpHaddockHtml = listToMaybe haddockHtml - , dpProfiling = () - , dpHaddock = () - , dpSymbols = () , dpIsExposed = exposed == ["True"] } diff --git a/src/Stack/SDist.hs b/src/Stack/SDist.hs index 9d00f15159..12e858654b 100644 --- a/src/Stack/SDist.hs +++ b/src/Stack/SDist.hs @@ -172,12 +172,7 @@ getCabalLbs pvpBounds mrev cabalfp sourceMap = do unless (cabalfp == cabalfp') $ error $ "getCabalLbs: cabalfp /= cabalfp': " ++ show (cabalfp, cabalfp') installMap <- toInstallMap sourceMap - (installedMap, _, _, _) <- getInstalled GetInstalledOpts - { getInstalledProfiling = False - , getInstalledHaddock = False - , getInstalledSymbols = False - } - installMap + (installedMap, _, _, _) <- getInstalled installMap let internalPackages = Set.fromList $ gpdPackageName gpd : map (Cabal.unqualComponentNameToPackageName . fst) (Cabal.condSubLibraries gpd) diff --git a/src/Stack/Script.hs b/src/Stack/Script.hs index 72b0ea06b6..addf22491a 100644 --- a/src/Stack/Script.hs +++ b/src/Stack/Script.hs @@ -215,13 +215,7 @@ getModuleInfo = do sourceMap <- view $ envConfigL . to envConfigSourceMap installMap <- toInstallMap sourceMap (_installedMap, globalDumpPkgs, snapshotDumpPkgs, _localDumpPkgs) <- - getInstalled - GetInstalledOpts - { getInstalledProfiling = False - , getInstalledHaddock = False - , getInstalledSymbols = False - } - installMap + getInstalled installMap let globals = toModuleInfo (smGlobal sourceMap) globalDumpPkgs notHiddenDeps = notHidden $ smDeps sourceMap installedDeps = toModuleInfo notHiddenDeps snapshotDumpPkgs diff --git a/src/Stack/Setup.hs b/src/Stack/Setup.hs index 21015e631b..c25d4cc0ec 100644 --- a/src/Stack/Setup.hs +++ b/src/Stack/Setup.hs @@ -293,9 +293,6 @@ setupEnv needTargets boptsCLI mResolveMissingGHC = do , dpDepends = [] , dpHaddockInterfaces = [] , dpHaddockHtml = Nothing - , dpProfiling = () - , dpHaddock = () - , dpSymbols = () , dpIsExposed = True } fakeDump = sma { diff --git a/src/Stack/Snapshot.hs b/src/Stack/Snapshot.hs index 3fd4792d16..fa50b8e9c5 100644 --- a/src/Stack/Snapshot.hs +++ b/src/Stack/Snapshot.hs @@ -406,14 +406,14 @@ loadCompiler cv = do , lsPackages = Map.empty } where - toGlobals :: Map GhcPkgId (DumpPackage () () ()) + toGlobals :: Map GhcPkgId DumpPackage -> Map PackageName (LoadedPackageInfo GhcPkgId) toGlobals m = Map.fromList $ map go $ Map.elems m where identMap = Map.map dpPackageIdent m - go :: DumpPackage () () () -> (PackageName, LoadedPackageInfo GhcPkgId) + go :: DumpPackage -> (PackageName, LoadedPackageInfo GhcPkgId) go dp = (name, lpi) where diff --git a/src/Stack/SourceMap.hs b/src/Stack/SourceMap.hs index 6a4ddea2de..d920ddf4e1 100644 --- a/src/Stack/SourceMap.hs +++ b/src/Stack/SourceMap.hs @@ -138,7 +138,7 @@ globalsFromHints compiler = do logWarn $ "Unable to load global hints for " <> RIO.display compiler pure mempty -type DumpedGlobalPackage = DumpPackage () () () +type DumpedGlobalPackage = DumpPackage actualFromGhc :: (HasConfig env) diff --git a/src/Stack/StoreTH.hs b/src/Stack/StoreTH.hs index 276b0c46dc..5733e63697 100644 --- a/src/Stack/StoreTH.hs +++ b/src/Stack/StoreTH.hs @@ -8,9 +8,6 @@ module Stack.StoreTH , decodePrecompiledCache , encodePrecompiledCache - , decodeOrLoadInstalledCache - , encodeInstalledCache - , decodeOrLoadLoadedSnapshot ) where @@ -18,7 +15,6 @@ import Data.Store.Version import Stack.Prelude import Stack.Types.Build import Stack.Types.BuildPlan -import Stack.Types.PackageDump decodeConfigCache :: HasLogFunc env @@ -46,20 +42,6 @@ encodePrecompiledCache -> RIO env () encodePrecompiledCache = $(versionedEncodeFile precompiledCacheVC) -decodeOrLoadInstalledCache - :: HasLogFunc env - => Path Abs File - -> RIO env InstalledCacheInner - -> RIO env InstalledCacheInner -decodeOrLoadInstalledCache = $(versionedDecodeOrLoad installedCacheVC) - -encodeInstalledCache - :: HasLogFunc env - => Path Abs File - -> InstalledCacheInner - -> RIO env () -encodeInstalledCache = $(versionedEncodeFile installedCacheVC) - decodeOrLoadLoadedSnapshot :: HasLogFunc env => Path Abs File diff --git a/src/Stack/Types/Config.hs b/src/Stack/Types/Config.hs index fee917948b..e42fcca637 100644 --- a/src/Stack/Types/Config.hs +++ b/src/Stack/Types/Config.hs @@ -103,7 +103,6 @@ module Stack.Types.Config ,SCM(..) -- * Paths ,bindirSuffix - ,configInstalledCache ,configLoadedSnapshotCache ,GlobalInfoSource(..) ,getProjectWorkDir @@ -1174,10 +1173,6 @@ getProjectWorkDir = do workDir <- view workDirL return (root workDir) --- | File containing the installed cache, see "Stack.PackageDump" -configInstalledCache :: (HasBuildConfig env, MonadReader env m) => m (Path Abs File) -configInstalledCache = liftM ( relFileInstalledCacheBin) getProjectWorkDir - -- | Relative directory for the platform identifier platformOnlyRelDir :: (MonadReader env m, HasPlatform env, MonadThrow m) diff --git a/src/Stack/Types/PackageDump.hs b/src/Stack/Types/PackageDump.hs deleted file mode 100644 index 9e72c7105a..0000000000 --- a/src/Stack/Types/PackageDump.hs +++ /dev/null @@ -1,32 +0,0 @@ -{-# LANGUAGE NoImplicitPrelude #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -module Stack.Types.PackageDump - ( InstalledCache(..) - , InstalledCacheInner(..) - , InstalledCacheEntry(..) - , installedCacheVC - ) where - -import Data.Store -import Data.Store.Version -import Stack.Prelude -import Stack.Types.GhcPkgId - --- | Cached information on whether package have profiling libraries and haddocks. -newtype InstalledCache = InstalledCache (IORef InstalledCacheInner) -newtype InstalledCacheInner = InstalledCacheInner (Map GhcPkgId InstalledCacheEntry) - deriving (Store, Generic, Eq, Show, Data, Typeable) - --- | Cached information on whether a package has profiling libraries and haddocks. -data InstalledCacheEntry = InstalledCacheEntry - { installedCacheProfiling :: !Bool - , installedCacheHaddock :: !Bool - , installedCacheSymbols :: !Bool - , installedCacheIdent :: !PackageIdentifier } - deriving (Eq, Generic, Show, Data, Typeable) -instance Store InstalledCacheEntry - -installedCacheVC :: VersionConfig InstalledCacheInner -installedCacheVC = storeVersionConfig "installed-v2" "eHLVmgbOWvPSm1X3wLfclM-XiXc=" diff --git a/src/test/Stack/PackageDumpSpec.hs b/src/test/Stack/PackageDumpSpec.hs index 2cb1809a8d..fae5741f40 100644 --- a/src/test/Stack/PackageDumpSpec.hs +++ b/src/test/Stack/PackageDumpSpec.hs @@ -88,9 +88,6 @@ spec = do , dpHasExposedModules = True , dpHaddockInterfaces = ["/opt/ghc/7.8.4/share/doc/ghc/html/libraries/haskell2010-1.1.2.0/haskell2010.haddock"] , dpHaddockHtml = Just "/opt/ghc/7.8.4/share/doc/ghc/html/libraries/haskell2010-1.1.2.0" - , dpProfiling = () - , dpHaddock = () - , dpSymbols = () , dpIsExposed = False , dpExposedModules = mempty } @@ -133,9 +130,6 @@ spec = do , dpDepends = depends , dpLibraries = ["HSghc-7.10.1-EMlWrQ42XY0BNVbSrKixqY"] , dpHasExposedModules = True - , dpProfiling = () - , dpHaddock = () - , dpSymbols = () , dpIsExposed = False , dpExposedModules = mempty } @@ -175,9 +169,6 @@ spec = do , dpDepends = depends , dpLibraries = ["HShmatrix-0.16.1.5"] , dpHasExposedModules = True - , dpProfiling = () - , dpHaddock = () - , dpSymbols = () , dpIsExposed = True , dpExposedModules = Set.fromList ["Data.Packed","Data.Packed.Vector","Data.Packed.Matrix","Data.Packed.Foreign","Data.Packed.ST","Data.Packed.Development","Numeric.LinearAlgebra","Numeric.LinearAlgebra.LAPACK","Numeric.LinearAlgebra.Algorithms","Numeric.Container","Numeric.LinearAlgebra.Util","Numeric.LinearAlgebra.Devel","Numeric.LinearAlgebra.Data","Numeric.LinearAlgebra.HMatrix","Numeric.LinearAlgebra.Static"] } @@ -211,31 +202,15 @@ spec = do , dpDepends = depends , dpLibraries = ["HSghc-boot-0.0.0.0"] , dpHasExposedModules = True - , dpProfiling = () - , dpHaddock = () - , dpSymbols = () , dpIsExposed = True , dpExposedModules = Set.fromList ["GHC.Lexeme", "GHC.PackageDb"] } - it "ghcPkgDump + addProfiling + addHaddock" $ runEnvNoLogging $ do - icache <- newInstalledCache - ghcPkgDump Ghc [] - $ conduitDumpPackage - .| addProfiling icache - .| addHaddock icache - .| fakeAddSymbols - .| CL.sinkNull - it "sinkMatching" $ runEnvNoLogging $ do - icache <- newInstalledCache m <- ghcPkgDump Ghc [] $ conduitDumpPackage - .| addProfiling icache - .| addHaddock icache - .| fakeAddSymbols - .| sinkMatching False False False (Map.singleton (mkPackageName "transformers") (mkVersion [0, 0, 0, 0, 0, 0, 1])) + .| sinkMatching (Map.singleton (mkPackageName "transformers") (mkVersion [0, 0, 0, 0, 0, 0, 1])) case Map.lookup (mkPackageName "base") m of Nothing -> error "base not present" Just _ -> return () @@ -284,10 +259,6 @@ checkDepsPresent prunes selected = Nothing -> error "checkDepsPresent: missing in depMap" Just deps -> Set.null $ Set.difference (Set.fromList deps) allIds --- addSymbols can't be reasonably tested like this -fakeAddSymbols :: Monad m => ConduitM (DumpPackage a b c) (DumpPackage a b Bool) m () -fakeAddSymbols = CL.map (\dp -> dp { dpSymbols = False }) - runEnvNoLogging :: RIO LoggedProcessContext a -> IO a runEnvNoLogging inner = do envVars <- view envVarsL <$> mkDefaultProcessContext From bf5288a42c7d3d9a31cdf4467424a7feefd8452d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Niklas=20Hamb=C3=BCchen?= Date: Fri, 22 Feb 2019 14:07:15 +0100 Subject: [PATCH 53/80] stack ghci: Make shown options easily copy-pastable. Turns 1. Package `p' component exe:myexe with main-is file ... into 1. Package `p' component p:exe:myexe with main-is file ... so that you can easily copy-paste it into stack ghci p:exe:myexe --- ChangeLog.md | 2 ++ src/Stack/Ghci.hs | 7 +++++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/ChangeLog.md b/ChangeLog.md index 0e7caaaf4e..3d2a543d8f 100644 --- a/ChangeLog.md +++ b/ChangeLog.md @@ -83,6 +83,8 @@ Other enhancements: * Stack parses and respects the `preferred-versions` information from Hackage for choosing latest version of a package in some cases, e.g. `stack unpack packagename`. +* The components output in the `The main module to load is ambiguous` message + now include package names so they can be more easily copy-pasted. * Git repos are shared across multiple projects. See [#3551](https://github.com/commercialhaskell/stack/issues/3551) * Use en_US.UTF-8 locale by default in pure Nix mode so programs won't diff --git a/src/Stack/Ghci.hs b/src/Stack/Ghci.hs index 4d9e606664..5bcb2804d0 100644 --- a/src/Stack/Ghci.hs +++ b/src/Stack/Ghci.hs @@ -561,10 +561,13 @@ figureOutMainFile bopts mainIsTargets targets0 packages = do wantedPackageComponents bopts target (ghciPkgPackage pkg) renderCandidate c@(pkgName,namedComponent,mainIs) = let candidateIndex = T.pack . show . (+1) . fromMaybe 0 . elemIndex c + pkgNameText = T.pack (packageNameString pkgName) in candidateIndex candidates <> ". Package `" <> - T.pack (packageNameString pkgName) <> + pkgNameText <> "' component " <> - renderComp namedComponent <> + -- This is the format that can be directly copy-pasted as + -- an argument to `stack ghci`. + pkgNameText <> ":" <> renderComp namedComponent <> " with main-is file: " <> T.pack (toFilePath mainIs) candidateIndices = take (length candidates) [1 :: Int ..] From 2e075f438effba7f2f1ceae05bbbccb7f74a20a4 Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Mon, 25 Feb 2019 12:21:59 +0300 Subject: [PATCH 54/80] Remove commented out parameter --- src/Stack/Build/ConstructPlan.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Stack/Build/ConstructPlan.hs b/src/Stack/Build/ConstructPlan.hs index 27b07a929e..3890cd46b1 100644 --- a/src/Stack/Build/ConstructPlan.hs +++ b/src/Stack/Build/ConstructPlan.hs @@ -351,7 +351,7 @@ mkUnregisterLocal tasks dirtyReason localDumpPkgs initialBuildSteps = -- step. addFinal :: LocalPackage -> Package -> Bool -> Bool -> M () addFinal lp package isAllInOne buildHaddocks = do - depsRes <- addPackageDeps {-False-} package + depsRes <- addPackageDeps package res <- case depsRes of Left e -> return $ Left e Right (missing, present, _minLoc) -> do From c02a34848c377be8d12bc203c4081a00ae454454 Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Mon, 25 Feb 2019 12:42:28 +0300 Subject: [PATCH 55/80] Proper all-in-one for expected test failures Prevent all-in-one build only for not yet installed library when test failures are expected --- src/Stack/Build/ConstructPlan.hs | 31 +++++++++++++++---------------- 1 file changed, 15 insertions(+), 16 deletions(-) diff --git a/src/Stack/Build/ConstructPlan.hs b/src/Stack/Build/ConstructPlan.hs index 3890cd46b1..76df04b56c 100644 --- a/src/Stack/Build/ConstructPlan.hs +++ b/src/Stack/Build/ConstructPlan.hs @@ -498,15 +498,10 @@ installPackage name ps minstalled = do package <- loadPackage ctx pkgLoc (cpFlags cp) (cpGhcOptions cp) resolveDepsAndInstall True (cpHaddocks cp) ps package minstalled PSFilePath lp -> do - -- in curator builds we can't do all-in-one build as test/benchmark failure - -- could prevent library from being available to its dependencies - splitRequired <- expectedTestOrBenchFailures <$> asks mcurator case lpTestBench lp of Nothing -> do planDebug $ "installPackage: No test / bench component for " ++ show name ++ " so doing an all-in-one build." resolveDepsAndInstall True (lpBuildHaddocks lp) ps (lpPackage lp) minstalled - Just tb | splitRequired -> - splitInstallSteps lp tb Just tb -> do -- Attempt to find a plan which performs an all-in-one -- build. Ignore the writer action + reset the state if @@ -522,9 +517,16 @@ installPackage name ps minstalled = do Right deps -> do planDebug $ "installPackage: For " ++ show name ++ ", successfully added package deps" adr <- installPackageGivenDeps True (lpBuildHaddocks lp) ps tb minstalled deps + -- in curator builds we can't do all-in-one build as test/benchmark failure + -- could prevent library from being available to its dependencies + -- but when it's already available it's OK to do that + splitRequired <- expectedTestOrBenchFailures <$> asks mcurator + let finalAllInOne = case adr of + ADRToInstall _ | splitRequired -> False + _ -> True -- FIXME: this redundantly adds the deps (but -- they'll all just get looked up in the map) - addFinal lp tb True False + addFinal lp tb finalAllInOne False return $ Right adr Left _ -> do -- Reset the state to how it was before @@ -534,22 +536,19 @@ installPackage name ps minstalled = do put s -- Otherwise, fall back on building the -- tests / benchmarks in a separate step. - splitInstallSteps lp tb + res' <- resolveDepsAndInstall False (lpBuildHaddocks lp) ps (lpPackage lp) minstalled + when (isRight res') $ do + -- Insert it into the map so that it's + -- available for addFinal. + updateLibMap name res' + addFinal lp tb False False + return res' where expectedTestOrBenchFailures maybeCurator = fromMaybe False $ do curator <- maybeCurator pure $ Set.member name (curatorExpectTestFailure curator) || Set.member name (curatorExpectBenchmarkFailure curator) - splitInstallSteps lp tb = do - res' <- resolveDepsAndInstall False (lpBuildHaddocks lp) ps (lpPackage lp) minstalled - when (isRight res') $ do - -- Insert it into the map so that it's - -- available for addFinal. - updateLibMap name res' - addFinal lp tb False False - return res' - resolveDepsAndInstall :: Bool -> Bool -> PackageSource From e09f467e15b131a0f54d252db400bbeff4afb572 Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Mon, 25 Feb 2019 12:46:44 +0300 Subject: [PATCH 56/80] Better handling of test timeouts --- src/Stack/Build/Execute.hs | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/src/Stack/Build/Execute.hs b/src/Stack/Build/Execute.hs index 5661c001d9..9797a40e4e 100644 --- a/src/Stack/Build/Execute.hs +++ b/src/Stack/Build/Execute.hs @@ -1863,15 +1863,10 @@ singleTest topts testsToRun ac ee task installedMap = do OTLogFile _ h -> setter (useHandleOpen h) optionalTimeout action | Just maxSecs <- toMaximumTimeSeconds topts, maxSecs > 0 = do - mres <- timeout (maxSecs * 1000000) action - case mres of - Nothing -> throwString $ "test suite timed out, package " <> - packageNameString pname <> ", suite: " <> - T.unpack testName <> T.unpack argsDisplay - Just res -> return res - | otherwise = action - - ec <- withWorkingDir (toFilePath pkgDir) $ + timeout (maxSecs * 1000000) action + | otherwise = Just <$> action + + mec <- withWorkingDir (toFilePath pkgDir) $ optionalTimeout $ proc (toFilePath exePath) args $ \pc0 -> do stdinBS <- if isTestTypeLib @@ -1898,11 +1893,16 @@ singleTest topts testsToRun ac ee task installedMap = do when needHpc $ updateTixFile (packageName package) tixPath testName' let announceResult result = announce $ "Test suite " <> testName <> " " <> result - case ec of - ExitSuccess -> do + case mec of + Just ExitSuccess -> do announceResult "passed" return Map.empty - _ -> do + Nothing -> do + announceResult "timed out" + if expectFailure + then return Map.empty + else return $ Map.singleton testName Nothing + Just ec -> do announceResult "failed" if expectFailure then return Map.empty From 455996da00c4977cd641dc7e5663cea8caeb9d5b Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Mon, 25 Feb 2019 15:22:20 +0300 Subject: [PATCH 57/80] Fix all-in-one for tasks with expected failures --- src/Stack/Build/ConstructPlan.hs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/Stack/Build/ConstructPlan.hs b/src/Stack/Build/ConstructPlan.hs index 76df04b56c..70af6156f0 100644 --- a/src/Stack/Build/ConstructPlan.hs +++ b/src/Stack/Build/ConstructPlan.hs @@ -516,11 +516,12 @@ installPackage name ps minstalled = do case res of Right deps -> do planDebug $ "installPackage: For " ++ show name ++ ", successfully added package deps" - adr <- installPackageGivenDeps True (lpBuildHaddocks lp) ps tb minstalled deps -- in curator builds we can't do all-in-one build as test/benchmark failure -- could prevent library from being available to its dependencies -- but when it's already available it's OK to do that splitRequired <- expectedTestOrBenchFailures <$> asks mcurator + let isAllInOne = not splitRequired + adr <- installPackageGivenDeps isAllInOne (lpBuildHaddocks lp) ps tb minstalled deps let finalAllInOne = case adr of ADRToInstall _ | splitRequired -> False _ -> True From b4d6ffd592ca1badf404c1d4ee168a03b228c70c Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Tue, 26 Feb 2019 09:57:42 +0300 Subject: [PATCH 58/80] Disable colored output when running Stackage builds --- subs/curator/app/Main.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/subs/curator/app/Main.hs b/subs/curator/app/Main.hs index d48673bc45..bca69f3844 100644 --- a/subs/curator/app/Main.hs +++ b/subs/curator/app/Main.hs @@ -124,7 +124,7 @@ build = do logInfo "Building" withWorkingDir "unpack-dir" $ proc "stack" - (words "build --test --bench --test-suite-timeout=600 --no-rerun-tests --no-run-benchmarks --haddock") + (words "build --test --bench --test-suite-timeout=600 --no-rerun-tests --no-run-benchmarks --haddock --color never") runProcess_ loadPantrySnapshotLayerFile :: FilePath -> RIO PantryApp RawSnapshotLayer From d1d9b5f50caa69a36ef06c6ece4b992f0aeb7b0f Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Thu, 28 Feb 2019 10:16:50 +0300 Subject: [PATCH 59/80] Extract smRelDir helper giving relative dir name for a source map --- src/Stack/Types/Config.hs | 4 ++-- src/Stack/Types/SourceMap.hs | 10 ++++++++++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/src/Stack/Types/Config.hs b/src/Stack/Types/Config.hs index e42fcca637..c15c9ae53c 100644 --- a/src/Stack/Types/Config.hs +++ b/src/Stack/Types/Config.hs @@ -1243,9 +1243,9 @@ platformSnapAndCompilerRel :: (HasEnvConfig env) => RIO env (Path Rel Dir) platformSnapAndCompilerRel = do - SourceMapHash smh <- view $ envConfigL.to envConfigSourceMap.to smHash platform <- platformGhcRelDir - name <- parseRelDir $ T.unpack $ SHA256.toHexText smh + sm <- view $ envConfigL.to envConfigSourceMap + name <- smRelDir sm ghc <- compilerVersionDir useShaPathOnWindows (platform name ghc) diff --git a/src/Stack/Types/SourceMap.hs b/src/Stack/Types/SourceMap.hs index eee6c63b3b..46333e467c 100644 --- a/src/Stack/Types/SourceMap.hs +++ b/src/Stack/Types/SourceMap.hs @@ -20,8 +20,12 @@ module Stack.Types.SourceMap , GlobalPackage (..) , isReplacedGlobal , SourceMapHash (..) + , smRelDir ) where +import qualified Data.Text as T +import qualified Pantry.SHA256 as SHA256 +import Path import Stack.Prelude import Stack.Types.Compiler import Stack.Types.NamedComponent @@ -150,3 +154,9 @@ data SourceMap = SourceMap -- | A unique hash for the immutable portions of a 'SourceMap'. newtype SourceMapHash = SourceMapHash SHA256 + +-- | Returns relative directory name with source map's hash +smRelDir :: (MonadThrow m) => SourceMap -> m (Path Rel Dir) +smRelDir sm = do + let SourceMapHash smh = smHash sm + parseRelDir $ T.unpack $ SHA256.toHexText smh From cc6acc9d4d3c6a2fb102634ae542284b5b486a36 Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Thu, 28 Feb 2019 12:06:44 +0300 Subject: [PATCH 60/80] Minor refactoring of hashSourceMapData --- src/Stack/Build/Source.hs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/Stack/Build/Source.hs b/src/Stack/Build/Source.hs index 402e4c83ca..cde2287f2c 100644 --- a/src/Stack/Build/Source.hs +++ b/src/Stack/Build/Source.hs @@ -144,14 +144,15 @@ hashSourceMapData -> Map PackageName DepPackage -> RIO env SourceMapHash hashSourceMapData wc smDeps = do - path <- encodeUtf8 . T.pack . toFilePath <$> getCompilerPath wc + compilerPath <- encodeUtf8 . T.pack . toFilePath <$> getCompilerPath wc let compilerExe = case wc of Ghc -> "ghc" Ghcjs -> "ghcjs" - info <- BL.toStrict . fst <$> proc compilerExe ["--info"] readProcess_ + compilerInfo <- BL.toStrict . fst <$> proc compilerExe ["--info"] readProcess_ immDeps <- forM (Map.elems smDeps) depPackageHashableContent - return $ SourceMapHash (SHA256.hashLazyBytes $ BL.fromChunks (path:info:immDeps)) + let hashedContent = compilerPath:compilerInfo:immDeps + return $ SourceMapHash (SHA256.hashLazyBytes $ BL.fromChunks hashedContent) depPackageHashableContent :: (HasConfig env) => DepPackage -> RIO env ByteString depPackageHashableContent DepPackage {..} = do From 9f2ebc55e15a7f38edee880b35c2fbbdaee867f5 Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Thu, 28 Feb 2019 12:50:31 +0300 Subject: [PATCH 61/80] Add GHC options which are supposed to be applied to GHC boot libs --- src/Stack/Build/Source.hs | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/src/Stack/Build/Source.hs b/src/Stack/Build/Source.hs index cde2287f2c..c1aac1abbc 100644 --- a/src/Stack/Build/Source.hs +++ b/src/Stack/Build/Source.hs @@ -107,7 +107,7 @@ loadSourceMap smt boptsCli sma = do maybeProjectFlags _ = Nothing globals = pruneGlobals (smaGlobal sma) (Map.keysSet deps) checkFlagsUsedThrowing packageCliFlags FSCommandLine project deps - smh <- hashSourceMapData (whichCompiler compiler) deps + smh <- hashSourceMapData bconfig boptsCli (whichCompiler compiler) deps return SourceMap { smTargets = smt @@ -140,10 +140,12 @@ loadSourceMap smt boptsCli sma = do -- hashSourceMapData :: (HasConfig env) - => WhichCompiler + => BuildConfig + -> BuildOptsCLI + -> WhichCompiler -> Map PackageName DepPackage -> RIO env SourceMapHash -hashSourceMapData wc smDeps = do +hashSourceMapData bc boptsCli wc smDeps = do compilerPath <- encodeUtf8 . T.pack . toFilePath <$> getCompilerPath wc let compilerExe = case wc of @@ -151,7 +153,12 @@ hashSourceMapData wc smDeps = do Ghcjs -> "ghcjs" compilerInfo <- BL.toStrict . fst <$> proc compilerExe ["--info"] readProcess_ immDeps <- forM (Map.elems smDeps) depPackageHashableContent - let hashedContent = compilerPath:compilerInfo:immDeps + let -- extra bytestring specifying GHC options supposed to be applied to + -- GHC boot packages so we'll have differrent hashes when bare + -- resolver 'ghc-X.Y.Z' is used, no extra-deps and e.g. user wants builds + -- with profiling or without + bootGhcOpts = B.concat $ map encodeUtf8 (generalGhcOptions bc boptsCli False False) + hashedContent = compilerPath:compilerInfo:bootGhcOpts:immDeps return $ SourceMapHash (SHA256.hashLazyBytes $ BL.fromChunks hashedContent) depPackageHashableContent :: (HasConfig env) => DepPackage -> RIO env ByteString From 58c2cd71dc5814ec9ed941f629b3b5c8ee4695aa Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Thu, 28 Feb 2019 14:38:04 +0300 Subject: [PATCH 62/80] Add source map hash into build cache path to allow proper switch Also includes a test for this functionality --- src/Stack/Build/Cache.hs | 5 ++++- .../integration/tests/proper-rebuilds/Main.hs | 20 +++++++++++++++++++ .../tests/proper-rebuilds/files/app/Main.hs | 6 ++++++ .../tests/proper-rebuilds/files/files.cabal | 17 ++++++++++++++++ .../tests/proper-rebuilds/files/src/Lib.hs | 4 ++++ .../tests/proper-rebuilds/files/src/Lib.hs.v2 | 4 ++++ .../tests/proper-rebuilds/files/stack.yaml | 1 + 7 files changed, 56 insertions(+), 1 deletion(-) create mode 100644 test/integration/tests/proper-rebuilds/Main.hs create mode 100644 test/integration/tests/proper-rebuilds/files/app/Main.hs create mode 100644 test/integration/tests/proper-rebuilds/files/files.cabal create mode 100644 test/integration/tests/proper-rebuilds/files/src/Lib.hs create mode 100644 test/integration/tests/proper-rebuilds/files/src/Lib.hs.v2 create mode 100644 test/integration/tests/proper-rebuilds/files/stack.yaml diff --git a/src/Stack/Build/Cache.hs b/src/Stack/Build/Cache.hs index 8c69eba43a..49adc27dd0 100644 --- a/src/Stack/Build/Cache.hs +++ b/src/Stack/Build/Cache.hs @@ -51,6 +51,7 @@ import Stack.Types.Compiler import Stack.Types.Config import Stack.Types.GhcPkgId import Stack.Types.NamedComponent +import Stack.Types.SourceMap (smRelDir) import qualified System.FilePath as FP import System.PosixCompat.Files (modificationTime, getFileStatus, setFileTimes) @@ -108,6 +109,8 @@ buildCacheFile :: (HasEnvConfig env, MonadReader env m, MonadThrow m) -> m (Path Abs File) buildCacheFile dir component = do cachesDir <- buildCachesDir dir + sm <- view $ envConfigL.to envConfigSourceMap + smDirName <- smRelDir sm let nonLibComponent prefix name = prefix <> "-" <> T.unpack name cacheFileName <- parseRelFile $ case component of CLib -> "lib" @@ -115,7 +118,7 @@ buildCacheFile dir component = do CExe name -> nonLibComponent "exe" name CTest name -> nonLibComponent "test" name CBench name -> nonLibComponent "bench" name - return $ cachesDir cacheFileName + return $ cachesDir smDirName cacheFileName -- | Try to read the dirtiness cache for the given package directory. tryGetBuildCache :: HasEnvConfig env diff --git a/test/integration/tests/proper-rebuilds/Main.hs b/test/integration/tests/proper-rebuilds/Main.hs new file mode 100644 index 0000000000..229fd86711 --- /dev/null +++ b/test/integration/tests/proper-rebuilds/Main.hs @@ -0,0 +1,20 @@ +import Control.Monad (unless, when) +import Data.List (isInfixOf) +import StackTest +import System.Directory + +main :: IO () +main = do + let expectRecompilation stderr = + unless ("files-1.0.0: build" `isInfixOf` stderr) $ + error $ "package recompilation was expected" + expectNoRecompilation stderr = + when ("files-1.0.0: build" `isInfixOf` stderr) $ + error "package recompilation was not expected" + stackCheckStderr ["build"] expectRecompilation + stackCheckStderr ["build" , "--profile"] expectRecompilation + stackCheckStderr ["build" , "--profile"] expectNoRecompilation + -- changing source file to trigger recompilation + copyFile "src/Lib.hs.v2" "src/Lib.hs" + stackCheckStderr ["build" , "--profile"] expectRecompilation + stackCheckStderr ["build"] expectRecompilation diff --git a/test/integration/tests/proper-rebuilds/files/app/Main.hs b/test/integration/tests/proper-rebuilds/files/app/Main.hs new file mode 100644 index 0000000000..a2fa21e3ac --- /dev/null +++ b/test/integration/tests/proper-rebuilds/files/app/Main.hs @@ -0,0 +1,6 @@ +module Main where + +import Lib + +main = do + putStrLn $ "Sample strings: " ++ show someStrings diff --git a/test/integration/tests/proper-rebuilds/files/files.cabal b/test/integration/tests/proper-rebuilds/files/files.cabal new file mode 100644 index 0000000000..b04858a5fd --- /dev/null +++ b/test/integration/tests/proper-rebuilds/files/files.cabal @@ -0,0 +1,17 @@ +name: files +version: 1.0.0 +build-type: Simple +cabal-version: >= 1.10 + +library + hs-source-dirs: src + exposed-modules: Lib + build-depends: base + default-language: Haskell2010 + +executable test-exe + hs-source-dirs: app + main-is: Main.hs + ghc-options: -rtsopts + build-depends: base, files + default-language: Haskell2010 \ No newline at end of file diff --git a/test/integration/tests/proper-rebuilds/files/src/Lib.hs b/test/integration/tests/proper-rebuilds/files/src/Lib.hs new file mode 100644 index 0000000000..fc0ad60719 --- /dev/null +++ b/test/integration/tests/proper-rebuilds/files/src/Lib.hs @@ -0,0 +1,4 @@ +module Lib where + +someStrings :: [String] +someStrings = ["Hello", "world!"] diff --git a/test/integration/tests/proper-rebuilds/files/src/Lib.hs.v2 b/test/integration/tests/proper-rebuilds/files/src/Lib.hs.v2 new file mode 100644 index 0000000000..59c5f8c548 --- /dev/null +++ b/test/integration/tests/proper-rebuilds/files/src/Lib.hs.v2 @@ -0,0 +1,4 @@ +module Lib where + +someStrings :: [String] +someStrings = ["Hello", "other", "world!"] diff --git a/test/integration/tests/proper-rebuilds/files/stack.yaml b/test/integration/tests/proper-rebuilds/files/stack.yaml new file mode 100644 index 0000000000..a95908b164 --- /dev/null +++ b/test/integration/tests/proper-rebuilds/files/stack.yaml @@ -0,0 +1 @@ +resolver: ghc-8.2.2 From 4b316b82550bf055a434e435969fe17e85f315ab Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Thu, 28 Feb 2019 16:31:39 +0300 Subject: [PATCH 63/80] Mutable dependencies integration test --- test/integration/tests/mutable-deps/Main.hs | 21 + .../tests/mutable-deps/files/app/Main.hs | 7 + .../files/filepath-1.4.1.2/LICENSE | 30 + .../files/filepath-1.4.1.2/README.md | 19 + .../files/filepath-1.4.1.2/Setup.hs | 2 + .../files/filepath-1.4.1.2/System/FilePath.hs | 29 + .../System/FilePath/Internal.hs | 1029 +++++++++++++++++ .../filepath-1.4.1.2/System/FilePath/Posix.hs | 4 + .../System/FilePath/Windows.hs | 4 + .../files/filepath-1.4.1.2/changelog.md | 81 ++ .../files/filepath-1.4.1.2/filepath.cabal | 67 ++ .../files/filepath-1.4.1.2/tests/Test.hs | 30 + .../files/filepath-1.4.1.2/tests/TestGen.hs | 448 +++++++ .../files/filepath-1.4.1.2/tests/TestUtil.hs | 52 + .../tests/mutable-deps/files/files.cabal | 17 + .../tests/mutable-deps/files/src/Files.hs | 6 + .../tests/mutable-deps/files/stack.yaml | 6 + 17 files changed, 1852 insertions(+) create mode 100644 test/integration/tests/mutable-deps/Main.hs create mode 100644 test/integration/tests/mutable-deps/files/app/Main.hs create mode 100644 test/integration/tests/mutable-deps/files/filepath-1.4.1.2/LICENSE create mode 100644 test/integration/tests/mutable-deps/files/filepath-1.4.1.2/README.md create mode 100644 test/integration/tests/mutable-deps/files/filepath-1.4.1.2/Setup.hs create mode 100644 test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath.hs create mode 100644 test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Internal.hs create mode 100644 test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Posix.hs create mode 100644 test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Windows.hs create mode 100644 test/integration/tests/mutable-deps/files/filepath-1.4.1.2/changelog.md create mode 100644 test/integration/tests/mutable-deps/files/filepath-1.4.1.2/filepath.cabal create mode 100644 test/integration/tests/mutable-deps/files/filepath-1.4.1.2/tests/Test.hs create mode 100644 test/integration/tests/mutable-deps/files/filepath-1.4.1.2/tests/TestGen.hs create mode 100644 test/integration/tests/mutable-deps/files/filepath-1.4.1.2/tests/TestUtil.hs create mode 100644 test/integration/tests/mutable-deps/files/files.cabal create mode 100644 test/integration/tests/mutable-deps/files/src/Files.hs create mode 100644 test/integration/tests/mutable-deps/files/stack.yaml diff --git a/test/integration/tests/mutable-deps/Main.hs b/test/integration/tests/mutable-deps/Main.hs new file mode 100644 index 0000000000..c9e93edd38 --- /dev/null +++ b/test/integration/tests/mutable-deps/Main.hs @@ -0,0 +1,21 @@ +import Control.Monad (forM_, unless, when) +import Data.List (isInfixOf) +import StackTest + +main :: IO () +main = do + let expectRecompilation pkgs stderr = forM_ pkgs $ \p -> + unless ((p ++ ": build") `isInfixOf` stderr) $ + error $ "package " ++ show p ++ " recompilation was expected" + expectNoRecompilation pkgs stderr = forM_ pkgs $ \p -> + when ((p ++ ": build") `isInfixOf` stderr) $ + error $ "package " ++ show p ++ " recompilation was not expected" + mutablePackages = [ "filepath-1.4.1.2" + , "directory-1.3.0.2" + , "filemanip-0.3.6.3" + , "files-1.0.0" + ] + stackCheckStderr ["build"] $ expectRecompilation mutablePackages + stackCheckStderr ["build" , "--profile"] $ expectRecompilation mutablePackages + stackCheckStderr ["build"] $ expectNoRecompilation mutablePackages + stackCheckStderr ["build" , "--profile"] $ expectNoRecompilation mutablePackages diff --git a/test/integration/tests/mutable-deps/files/app/Main.hs b/test/integration/tests/mutable-deps/files/app/Main.hs new file mode 100644 index 0000000000..5e18155cea --- /dev/null +++ b/test/integration/tests/mutable-deps/files/app/Main.hs @@ -0,0 +1,7 @@ +module Main where + +import Files + +main = do + cFiles <- allCFiles + putStrLn $ "C files:" ++ show cFiles diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/LICENSE b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/LICENSE new file mode 100644 index 0000000000..e38555498e --- /dev/null +++ b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/LICENSE @@ -0,0 +1,30 @@ +Copyright Neil Mitchell 2005-2017. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of Neil Mitchell nor the names of other + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/README.md b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/README.md new file mode 100644 index 0000000000..f059998854 --- /dev/null +++ b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/README.md @@ -0,0 +1,19 @@ +# FilePath [![Hackage version](https://img.shields.io/hackage/v/filepath.svg?label=Hackage)](https://hackage.haskell.org/package/filepath) [![Linux Build Status](https://img.shields.io/travis/haskell/filepath.svg?label=Linux%20build)](https://travis-ci.org/haskell/filepath) [![Windows Build Status](https://img.shields.io/appveyor/ci/ndmitchell/filepath.svg?label=Windows%20build)](https://ci.appveyor.com/project/ndmitchell/filepath) + +The `filepath` package provides functionality for manipulating `FilePath` values, and is shipped with both [GHC](https://www.haskell.org/ghc/) and the [Haskell Platform](https://www.haskell.org/platform/). It provides three modules: + +* [`System.FilePath.Posix`](http://hackage.haskell.org/package/filepath/docs/System-FilePath-Posix.html) manipulates POSIX/Linux style `FilePath` values (with `/` as the path separator). +* [`System.FilePath.Windows`](http://hackage.haskell.org/package/filepath/docs/System-FilePath-Windows.html) manipulates Windows style `FilePath` values (with either `\` or `/` as the path separator, and deals with drives). +* [`System.FilePath`](http://hackage.haskell.org/package/filepath/docs/System-FilePath.html) is an alias for the module appropriate to your platform. + +All three modules provide the same API, and the same documentation (calling out differences in the different variants). + +### Should `FilePath` be an abstract data type? + +The answer for this library is "no". While an abstract `FilePath` has some advantages (mostly type safety), it also has some disadvantages: + +* In Haskell the definition is `type FilePath = String`, and all file-oriented functions operate on this type alias, e.g. `readFile`/`writeFile`. Any abstract type would require wrappers for these functions or lots of casts between `String` and the abstraction. +* It is not immediately obvious what a `FilePath` is, and what is just a pure `String`. For example, `/path/file.ext` is a `FilePath`. Is `/`? `/path`? `path`? `file.ext`? `.ext`? `file`? +* Often it is useful to represent invalid files, e.g. `/foo/*.txt` probably isn't an actual file, but a glob pattern. Other programs use `foo//bar` for globs, which is definitely not a file, but might want to be stored as a `FilePath`. +* Some programs use syntactic non-semantic details of the `FilePath` to change their behaviour. For example, `foo`, `foo/` and `foo/.` are all similar, and refer to the same location on disk, but may behave differently when passed to command-line tools. +* A useful step to introducing an abstract `FilePath` is to reduce the amount of manipulating `FilePath` values like lists. This library hopes to help in that effort. diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/Setup.hs b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/Setup.hs new file mode 100644 index 0000000000..9a994af677 --- /dev/null +++ b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/Setup.hs @@ -0,0 +1,2 @@ +import Distribution.Simple +main = defaultMain diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath.hs b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath.hs new file mode 100644 index 0000000000..331ae81818 --- /dev/null +++ b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath.hs @@ -0,0 +1,29 @@ +{-# LANGUAGE CPP #-} +#if __GLASGOW_HASKELL__ >= 704 +{-# LANGUAGE Safe #-} +#endif +{- | +Module : System.FilePath +Copyright : (c) Neil Mitchell 2005-2014 +License : BSD3 + +Maintainer : ndmitchell@gmail.com +Stability : stable +Portability : portable + +A library for 'FilePath' manipulations, using Posix or Windows filepaths +depending on the platform. + +Both "System.FilePath.Posix" and "System.FilePath.Windows" provide the +same interface. See either for examples and a list of the available +functions. +-} + + +#if defined(mingw32_HOST_OS) || defined(__MINGW32__) +module System.FilePath(module System.FilePath.Windows) where +import System.FilePath.Windows +#else +module System.FilePath(module System.FilePath.Posix) where +import System.FilePath.Posix +#endif diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Internal.hs b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Internal.hs new file mode 100644 index 0000000000..4a376b33b1 --- /dev/null +++ b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Internal.hs @@ -0,0 +1,1029 @@ +#if __GLASGOW_HASKELL__ >= 704 +{-# LANGUAGE Safe #-} +#endif +{-# LANGUAGE PatternGuards #-} + +-- This template expects CPP definitions for: +-- MODULE_NAME = Posix | Windows +-- IS_WINDOWS = False | True + +-- | +-- Module : System.FilePath.MODULE_NAME +-- Copyright : (c) Neil Mitchell 2005-2014 +-- License : BSD3 +-- +-- Maintainer : ndmitchell@gmail.com +-- Stability : stable +-- Portability : portable +-- +-- A library for 'FilePath' manipulations, using MODULE_NAME style paths on +-- all platforms. Importing "System.FilePath" is usually better. +-- +-- Given the example 'FilePath': @\/directory\/file.ext@ +-- +-- We can use the following functions to extract pieces. +-- +-- * 'takeFileName' gives @\"file.ext\"@ +-- +-- * 'takeDirectory' gives @\"\/directory\"@ +-- +-- * 'takeExtension' gives @\".ext\"@ +-- +-- * 'dropExtension' gives @\"\/directory\/file\"@ +-- +-- * 'takeBaseName' gives @\"file\"@ +-- +-- And we could have built an equivalent path with the following expressions: +-- +-- * @\"\/directory\" '' \"file.ext\"@. +-- +-- * @\"\/directory\/file" '<.>' \"ext\"@. +-- +-- * @\"\/directory\/file.txt" '-<.>' \"ext\"@. +-- +-- Each function in this module is documented with several examples, +-- which are also used as tests. +-- +-- Here are a few examples of using the @filepath@ functions together: +-- +-- /Example 1:/ Find the possible locations of a Haskell module @Test@ imported from module @Main@: +-- +-- @['replaceFileName' path_to_main \"Test\" '<.>' ext | ext <- [\"hs\",\"lhs\"] ]@ +-- +-- /Example 2:/ Download a file from @url@ and save it to disk: +-- +-- @do let file = 'makeValid' url +-- System.IO.createDirectoryIfMissing True ('takeDirectory' file)@ +-- +-- /Example 3:/ Compile a Haskell file, putting the @.hi@ file under @interface@: +-- +-- @'takeDirectory' file '' \"interface\" '' ('takeFileName' file '-<.>' \"hi\")@ +-- +-- References: +-- [1] (Microsoft MSDN) +module System.FilePath.MODULE_NAME + ( + -- * Separator predicates + FilePath, + pathSeparator, pathSeparators, isPathSeparator, + searchPathSeparator, isSearchPathSeparator, + extSeparator, isExtSeparator, + + -- * @$PATH@ methods + splitSearchPath, getSearchPath, + + -- * Extension functions + splitExtension, + takeExtension, replaceExtension, (-<.>), dropExtension, addExtension, hasExtension, (<.>), + splitExtensions, dropExtensions, takeExtensions, replaceExtensions, + stripExtension, + + -- * Filename\/directory functions + splitFileName, + takeFileName, replaceFileName, dropFileName, + takeBaseName, replaceBaseName, + takeDirectory, replaceDirectory, + combine, (), + splitPath, joinPath, splitDirectories, + + -- * Drive functions + splitDrive, joinDrive, + takeDrive, hasDrive, dropDrive, isDrive, + + -- * Trailing slash functions + hasTrailingPathSeparator, + addTrailingPathSeparator, + dropTrailingPathSeparator, + + -- * File name manipulations + normalise, equalFilePath, + makeRelative, + isRelative, isAbsolute, + isValid, makeValid + ) + where + +import Data.Char(toLower, toUpper, isAsciiLower, isAsciiUpper) +import Data.Maybe(isJust) +import Data.List(stripPrefix) + +import System.Environment(getEnv) + + +infixr 7 <.>, -<.> +infixr 5 + + + + + +--------------------------------------------------------------------- +-- Platform Abstraction Methods (private) + +-- | Is the operating system Unix or Linux like +isPosix :: Bool +isPosix = not isWindows + +-- | Is the operating system Windows like +isWindows :: Bool +isWindows = IS_WINDOWS + + +--------------------------------------------------------------------- +-- The basic functions + +-- | The character that separates directories. In the case where more than +-- one character is possible, 'pathSeparator' is the \'ideal\' one. +-- +-- > Windows: pathSeparator == '\\' +-- > Posix: pathSeparator == '/' +-- > isPathSeparator pathSeparator +pathSeparator :: Char +pathSeparator = if isWindows then '\\' else '/' + +-- | The list of all possible separators. +-- +-- > Windows: pathSeparators == ['\\', '/'] +-- > Posix: pathSeparators == ['/'] +-- > pathSeparator `elem` pathSeparators +pathSeparators :: [Char] +pathSeparators = if isWindows then "\\/" else "/" + +-- | Rather than using @(== 'pathSeparator')@, use this. Test if something +-- is a path separator. +-- +-- > isPathSeparator a == (a `elem` pathSeparators) +isPathSeparator :: Char -> Bool +isPathSeparator '/' = True +isPathSeparator '\\' = isWindows +isPathSeparator _ = False + + +-- | The character that is used to separate the entries in the $PATH environment variable. +-- +-- > Windows: searchPathSeparator == ';' +-- > Posix: searchPathSeparator == ':' +searchPathSeparator :: Char +searchPathSeparator = if isWindows then ';' else ':' + +-- | Is the character a file separator? +-- +-- > isSearchPathSeparator a == (a == searchPathSeparator) +isSearchPathSeparator :: Char -> Bool +isSearchPathSeparator = (== searchPathSeparator) + + +-- | File extension character +-- +-- > extSeparator == '.' +extSeparator :: Char +extSeparator = '.' + +-- | Is the character an extension character? +-- +-- > isExtSeparator a == (a == extSeparator) +isExtSeparator :: Char -> Bool +isExtSeparator = (== extSeparator) + + +--------------------------------------------------------------------- +-- Path methods (environment $PATH) + +-- | Take a string, split it on the 'searchPathSeparator' character. +-- Blank items are ignored on Windows, and converted to @.@ on Posix. +-- On Windows path elements are stripped of quotes. +-- +-- Follows the recommendations in +-- +-- +-- > Posix: splitSearchPath "File1:File2:File3" == ["File1","File2","File3"] +-- > Posix: splitSearchPath "File1::File2:File3" == ["File1",".","File2","File3"] +-- > Windows: splitSearchPath "File1;File2;File3" == ["File1","File2","File3"] +-- > Windows: splitSearchPath "File1;;File2;File3" == ["File1","File2","File3"] +-- > Windows: splitSearchPath "File1;\"File2\";File3" == ["File1","File2","File3"] +splitSearchPath :: String -> [FilePath] +splitSearchPath = f + where + f xs = case break isSearchPathSeparator xs of + (pre, [] ) -> g pre + (pre, _:post) -> g pre ++ f post + + g "" = ["." | isPosix] + g ('\"':x@(_:_)) | isWindows && last x == '\"' = [init x] + g x = [x] + + +-- | Get a list of 'FilePath's in the $PATH variable. +getSearchPath :: IO [FilePath] +getSearchPath = fmap splitSearchPath (getEnv "PATH") + + +--------------------------------------------------------------------- +-- Extension methods + +-- | Split on the extension. 'addExtension' is the inverse. +-- +-- > splitExtension "/directory/path.ext" == ("/directory/path",".ext") +-- > uncurry (++) (splitExtension x) == x +-- > Valid x => uncurry addExtension (splitExtension x) == x +-- > splitExtension "file.txt" == ("file",".txt") +-- > splitExtension "file" == ("file","") +-- > splitExtension "file/file.txt" == ("file/file",".txt") +-- > splitExtension "file.txt/boris" == ("file.txt/boris","") +-- > splitExtension "file.txt/boris.ext" == ("file.txt/boris",".ext") +-- > splitExtension "file/path.txt.bob.fred" == ("file/path.txt.bob",".fred") +-- > splitExtension "file/path.txt/" == ("file/path.txt/","") +splitExtension :: FilePath -> (String, String) +splitExtension x = case nameDot of + "" -> (x,"") + _ -> (dir ++ init nameDot, extSeparator : ext) + where + (dir,file) = splitFileName_ x + (nameDot,ext) = breakEnd isExtSeparator file + +-- | Get the extension of a file, returns @\"\"@ for no extension, @.ext@ otherwise. +-- +-- > takeExtension "/directory/path.ext" == ".ext" +-- > takeExtension x == snd (splitExtension x) +-- > Valid x => takeExtension (addExtension x "ext") == ".ext" +-- > Valid x => takeExtension (replaceExtension x "ext") == ".ext" +takeExtension :: FilePath -> String +takeExtension = snd . splitExtension + +-- | Remove the current extension and add another, equivalent to 'replaceExtension'. +-- +-- > "/directory/path.txt" -<.> "ext" == "/directory/path.ext" +-- > "/directory/path.txt" -<.> ".ext" == "/directory/path.ext" +-- > "foo.o" -<.> "c" == "foo.c" +(-<.>) :: FilePath -> String -> FilePath +(-<.>) = replaceExtension + +-- | Set the extension of a file, overwriting one if already present, equivalent to '-<.>'. +-- +-- > replaceExtension "/directory/path.txt" "ext" == "/directory/path.ext" +-- > replaceExtension "/directory/path.txt" ".ext" == "/directory/path.ext" +-- > replaceExtension "file.txt" ".bob" == "file.bob" +-- > replaceExtension "file.txt" "bob" == "file.bob" +-- > replaceExtension "file" ".bob" == "file.bob" +-- > replaceExtension "file.txt" "" == "file" +-- > replaceExtension "file.fred.bob" "txt" == "file.fred.txt" +-- > replaceExtension x y == addExtension (dropExtension x) y +replaceExtension :: FilePath -> String -> FilePath +replaceExtension x y = dropExtension x <.> y + +-- | Add an extension, even if there is already one there, equivalent to 'addExtension'. +-- +-- > "/directory/path" <.> "ext" == "/directory/path.ext" +-- > "/directory/path" <.> ".ext" == "/directory/path.ext" +(<.>) :: FilePath -> String -> FilePath +(<.>) = addExtension + +-- | Remove last extension, and the \".\" preceding it. +-- +-- > dropExtension "/directory/path.ext" == "/directory/path" +-- > dropExtension x == fst (splitExtension x) +dropExtension :: FilePath -> FilePath +dropExtension = fst . splitExtension + +-- | Add an extension, even if there is already one there, equivalent to '<.>'. +-- +-- > addExtension "/directory/path" "ext" == "/directory/path.ext" +-- > addExtension "file.txt" "bib" == "file.txt.bib" +-- > addExtension "file." ".bib" == "file..bib" +-- > addExtension "file" ".bib" == "file.bib" +-- > addExtension "/" "x" == "/.x" +-- > addExtension x "" == x +-- > Valid x => takeFileName (addExtension (addTrailingPathSeparator x) "ext") == ".ext" +-- > Windows: addExtension "\\\\share" ".txt" == "\\\\share\\.txt" +addExtension :: FilePath -> String -> FilePath +addExtension file "" = file +addExtension file xs@(x:_) = joinDrive a res + where + res = if isExtSeparator x then b ++ xs + else b ++ [extSeparator] ++ xs + + (a,b) = splitDrive file + +-- | Does the given filename have an extension? +-- +-- > hasExtension "/directory/path.ext" == True +-- > hasExtension "/directory/path" == False +-- > null (takeExtension x) == not (hasExtension x) +hasExtension :: FilePath -> Bool +hasExtension = any isExtSeparator . takeFileName + + +-- | Drop the given extension from a FilePath, and the @\".\"@ preceding it. +-- Returns 'Nothing' if the FilePath does not have the given extension, or +-- 'Just' and the part before the extension if it does. +-- +-- This function can be more predictable than 'dropExtensions', especially if the filename +-- might itself contain @.@ characters. +-- +-- > stripExtension "hs.o" "foo.x.hs.o" == Just "foo.x" +-- > stripExtension "hi.o" "foo.x.hs.o" == Nothing +-- > dropExtension x == fromJust (stripExtension (takeExtension x) x) +-- > dropExtensions x == fromJust (stripExtension (takeExtensions x) x) +-- > stripExtension ".c.d" "a.b.c.d" == Just "a.b" +-- > stripExtension ".c.d" "a.b..c.d" == Just "a.b." +-- > stripExtension "baz" "foo.bar" == Nothing +-- > stripExtension "bar" "foobar" == Nothing +-- > stripExtension "" x == Just x +stripExtension :: String -> FilePath -> Maybe FilePath +stripExtension [] path = Just path +stripExtension ext@(x:_) path = stripSuffix dotExt path + where dotExt = if isExtSeparator x then ext else '.':ext + + +-- | Split on all extensions. +-- +-- > splitExtensions "/directory/path.ext" == ("/directory/path",".ext") +-- > splitExtensions "file.tar.gz" == ("file",".tar.gz") +-- > uncurry (++) (splitExtensions x) == x +-- > Valid x => uncurry addExtension (splitExtensions x) == x +-- > splitExtensions "file.tar.gz" == ("file",".tar.gz") +splitExtensions :: FilePath -> (FilePath, String) +splitExtensions x = (a ++ c, d) + where + (a,b) = splitFileName_ x + (c,d) = break isExtSeparator b + +-- | Drop all extensions. +-- +-- > dropExtensions "/directory/path.ext" == "/directory/path" +-- > dropExtensions "file.tar.gz" == "file" +-- > not $ hasExtension $ dropExtensions x +-- > not $ any isExtSeparator $ takeFileName $ dropExtensions x +dropExtensions :: FilePath -> FilePath +dropExtensions = fst . splitExtensions + +-- | Get all extensions. +-- +-- > takeExtensions "/directory/path.ext" == ".ext" +-- > takeExtensions "file.tar.gz" == ".tar.gz" +takeExtensions :: FilePath -> String +takeExtensions = snd . splitExtensions + + +-- | Replace all extensions of a file with a new extension. Note +-- that 'replaceExtension' and 'addExtension' both work for adding +-- multiple extensions, so only required when you need to drop +-- all extensions first. +-- +-- > replaceExtensions "file.fred.bob" "txt" == "file.txt" +-- > replaceExtensions "file.fred.bob" "tar.gz" == "file.tar.gz" +replaceExtensions :: FilePath -> String -> FilePath +replaceExtensions x y = dropExtensions x <.> y + + + +--------------------------------------------------------------------- +-- Drive methods + +-- | Is the given character a valid drive letter? +-- only a-z and A-Z are letters, not isAlpha which is more unicodey +isLetter :: Char -> Bool +isLetter x = isAsciiLower x || isAsciiUpper x + + +-- | Split a path into a drive and a path. +-- On Posix, \/ is a Drive. +-- +-- > uncurry (++) (splitDrive x) == x +-- > Windows: splitDrive "file" == ("","file") +-- > Windows: splitDrive "c:/file" == ("c:/","file") +-- > Windows: splitDrive "c:\\file" == ("c:\\","file") +-- > Windows: splitDrive "\\\\shared\\test" == ("\\\\shared\\","test") +-- > Windows: splitDrive "\\\\shared" == ("\\\\shared","") +-- > Windows: splitDrive "\\\\?\\UNC\\shared\\file" == ("\\\\?\\UNC\\shared\\","file") +-- > Windows: splitDrive "\\\\?\\UNCshared\\file" == ("\\\\?\\","UNCshared\\file") +-- > Windows: splitDrive "\\\\?\\d:\\file" == ("\\\\?\\d:\\","file") +-- > Windows: splitDrive "/d" == ("","/d") +-- > Posix: splitDrive "/test" == ("/","test") +-- > Posix: splitDrive "//test" == ("//","test") +-- > Posix: splitDrive "test/file" == ("","test/file") +-- > Posix: splitDrive "file" == ("","file") +splitDrive :: FilePath -> (FilePath, FilePath) +splitDrive x | isPosix = span (== '/') x +splitDrive x | Just y <- readDriveLetter x = y +splitDrive x | Just y <- readDriveUNC x = y +splitDrive x | Just y <- readDriveShare x = y +splitDrive x = ("",x) + +addSlash :: FilePath -> FilePath -> (FilePath, FilePath) +addSlash a xs = (a++c,d) + where (c,d) = span isPathSeparator xs + +-- See [1]. +-- "\\?\D:\" or "\\?\UNC\\" +readDriveUNC :: FilePath -> Maybe (FilePath, FilePath) +readDriveUNC (s1:s2:'?':s3:xs) | all isPathSeparator [s1,s2,s3] = + case map toUpper xs of + ('U':'N':'C':s4:_) | isPathSeparator s4 -> + let (a,b) = readDriveShareName (drop 4 xs) + in Just (s1:s2:'?':s3:take 4 xs ++ a, b) + _ -> case readDriveLetter xs of + -- Extended-length path. + Just (a,b) -> Just (s1:s2:'?':s3:a,b) + Nothing -> Nothing +readDriveUNC _ = Nothing + +{- c:\ -} +readDriveLetter :: String -> Maybe (FilePath, FilePath) +readDriveLetter (x:':':y:xs) | isLetter x && isPathSeparator y = Just $ addSlash [x,':'] (y:xs) +readDriveLetter (x:':':xs) | isLetter x = Just ([x,':'], xs) +readDriveLetter _ = Nothing + +{- \\sharename\ -} +readDriveShare :: String -> Maybe (FilePath, FilePath) +readDriveShare (s1:s2:xs) | isPathSeparator s1 && isPathSeparator s2 = + Just (s1:s2:a,b) + where (a,b) = readDriveShareName xs +readDriveShare _ = Nothing + +{- assume you have already seen \\ -} +{- share\bob -> "share\", "bob" -} +readDriveShareName :: String -> (FilePath, FilePath) +readDriveShareName name = addSlash a b + where (a,b) = break isPathSeparator name + + + +-- | Join a drive and the rest of the path. +-- +-- > Valid x => uncurry joinDrive (splitDrive x) == x +-- > Windows: joinDrive "C:" "foo" == "C:foo" +-- > Windows: joinDrive "C:\\" "bar" == "C:\\bar" +-- > Windows: joinDrive "\\\\share" "foo" == "\\\\share\\foo" +-- > Windows: joinDrive "/:" "foo" == "/:\\foo" +joinDrive :: FilePath -> FilePath -> FilePath +joinDrive = combineAlways + +-- | Get the drive from a filepath. +-- +-- > takeDrive x == fst (splitDrive x) +takeDrive :: FilePath -> FilePath +takeDrive = fst . splitDrive + +-- | Delete the drive, if it exists. +-- +-- > dropDrive x == snd (splitDrive x) +dropDrive :: FilePath -> FilePath +dropDrive = snd . splitDrive + +-- | Does a path have a drive. +-- +-- > not (hasDrive x) == null (takeDrive x) +-- > Posix: hasDrive "/foo" == True +-- > Windows: hasDrive "C:\\foo" == True +-- > Windows: hasDrive "C:foo" == True +-- > hasDrive "foo" == False +-- > hasDrive "" == False +hasDrive :: FilePath -> Bool +hasDrive = not . null . takeDrive + + +-- | Is an element a drive +-- +-- > Posix: isDrive "/" == True +-- > Posix: isDrive "/foo" == False +-- > Windows: isDrive "C:\\" == True +-- > Windows: isDrive "C:\\foo" == False +-- > isDrive "" == False +isDrive :: FilePath -> Bool +isDrive x = not (null x) && null (dropDrive x) + + +--------------------------------------------------------------------- +-- Operations on a filepath, as a list of directories + +-- | Split a filename into directory and file. '' is the inverse. +-- The first component will often end with a trailing slash. +-- +-- > splitFileName "/directory/file.ext" == ("/directory/","file.ext") +-- > Valid x => uncurry () (splitFileName x) == x || fst (splitFileName x) == "./" +-- > Valid x => isValid (fst (splitFileName x)) +-- > splitFileName "file/bob.txt" == ("file/", "bob.txt") +-- > splitFileName "file/" == ("file/", "") +-- > splitFileName "bob" == ("./", "bob") +-- > Posix: splitFileName "/" == ("/","") +-- > Windows: splitFileName "c:" == ("c:","") +splitFileName :: FilePath -> (String, String) +splitFileName x = (if null dir then "./" else dir, name) + where + (dir, name) = splitFileName_ x + +-- version of splitFileName where, if the FilePath has no directory +-- component, the returned directory is "" rather than "./". This +-- is used in cases where we are going to combine the returned +-- directory to make a valid FilePath, and having a "./" appear would +-- look strange and upset simple equality properties. See +-- e.g. replaceFileName. +splitFileName_ :: FilePath -> (String, String) +splitFileName_ x = (drv ++ dir, file) + where + (drv,pth) = splitDrive x + (dir,file) = breakEnd isPathSeparator pth + +-- | Set the filename. +-- +-- > replaceFileName "/directory/other.txt" "file.ext" == "/directory/file.ext" +-- > Valid x => replaceFileName x (takeFileName x) == x +replaceFileName :: FilePath -> String -> FilePath +replaceFileName x y = a y where (a,_) = splitFileName_ x + +-- | Drop the filename. Unlike 'takeDirectory', this function will leave +-- a trailing path separator on the directory. +-- +-- > dropFileName "/directory/file.ext" == "/directory/" +-- > dropFileName x == fst (splitFileName x) +dropFileName :: FilePath -> FilePath +dropFileName = fst . splitFileName + + +-- | Get the file name. +-- +-- > takeFileName "/directory/file.ext" == "file.ext" +-- > takeFileName "test/" == "" +-- > takeFileName x `isSuffixOf` x +-- > takeFileName x == snd (splitFileName x) +-- > Valid x => takeFileName (replaceFileName x "fred") == "fred" +-- > Valid x => takeFileName (x "fred") == "fred" +-- > Valid x => isRelative (takeFileName x) +takeFileName :: FilePath -> FilePath +takeFileName = snd . splitFileName + +-- | Get the base name, without an extension or path. +-- +-- > takeBaseName "/directory/file.ext" == "file" +-- > takeBaseName "file/test.txt" == "test" +-- > takeBaseName "dave.ext" == "dave" +-- > takeBaseName "" == "" +-- > takeBaseName "test" == "test" +-- > takeBaseName (addTrailingPathSeparator x) == "" +-- > takeBaseName "file/file.tar.gz" == "file.tar" +takeBaseName :: FilePath -> String +takeBaseName = dropExtension . takeFileName + +-- | Set the base name. +-- +-- > replaceBaseName "/directory/other.ext" "file" == "/directory/file.ext" +-- > replaceBaseName "file/test.txt" "bob" == "file/bob.txt" +-- > replaceBaseName "fred" "bill" == "bill" +-- > replaceBaseName "/dave/fred/bob.gz.tar" "new" == "/dave/fred/new.tar" +-- > Valid x => replaceBaseName x (takeBaseName x) == x +replaceBaseName :: FilePath -> String -> FilePath +replaceBaseName pth nam = combineAlways a (nam <.> ext) + where + (a,b) = splitFileName_ pth + ext = takeExtension b + +-- | Is an item either a directory or the last character a path separator? +-- +-- > hasTrailingPathSeparator "test" == False +-- > hasTrailingPathSeparator "test/" == True +hasTrailingPathSeparator :: FilePath -> Bool +hasTrailingPathSeparator "" = False +hasTrailingPathSeparator x = isPathSeparator (last x) + + +hasLeadingPathSeparator :: FilePath -> Bool +hasLeadingPathSeparator "" = False +hasLeadingPathSeparator x = isPathSeparator (head x) + + +-- | Add a trailing file path separator if one is not already present. +-- +-- > hasTrailingPathSeparator (addTrailingPathSeparator x) +-- > hasTrailingPathSeparator x ==> addTrailingPathSeparator x == x +-- > Posix: addTrailingPathSeparator "test/rest" == "test/rest/" +addTrailingPathSeparator :: FilePath -> FilePath +addTrailingPathSeparator x = if hasTrailingPathSeparator x then x else x ++ [pathSeparator] + + +-- | Remove any trailing path separators +-- +-- > dropTrailingPathSeparator "file/test/" == "file/test" +-- > dropTrailingPathSeparator "/" == "/" +-- > Windows: dropTrailingPathSeparator "\\" == "\\" +-- > Posix: not (hasTrailingPathSeparator (dropTrailingPathSeparator x)) || isDrive x +dropTrailingPathSeparator :: FilePath -> FilePath +dropTrailingPathSeparator x = + if hasTrailingPathSeparator x && not (isDrive x) + then let x' = dropWhileEnd isPathSeparator x + in if null x' then [last x] else x' + else x + + +-- | Get the directory name, move up one level. +-- +-- > takeDirectory "/directory/other.ext" == "/directory" +-- > takeDirectory x `isPrefixOf` x || takeDirectory x == "." +-- > takeDirectory "foo" == "." +-- > takeDirectory "/" == "/" +-- > takeDirectory "/foo" == "/" +-- > takeDirectory "/foo/bar/baz" == "/foo/bar" +-- > takeDirectory "/foo/bar/baz/" == "/foo/bar/baz" +-- > takeDirectory "foo/bar/baz" == "foo/bar" +-- > Windows: takeDirectory "foo\\bar" == "foo" +-- > Windows: takeDirectory "foo\\bar\\\\" == "foo\\bar" +-- > Windows: takeDirectory "C:\\" == "C:\\" +takeDirectory :: FilePath -> FilePath +takeDirectory = dropTrailingPathSeparator . dropFileName + +-- | Set the directory, keeping the filename the same. +-- +-- > replaceDirectory "root/file.ext" "/directory/" == "/directory/file.ext" +-- > Valid x => replaceDirectory x (takeDirectory x) `equalFilePath` x +replaceDirectory :: FilePath -> String -> FilePath +replaceDirectory x dir = combineAlways dir (takeFileName x) + + +-- | An alias for ''. +combine :: FilePath -> FilePath -> FilePath +combine a b | hasLeadingPathSeparator b || hasDrive b = b + | otherwise = combineAlways a b + +-- | Combine two paths, assuming rhs is NOT absolute. +combineAlways :: FilePath -> FilePath -> FilePath +combineAlways a b | null a = b + | null b = a + | hasTrailingPathSeparator a = a ++ b + | otherwise = case a of + [a1,':'] | isWindows && isLetter a1 -> a ++ b + _ -> a ++ [pathSeparator] ++ b + + +-- | Combine two paths with a path separator. +-- If the second path starts with a path separator or a drive letter, then it returns the second. +-- The intention is that @readFile (dir '' file)@ will access the same file as +-- @setCurrentDirectory dir; readFile file@. +-- +-- > Posix: "/directory" "file.ext" == "/directory/file.ext" +-- > Windows: "/directory" "file.ext" == "/directory\\file.ext" +-- > "directory" "/file.ext" == "/file.ext" +-- > Valid x => (takeDirectory x takeFileName x) `equalFilePath` x +-- +-- Combined: +-- +-- > Posix: "/" "test" == "/test" +-- > Posix: "home" "bob" == "home/bob" +-- > Posix: "x:" "foo" == "x:/foo" +-- > Windows: "C:\\foo" "bar" == "C:\\foo\\bar" +-- > Windows: "home" "bob" == "home\\bob" +-- +-- Not combined: +-- +-- > Posix: "home" "/bob" == "/bob" +-- > Windows: "home" "C:\\bob" == "C:\\bob" +-- +-- Not combined (tricky): +-- +-- On Windows, if a filepath starts with a single slash, it is relative to the +-- root of the current drive. In [1], this is (confusingly) referred to as an +-- absolute path. +-- The current behavior of '' is to never combine these forms. +-- +-- > Windows: "home" "/bob" == "/bob" +-- > Windows: "home" "\\bob" == "\\bob" +-- > Windows: "C:\\home" "\\bob" == "\\bob" +-- +-- On Windows, from [1]: "If a file name begins with only a disk designator +-- but not the backslash after the colon, it is interpreted as a relative path +-- to the current directory on the drive with the specified letter." +-- The current behavior of '' is to never combine these forms. +-- +-- > Windows: "D:\\foo" "C:bar" == "C:bar" +-- > Windows: "C:\\foo" "C:bar" == "C:bar" +() :: FilePath -> FilePath -> FilePath +() = combine + + +-- | Split a path by the directory separator. +-- +-- > splitPath "/directory/file.ext" == ["/","directory/","file.ext"] +-- > concat (splitPath x) == x +-- > splitPath "test//item/" == ["test//","item/"] +-- > splitPath "test/item/file" == ["test/","item/","file"] +-- > splitPath "" == [] +-- > Windows: splitPath "c:\\test\\path" == ["c:\\","test\\","path"] +-- > Posix: splitPath "/file/test" == ["/","file/","test"] +splitPath :: FilePath -> [FilePath] +splitPath x = [drive | drive /= ""] ++ f path + where + (drive,path) = splitDrive x + + f "" = [] + f y = (a++c) : f d + where + (a,b) = break isPathSeparator y + (c,d) = span isPathSeparator b + +-- | Just as 'splitPath', but don't add the trailing slashes to each element. +-- +-- > splitDirectories "/directory/file.ext" == ["/","directory","file.ext"] +-- > splitDirectories "test/file" == ["test","file"] +-- > splitDirectories "/test/file" == ["/","test","file"] +-- > Windows: splitDirectories "C:\\test\\file" == ["C:\\", "test", "file"] +-- > Valid x => joinPath (splitDirectories x) `equalFilePath` x +-- > splitDirectories "" == [] +-- > Windows: splitDirectories "C:\\test\\\\\\file" == ["C:\\", "test", "file"] +-- > splitDirectories "/test///file" == ["/","test","file"] +splitDirectories :: FilePath -> [FilePath] +splitDirectories = map dropTrailingPathSeparator . splitPath + + +-- | Join path elements back together. +-- +-- > joinPath ["/","directory/","file.ext"] == "/directory/file.ext" +-- > Valid x => joinPath (splitPath x) == x +-- > joinPath [] == "" +-- > Posix: joinPath ["test","file","path"] == "test/file/path" +joinPath :: [FilePath] -> FilePath +-- Note that this definition on c:\\c:\\, join then split will give c:\\. +joinPath = foldr combine "" + + + + + + +--------------------------------------------------------------------- +-- File name manipulators + +-- | Equality of two 'FilePath's. +-- If you call @System.Directory.canonicalizePath@ +-- first this has a much better chance of working. +-- Note that this doesn't follow symlinks or DOSNAM~1s. +-- +-- > x == y ==> equalFilePath x y +-- > normalise x == normalise y ==> equalFilePath x y +-- > equalFilePath "foo" "foo/" +-- > not (equalFilePath "foo" "/foo") +-- > Posix: not (equalFilePath "foo" "FOO") +-- > Windows: equalFilePath "foo" "FOO" +-- > Windows: not (equalFilePath "C:" "C:/") +equalFilePath :: FilePath -> FilePath -> Bool +equalFilePath a b = f a == f b + where + f x | isWindows = dropTrailingPathSeparator $ map toLower $ normalise x + | otherwise = dropTrailingPathSeparator $ normalise x + + +-- | Contract a filename, based on a relative path. Note that the resulting path +-- will never introduce @..@ paths, as the presence of symlinks means @..\/b@ +-- may not reach @a\/b@ if it starts from @a\/c@. For a worked example see +-- . +-- +-- The corresponding @makeAbsolute@ function can be found in +-- @System.Directory@. +-- +-- > makeRelative "/directory" "/directory/file.ext" == "file.ext" +-- > Valid x => makeRelative (takeDirectory x) x `equalFilePath` takeFileName x +-- > makeRelative x x == "." +-- > Valid x y => equalFilePath x y || (isRelative x && makeRelative y x == x) || equalFilePath (y makeRelative y x) x +-- > Windows: makeRelative "C:\\Home" "c:\\home\\bob" == "bob" +-- > Windows: makeRelative "C:\\Home" "c:/home/bob" == "bob" +-- > Windows: makeRelative "C:\\Home" "D:\\Home\\Bob" == "D:\\Home\\Bob" +-- > Windows: makeRelative "C:\\Home" "C:Home\\Bob" == "C:Home\\Bob" +-- > Windows: makeRelative "/Home" "/home/bob" == "bob" +-- > Windows: makeRelative "/" "//" == "//" +-- > Posix: makeRelative "/Home" "/home/bob" == "/home/bob" +-- > Posix: makeRelative "/home/" "/home/bob/foo/bar" == "bob/foo/bar" +-- > Posix: makeRelative "/fred" "bob" == "bob" +-- > Posix: makeRelative "/file/test" "/file/test/fred" == "fred" +-- > Posix: makeRelative "/file/test" "/file/test/fred/" == "fred/" +-- > Posix: makeRelative "some/path" "some/path/a/b/c" == "a/b/c" +makeRelative :: FilePath -> FilePath -> FilePath +makeRelative root path + | equalFilePath root path = "." + | takeAbs root /= takeAbs path = path + | otherwise = f (dropAbs root) (dropAbs path) + where + f "" y = dropWhile isPathSeparator y + f x y = let (x1,x2) = g x + (y1,y2) = g y + in if equalFilePath x1 y1 then f x2 y2 else path + + g x = (dropWhile isPathSeparator a, dropWhile isPathSeparator b) + where (a,b) = break isPathSeparator $ dropWhile isPathSeparator x + + -- on windows, need to drop '/' which is kind of absolute, but not a drive + dropAbs x | hasLeadingPathSeparator x && not (hasDrive x) = tail x + dropAbs x = dropDrive x + + takeAbs x | hasLeadingPathSeparator x && not (hasDrive x) = [pathSeparator] + takeAbs x = map (\y -> if isPathSeparator y then pathSeparator else toLower y) $ takeDrive x + +-- | Normalise a file +-- +-- * \/\/ outside of the drive can be made blank +-- +-- * \/ -> 'pathSeparator' +-- +-- * .\/ -> \"\" +-- +-- > Posix: normalise "/file/\\test////" == "/file/\\test/" +-- > Posix: normalise "/file/./test" == "/file/test" +-- > Posix: normalise "/test/file/../bob/fred/" == "/test/file/../bob/fred/" +-- > Posix: normalise "../bob/fred/" == "../bob/fred/" +-- > Posix: normalise "./bob/fred/" == "bob/fred/" +-- > Windows: normalise "c:\\file/bob\\" == "C:\\file\\bob\\" +-- > Windows: normalise "c:\\" == "C:\\" +-- > Windows: normalise "C:.\\" == "C:" +-- > Windows: normalise "\\\\server\\test" == "\\\\server\\test" +-- > Windows: normalise "//server/test" == "\\\\server\\test" +-- > Windows: normalise "c:/file" == "C:\\file" +-- > Windows: normalise "/file" == "\\file" +-- > Windows: normalise "\\" == "\\" +-- > Windows: normalise "/./" == "\\" +-- > normalise "." == "." +-- > Posix: normalise "./" == "./" +-- > Posix: normalise "./." == "./" +-- > Posix: normalise "/./" == "/" +-- > Posix: normalise "/" == "/" +-- > Posix: normalise "bob/fred/." == "bob/fred/" +-- > Posix: normalise "//home" == "/home" +normalise :: FilePath -> FilePath +normalise path = result ++ [pathSeparator | addPathSeparator] + where + (drv,pth) = splitDrive path + result = joinDrive' (normaliseDrive drv) (f pth) + + joinDrive' "" "" = "." + joinDrive' d p = joinDrive d p + + addPathSeparator = isDirPath pth + && not (hasTrailingPathSeparator result) + && not (isRelativeDrive drv) + + isDirPath xs = hasTrailingPathSeparator xs + || not (null xs) && last xs == '.' && hasTrailingPathSeparator (init xs) + + f = joinPath . dropDots . propSep . splitDirectories + + propSep (x:xs) | all isPathSeparator x = [pathSeparator] : xs + | otherwise = x : xs + propSep [] = [] + + dropDots = filter ("." /=) + +normaliseDrive :: FilePath -> FilePath +normaliseDrive "" = "" +normaliseDrive _ | isPosix = [pathSeparator] +normaliseDrive drive = if isJust $ readDriveLetter x2 + then map toUpper x2 + else x2 + where + x2 = map repSlash drive + + repSlash x = if isPathSeparator x then pathSeparator else x + +-- Information for validity functions on Windows. See [1]. +isBadCharacter :: Char -> Bool +isBadCharacter x = x >= '\0' && x <= '\31' || x `elem` ":*?><|\"" + +badElements :: [FilePath] +badElements = + ["CON","PRN","AUX","NUL","CLOCK$" + ,"COM1","COM2","COM3","COM4","COM5","COM6","COM7","COM8","COM9" + ,"LPT1","LPT2","LPT3","LPT4","LPT5","LPT6","LPT7","LPT8","LPT9"] + + +-- | Is a FilePath valid, i.e. could you create a file like it? This function checks for invalid names, +-- and invalid characters, but does not check if length limits are exceeded, as these are typically +-- filesystem dependent. +-- +-- > isValid "" == False +-- > isValid "\0" == False +-- > Posix: isValid "/random_ path:*" == True +-- > Posix: isValid x == not (null x) +-- > Windows: isValid "c:\\test" == True +-- > Windows: isValid "c:\\test:of_test" == False +-- > Windows: isValid "test*" == False +-- > Windows: isValid "c:\\test\\nul" == False +-- > Windows: isValid "c:\\test\\prn.txt" == False +-- > Windows: isValid "c:\\nul\\file" == False +-- > Windows: isValid "\\\\" == False +-- > Windows: isValid "\\\\\\foo" == False +-- > Windows: isValid "\\\\?\\D:file" == False +-- > Windows: isValid "foo\tbar" == False +-- > Windows: isValid "nul .txt" == False +-- > Windows: isValid " nul.txt" == True +isValid :: FilePath -> Bool +isValid "" = False +isValid x | '\0' `elem` x = False +isValid _ | isPosix = True +isValid path = + not (any isBadCharacter x2) && + not (any f $ splitDirectories x2) && + not (isJust (readDriveShare x1) && all isPathSeparator x1) && + not (isJust (readDriveUNC x1) && not (hasTrailingPathSeparator x1)) + where + (x1,x2) = splitDrive path + f x = map toUpper (dropWhileEnd (== ' ') $ dropExtensions x) `elem` badElements + + +-- | Take a FilePath and make it valid; does not change already valid FilePaths. +-- +-- > isValid (makeValid x) +-- > isValid x ==> makeValid x == x +-- > makeValid "" == "_" +-- > makeValid "file\0name" == "file_name" +-- > Windows: makeValid "c:\\already\\/valid" == "c:\\already\\/valid" +-- > Windows: makeValid "c:\\test:of_test" == "c:\\test_of_test" +-- > Windows: makeValid "test*" == "test_" +-- > Windows: makeValid "c:\\test\\nul" == "c:\\test\\nul_" +-- > Windows: makeValid "c:\\test\\prn.txt" == "c:\\test\\prn_.txt" +-- > Windows: makeValid "c:\\test/prn.txt" == "c:\\test/prn_.txt" +-- > Windows: makeValid "c:\\nul\\file" == "c:\\nul_\\file" +-- > Windows: makeValid "\\\\\\foo" == "\\\\drive" +-- > Windows: makeValid "\\\\?\\D:file" == "\\\\?\\D:\\file" +-- > Windows: makeValid "nul .txt" == "nul _.txt" +makeValid :: FilePath -> FilePath +makeValid "" = "_" +makeValid path + | isPosix = map (\x -> if x == '\0' then '_' else x) path + | isJust (readDriveShare drv) && all isPathSeparator drv = take 2 drv ++ "drive" + | isJust (readDriveUNC drv) && not (hasTrailingPathSeparator drv) = + makeValid (drv ++ [pathSeparator] ++ pth) + | otherwise = joinDrive drv $ validElements $ validChars pth + where + (drv,pth) = splitDrive path + + validChars = map f + f x = if isBadCharacter x then '_' else x + + validElements x = joinPath $ map g $ splitPath x + g x = h a ++ b + where (a,b) = break isPathSeparator x + h x = if map toUpper (dropWhileEnd (== ' ') a) `elem` badElements then a ++ "_" <.> b else x + where (a,b) = splitExtensions x + + +-- | Is a path relative, or is it fixed to the root? +-- +-- > Windows: isRelative "path\\test" == True +-- > Windows: isRelative "c:\\test" == False +-- > Windows: isRelative "c:test" == True +-- > Windows: isRelative "c:\\" == False +-- > Windows: isRelative "c:/" == False +-- > Windows: isRelative "c:" == True +-- > Windows: isRelative "\\\\foo" == False +-- > Windows: isRelative "\\\\?\\foo" == False +-- > Windows: isRelative "\\\\?\\UNC\\foo" == False +-- > Windows: isRelative "/foo" == True +-- > Windows: isRelative "\\foo" == True +-- > Posix: isRelative "test/path" == True +-- > Posix: isRelative "/test" == False +-- > Posix: isRelative "/" == False +-- +-- According to [1]: +-- +-- * "A UNC name of any format [is never relative]." +-- +-- * "You cannot use the "\\?\" prefix with a relative path." +isRelative :: FilePath -> Bool +isRelative x = null drive || isRelativeDrive drive + where drive = takeDrive x + + +{- c:foo -} +-- From [1]: "If a file name begins with only a disk designator but not the +-- backslash after the colon, it is interpreted as a relative path to the +-- current directory on the drive with the specified letter." +isRelativeDrive :: String -> Bool +isRelativeDrive x = + maybe False (not . hasTrailingPathSeparator . fst) (readDriveLetter x) + + +-- | @not . 'isRelative'@ +-- +-- > isAbsolute x == not (isRelative x) +isAbsolute :: FilePath -> Bool +isAbsolute = not . isRelative + + +----------------------------------------------------------------------------- +-- dropWhileEnd (>2) [1,2,3,4,1,2,3,4] == [1,2,3,4,1,2]) +-- Note that Data.List.dropWhileEnd is only available in base >= 4.5. +dropWhileEnd :: (a -> Bool) -> [a] -> [a] +dropWhileEnd p = reverse . dropWhile p . reverse + +-- takeWhileEnd (>2) [1,2,3,4,1,2,3,4] == [3,4]) +takeWhileEnd :: (a -> Bool) -> [a] -> [a] +takeWhileEnd p = reverse . takeWhile p . reverse + +-- spanEnd (>2) [1,2,3,4,1,2,3,4] = ([1,2,3,4,1,2], [3,4]) +spanEnd :: (a -> Bool) -> [a] -> ([a], [a]) +spanEnd p xs = (dropWhileEnd p xs, takeWhileEnd p xs) + +-- breakEnd (< 2) [1,2,3,4,1,2,3,4] == ([1,2,3,4,1],[2,3,4]) +breakEnd :: (a -> Bool) -> [a] -> ([a], [a]) +breakEnd p = spanEnd (not . p) + +-- | The stripSuffix function drops the given suffix from a list. It returns +-- Nothing if the list did not end with the suffix given, or Just the list +-- before the suffix, if it does. +stripSuffix :: Eq a => [a] -> [a] -> Maybe [a] +stripSuffix xs ys = fmap reverse $ stripPrefix (reverse xs) (reverse ys) diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Posix.hs b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Posix.hs new file mode 100644 index 0000000000..3fbd0ffcb1 --- /dev/null +++ b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Posix.hs @@ -0,0 +1,4 @@ +{-# LANGUAGE CPP #-} +#define MODULE_NAME Posix +#define IS_WINDOWS False +#include "Internal.hs" diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Windows.hs b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Windows.hs new file mode 100644 index 0000000000..3e3e9d672e --- /dev/null +++ b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Windows.hs @@ -0,0 +1,4 @@ +{-# LANGUAGE CPP #-} +#define MODULE_NAME Windows +#define IS_WINDOWS True +#include "Internal.hs" diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/changelog.md b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/changelog.md new file mode 100644 index 0000000000..edecd177f0 --- /dev/null +++ b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/changelog.md @@ -0,0 +1,81 @@ +# Changelog for [`filepath` package](http://hackage.haskell.org/package/filepath) + +_Note: below all `FilePath` values are unquoted, so `\\` really means two backslashes._ + +## 1.4.1.2 *Feb 2017* + + * Bundled with GHC 8.2.1 + +## 1.4.1.1 *Nov 2016* + + * Bundled with GHC 8.0.2 + + * Documentation improvements + + * Allow QuickCheck-2.9 + +## 1.4.1.0 *Dec 2015* + + * Bundled with GHC 8.0.1 + + * Add `replaceExtensions` and `stripExtension` functions. + + * Make `isValid` detect more invalid Windows paths, e.g. `nul .txt` and `foo\nbar`. + + * Improve the documentation. + + * Bug fix: `isValid "\0"` now returns `False`, instead of `True` + +## 1.4.0.0 *Mar 2015* + + * Bundled with GHC 7.10.1 + + * New function: Add `-<.>` as an alias for `replaceExtension`. + + * Semantic change: `joinDrive /foo bar` now returns `/foo/bar`, instead of `/foobar` + + * Semantic change: on Windows, `splitSearchPath File1;\"File 2\"` now returns `[File1,File2]` instead of `[File1,\"File2\"]` + + * Bug fix: on Posix systems, `normalise //home` now returns `/home`, instead of `//home` + + * Bug fix: `normalise /./` now returns `/` on Posix and `\` on Windows, instead of `//` and `\\` + + * Bug fix: `isDrive ""` now returns `False`, instead of `True` + + * Bug fix: on Windows, `dropTrailingPathSeparator /` now returns `/` unchanged, instead of the normalised `\` + + * Bug fix: on Windows, `equalFilePath C:\ C:` now returns `False`, instead of `True` + + * Bug fix: on Windows, `isValid \\\foo` now returns `False`, instead of `True` + + * Bug fix: on Windows, `isValid \\?\D:file` now returns `False`, instead of `True` + + * Bug fix: on Windows, `normalise \` now returns `\` unchanged, instead of `\\` + + * Bug fix: on Windows, `normalise C:.\` now returns `C:`, instead of `C:\\` + + * Bug fix: on Windows, `normalise //server/test` now returns `\\server\test`, instead of `//server/test` unchanged + + * Bug fix: on Windows, `makeRelative / //` now returns `//`, instead of `""` + +## 1.3.0.2 *Mar 2014* + + * Bundled with GHC 7.8.1 + + * Update to Cabal 1.10 format + + * Minor Haddock cleanups + +## 1.3.0.1 *Sep 2012* + + * Bundled with GHC 7.6.1 + + * No changes + +## 1.3.0.0 *Feb 2012* + + * Bundled with GHC 7.4.1 + + * Add support for SafeHaskell + + * Bug fix: `normalise /` now returns `/`, instead of `/.` diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/filepath.cabal b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/filepath.cabal new file mode 100644 index 0000000000..93d64056bf --- /dev/null +++ b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/filepath.cabal @@ -0,0 +1,67 @@ +cabal-version: >= 1.18 +name: filepath +version: 1.4.1.2 +-- NOTE: Don't forget to update ./changelog.md +license: BSD3 +license-file: LICENSE +author: Neil Mitchell +maintainer: Neil Mitchell +copyright: Neil Mitchell 2005-2017 +bug-reports: https://github.com/haskell/filepath/issues +homepage: https://github.com/haskell/filepath#readme +category: System +build-type: Simple +synopsis: Library for manipulating FilePaths in a cross platform way. +tested-with: GHC==8.0.2, GHC==7.10.3, GHC==7.8.4, GHC==7.6.3, GHC==7.4.2 +description: + This package provides functionality for manipulating @FilePath@ values, and is shipped with both and the . It provides three modules: + . + * "System.FilePath.Posix" manipulates POSIX\/Linux style @FilePath@ values (with @\/@ as the path separator). + . + * "System.FilePath.Windows" manipulates Windows style @FilePath@ values (with either @\\@ or @\/@ as the path separator, and deals with drives). + . + * "System.FilePath" is an alias for the module appropriate to your platform. + . + All three modules provide the same API, and the same documentation (calling out differences in the different variants). + +extra-source-files: + System/FilePath/Internal.hs +extra-doc-files: + README.md + changelog.md + +source-repository head + type: git + location: https://github.com/haskell/filepath.git + +library + default-language: Haskell2010 + other-extensions: + CPP + PatternGuards + if impl(GHC >= 7.2) + other-extensions: Safe + + exposed-modules: + System.FilePath + System.FilePath.Posix + System.FilePath.Windows + + build-depends: + base >= 4 && < 4.11 + + ghc-options: -Wall + +test-suite filepath-tests + type: exitcode-stdio-1.0 + default-language: Haskell2010 + main-is: Test.hs + ghc-options: -main-is Test + hs-source-dirs: tests + other-modules: + TestGen + TestUtil + build-depends: + filepath, + base, + QuickCheck >= 2.7 && < 2.10 diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/tests/Test.hs b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/tests/Test.hs new file mode 100644 index 0000000000..b9b695b56b --- /dev/null +++ b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/tests/Test.hs @@ -0,0 +1,30 @@ + +module Test(main) where + +import System.Environment +import TestGen +import Control.Monad +import Data.Maybe +import Test.QuickCheck + + +main :: IO () +main = do + args <- getArgs + let count = case args of i:_ -> read i; _ -> 10000 + putStrLn $ "Testing with " ++ show count ++ " repetitions" + let total = length tests + let showOutput x = show x{output=""} ++ "\n" ++ output x + bad <- fmap catMaybes $ forM (zip [1..] tests) $ \(i,(msg,prop)) -> do + putStrLn $ "Test " ++ show i ++ " of " ++ show total ++ ": " ++ msg + res <- quickCheckWithResult stdArgs{chatty=False, maxSuccess=count} prop + case res of + Success{} -> return Nothing + bad -> do putStrLn $ showOutput bad; putStrLn "TEST FAILURE!"; return $ Just (msg,bad) + if null bad then + putStrLn $ "Success, " ++ show total ++ " tests passed" + else do + putStrLn $ show (length bad) ++ " FAILURES\n" + forM_ (zip [1..] bad) $ \(i,(a,b)) -> + putStrLn $ "FAILURE " ++ show i ++ ": " ++ a ++ "\n" ++ showOutput b ++ "\n" + fail $ "FAILURE, failed " ++ show (length bad) ++ " of " ++ show total ++ " tests" diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/tests/TestGen.hs b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/tests/TestGen.hs new file mode 100644 index 0000000000..848ae5b7c2 --- /dev/null +++ b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/tests/TestGen.hs @@ -0,0 +1,448 @@ +-- GENERATED CODE: See ../Generate.hs +module TestGen(tests) where +import TestUtil +import qualified System.FilePath.Windows as W +import qualified System.FilePath.Posix as P +tests :: [(String, Property)] +tests = + [("W.pathSeparator == '\\\\'", property $ W.pathSeparator == '\\') + ,("P.pathSeparator == '/'", property $ P.pathSeparator == '/') + ,("P.isPathSeparator P.pathSeparator", property $ P.isPathSeparator P.pathSeparator) + ,("W.isPathSeparator W.pathSeparator", property $ W.isPathSeparator W.pathSeparator) + ,("W.pathSeparators == ['\\\\', '/']", property $ W.pathSeparators == ['\\', '/']) + ,("P.pathSeparators == ['/']", property $ P.pathSeparators == ['/']) + ,("P.pathSeparator `elem` P.pathSeparators", property $ P.pathSeparator `elem` P.pathSeparators) + ,("W.pathSeparator `elem` W.pathSeparators", property $ W.pathSeparator `elem` W.pathSeparators) + ,("P.isPathSeparator a == (a `elem` P.pathSeparators)", property $ \a -> P.isPathSeparator a == (a `elem` P.pathSeparators)) + ,("W.isPathSeparator a == (a `elem` W.pathSeparators)", property $ \a -> W.isPathSeparator a == (a `elem` W.pathSeparators)) + ,("W.searchPathSeparator == ';'", property $ W.searchPathSeparator == ';') + ,("P.searchPathSeparator == ':'", property $ P.searchPathSeparator == ':') + ,("P.isSearchPathSeparator a == (a == P.searchPathSeparator)", property $ \a -> P.isSearchPathSeparator a == (a == P.searchPathSeparator)) + ,("W.isSearchPathSeparator a == (a == W.searchPathSeparator)", property $ \a -> W.isSearchPathSeparator a == (a == W.searchPathSeparator)) + ,("P.extSeparator == '.'", property $ P.extSeparator == '.') + ,("W.extSeparator == '.'", property $ W.extSeparator == '.') + ,("P.isExtSeparator a == (a == P.extSeparator)", property $ \a -> P.isExtSeparator a == (a == P.extSeparator)) + ,("W.isExtSeparator a == (a == W.extSeparator)", property $ \a -> W.isExtSeparator a == (a == W.extSeparator)) + ,("P.splitSearchPath \"File1:File2:File3\" == [\"File1\", \"File2\", \"File3\"]", property $ P.splitSearchPath "File1:File2:File3" == ["File1", "File2", "File3"]) + ,("P.splitSearchPath \"File1::File2:File3\" == [\"File1\", \".\", \"File2\", \"File3\"]", property $ P.splitSearchPath "File1::File2:File3" == ["File1", ".", "File2", "File3"]) + ,("W.splitSearchPath \"File1;File2;File3\" == [\"File1\", \"File2\", \"File3\"]", property $ W.splitSearchPath "File1;File2;File3" == ["File1", "File2", "File3"]) + ,("W.splitSearchPath \"File1;;File2;File3\" == [\"File1\", \"File2\", \"File3\"]", property $ W.splitSearchPath "File1;;File2;File3" == ["File1", "File2", "File3"]) + ,("W.splitSearchPath \"File1;\\\"File2\\\";File3\" == [\"File1\", \"File2\", \"File3\"]", property $ W.splitSearchPath "File1;\"File2\";File3" == ["File1", "File2", "File3"]) + ,("P.splitExtension \"/directory/path.ext\" == (\"/directory/path\", \".ext\")", property $ P.splitExtension "/directory/path.ext" == ("/directory/path", ".ext")) + ,("W.splitExtension \"/directory/path.ext\" == (\"/directory/path\", \".ext\")", property $ W.splitExtension "/directory/path.ext" == ("/directory/path", ".ext")) + ,("uncurry (++) (P.splitExtension x) == x", property $ \(QFilePath x) -> uncurry (++) (P.splitExtension x) == x) + ,("uncurry (++) (W.splitExtension x) == x", property $ \(QFilePath x) -> uncurry (++) (W.splitExtension x) == x) + ,("uncurry P.addExtension (P.splitExtension x) == x", property $ \(QFilePathValidP x) -> uncurry P.addExtension (P.splitExtension x) == x) + ,("uncurry W.addExtension (W.splitExtension x) == x", property $ \(QFilePathValidW x) -> uncurry W.addExtension (W.splitExtension x) == x) + ,("P.splitExtension \"file.txt\" == (\"file\", \".txt\")", property $ P.splitExtension "file.txt" == ("file", ".txt")) + ,("W.splitExtension \"file.txt\" == (\"file\", \".txt\")", property $ W.splitExtension "file.txt" == ("file", ".txt")) + ,("P.splitExtension \"file\" == (\"file\", \"\")", property $ P.splitExtension "file" == ("file", "")) + ,("W.splitExtension \"file\" == (\"file\", \"\")", property $ W.splitExtension "file" == ("file", "")) + ,("P.splitExtension \"file/file.txt\" == (\"file/file\", \".txt\")", property $ P.splitExtension "file/file.txt" == ("file/file", ".txt")) + ,("W.splitExtension \"file/file.txt\" == (\"file/file\", \".txt\")", property $ W.splitExtension "file/file.txt" == ("file/file", ".txt")) + ,("P.splitExtension \"file.txt/boris\" == (\"file.txt/boris\", \"\")", property $ P.splitExtension "file.txt/boris" == ("file.txt/boris", "")) + ,("W.splitExtension \"file.txt/boris\" == (\"file.txt/boris\", \"\")", property $ W.splitExtension "file.txt/boris" == ("file.txt/boris", "")) + ,("P.splitExtension \"file.txt/boris.ext\" == (\"file.txt/boris\", \".ext\")", property $ P.splitExtension "file.txt/boris.ext" == ("file.txt/boris", ".ext")) + ,("W.splitExtension \"file.txt/boris.ext\" == (\"file.txt/boris\", \".ext\")", property $ W.splitExtension "file.txt/boris.ext" == ("file.txt/boris", ".ext")) + ,("P.splitExtension \"file/path.txt.bob.fred\" == (\"file/path.txt.bob\", \".fred\")", property $ P.splitExtension "file/path.txt.bob.fred" == ("file/path.txt.bob", ".fred")) + ,("W.splitExtension \"file/path.txt.bob.fred\" == (\"file/path.txt.bob\", \".fred\")", property $ W.splitExtension "file/path.txt.bob.fred" == ("file/path.txt.bob", ".fred")) + ,("P.splitExtension \"file/path.txt/\" == (\"file/path.txt/\", \"\")", property $ P.splitExtension "file/path.txt/" == ("file/path.txt/", "")) + ,("W.splitExtension \"file/path.txt/\" == (\"file/path.txt/\", \"\")", property $ W.splitExtension "file/path.txt/" == ("file/path.txt/", "")) + ,("P.takeExtension \"/directory/path.ext\" == \".ext\"", property $ P.takeExtension "/directory/path.ext" == ".ext") + ,("W.takeExtension \"/directory/path.ext\" == \".ext\"", property $ W.takeExtension "/directory/path.ext" == ".ext") + ,("P.takeExtension x == snd (P.splitExtension x)", property $ \(QFilePath x) -> P.takeExtension x == snd (P.splitExtension x)) + ,("W.takeExtension x == snd (W.splitExtension x)", property $ \(QFilePath x) -> W.takeExtension x == snd (W.splitExtension x)) + ,("P.takeExtension (P.addExtension x \"ext\") == \".ext\"", property $ \(QFilePathValidP x) -> P.takeExtension (P.addExtension x "ext") == ".ext") + ,("W.takeExtension (W.addExtension x \"ext\") == \".ext\"", property $ \(QFilePathValidW x) -> W.takeExtension (W.addExtension x "ext") == ".ext") + ,("P.takeExtension (P.replaceExtension x \"ext\") == \".ext\"", property $ \(QFilePathValidP x) -> P.takeExtension (P.replaceExtension x "ext") == ".ext") + ,("W.takeExtension (W.replaceExtension x \"ext\") == \".ext\"", property $ \(QFilePathValidW x) -> W.takeExtension (W.replaceExtension x "ext") == ".ext") + ,("\"/directory/path.txt\" P.-<.> \"ext\" == \"/directory/path.ext\"", property $ "/directory/path.txt" P.-<.> "ext" == "/directory/path.ext") + ,("\"/directory/path.txt\" W.-<.> \"ext\" == \"/directory/path.ext\"", property $ "/directory/path.txt" W.-<.> "ext" == "/directory/path.ext") + ,("\"/directory/path.txt\" P.-<.> \".ext\" == \"/directory/path.ext\"", property $ "/directory/path.txt" P.-<.> ".ext" == "/directory/path.ext") + ,("\"/directory/path.txt\" W.-<.> \".ext\" == \"/directory/path.ext\"", property $ "/directory/path.txt" W.-<.> ".ext" == "/directory/path.ext") + ,("\"foo.o\" P.-<.> \"c\" == \"foo.c\"", property $ "foo.o" P.-<.> "c" == "foo.c") + ,("\"foo.o\" W.-<.> \"c\" == \"foo.c\"", property $ "foo.o" W.-<.> "c" == "foo.c") + ,("P.replaceExtension \"/directory/path.txt\" \"ext\" == \"/directory/path.ext\"", property $ P.replaceExtension "/directory/path.txt" "ext" == "/directory/path.ext") + ,("W.replaceExtension \"/directory/path.txt\" \"ext\" == \"/directory/path.ext\"", property $ W.replaceExtension "/directory/path.txt" "ext" == "/directory/path.ext") + ,("P.replaceExtension \"/directory/path.txt\" \".ext\" == \"/directory/path.ext\"", property $ P.replaceExtension "/directory/path.txt" ".ext" == "/directory/path.ext") + ,("W.replaceExtension \"/directory/path.txt\" \".ext\" == \"/directory/path.ext\"", property $ W.replaceExtension "/directory/path.txt" ".ext" == "/directory/path.ext") + ,("P.replaceExtension \"file.txt\" \".bob\" == \"file.bob\"", property $ P.replaceExtension "file.txt" ".bob" == "file.bob") + ,("W.replaceExtension \"file.txt\" \".bob\" == \"file.bob\"", property $ W.replaceExtension "file.txt" ".bob" == "file.bob") + ,("P.replaceExtension \"file.txt\" \"bob\" == \"file.bob\"", property $ P.replaceExtension "file.txt" "bob" == "file.bob") + ,("W.replaceExtension \"file.txt\" \"bob\" == \"file.bob\"", property $ W.replaceExtension "file.txt" "bob" == "file.bob") + ,("P.replaceExtension \"file\" \".bob\" == \"file.bob\"", property $ P.replaceExtension "file" ".bob" == "file.bob") + ,("W.replaceExtension \"file\" \".bob\" == \"file.bob\"", property $ W.replaceExtension "file" ".bob" == "file.bob") + ,("P.replaceExtension \"file.txt\" \"\" == \"file\"", property $ P.replaceExtension "file.txt" "" == "file") + ,("W.replaceExtension \"file.txt\" \"\" == \"file\"", property $ W.replaceExtension "file.txt" "" == "file") + ,("P.replaceExtension \"file.fred.bob\" \"txt\" == \"file.fred.txt\"", property $ P.replaceExtension "file.fred.bob" "txt" == "file.fred.txt") + ,("W.replaceExtension \"file.fred.bob\" \"txt\" == \"file.fred.txt\"", property $ W.replaceExtension "file.fred.bob" "txt" == "file.fred.txt") + ,("P.replaceExtension x y == P.addExtension (P.dropExtension x) y", property $ \(QFilePath x) (QFilePath y) -> P.replaceExtension x y == P.addExtension (P.dropExtension x) y) + ,("W.replaceExtension x y == W.addExtension (W.dropExtension x) y", property $ \(QFilePath x) (QFilePath y) -> W.replaceExtension x y == W.addExtension (W.dropExtension x) y) + ,("\"/directory/path\" P.<.> \"ext\" == \"/directory/path.ext\"", property $ "/directory/path" P.<.> "ext" == "/directory/path.ext") + ,("\"/directory/path\" W.<.> \"ext\" == \"/directory/path.ext\"", property $ "/directory/path" W.<.> "ext" == "/directory/path.ext") + ,("\"/directory/path\" P.<.> \".ext\" == \"/directory/path.ext\"", property $ "/directory/path" P.<.> ".ext" == "/directory/path.ext") + ,("\"/directory/path\" W.<.> \".ext\" == \"/directory/path.ext\"", property $ "/directory/path" W.<.> ".ext" == "/directory/path.ext") + ,("P.dropExtension \"/directory/path.ext\" == \"/directory/path\"", property $ P.dropExtension "/directory/path.ext" == "/directory/path") + ,("W.dropExtension \"/directory/path.ext\" == \"/directory/path\"", property $ W.dropExtension "/directory/path.ext" == "/directory/path") + ,("P.dropExtension x == fst (P.splitExtension x)", property $ \(QFilePath x) -> P.dropExtension x == fst (P.splitExtension x)) + ,("W.dropExtension x == fst (W.splitExtension x)", property $ \(QFilePath x) -> W.dropExtension x == fst (W.splitExtension x)) + ,("P.addExtension \"/directory/path\" \"ext\" == \"/directory/path.ext\"", property $ P.addExtension "/directory/path" "ext" == "/directory/path.ext") + ,("W.addExtension \"/directory/path\" \"ext\" == \"/directory/path.ext\"", property $ W.addExtension "/directory/path" "ext" == "/directory/path.ext") + ,("P.addExtension \"file.txt\" \"bib\" == \"file.txt.bib\"", property $ P.addExtension "file.txt" "bib" == "file.txt.bib") + ,("W.addExtension \"file.txt\" \"bib\" == \"file.txt.bib\"", property $ W.addExtension "file.txt" "bib" == "file.txt.bib") + ,("P.addExtension \"file.\" \".bib\" == \"file..bib\"", property $ P.addExtension "file." ".bib" == "file..bib") + ,("W.addExtension \"file.\" \".bib\" == \"file..bib\"", property $ W.addExtension "file." ".bib" == "file..bib") + ,("P.addExtension \"file\" \".bib\" == \"file.bib\"", property $ P.addExtension "file" ".bib" == "file.bib") + ,("W.addExtension \"file\" \".bib\" == \"file.bib\"", property $ W.addExtension "file" ".bib" == "file.bib") + ,("P.addExtension \"/\" \"x\" == \"/.x\"", property $ P.addExtension "/" "x" == "/.x") + ,("W.addExtension \"/\" \"x\" == \"/.x\"", property $ W.addExtension "/" "x" == "/.x") + ,("P.addExtension x \"\" == x", property $ \(QFilePath x) -> P.addExtension x "" == x) + ,("W.addExtension x \"\" == x", property $ \(QFilePath x) -> W.addExtension x "" == x) + ,("P.takeFileName (P.addExtension (P.addTrailingPathSeparator x) \"ext\") == \".ext\"", property $ \(QFilePathValidP x) -> P.takeFileName (P.addExtension (P.addTrailingPathSeparator x) "ext") == ".ext") + ,("W.takeFileName (W.addExtension (W.addTrailingPathSeparator x) \"ext\") == \".ext\"", property $ \(QFilePathValidW x) -> W.takeFileName (W.addExtension (W.addTrailingPathSeparator x) "ext") == ".ext") + ,("W.addExtension \"\\\\\\\\share\" \".txt\" == \"\\\\\\\\share\\\\.txt\"", property $ W.addExtension "\\\\share" ".txt" == "\\\\share\\.txt") + ,("P.hasExtension \"/directory/path.ext\" == True", property $ P.hasExtension "/directory/path.ext" == True) + ,("W.hasExtension \"/directory/path.ext\" == True", property $ W.hasExtension "/directory/path.ext" == True) + ,("P.hasExtension \"/directory/path\" == False", property $ P.hasExtension "/directory/path" == False) + ,("W.hasExtension \"/directory/path\" == False", property $ W.hasExtension "/directory/path" == False) + ,("null (P.takeExtension x) == not (P.hasExtension x)", property $ \(QFilePath x) -> null (P.takeExtension x) == not (P.hasExtension x)) + ,("null (W.takeExtension x) == not (W.hasExtension x)", property $ \(QFilePath x) -> null (W.takeExtension x) == not (W.hasExtension x)) + ,("P.stripExtension \"hs.o\" \"foo.x.hs.o\" == Just \"foo.x\"", property $ P.stripExtension "hs.o" "foo.x.hs.o" == Just "foo.x") + ,("W.stripExtension \"hs.o\" \"foo.x.hs.o\" == Just \"foo.x\"", property $ W.stripExtension "hs.o" "foo.x.hs.o" == Just "foo.x") + ,("P.stripExtension \"hi.o\" \"foo.x.hs.o\" == Nothing", property $ P.stripExtension "hi.o" "foo.x.hs.o" == Nothing) + ,("W.stripExtension \"hi.o\" \"foo.x.hs.o\" == Nothing", property $ W.stripExtension "hi.o" "foo.x.hs.o" == Nothing) + ,("P.dropExtension x == fromJust (P.stripExtension (P.takeExtension x) x)", property $ \(QFilePath x) -> P.dropExtension x == fromJust (P.stripExtension (P.takeExtension x) x)) + ,("W.dropExtension x == fromJust (W.stripExtension (W.takeExtension x) x)", property $ \(QFilePath x) -> W.dropExtension x == fromJust (W.stripExtension (W.takeExtension x) x)) + ,("P.dropExtensions x == fromJust (P.stripExtension (P.takeExtensions x) x)", property $ \(QFilePath x) -> P.dropExtensions x == fromJust (P.stripExtension (P.takeExtensions x) x)) + ,("W.dropExtensions x == fromJust (W.stripExtension (W.takeExtensions x) x)", property $ \(QFilePath x) -> W.dropExtensions x == fromJust (W.stripExtension (W.takeExtensions x) x)) + ,("P.stripExtension \".c.d\" \"a.b.c.d\" == Just \"a.b\"", property $ P.stripExtension ".c.d" "a.b.c.d" == Just "a.b") + ,("W.stripExtension \".c.d\" \"a.b.c.d\" == Just \"a.b\"", property $ W.stripExtension ".c.d" "a.b.c.d" == Just "a.b") + ,("P.stripExtension \".c.d\" \"a.b..c.d\" == Just \"a.b.\"", property $ P.stripExtension ".c.d" "a.b..c.d" == Just "a.b.") + ,("W.stripExtension \".c.d\" \"a.b..c.d\" == Just \"a.b.\"", property $ W.stripExtension ".c.d" "a.b..c.d" == Just "a.b.") + ,("P.stripExtension \"baz\" \"foo.bar\" == Nothing", property $ P.stripExtension "baz" "foo.bar" == Nothing) + ,("W.stripExtension \"baz\" \"foo.bar\" == Nothing", property $ W.stripExtension "baz" "foo.bar" == Nothing) + ,("P.stripExtension \"bar\" \"foobar\" == Nothing", property $ P.stripExtension "bar" "foobar" == Nothing) + ,("W.stripExtension \"bar\" \"foobar\" == Nothing", property $ W.stripExtension "bar" "foobar" == Nothing) + ,("P.stripExtension \"\" x == Just x", property $ \(QFilePath x) -> P.stripExtension "" x == Just x) + ,("W.stripExtension \"\" x == Just x", property $ \(QFilePath x) -> W.stripExtension "" x == Just x) + ,("P.splitExtensions \"/directory/path.ext\" == (\"/directory/path\", \".ext\")", property $ P.splitExtensions "/directory/path.ext" == ("/directory/path", ".ext")) + ,("W.splitExtensions \"/directory/path.ext\" == (\"/directory/path\", \".ext\")", property $ W.splitExtensions "/directory/path.ext" == ("/directory/path", ".ext")) + ,("P.splitExtensions \"file.tar.gz\" == (\"file\", \".tar.gz\")", property $ P.splitExtensions "file.tar.gz" == ("file", ".tar.gz")) + ,("W.splitExtensions \"file.tar.gz\" == (\"file\", \".tar.gz\")", property $ W.splitExtensions "file.tar.gz" == ("file", ".tar.gz")) + ,("uncurry (++) (P.splitExtensions x) == x", property $ \(QFilePath x) -> uncurry (++) (P.splitExtensions x) == x) + ,("uncurry (++) (W.splitExtensions x) == x", property $ \(QFilePath x) -> uncurry (++) (W.splitExtensions x) == x) + ,("uncurry P.addExtension (P.splitExtensions x) == x", property $ \(QFilePathValidP x) -> uncurry P.addExtension (P.splitExtensions x) == x) + ,("uncurry W.addExtension (W.splitExtensions x) == x", property $ \(QFilePathValidW x) -> uncurry W.addExtension (W.splitExtensions x) == x) + ,("P.splitExtensions \"file.tar.gz\" == (\"file\", \".tar.gz\")", property $ P.splitExtensions "file.tar.gz" == ("file", ".tar.gz")) + ,("W.splitExtensions \"file.tar.gz\" == (\"file\", \".tar.gz\")", property $ W.splitExtensions "file.tar.gz" == ("file", ".tar.gz")) + ,("P.dropExtensions \"/directory/path.ext\" == \"/directory/path\"", property $ P.dropExtensions "/directory/path.ext" == "/directory/path") + ,("W.dropExtensions \"/directory/path.ext\" == \"/directory/path\"", property $ W.dropExtensions "/directory/path.ext" == "/directory/path") + ,("P.dropExtensions \"file.tar.gz\" == \"file\"", property $ P.dropExtensions "file.tar.gz" == "file") + ,("W.dropExtensions \"file.tar.gz\" == \"file\"", property $ W.dropExtensions "file.tar.gz" == "file") + ,("not $ P.hasExtension $ P.dropExtensions x", property $ \(QFilePath x) -> not $ P.hasExtension $ P.dropExtensions x) + ,("not $ W.hasExtension $ W.dropExtensions x", property $ \(QFilePath x) -> not $ W.hasExtension $ W.dropExtensions x) + ,("not $ any P.isExtSeparator $ P.takeFileName $ P.dropExtensions x", property $ \(QFilePath x) -> not $ any P.isExtSeparator $ P.takeFileName $ P.dropExtensions x) + ,("not $ any W.isExtSeparator $ W.takeFileName $ W.dropExtensions x", property $ \(QFilePath x) -> not $ any W.isExtSeparator $ W.takeFileName $ W.dropExtensions x) + ,("P.takeExtensions \"/directory/path.ext\" == \".ext\"", property $ P.takeExtensions "/directory/path.ext" == ".ext") + ,("W.takeExtensions \"/directory/path.ext\" == \".ext\"", property $ W.takeExtensions "/directory/path.ext" == ".ext") + ,("P.takeExtensions \"file.tar.gz\" == \".tar.gz\"", property $ P.takeExtensions "file.tar.gz" == ".tar.gz") + ,("W.takeExtensions \"file.tar.gz\" == \".tar.gz\"", property $ W.takeExtensions "file.tar.gz" == ".tar.gz") + ,("P.replaceExtensions \"file.fred.bob\" \"txt\" == \"file.txt\"", property $ P.replaceExtensions "file.fred.bob" "txt" == "file.txt") + ,("W.replaceExtensions \"file.fred.bob\" \"txt\" == \"file.txt\"", property $ W.replaceExtensions "file.fred.bob" "txt" == "file.txt") + ,("P.replaceExtensions \"file.fred.bob\" \"tar.gz\" == \"file.tar.gz\"", property $ P.replaceExtensions "file.fred.bob" "tar.gz" == "file.tar.gz") + ,("W.replaceExtensions \"file.fred.bob\" \"tar.gz\" == \"file.tar.gz\"", property $ W.replaceExtensions "file.fred.bob" "tar.gz" == "file.tar.gz") + ,("uncurry (++) (P.splitDrive x) == x", property $ \(QFilePath x) -> uncurry (++) (P.splitDrive x) == x) + ,("uncurry (++) (W.splitDrive x) == x", property $ \(QFilePath x) -> uncurry (++) (W.splitDrive x) == x) + ,("W.splitDrive \"file\" == (\"\", \"file\")", property $ W.splitDrive "file" == ("", "file")) + ,("W.splitDrive \"c:/file\" == (\"c:/\", \"file\")", property $ W.splitDrive "c:/file" == ("c:/", "file")) + ,("W.splitDrive \"c:\\\\file\" == (\"c:\\\\\", \"file\")", property $ W.splitDrive "c:\\file" == ("c:\\", "file")) + ,("W.splitDrive \"\\\\\\\\shared\\\\test\" == (\"\\\\\\\\shared\\\\\", \"test\")", property $ W.splitDrive "\\\\shared\\test" == ("\\\\shared\\", "test")) + ,("W.splitDrive \"\\\\\\\\shared\" == (\"\\\\\\\\shared\", \"\")", property $ W.splitDrive "\\\\shared" == ("\\\\shared", "")) + ,("W.splitDrive \"\\\\\\\\?\\\\UNC\\\\shared\\\\file\" == (\"\\\\\\\\?\\\\UNC\\\\shared\\\\\", \"file\")", property $ W.splitDrive "\\\\?\\UNC\\shared\\file" == ("\\\\?\\UNC\\shared\\", "file")) + ,("W.splitDrive \"\\\\\\\\?\\\\UNCshared\\\\file\" == (\"\\\\\\\\?\\\\\", \"UNCshared\\\\file\")", property $ W.splitDrive "\\\\?\\UNCshared\\file" == ("\\\\?\\", "UNCshared\\file")) + ,("W.splitDrive \"\\\\\\\\?\\\\d:\\\\file\" == (\"\\\\\\\\?\\\\d:\\\\\", \"file\")", property $ W.splitDrive "\\\\?\\d:\\file" == ("\\\\?\\d:\\", "file")) + ,("W.splitDrive \"/d\" == (\"\", \"/d\")", property $ W.splitDrive "/d" == ("", "/d")) + ,("P.splitDrive \"/test\" == (\"/\", \"test\")", property $ P.splitDrive "/test" == ("/", "test")) + ,("P.splitDrive \"//test\" == (\"//\", \"test\")", property $ P.splitDrive "//test" == ("//", "test")) + ,("P.splitDrive \"test/file\" == (\"\", \"test/file\")", property $ P.splitDrive "test/file" == ("", "test/file")) + ,("P.splitDrive \"file\" == (\"\", \"file\")", property $ P.splitDrive "file" == ("", "file")) + ,("uncurry P.joinDrive (P.splitDrive x) == x", property $ \(QFilePathValidP x) -> uncurry P.joinDrive (P.splitDrive x) == x) + ,("uncurry W.joinDrive (W.splitDrive x) == x", property $ \(QFilePathValidW x) -> uncurry W.joinDrive (W.splitDrive x) == x) + ,("W.joinDrive \"C:\" \"foo\" == \"C:foo\"", property $ W.joinDrive "C:" "foo" == "C:foo") + ,("W.joinDrive \"C:\\\\\" \"bar\" == \"C:\\\\bar\"", property $ W.joinDrive "C:\\" "bar" == "C:\\bar") + ,("W.joinDrive \"\\\\\\\\share\" \"foo\" == \"\\\\\\\\share\\\\foo\"", property $ W.joinDrive "\\\\share" "foo" == "\\\\share\\foo") + ,("W.joinDrive \"/:\" \"foo\" == \"/:\\\\foo\"", property $ W.joinDrive "/:" "foo" == "/:\\foo") + ,("P.takeDrive x == fst (P.splitDrive x)", property $ \(QFilePath x) -> P.takeDrive x == fst (P.splitDrive x)) + ,("W.takeDrive x == fst (W.splitDrive x)", property $ \(QFilePath x) -> W.takeDrive x == fst (W.splitDrive x)) + ,("P.dropDrive x == snd (P.splitDrive x)", property $ \(QFilePath x) -> P.dropDrive x == snd (P.splitDrive x)) + ,("W.dropDrive x == snd (W.splitDrive x)", property $ \(QFilePath x) -> W.dropDrive x == snd (W.splitDrive x)) + ,("not (P.hasDrive x) == null (P.takeDrive x)", property $ \(QFilePath x) -> not (P.hasDrive x) == null (P.takeDrive x)) + ,("not (W.hasDrive x) == null (W.takeDrive x)", property $ \(QFilePath x) -> not (W.hasDrive x) == null (W.takeDrive x)) + ,("P.hasDrive \"/foo\" == True", property $ P.hasDrive "/foo" == True) + ,("W.hasDrive \"C:\\\\foo\" == True", property $ W.hasDrive "C:\\foo" == True) + ,("W.hasDrive \"C:foo\" == True", property $ W.hasDrive "C:foo" == True) + ,("P.hasDrive \"foo\" == False", property $ P.hasDrive "foo" == False) + ,("W.hasDrive \"foo\" == False", property $ W.hasDrive "foo" == False) + ,("P.hasDrive \"\" == False", property $ P.hasDrive "" == False) + ,("W.hasDrive \"\" == False", property $ W.hasDrive "" == False) + ,("P.isDrive \"/\" == True", property $ P.isDrive "/" == True) + ,("P.isDrive \"/foo\" == False", property $ P.isDrive "/foo" == False) + ,("W.isDrive \"C:\\\\\" == True", property $ W.isDrive "C:\\" == True) + ,("W.isDrive \"C:\\\\foo\" == False", property $ W.isDrive "C:\\foo" == False) + ,("P.isDrive \"\" == False", property $ P.isDrive "" == False) + ,("W.isDrive \"\" == False", property $ W.isDrive "" == False) + ,("P.splitFileName \"/directory/file.ext\" == (\"/directory/\", \"file.ext\")", property $ P.splitFileName "/directory/file.ext" == ("/directory/", "file.ext")) + ,("W.splitFileName \"/directory/file.ext\" == (\"/directory/\", \"file.ext\")", property $ W.splitFileName "/directory/file.ext" == ("/directory/", "file.ext")) + ,("uncurry (P.) (P.splitFileName x) == x || fst (P.splitFileName x) == \"./\"", property $ \(QFilePathValidP x) -> uncurry (P.) (P.splitFileName x) == x || fst (P.splitFileName x) == "./") + ,("uncurry (W.) (W.splitFileName x) == x || fst (W.splitFileName x) == \"./\"", property $ \(QFilePathValidW x) -> uncurry (W.) (W.splitFileName x) == x || fst (W.splitFileName x) == "./") + ,("P.isValid (fst (P.splitFileName x))", property $ \(QFilePathValidP x) -> P.isValid (fst (P.splitFileName x))) + ,("W.isValid (fst (W.splitFileName x))", property $ \(QFilePathValidW x) -> W.isValid (fst (W.splitFileName x))) + ,("P.splitFileName \"file/bob.txt\" == (\"file/\", \"bob.txt\")", property $ P.splitFileName "file/bob.txt" == ("file/", "bob.txt")) + ,("W.splitFileName \"file/bob.txt\" == (\"file/\", \"bob.txt\")", property $ W.splitFileName "file/bob.txt" == ("file/", "bob.txt")) + ,("P.splitFileName \"file/\" == (\"file/\", \"\")", property $ P.splitFileName "file/" == ("file/", "")) + ,("W.splitFileName \"file/\" == (\"file/\", \"\")", property $ W.splitFileName "file/" == ("file/", "")) + ,("P.splitFileName \"bob\" == (\"./\", \"bob\")", property $ P.splitFileName "bob" == ("./", "bob")) + ,("W.splitFileName \"bob\" == (\"./\", \"bob\")", property $ W.splitFileName "bob" == ("./", "bob")) + ,("P.splitFileName \"/\" == (\"/\", \"\")", property $ P.splitFileName "/" == ("/", "")) + ,("W.splitFileName \"c:\" == (\"c:\", \"\")", property $ W.splitFileName "c:" == ("c:", "")) + ,("P.replaceFileName \"/directory/other.txt\" \"file.ext\" == \"/directory/file.ext\"", property $ P.replaceFileName "/directory/other.txt" "file.ext" == "/directory/file.ext") + ,("W.replaceFileName \"/directory/other.txt\" \"file.ext\" == \"/directory/file.ext\"", property $ W.replaceFileName "/directory/other.txt" "file.ext" == "/directory/file.ext") + ,("P.replaceFileName x (P.takeFileName x) == x", property $ \(QFilePathValidP x) -> P.replaceFileName x (P.takeFileName x) == x) + ,("W.replaceFileName x (W.takeFileName x) == x", property $ \(QFilePathValidW x) -> W.replaceFileName x (W.takeFileName x) == x) + ,("P.dropFileName \"/directory/file.ext\" == \"/directory/\"", property $ P.dropFileName "/directory/file.ext" == "/directory/") + ,("W.dropFileName \"/directory/file.ext\" == \"/directory/\"", property $ W.dropFileName "/directory/file.ext" == "/directory/") + ,("P.dropFileName x == fst (P.splitFileName x)", property $ \(QFilePath x) -> P.dropFileName x == fst (P.splitFileName x)) + ,("W.dropFileName x == fst (W.splitFileName x)", property $ \(QFilePath x) -> W.dropFileName x == fst (W.splitFileName x)) + ,("P.takeFileName \"/directory/file.ext\" == \"file.ext\"", property $ P.takeFileName "/directory/file.ext" == "file.ext") + ,("W.takeFileName \"/directory/file.ext\" == \"file.ext\"", property $ W.takeFileName "/directory/file.ext" == "file.ext") + ,("P.takeFileName \"test/\" == \"\"", property $ P.takeFileName "test/" == "") + ,("W.takeFileName \"test/\" == \"\"", property $ W.takeFileName "test/" == "") + ,("P.takeFileName x `isSuffixOf` x", property $ \(QFilePath x) -> P.takeFileName x `isSuffixOf` x) + ,("W.takeFileName x `isSuffixOf` x", property $ \(QFilePath x) -> W.takeFileName x `isSuffixOf` x) + ,("P.takeFileName x == snd (P.splitFileName x)", property $ \(QFilePath x) -> P.takeFileName x == snd (P.splitFileName x)) + ,("W.takeFileName x == snd (W.splitFileName x)", property $ \(QFilePath x) -> W.takeFileName x == snd (W.splitFileName x)) + ,("P.takeFileName (P.replaceFileName x \"fred\") == \"fred\"", property $ \(QFilePathValidP x) -> P.takeFileName (P.replaceFileName x "fred") == "fred") + ,("W.takeFileName (W.replaceFileName x \"fred\") == \"fred\"", property $ \(QFilePathValidW x) -> W.takeFileName (W.replaceFileName x "fred") == "fred") + ,("P.takeFileName (x P. \"fred\") == \"fred\"", property $ \(QFilePathValidP x) -> P.takeFileName (x P. "fred") == "fred") + ,("W.takeFileName (x W. \"fred\") == \"fred\"", property $ \(QFilePathValidW x) -> W.takeFileName (x W. "fred") == "fred") + ,("P.isRelative (P.takeFileName x)", property $ \(QFilePathValidP x) -> P.isRelative (P.takeFileName x)) + ,("W.isRelative (W.takeFileName x)", property $ \(QFilePathValidW x) -> W.isRelative (W.takeFileName x)) + ,("P.takeBaseName \"/directory/file.ext\" == \"file\"", property $ P.takeBaseName "/directory/file.ext" == "file") + ,("W.takeBaseName \"/directory/file.ext\" == \"file\"", property $ W.takeBaseName "/directory/file.ext" == "file") + ,("P.takeBaseName \"file/test.txt\" == \"test\"", property $ P.takeBaseName "file/test.txt" == "test") + ,("W.takeBaseName \"file/test.txt\" == \"test\"", property $ W.takeBaseName "file/test.txt" == "test") + ,("P.takeBaseName \"dave.ext\" == \"dave\"", property $ P.takeBaseName "dave.ext" == "dave") + ,("W.takeBaseName \"dave.ext\" == \"dave\"", property $ W.takeBaseName "dave.ext" == "dave") + ,("P.takeBaseName \"\" == \"\"", property $ P.takeBaseName "" == "") + ,("W.takeBaseName \"\" == \"\"", property $ W.takeBaseName "" == "") + ,("P.takeBaseName \"test\" == \"test\"", property $ P.takeBaseName "test" == "test") + ,("W.takeBaseName \"test\" == \"test\"", property $ W.takeBaseName "test" == "test") + ,("P.takeBaseName (P.addTrailingPathSeparator x) == \"\"", property $ \(QFilePath x) -> P.takeBaseName (P.addTrailingPathSeparator x) == "") + ,("W.takeBaseName (W.addTrailingPathSeparator x) == \"\"", property $ \(QFilePath x) -> W.takeBaseName (W.addTrailingPathSeparator x) == "") + ,("P.takeBaseName \"file/file.tar.gz\" == \"file.tar\"", property $ P.takeBaseName "file/file.tar.gz" == "file.tar") + ,("W.takeBaseName \"file/file.tar.gz\" == \"file.tar\"", property $ W.takeBaseName "file/file.tar.gz" == "file.tar") + ,("P.replaceBaseName \"/directory/other.ext\" \"file\" == \"/directory/file.ext\"", property $ P.replaceBaseName "/directory/other.ext" "file" == "/directory/file.ext") + ,("W.replaceBaseName \"/directory/other.ext\" \"file\" == \"/directory/file.ext\"", property $ W.replaceBaseName "/directory/other.ext" "file" == "/directory/file.ext") + ,("P.replaceBaseName \"file/test.txt\" \"bob\" == \"file/bob.txt\"", property $ P.replaceBaseName "file/test.txt" "bob" == "file/bob.txt") + ,("W.replaceBaseName \"file/test.txt\" \"bob\" == \"file/bob.txt\"", property $ W.replaceBaseName "file/test.txt" "bob" == "file/bob.txt") + ,("P.replaceBaseName \"fred\" \"bill\" == \"bill\"", property $ P.replaceBaseName "fred" "bill" == "bill") + ,("W.replaceBaseName \"fred\" \"bill\" == \"bill\"", property $ W.replaceBaseName "fred" "bill" == "bill") + ,("P.replaceBaseName \"/dave/fred/bob.gz.tar\" \"new\" == \"/dave/fred/new.tar\"", property $ P.replaceBaseName "/dave/fred/bob.gz.tar" "new" == "/dave/fred/new.tar") + ,("W.replaceBaseName \"/dave/fred/bob.gz.tar\" \"new\" == \"/dave/fred/new.tar\"", property $ W.replaceBaseName "/dave/fred/bob.gz.tar" "new" == "/dave/fred/new.tar") + ,("P.replaceBaseName x (P.takeBaseName x) == x", property $ \(QFilePathValidP x) -> P.replaceBaseName x (P.takeBaseName x) == x) + ,("W.replaceBaseName x (W.takeBaseName x) == x", property $ \(QFilePathValidW x) -> W.replaceBaseName x (W.takeBaseName x) == x) + ,("P.hasTrailingPathSeparator \"test\" == False", property $ P.hasTrailingPathSeparator "test" == False) + ,("W.hasTrailingPathSeparator \"test\" == False", property $ W.hasTrailingPathSeparator "test" == False) + ,("P.hasTrailingPathSeparator \"test/\" == True", property $ P.hasTrailingPathSeparator "test/" == True) + ,("W.hasTrailingPathSeparator \"test/\" == True", property $ W.hasTrailingPathSeparator "test/" == True) + ,("P.hasTrailingPathSeparator (P.addTrailingPathSeparator x)", property $ \(QFilePath x) -> P.hasTrailingPathSeparator (P.addTrailingPathSeparator x)) + ,("W.hasTrailingPathSeparator (W.addTrailingPathSeparator x)", property $ \(QFilePath x) -> W.hasTrailingPathSeparator (W.addTrailingPathSeparator x)) + ,("P.hasTrailingPathSeparator x ==> P.addTrailingPathSeparator x == x", property $ \(QFilePath x) -> P.hasTrailingPathSeparator x ==> P.addTrailingPathSeparator x == x) + ,("W.hasTrailingPathSeparator x ==> W.addTrailingPathSeparator x == x", property $ \(QFilePath x) -> W.hasTrailingPathSeparator x ==> W.addTrailingPathSeparator x == x) + ,("P.addTrailingPathSeparator \"test/rest\" == \"test/rest/\"", property $ P.addTrailingPathSeparator "test/rest" == "test/rest/") + ,("P.dropTrailingPathSeparator \"file/test/\" == \"file/test\"", property $ P.dropTrailingPathSeparator "file/test/" == "file/test") + ,("W.dropTrailingPathSeparator \"file/test/\" == \"file/test\"", property $ W.dropTrailingPathSeparator "file/test/" == "file/test") + ,("P.dropTrailingPathSeparator \"/\" == \"/\"", property $ P.dropTrailingPathSeparator "/" == "/") + ,("W.dropTrailingPathSeparator \"/\" == \"/\"", property $ W.dropTrailingPathSeparator "/" == "/") + ,("W.dropTrailingPathSeparator \"\\\\\" == \"\\\\\"", property $ W.dropTrailingPathSeparator "\\" == "\\") + ,("not (P.hasTrailingPathSeparator (P.dropTrailingPathSeparator x)) || P.isDrive x", property $ \(QFilePath x) -> not (P.hasTrailingPathSeparator (P.dropTrailingPathSeparator x)) || P.isDrive x) + ,("P.takeDirectory \"/directory/other.ext\" == \"/directory\"", property $ P.takeDirectory "/directory/other.ext" == "/directory") + ,("W.takeDirectory \"/directory/other.ext\" == \"/directory\"", property $ W.takeDirectory "/directory/other.ext" == "/directory") + ,("P.takeDirectory x `isPrefixOf` x || P.takeDirectory x == \".\"", property $ \(QFilePath x) -> P.takeDirectory x `isPrefixOf` x || P.takeDirectory x == ".") + ,("W.takeDirectory x `isPrefixOf` x || W.takeDirectory x == \".\"", property $ \(QFilePath x) -> W.takeDirectory x `isPrefixOf` x || W.takeDirectory x == ".") + ,("P.takeDirectory \"foo\" == \".\"", property $ P.takeDirectory "foo" == ".") + ,("W.takeDirectory \"foo\" == \".\"", property $ W.takeDirectory "foo" == ".") + ,("P.takeDirectory \"/\" == \"/\"", property $ P.takeDirectory "/" == "/") + ,("W.takeDirectory \"/\" == \"/\"", property $ W.takeDirectory "/" == "/") + ,("P.takeDirectory \"/foo\" == \"/\"", property $ P.takeDirectory "/foo" == "/") + ,("W.takeDirectory \"/foo\" == \"/\"", property $ W.takeDirectory "/foo" == "/") + ,("P.takeDirectory \"/foo/bar/baz\" == \"/foo/bar\"", property $ P.takeDirectory "/foo/bar/baz" == "/foo/bar") + ,("W.takeDirectory \"/foo/bar/baz\" == \"/foo/bar\"", property $ W.takeDirectory "/foo/bar/baz" == "/foo/bar") + ,("P.takeDirectory \"/foo/bar/baz/\" == \"/foo/bar/baz\"", property $ P.takeDirectory "/foo/bar/baz/" == "/foo/bar/baz") + ,("W.takeDirectory \"/foo/bar/baz/\" == \"/foo/bar/baz\"", property $ W.takeDirectory "/foo/bar/baz/" == "/foo/bar/baz") + ,("P.takeDirectory \"foo/bar/baz\" == \"foo/bar\"", property $ P.takeDirectory "foo/bar/baz" == "foo/bar") + ,("W.takeDirectory \"foo/bar/baz\" == \"foo/bar\"", property $ W.takeDirectory "foo/bar/baz" == "foo/bar") + ,("W.takeDirectory \"foo\\\\bar\" == \"foo\"", property $ W.takeDirectory "foo\\bar" == "foo") + ,("W.takeDirectory \"foo\\\\bar\\\\\\\\\" == \"foo\\\\bar\"", property $ W.takeDirectory "foo\\bar\\\\" == "foo\\bar") + ,("W.takeDirectory \"C:\\\\\" == \"C:\\\\\"", property $ W.takeDirectory "C:\\" == "C:\\") + ,("P.replaceDirectory \"root/file.ext\" \"/directory/\" == \"/directory/file.ext\"", property $ P.replaceDirectory "root/file.ext" "/directory/" == "/directory/file.ext") + ,("W.replaceDirectory \"root/file.ext\" \"/directory/\" == \"/directory/file.ext\"", property $ W.replaceDirectory "root/file.ext" "/directory/" == "/directory/file.ext") + ,("P.replaceDirectory x (P.takeDirectory x) `P.equalFilePath` x", property $ \(QFilePathValidP x) -> P.replaceDirectory x (P.takeDirectory x) `P.equalFilePath` x) + ,("W.replaceDirectory x (W.takeDirectory x) `W.equalFilePath` x", property $ \(QFilePathValidW x) -> W.replaceDirectory x (W.takeDirectory x) `W.equalFilePath` x) + ,("\"/directory\" P. \"file.ext\" == \"/directory/file.ext\"", property $ "/directory" P. "file.ext" == "/directory/file.ext") + ,("\"/directory\" W. \"file.ext\" == \"/directory\\\\file.ext\"", property $ "/directory" W. "file.ext" == "/directory\\file.ext") + ,("\"directory\" P. \"/file.ext\" == \"/file.ext\"", property $ "directory" P. "/file.ext" == "/file.ext") + ,("\"directory\" W. \"/file.ext\" == \"/file.ext\"", property $ "directory" W. "/file.ext" == "/file.ext") + ,("(P.takeDirectory x P. P.takeFileName x) `P.equalFilePath` x", property $ \(QFilePathValidP x) -> (P.takeDirectory x P. P.takeFileName x) `P.equalFilePath` x) + ,("(W.takeDirectory x W. W.takeFileName x) `W.equalFilePath` x", property $ \(QFilePathValidW x) -> (W.takeDirectory x W. W.takeFileName x) `W.equalFilePath` x) + ,("\"/\" P. \"test\" == \"/test\"", property $ "/" P. "test" == "/test") + ,("\"home\" P. \"bob\" == \"home/bob\"", property $ "home" P. "bob" == "home/bob") + ,("\"x:\" P. \"foo\" == \"x:/foo\"", property $ "x:" P. "foo" == "x:/foo") + ,("\"C:\\\\foo\" W. \"bar\" == \"C:\\\\foo\\\\bar\"", property $ "C:\\foo" W. "bar" == "C:\\foo\\bar") + ,("\"home\" W. \"bob\" == \"home\\\\bob\"", property $ "home" W. "bob" == "home\\bob") + ,("\"home\" P. \"/bob\" == \"/bob\"", property $ "home" P. "/bob" == "/bob") + ,("\"home\" W. \"C:\\\\bob\" == \"C:\\\\bob\"", property $ "home" W. "C:\\bob" == "C:\\bob") + ,("\"home\" W. \"/bob\" == \"/bob\"", property $ "home" W. "/bob" == "/bob") + ,("\"home\" W. \"\\\\bob\" == \"\\\\bob\"", property $ "home" W. "\\bob" == "\\bob") + ,("\"C:\\\\home\" W. \"\\\\bob\" == \"\\\\bob\"", property $ "C:\\home" W. "\\bob" == "\\bob") + ,("\"D:\\\\foo\" W. \"C:bar\" == \"C:bar\"", property $ "D:\\foo" W. "C:bar" == "C:bar") + ,("\"C:\\\\foo\" W. \"C:bar\" == \"C:bar\"", property $ "C:\\foo" W. "C:bar" == "C:bar") + ,("P.splitPath \"/directory/file.ext\" == [\"/\", \"directory/\", \"file.ext\"]", property $ P.splitPath "/directory/file.ext" == ["/", "directory/", "file.ext"]) + ,("W.splitPath \"/directory/file.ext\" == [\"/\", \"directory/\", \"file.ext\"]", property $ W.splitPath "/directory/file.ext" == ["/", "directory/", "file.ext"]) + ,("concat (P.splitPath x) == x", property $ \(QFilePath x) -> concat (P.splitPath x) == x) + ,("concat (W.splitPath x) == x", property $ \(QFilePath x) -> concat (W.splitPath x) == x) + ,("P.splitPath \"test//item/\" == [\"test//\", \"item/\"]", property $ P.splitPath "test//item/" == ["test//", "item/"]) + ,("W.splitPath \"test//item/\" == [\"test//\", \"item/\"]", property $ W.splitPath "test//item/" == ["test//", "item/"]) + ,("P.splitPath \"test/item/file\" == [\"test/\", \"item/\", \"file\"]", property $ P.splitPath "test/item/file" == ["test/", "item/", "file"]) + ,("W.splitPath \"test/item/file\" == [\"test/\", \"item/\", \"file\"]", property $ W.splitPath "test/item/file" == ["test/", "item/", "file"]) + ,("P.splitPath \"\" == []", property $ P.splitPath "" == []) + ,("W.splitPath \"\" == []", property $ W.splitPath "" == []) + ,("W.splitPath \"c:\\\\test\\\\path\" == [\"c:\\\\\", \"test\\\\\", \"path\"]", property $ W.splitPath "c:\\test\\path" == ["c:\\", "test\\", "path"]) + ,("P.splitPath \"/file/test\" == [\"/\", \"file/\", \"test\"]", property $ P.splitPath "/file/test" == ["/", "file/", "test"]) + ,("P.splitDirectories \"/directory/file.ext\" == [\"/\", \"directory\", \"file.ext\"]", property $ P.splitDirectories "/directory/file.ext" == ["/", "directory", "file.ext"]) + ,("W.splitDirectories \"/directory/file.ext\" == [\"/\", \"directory\", \"file.ext\"]", property $ W.splitDirectories "/directory/file.ext" == ["/", "directory", "file.ext"]) + ,("P.splitDirectories \"test/file\" == [\"test\", \"file\"]", property $ P.splitDirectories "test/file" == ["test", "file"]) + ,("W.splitDirectories \"test/file\" == [\"test\", \"file\"]", property $ W.splitDirectories "test/file" == ["test", "file"]) + ,("P.splitDirectories \"/test/file\" == [\"/\", \"test\", \"file\"]", property $ P.splitDirectories "/test/file" == ["/", "test", "file"]) + ,("W.splitDirectories \"/test/file\" == [\"/\", \"test\", \"file\"]", property $ W.splitDirectories "/test/file" == ["/", "test", "file"]) + ,("W.splitDirectories \"C:\\\\test\\\\file\" == [\"C:\\\\\", \"test\", \"file\"]", property $ W.splitDirectories "C:\\test\\file" == ["C:\\", "test", "file"]) + ,("P.joinPath (P.splitDirectories x) `P.equalFilePath` x", property $ \(QFilePathValidP x) -> P.joinPath (P.splitDirectories x) `P.equalFilePath` x) + ,("W.joinPath (W.splitDirectories x) `W.equalFilePath` x", property $ \(QFilePathValidW x) -> W.joinPath (W.splitDirectories x) `W.equalFilePath` x) + ,("P.splitDirectories \"\" == []", property $ P.splitDirectories "" == []) + ,("W.splitDirectories \"\" == []", property $ W.splitDirectories "" == []) + ,("W.splitDirectories \"C:\\\\test\\\\\\\\\\\\file\" == [\"C:\\\\\", \"test\", \"file\"]", property $ W.splitDirectories "C:\\test\\\\\\file" == ["C:\\", "test", "file"]) + ,("P.splitDirectories \"/test///file\" == [\"/\", \"test\", \"file\"]", property $ P.splitDirectories "/test///file" == ["/", "test", "file"]) + ,("W.splitDirectories \"/test///file\" == [\"/\", \"test\", \"file\"]", property $ W.splitDirectories "/test///file" == ["/", "test", "file"]) + ,("P.joinPath [\"/\", \"directory/\", \"file.ext\"] == \"/directory/file.ext\"", property $ P.joinPath ["/", "directory/", "file.ext"] == "/directory/file.ext") + ,("W.joinPath [\"/\", \"directory/\", \"file.ext\"] == \"/directory/file.ext\"", property $ W.joinPath ["/", "directory/", "file.ext"] == "/directory/file.ext") + ,("P.joinPath (P.splitPath x) == x", property $ \(QFilePathValidP x) -> P.joinPath (P.splitPath x) == x) + ,("W.joinPath (W.splitPath x) == x", property $ \(QFilePathValidW x) -> W.joinPath (W.splitPath x) == x) + ,("P.joinPath [] == \"\"", property $ P.joinPath [] == "") + ,("W.joinPath [] == \"\"", property $ W.joinPath [] == "") + ,("P.joinPath [\"test\", \"file\", \"path\"] == \"test/file/path\"", property $ P.joinPath ["test", "file", "path"] == "test/file/path") + ,("x == y ==> P.equalFilePath x y", property $ \(QFilePath x) (QFilePath y) -> x == y ==> P.equalFilePath x y) + ,("x == y ==> W.equalFilePath x y", property $ \(QFilePath x) (QFilePath y) -> x == y ==> W.equalFilePath x y) + ,("P.normalise x == P.normalise y ==> P.equalFilePath x y", property $ \(QFilePath x) (QFilePath y) -> P.normalise x == P.normalise y ==> P.equalFilePath x y) + ,("W.normalise x == W.normalise y ==> W.equalFilePath x y", property $ \(QFilePath x) (QFilePath y) -> W.normalise x == W.normalise y ==> W.equalFilePath x y) + ,("P.equalFilePath \"foo\" \"foo/\"", property $ P.equalFilePath "foo" "foo/") + ,("W.equalFilePath \"foo\" \"foo/\"", property $ W.equalFilePath "foo" "foo/") + ,("not (P.equalFilePath \"foo\" \"/foo\")", property $ not (P.equalFilePath "foo" "/foo")) + ,("not (W.equalFilePath \"foo\" \"/foo\")", property $ not (W.equalFilePath "foo" "/foo")) + ,("not (P.equalFilePath \"foo\" \"FOO\")", property $ not (P.equalFilePath "foo" "FOO")) + ,("W.equalFilePath \"foo\" \"FOO\"", property $ W.equalFilePath "foo" "FOO") + ,("not (W.equalFilePath \"C:\" \"C:/\")", property $ not (W.equalFilePath "C:" "C:/")) + ,("P.makeRelative \"/directory\" \"/directory/file.ext\" == \"file.ext\"", property $ P.makeRelative "/directory" "/directory/file.ext" == "file.ext") + ,("W.makeRelative \"/directory\" \"/directory/file.ext\" == \"file.ext\"", property $ W.makeRelative "/directory" "/directory/file.ext" == "file.ext") + ,("P.makeRelative (P.takeDirectory x) x `P.equalFilePath` P.takeFileName x", property $ \(QFilePathValidP x) -> P.makeRelative (P.takeDirectory x) x `P.equalFilePath` P.takeFileName x) + ,("W.makeRelative (W.takeDirectory x) x `W.equalFilePath` W.takeFileName x", property $ \(QFilePathValidW x) -> W.makeRelative (W.takeDirectory x) x `W.equalFilePath` W.takeFileName x) + ,("P.makeRelative x x == \".\"", property $ \(QFilePath x) -> P.makeRelative x x == ".") + ,("W.makeRelative x x == \".\"", property $ \(QFilePath x) -> W.makeRelative x x == ".") + ,("P.equalFilePath x y || (P.isRelative x && P.makeRelative y x == x) || P.equalFilePath (y P. P.makeRelative y x) x", property $ \(QFilePathValidP x) (QFilePathValidP y) -> P.equalFilePath x y || (P.isRelative x && P.makeRelative y x == x) || P.equalFilePath (y P. P.makeRelative y x) x) + ,("W.equalFilePath x y || (W.isRelative x && W.makeRelative y x == x) || W.equalFilePath (y W. W.makeRelative y x) x", property $ \(QFilePathValidW x) (QFilePathValidW y) -> W.equalFilePath x y || (W.isRelative x && W.makeRelative y x == x) || W.equalFilePath (y W. W.makeRelative y x) x) + ,("W.makeRelative \"C:\\\\Home\" \"c:\\\\home\\\\bob\" == \"bob\"", property $ W.makeRelative "C:\\Home" "c:\\home\\bob" == "bob") + ,("W.makeRelative \"C:\\\\Home\" \"c:/home/bob\" == \"bob\"", property $ W.makeRelative "C:\\Home" "c:/home/bob" == "bob") + ,("W.makeRelative \"C:\\\\Home\" \"D:\\\\Home\\\\Bob\" == \"D:\\\\Home\\\\Bob\"", property $ W.makeRelative "C:\\Home" "D:\\Home\\Bob" == "D:\\Home\\Bob") + ,("W.makeRelative \"C:\\\\Home\" \"C:Home\\\\Bob\" == \"C:Home\\\\Bob\"", property $ W.makeRelative "C:\\Home" "C:Home\\Bob" == "C:Home\\Bob") + ,("W.makeRelative \"/Home\" \"/home/bob\" == \"bob\"", property $ W.makeRelative "/Home" "/home/bob" == "bob") + ,("W.makeRelative \"/\" \"//\" == \"//\"", property $ W.makeRelative "/" "//" == "//") + ,("P.makeRelative \"/Home\" \"/home/bob\" == \"/home/bob\"", property $ P.makeRelative "/Home" "/home/bob" == "/home/bob") + ,("P.makeRelative \"/home/\" \"/home/bob/foo/bar\" == \"bob/foo/bar\"", property $ P.makeRelative "/home/" "/home/bob/foo/bar" == "bob/foo/bar") + ,("P.makeRelative \"/fred\" \"bob\" == \"bob\"", property $ P.makeRelative "/fred" "bob" == "bob") + ,("P.makeRelative \"/file/test\" \"/file/test/fred\" == \"fred\"", property $ P.makeRelative "/file/test" "/file/test/fred" == "fred") + ,("P.makeRelative \"/file/test\" \"/file/test/fred/\" == \"fred/\"", property $ P.makeRelative "/file/test" "/file/test/fred/" == "fred/") + ,("P.makeRelative \"some/path\" \"some/path/a/b/c\" == \"a/b/c\"", property $ P.makeRelative "some/path" "some/path/a/b/c" == "a/b/c") + ,("P.normalise \"/file/\\\\test////\" == \"/file/\\\\test/\"", property $ P.normalise "/file/\\test////" == "/file/\\test/") + ,("P.normalise \"/file/./test\" == \"/file/test\"", property $ P.normalise "/file/./test" == "/file/test") + ,("P.normalise \"/test/file/../bob/fred/\" == \"/test/file/../bob/fred/\"", property $ P.normalise "/test/file/../bob/fred/" == "/test/file/../bob/fred/") + ,("P.normalise \"../bob/fred/\" == \"../bob/fred/\"", property $ P.normalise "../bob/fred/" == "../bob/fred/") + ,("P.normalise \"./bob/fred/\" == \"bob/fred/\"", property $ P.normalise "./bob/fred/" == "bob/fred/") + ,("W.normalise \"c:\\\\file/bob\\\\\" == \"C:\\\\file\\\\bob\\\\\"", property $ W.normalise "c:\\file/bob\\" == "C:\\file\\bob\\") + ,("W.normalise \"c:\\\\\" == \"C:\\\\\"", property $ W.normalise "c:\\" == "C:\\") + ,("W.normalise \"C:.\\\\\" == \"C:\"", property $ W.normalise "C:.\\" == "C:") + ,("W.normalise \"\\\\\\\\server\\\\test\" == \"\\\\\\\\server\\\\test\"", property $ W.normalise "\\\\server\\test" == "\\\\server\\test") + ,("W.normalise \"//server/test\" == \"\\\\\\\\server\\\\test\"", property $ W.normalise "//server/test" == "\\\\server\\test") + ,("W.normalise \"c:/file\" == \"C:\\\\file\"", property $ W.normalise "c:/file" == "C:\\file") + ,("W.normalise \"/file\" == \"\\\\file\"", property $ W.normalise "/file" == "\\file") + ,("W.normalise \"\\\\\" == \"\\\\\"", property $ W.normalise "\\" == "\\") + ,("W.normalise \"/./\" == \"\\\\\"", property $ W.normalise "/./" == "\\") + ,("P.normalise \".\" == \".\"", property $ P.normalise "." == ".") + ,("W.normalise \".\" == \".\"", property $ W.normalise "." == ".") + ,("P.normalise \"./\" == \"./\"", property $ P.normalise "./" == "./") + ,("P.normalise \"./.\" == \"./\"", property $ P.normalise "./." == "./") + ,("P.normalise \"/./\" == \"/\"", property $ P.normalise "/./" == "/") + ,("P.normalise \"/\" == \"/\"", property $ P.normalise "/" == "/") + ,("P.normalise \"bob/fred/.\" == \"bob/fred/\"", property $ P.normalise "bob/fred/." == "bob/fred/") + ,("P.normalise \"//home\" == \"/home\"", property $ P.normalise "//home" == "/home") + ,("P.isValid \"\" == False", property $ P.isValid "" == False) + ,("W.isValid \"\" == False", property $ W.isValid "" == False) + ,("P.isValid \"\\0\" == False", property $ P.isValid "\0" == False) + ,("W.isValid \"\\0\" == False", property $ W.isValid "\0" == False) + ,("P.isValid \"/random_ path:*\" == True", property $ P.isValid "/random_ path:*" == True) + ,("P.isValid x == not (null x)", property $ \(QFilePath x) -> P.isValid x == not (null x)) + ,("W.isValid \"c:\\\\test\" == True", property $ W.isValid "c:\\test" == True) + ,("W.isValid \"c:\\\\test:of_test\" == False", property $ W.isValid "c:\\test:of_test" == False) + ,("W.isValid \"test*\" == False", property $ W.isValid "test*" == False) + ,("W.isValid \"c:\\\\test\\\\nul\" == False", property $ W.isValid "c:\\test\\nul" == False) + ,("W.isValid \"c:\\\\test\\\\prn.txt\" == False", property $ W.isValid "c:\\test\\prn.txt" == False) + ,("W.isValid \"c:\\\\nul\\\\file\" == False", property $ W.isValid "c:\\nul\\file" == False) + ,("W.isValid \"\\\\\\\\\" == False", property $ W.isValid "\\\\" == False) + ,("W.isValid \"\\\\\\\\\\\\foo\" == False", property $ W.isValid "\\\\\\foo" == False) + ,("W.isValid \"\\\\\\\\?\\\\D:file\" == False", property $ W.isValid "\\\\?\\D:file" == False) + ,("W.isValid \"foo\\tbar\" == False", property $ W.isValid "foo\tbar" == False) + ,("W.isValid \"nul .txt\" == False", property $ W.isValid "nul .txt" == False) + ,("W.isValid \" nul.txt\" == True", property $ W.isValid " nul.txt" == True) + ,("P.isValid (P.makeValid x)", property $ \(QFilePath x) -> P.isValid (P.makeValid x)) + ,("W.isValid (W.makeValid x)", property $ \(QFilePath x) -> W.isValid (W.makeValid x)) + ,("P.isValid x ==> P.makeValid x == x", property $ \(QFilePath x) -> P.isValid x ==> P.makeValid x == x) + ,("W.isValid x ==> W.makeValid x == x", property $ \(QFilePath x) -> W.isValid x ==> W.makeValid x == x) + ,("P.makeValid \"\" == \"_\"", property $ P.makeValid "" == "_") + ,("W.makeValid \"\" == \"_\"", property $ W.makeValid "" == "_") + ,("P.makeValid \"file\\0name\" == \"file_name\"", property $ P.makeValid "file\0name" == "file_name") + ,("W.makeValid \"file\\0name\" == \"file_name\"", property $ W.makeValid "file\0name" == "file_name") + ,("W.makeValid \"c:\\\\already\\\\/valid\" == \"c:\\\\already\\\\/valid\"", property $ W.makeValid "c:\\already\\/valid" == "c:\\already\\/valid") + ,("W.makeValid \"c:\\\\test:of_test\" == \"c:\\\\test_of_test\"", property $ W.makeValid "c:\\test:of_test" == "c:\\test_of_test") + ,("W.makeValid \"test*\" == \"test_\"", property $ W.makeValid "test*" == "test_") + ,("W.makeValid \"c:\\\\test\\\\nul\" == \"c:\\\\test\\\\nul_\"", property $ W.makeValid "c:\\test\\nul" == "c:\\test\\nul_") + ,("W.makeValid \"c:\\\\test\\\\prn.txt\" == \"c:\\\\test\\\\prn_.txt\"", property $ W.makeValid "c:\\test\\prn.txt" == "c:\\test\\prn_.txt") + ,("W.makeValid \"c:\\\\test/prn.txt\" == \"c:\\\\test/prn_.txt\"", property $ W.makeValid "c:\\test/prn.txt" == "c:\\test/prn_.txt") + ,("W.makeValid \"c:\\\\nul\\\\file\" == \"c:\\\\nul_\\\\file\"", property $ W.makeValid "c:\\nul\\file" == "c:\\nul_\\file") + ,("W.makeValid \"\\\\\\\\\\\\foo\" == \"\\\\\\\\drive\"", property $ W.makeValid "\\\\\\foo" == "\\\\drive") + ,("W.makeValid \"\\\\\\\\?\\\\D:file\" == \"\\\\\\\\?\\\\D:\\\\file\"", property $ W.makeValid "\\\\?\\D:file" == "\\\\?\\D:\\file") + ,("W.makeValid \"nul .txt\" == \"nul _.txt\"", property $ W.makeValid "nul .txt" == "nul _.txt") + ,("W.isRelative \"path\\\\test\" == True", property $ W.isRelative "path\\test" == True) + ,("W.isRelative \"c:\\\\test\" == False", property $ W.isRelative "c:\\test" == False) + ,("W.isRelative \"c:test\" == True", property $ W.isRelative "c:test" == True) + ,("W.isRelative \"c:\\\\\" == False", property $ W.isRelative "c:\\" == False) + ,("W.isRelative \"c:/\" == False", property $ W.isRelative "c:/" == False) + ,("W.isRelative \"c:\" == True", property $ W.isRelative "c:" == True) + ,("W.isRelative \"\\\\\\\\foo\" == False", property $ W.isRelative "\\\\foo" == False) + ,("W.isRelative \"\\\\\\\\?\\\\foo\" == False", property $ W.isRelative "\\\\?\\foo" == False) + ,("W.isRelative \"\\\\\\\\?\\\\UNC\\\\foo\" == False", property $ W.isRelative "\\\\?\\UNC\\foo" == False) + ,("W.isRelative \"/foo\" == True", property $ W.isRelative "/foo" == True) + ,("W.isRelative \"\\\\foo\" == True", property $ W.isRelative "\\foo" == True) + ,("P.isRelative \"test/path\" == True", property $ P.isRelative "test/path" == True) + ,("P.isRelative \"/test\" == False", property $ P.isRelative "/test" == False) + ,("P.isRelative \"/\" == False", property $ P.isRelative "/" == False) + ,("P.isAbsolute x == not (P.isRelative x)", property $ \(QFilePath x) -> P.isAbsolute x == not (P.isRelative x)) + ,("W.isAbsolute x == not (W.isRelative x)", property $ \(QFilePath x) -> W.isAbsolute x == not (W.isRelative x)) + ] diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/tests/TestUtil.hs b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/tests/TestUtil.hs new file mode 100644 index 0000000000..b237acd99e --- /dev/null +++ b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/tests/TestUtil.hs @@ -0,0 +1,52 @@ + +module TestUtil( + (==>), QFilePath(..), QFilePathValidW(..), QFilePathValidP(..), + module Test.QuickCheck, + module Data.List, + module Data.Maybe + ) where + +import Test.QuickCheck hiding ((==>)) +import Data.List +import Data.Maybe +import Control.Monad +import qualified System.FilePath.Windows as W +import qualified System.FilePath.Posix as P + +infixr 0 ==> +a ==> b = not a || b + + +newtype QFilePathValidW = QFilePathValidW FilePath deriving Show + +instance Arbitrary QFilePathValidW where + arbitrary = fmap (QFilePathValidW . W.makeValid) arbitraryFilePath + shrink (QFilePathValidW x) = shrinkValid QFilePathValidW W.makeValid x + +newtype QFilePathValidP = QFilePathValidP FilePath deriving Show + +instance Arbitrary QFilePathValidP where + arbitrary = fmap (QFilePathValidP . P.makeValid) arbitraryFilePath + shrink (QFilePathValidP x) = shrinkValid QFilePathValidP P.makeValid x + +newtype QFilePath = QFilePath FilePath deriving Show + +instance Arbitrary QFilePath where + arbitrary = fmap QFilePath arbitraryFilePath + shrink (QFilePath x) = shrinkValid QFilePath id x + + +-- | Generate an arbitrary FilePath use a few special (interesting) characters. +arbitraryFilePath :: Gen FilePath +arbitraryFilePath = sized $ \n -> do + k <- choose (0,n) + replicateM k $ elements "?./:\\a ;_" + +-- | Shrink, but also apply a validity function. Try and make shorter, or use more +-- @a@ (since @a@ is pretty dull), but make sure you terminate even after valid. +shrinkValid :: (FilePath -> a) -> (FilePath -> FilePath) -> FilePath -> [a] +shrinkValid wrap valid o = + [ wrap y + | y <- map valid $ shrinkList (\x -> ['a' | x /= 'a']) o + , length y < length o || (length y == length o && countA y > countA o)] + where countA = length . filter (== 'a') diff --git a/test/integration/tests/mutable-deps/files/files.cabal b/test/integration/tests/mutable-deps/files/files.cabal new file mode 100644 index 0000000000..cdd7a98a9a --- /dev/null +++ b/test/integration/tests/mutable-deps/files/files.cabal @@ -0,0 +1,17 @@ +name: files +version: 1.0.0 +build-type: Simple +cabal-version: >= 1.10 + +library + hs-source-dirs: src + exposed-modules: Files + build-depends: base + , filemanip + default-language: Haskell2010 + +executable test-exe + hs-source-dirs: app + main-is: Main.hs + build-depends: base, files + default-language: Haskell2010 \ No newline at end of file diff --git a/test/integration/tests/mutable-deps/files/src/Files.hs b/test/integration/tests/mutable-deps/files/src/Files.hs new file mode 100644 index 0000000000..5e3452f0b5 --- /dev/null +++ b/test/integration/tests/mutable-deps/files/src/Files.hs @@ -0,0 +1,6 @@ +module Files where + +import System.FilePath.Glob + +allCFiles :: IO [FilePath] +allCFiles = namesMatching "*.c" diff --git a/test/integration/tests/mutable-deps/files/stack.yaml b/test/integration/tests/mutable-deps/files/stack.yaml new file mode 100644 index 0000000000..0b1ec10e62 --- /dev/null +++ b/test/integration/tests/mutable-deps/files/stack.yaml @@ -0,0 +1,6 @@ +resolver: lts-11.22 +packages: +- . +extra-deps: +- ./filepath-1.4.1.2 +- directory-1.3.0.2 From b6bb6b2ef4df8237e1d0beda06bcb372721a7dec Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Thu, 28 Feb 2019 18:13:31 +0300 Subject: [PATCH 64/80] Hlint fixes --- .../files/filepath-1.4.1.2/System/FilePath/Internal.hs | 1 + .../tests/mutable-deps/files/filepath-1.4.1.2/tests/TestGen.hs | 1 + test/integration/tests/proper-rebuilds/Main.hs | 2 +- 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Internal.hs b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Internal.hs index 4a376b33b1..5a431e6626 100644 --- a/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Internal.hs +++ b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Internal.hs @@ -1,3 +1,4 @@ +{-# ANN module "HLint: ignore" #-} #if __GLASGOW_HASKELL__ >= 704 {-# LANGUAGE Safe #-} #endif diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/tests/TestGen.hs b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/tests/TestGen.hs index 848ae5b7c2..13aba3e2d5 100644 --- a/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/tests/TestGen.hs +++ b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/tests/TestGen.hs @@ -1,3 +1,4 @@ +{-# ANN module "HLint: ignore" #-} -- GENERATED CODE: See ../Generate.hs module TestGen(tests) where import TestUtil diff --git a/test/integration/tests/proper-rebuilds/Main.hs b/test/integration/tests/proper-rebuilds/Main.hs index 229fd86711..1ff1f0fed2 100644 --- a/test/integration/tests/proper-rebuilds/Main.hs +++ b/test/integration/tests/proper-rebuilds/Main.hs @@ -7,7 +7,7 @@ main :: IO () main = do let expectRecompilation stderr = unless ("files-1.0.0: build" `isInfixOf` stderr) $ - error $ "package recompilation was expected" + error "package recompilation was expected" expectNoRecompilation stderr = when ("files-1.0.0: build" `isInfixOf` stderr) $ error "package recompilation was not expected" From fe99d884bf11bf76f250a52f372ed4a51f90df05 Mon Sep 17 00:00:00 2001 From: favonia Date: Sat, 2 Mar 2019 12:52:00 -0600 Subject: [PATCH 65/80] Add the missing "https" in install_and_upgrade.md. --- doc/install_and_upgrade.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/install_and_upgrade.md b/doc/install_and_upgrade.md index b3cb0844c4..3df5724415 100644 --- a/doc/install_and_upgrade.md +++ b/doc/install_and_upgrade.md @@ -21,7 +21,7 @@ future, we are open to supporting more OSes (to request one, please Binary packages are signed with this [signing key](SIGNING_KEY.md). If you are writing a script that needs to download the latest binary, you can -use URLs like `https://get.haskellstack.org/stable/.` (e.g. //get.haskellstack.org/stable/linux-x86_64.tar.gz) that always point to the latest stable release. +use URLs like `https://get.haskellstack.org/stable/.` (e.g. https://get.haskellstack.org/stable/linux-x86_64.tar.gz) that always point to the latest stable release. ## Windows From 6afccc9e6ab3b16f7c651c4353424763911a728c Mon Sep 17 00:00:00 2001 From: Michael Snoyman Date: Mon, 4 Mar 2019 10:26:05 +0200 Subject: [PATCH 66/80] Add documentation for lock files --- doc/lock_files.md | 239 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 239 insertions(+) create mode 100644 doc/lock_files.md diff --git a/doc/lock_files.md b/doc/lock_files.md new file mode 100644 index 0000000000..739bcf76a3 --- /dev/null +++ b/doc/lock_files.md @@ -0,0 +1,239 @@ +
+ +# Lock Files + +Stack attempts to provide reproducible build plans. This involves +reproducibly getting the exact same contents of source packages and +configuration options (like cabal flags and GHC options) for a given +set of input files. There are a few problems with making this work: + +* Entering all of the information to fully provide reproducibility is + tedious. This would include things like Hackage revisions, hashes of + remote tarballs, etc. Users don't want to enter this information. +* Many operations in Stack rely upon a "snapshot hash," which + transitively includes the completed information for all of these + dependencies. If any of that information is missing when parsing the + `stack.yaml` file or snapshot files, it could be expensive for Stack + to calculate it. + +To address this, we follow the (fairly standard) approach of having a +_lock file_. The goal of the lock file is to cache completed +information about all packages and snapshot files so that: + +* These files can be stored in source control +* Users on other machines can reuse these lock files and get identical + build plans +* Rerunning `stack build` in the future is deterministic in the build + plan, not depending on mutable state in the world like Hackage + revisions + * **NOTE** If, for example, a tarball available remotely is + deleted or the hash changes, it will not be possible for Stack + to perform the build. However, by deterministic, we mean it + either performs the same build or fails, never accidentally + doing something different. +* Stack can quickly determine the build plan in the common case of no + changes to `stack.yaml` or snapshot files + +This document explains the contents of a lock file, how they are used, +and how they are created and updated. + +## stack.yaml and snapshot files + +Relevant to this discussion, the `stack.yaml` file specifies: + +* Resolver (the parent snapshot) +* Compiler override +* `extra-deps` +* Flags +* GHC options +* Hidden packages + +The resolver can either specify a compiler version or another snapshot +file. This snapshot file can contain the same information referenced +above for a `stack.yaml`, with the following differences: + +* The `extra-deps` are called `packages` +* Drop packages can be included + +Some of this information is, by its nature, complete. For example, the +"flags" field cannot be influenced by anything outside of the file +itself. + +On the other hand, some information in these files can be +incomplete. Consider: + +```yaml +resolver: lts-13.9 +packages: [] +extra-deps: +- https://hackage.haskell.org/package/acme-missiles-0.3.tar.gz +``` + +This information is _incomplete_, since the contents of that URL may +change in the future. Instead, you could specify enough information in +the `stack.yaml` file to fully resolve that package. That looks like: + +```yaml +extra-deps: +- size: 1442 + url: https://hackage.haskell.org/package/acme-missiles-0.3.tar.gz + cabal-file: + size: 613 + sha256: 2ba66a092a32593880a87fb00f3213762d7bca65a687d45965778deb8694c5d1 + name: acme-missiles + version: '0.3' + sha256: e563d8b524017a06b32768c4db8eff1f822f3fb22a90320b7e414402647b735b + pantry-tree: + size: 226 + sha256: 614bc0cca76937507ea0a5ccc17a504c997ce458d7f2f9e43b15a10c8eaeb033 +``` + +Users don't particularly feel like writing all of that. Therefore, +it's common to see _incomplete_ information in a `stack.yaml` file. + +Additionally, the `lts-13.9` information is _also_ incomplete. While +we assume in general that LTS snapshots never change, there's nothing +that technically prohibits that from happening. Instead, the complete +version of that field is: + +```yaml +resolver: + size: 496662 + url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/13/9.yaml + sha256: 83de9017d911cf7795f19353dba4d04bd24cd40622b7567ff61fc3f7223aa3ea +``` + +Also something people don't feel like writing by hand. + +## Recursive snapshot layers + +Snapshot files can be _recursive_, where `stack.yaml` refers to +`foo.yaml`, which refers to `bar.yaml`, which refers to `baz.yaml`. A +local snapshot file can refer to a remote snapshot file (available via +an HTTP(S) URL). + +We need to encode information from _all_ of these snapshot layers and +the `stack.yaml` file in the lock file, to ensure that we can detect +if anything changes. + +## Performance + +In addition to acting as a pure correctness mechanism, the design of a +lock file given here also works as a performance improvement. Instead +of requiring that all snapshot files be fully parsed on each Stack +invocation, we can store information in the lock file and bypass +parsing of the additional files in the common case of no changes. + +## Lock file contents + +The lock file contains the following information: + +* The full snapshot definition information, including completed + package locations + * **NOTE** This only applies to _immutable_ packages. Mutable + packages are not included in the lock file. +* Completed information for the snapshot locations +* A hash of the `stack.yaml` file +* The snapshot hash, to bypass the need to recalculate this on each + run of Stack + +It looks like the following: + +```yaml +# Lock file, some message about the file being auto-generated +stack-yaml: + sha256: XXXX + size: XXXX # in bytes + +snapshots: + # Starts with the snapshot specified in stack.yaml, + # then continues with the snapshot specified in each + # subsequent snapshot file + - original: + foo.yaml # raw content specified in a snapshot file + completed: + file: foo.yaml + sha256: XXXX # QUESTION: do we really need hashes of local files? + size: XXXX + - original: + lts-13.9 + completed: + size: 496662 + url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/13/9.yaml + sha256: 83de9017d911cf7795f19353dba4d04bd24cd40622b7567ff61fc3f7223aa3ea + +compiler: ghc-X.Y.Z + +packages: + acme-missiles: + location: + # QUESTION: any reason we need to specify which snapshot file it came from? I don't think so... + original: https://hackage.haskell.org/package/acme-missiles-0.3.tar.gz + completed: + size: 1442 + url: https://hackage.haskell.org/package/acme-missiles-0.3.tar.gz + cabal-file: + size: 613 + sha256: 2ba66a092a32593880a87fb00f3213762d7bca65a687d45965778deb8694c5d1 + name: acme-missiles + version: '0.3' + sha256: e563d8b524017a06b32768c4db8eff1f822f3fb22a90320b7e414402647b735b + pantry-tree: + size: 226 + sha256: 614bc0cca76937507ea0a5ccc17a504c997ce458d7f2f9e43b15a10c8eaeb033 + flags: ... + hidden: true/false + ghc-options: [...] +``` + +**NOTE** The `original` fields may seem superfluous at first. See the +update procedure below for an explanation. + +## Creation + +Whenever a `stack.yaml` file is loaded, Stack checks for a lock file +in the same file path, with a `.lock` extension added. For example, if +you run `stack build --stack-yaml stack-11.yaml`, it will use a lock +file in the location `stack-11.yaml.lock`. For the rest of this +document, we'll assume that the files are simply `stack.yaml` and +`stack.yaml.lock`. + +If the lock file does not exist, it will be created by: + +* Loading the `stack.yaml` +* Loading all snapshot files +* Completing all missing information +* Writing out the new `stack.yaml.lock` file + +## Dirtiness checking + +If the `stack.yaml.lock` file exists, its last modification time is +compared against the last modification time of the `stack.yaml` file +and any local snapshot files. If any of those files is more recent +than the `stack.yaml` file, then the update procedure is +triggered. Otherwise, the `stack.yaml.lock` file can be used as the +definition of the snapshot. + +## Update procedure + +The simplest possible implementation is: ignore the lock file entirely +and create a new one followign the creation steps above. There's a +significant downside to this, however: it may cause a larger delta in +the lock file than intended, by causing more packages to be +updates. For example, many packages from Hackage may have their +Hackage revision information updated unnecessarily. + +The more complicated update procedure is described below. **QUESTION** +Do we want to go the easy way at first and later implement the more +complicated update procedure? + +1. Create a map from original package location to completed package + location in the lock file +2. Load up each snapshot file +3. For each incomplete package location: + * Lookup the value in the map created in (1) + * If present: use that completed information + * Otherwise: complete the information using the procedure in + "creation" + +This should minimize the number of changes to packages incurred. From f5d6fd6b08bd1d7ff07dd4faafd295c376e3100a Mon Sep 17 00:00:00 2001 From: Michael Snoyman Date: Mon, 4 Mar 2019 16:24:10 +0200 Subject: [PATCH 67/80] Some clarifications from feedback --- doc/lock_files.md | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/doc/lock_files.md b/doc/lock_files.md index 739bcf76a3..8e83805d7c 100644 --- a/doc/lock_files.md +++ b/doc/lock_files.md @@ -129,7 +129,8 @@ parsing of the additional files in the common case of no changes. The lock file contains the following information: * The full snapshot definition information, including completed - package locations + package locations for both `extra-deps` and packages in + snapshot files * **NOTE** This only applies to _immutable_ packages. Mutable packages are not included in the lock file. * Completed information for the snapshot locations @@ -153,7 +154,7 @@ snapshots: foo.yaml # raw content specified in a snapshot file completed: file: foo.yaml - sha256: XXXX # QUESTION: do we really need hashes of local files? + sha256: XXXX size: XXXX - original: lts-13.9 @@ -210,7 +211,8 @@ If the lock file does not exist, it will be created by: If the `stack.yaml.lock` file exists, its last modification time is compared against the last modification time of the `stack.yaml` file and any local snapshot files. If any of those files is more recent -than the `stack.yaml` file, then the update procedure is +than the `stack.yaml` file, and the file hashes in the lock file +do not match the files on the filesystem, then the update procedure is triggered. Otherwise, the `stack.yaml.lock` file can be used as the definition of the snapshot. From 04257a78009739de8bbe2f6e95e8014f3384d18d Mon Sep 17 00:00:00 2001 From: Michael Snoyman Date: Mon, 4 Mar 2019 19:36:43 +0200 Subject: [PATCH 68/80] Another update from @qrilka --- doc/lock_files.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/lock_files.md b/doc/lock_files.md index 8e83805d7c..fc6260158b 100644 --- a/doc/lock_files.md +++ b/doc/lock_files.md @@ -235,7 +235,7 @@ complicated update procedure? 3. For each incomplete package location: * Lookup the value in the map created in (1) * If present: use that completed information - * Otherwise: complete the information using the procedure in - "creation" + * Otherwise: complete the information using the same completion + procedure from Pantry as in "creation" This should minimize the number of changes to packages incurred. From 59086c18140a83be79beb6cad5af5d42350b9e6b Mon Sep 17 00:00:00 2001 From: Michael Snoyman Date: Wed, 6 Mar 2019 12:37:48 +0200 Subject: [PATCH 69/80] Integration test: compiler override does not work Confirmed that this test passes with Stack 1.9.3 and fails with master. --- test/integration/tests/override-compiler/Main.hs | 7 +++++++ test/integration/tests/override-compiler/files/stack.yaml | 3 +++ 2 files changed, 10 insertions(+) create mode 100644 test/integration/tests/override-compiler/Main.hs create mode 100644 test/integration/tests/override-compiler/files/stack.yaml diff --git a/test/integration/tests/override-compiler/Main.hs b/test/integration/tests/override-compiler/Main.hs new file mode 100644 index 0000000000..77e75275a8 --- /dev/null +++ b/test/integration/tests/override-compiler/Main.hs @@ -0,0 +1,7 @@ +import StackTest +import Control.Monad (unless) + +main :: IO () +main = stackCheckStdout ["exec", "--", "ghc", "--numeric-version"] $ \ver -> + -- get rid of the newline character + unless (concat (lines ver) == "8.2.2") $ error $ "Invalid version: " ++ show ver diff --git a/test/integration/tests/override-compiler/files/stack.yaml b/test/integration/tests/override-compiler/files/stack.yaml new file mode 100644 index 0000000000..cbd151a11d --- /dev/null +++ b/test/integration/tests/override-compiler/files/stack.yaml @@ -0,0 +1,3 @@ +resolver: lts-13.10 +compiler: ghc-8.2.2 +packages: [] From 64159d142793cd43f9f4db80e0131e032c49892c Mon Sep 17 00:00:00 2001 From: Michael Snoyman Date: Wed, 6 Mar 2019 14:13:34 +0200 Subject: [PATCH 70/80] Avoid a warning for names in snapshots Since we no longer require this field, the previous code would spit out a warning if it was present. This is extremely noisy for a field that used to be required. Instead: just ignore the field if present. --- subs/pantry/src/Pantry/Types.hs | 1 + 1 file changed, 1 insertion(+) diff --git a/subs/pantry/src/Pantry/Types.hs b/subs/pantry/src/Pantry/Types.hs index 989006c099..67d49e7c3d 100644 --- a/subs/pantry/src/Pantry/Types.hs +++ b/subs/pantry/src/Pantry/Types.hs @@ -1954,6 +1954,7 @@ instance ToJSON RawSnapshotLayer where instance FromJSON (WithJSONWarnings (Unresolved RawSnapshotLayer)) where parseJSON = withObjectWarnings "Snapshot" $ \o -> do + _ :: Maybe Text <- o ..:? "name" -- avoid warnings for old snapshot format mcompiler <- o ..:? "compiler" mresolver <- jsonSubWarningsT $ o ...:? ["snapshot", "resolver"] unresolvedSnapshotParent <- From 0a9aea1540ca97532f58eb9e1615ae367b6fb548 Mon Sep 17 00:00:00 2001 From: Michael Snoyman Date: Wed, 6 Mar 2019 15:04:08 +0200 Subject: [PATCH 71/80] stack clean does not need ghc-pkg (fixes #4480) When running `stack clean`, the entire .stack-work/dist directory will now be deleted, not just the subdirectory for the current GHC/Cabal combination. --- ChangeLog.md | 3 ++ src/Stack/Clean.hs | 17 ++++++----- src/Stack/Config.hs | 1 - src/Stack/Constants/Config.hs | 28 ++++++++++++++---- src/Stack/Runners.hs | 55 ++++++++++++++--------------------- src/Stack/Setup.hs | 53 +++++---------------------------- src/Stack/Types/Config.hs | 7 ----- src/main/Main.hs | 8 +---- 8 files changed, 65 insertions(+), 107 deletions(-) diff --git a/ChangeLog.md b/ChangeLog.md index c2ceb5744e..9069219be9 100644 --- a/ChangeLog.md +++ b/ChangeLog.md @@ -113,6 +113,9 @@ Other enhancements: downside to this, however: if you have a multifile script, and change one of the dependency modules, Stack will not automatically detect and recompile. +* `stack clean` will delete the entire `.stack-work/dist` directory, + not just the relevant subdirectory for the current GHC version. See + [#4480](https://github.com/commercialhaskell/stack/issues/4480). Bug fixes: diff --git a/src/Stack/Clean.hs b/src/Stack/Clean.hs index c14f583b6c..6a8087649f 100644 --- a/src/Stack/Clean.hs +++ b/src/Stack/Clean.hs @@ -15,7 +15,7 @@ import Stack.Prelude import Data.List ((\\),intercalate) import qualified Data.Map.Strict as Map import Path.IO (ignoringAbsence, removeDirRecur) -import Stack.Constants.Config (distDirFromDir, workDirFromDir) +import Stack.Constants.Config (rootDistDirFromDir, workDirFromDir) import Stack.Types.Config import Stack.Types.SourceMap import System.Exit (exitFailure) @@ -23,29 +23,32 @@ import System.Exit (exitFailure) -- | Deletes build artifacts in the current project. -- -- Throws 'StackCleanException'. -clean :: HasEnvConfig env => CleanOpts -> RIO env () +clean :: HasBuildConfig env => CleanOpts -> RIO env () clean cleanOpts = do - failures <- mapM cleanDir =<< dirsToDelete cleanOpts + toDelete <- dirsToDelete cleanOpts + logDebug $ "Need to delete: " <> fromString (show (map toFilePath toDelete)) + failures <- mapM cleanDir toDelete when (or failures) $ liftIO exitFailure where - cleanDir dir = + cleanDir dir = do + logDebug $ "Deleting directory: " <> fromString (toFilePath dir) liftIO (ignoringAbsence (removeDirRecur dir) >> return False) `catchAny` \ex -> do logError $ "Exception while recursively deleting " <> fromString (toFilePath dir) <> "\n" <> displayShow ex logError "Perhaps you do not have permission to delete these files or they are in use?" return True -dirsToDelete :: HasEnvConfig env => CleanOpts -> RIO env [Path Abs Dir] +dirsToDelete :: HasBuildConfig env => CleanOpts -> RIO env [Path Abs Dir] dirsToDelete cleanOpts = do packages <- view $ buildConfigL.to (smwProject . bcSMWanted) case cleanOpts of CleanShallow [] -> -- Filter out packages listed as extra-deps - mapM (distDirFromDir . ppRoot) $ Map.elems packages + mapM (rootDistDirFromDir . ppRoot) $ Map.elems packages CleanShallow targets -> do let localPkgNames = Map.keys packages getPkgDir pkgName' = fmap ppRoot (Map.lookup pkgName' packages) case targets \\ localPkgNames of - [] -> mapM distDirFromDir (mapMaybe getPkgDir targets) + [] -> mapM rootDistDirFromDir (mapMaybe getPkgDir targets) xs -> throwM (NonLocalPackages xs) CleanFull -> do pkgWorkDirs <- mapM (workDirFromDir . ppRoot) $ Map.elems packages diff --git a/src/Stack/Config.hs b/src/Stack/Config.hs index 3c30adff7a..24293e2fd6 100644 --- a/src/Stack/Config.hs +++ b/src/Stack/Config.hs @@ -660,7 +660,6 @@ loadBuildConfig mproject maresolver mcompiler = do LCSProject _ -> False LCSNoConfig _extraDeps -> False , bcCurator = projectCurator project - , bcDownloadCompiler = WithDownloadCompiler } where getEmptyProject :: Maybe RawSnapshotLocation -> [PackageIdentifierRevision] -> RIO Config Project diff --git a/src/Stack/Constants/Config.hs b/src/Stack/Constants/Config.hs index c1ca27fcab..65dd865fcc 100644 --- a/src/Stack/Constants/Config.hs +++ b/src/Stack/Constants/Config.hs @@ -3,6 +3,7 @@ {-# LANGUAGE OverloadedStrings #-} module Stack.Constants.Config ( distDirFromDir + , rootDistDirFromDir , workDirFromDir , distRelativeDir , imageStagingDir @@ -105,8 +106,26 @@ distDirFromDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env) distDirFromDir fp = liftM (fp ) distRelativeDir +-- | The directory containing all dist directories, including all +-- different GHC/Cabal combos. +rootDistDirFromDir + :: (MonadReader env m, HasConfig env) + => Path Abs Dir + -> m (Path Abs Dir) +rootDistDirFromDir fp = + liftM (fp ) rootDistRelativeDir + +-- | Relative directory to the top dist directory, containing +-- individual GHC/Cabal combo as subdirs. +rootDistRelativeDir + :: (MonadReader env m, HasConfig env) + => m (Path Rel Dir) +rootDistRelativeDir = do + workDir <- view workDirL + return $ workDir $(mkRelDir "dist") + -- | Package's working directory. -workDirFromDir :: (MonadReader env m, HasEnvConfig env) +workDirFromDir :: (MonadReader env m, HasConfig env) => Path Abs Dir -> m (Path Abs Dir) workDirFromDir fp = view $ workDirL.to (fp ) @@ -129,11 +148,8 @@ distRelativeDir = do packageIdentifierString $ PackageIdentifier cabalPackageName cabalPkgVer platformAndCabal <- useShaPathOnWindows (platform envDir) - workDir <- view workDirL - return $ - workDir - $(mkRelDir "dist") - platformAndCabal + allDist <- rootDistRelativeDir + return $ allDist platformAndCabal -- | Docker sandbox from project root. projectDockerSandboxDir :: (MonadReader env m, HasConfig env) diff --git a/src/Stack/Runners.hs b/src/Stack/Runners.hs index 20a2d25e1c..37866a5b16 100644 --- a/src/Stack/Runners.hs +++ b/src/Stack/Runners.hs @@ -11,9 +11,7 @@ module Stack.Runners , withMiniConfigAndLock , withBuildConfigAndLock , withDefaultBuildConfigAndLock - , withDefaultBuildConfigAndLockNoDocker - , withBuildConfigAndLockInClean - , withBuildConfigAndLockNoDockerInClean + , withCleanConfig , withBuildConfig , withDefaultBuildConfig , withBuildConfigExt @@ -143,7 +141,7 @@ withDefaultBuildConfigAndLock -> (Maybe FileLock -> RIO EnvConfig ()) -> IO () withDefaultBuildConfigAndLock go inner = - withBuildConfigExt WithDocker WithDownloadCompiler go AllowNoTargets defaultBuildOptsCLI Nothing inner Nothing + withBuildConfigExt WithDocker go AllowNoTargets defaultBuildOptsCLI Nothing inner Nothing withBuildConfigAndLock :: GlobalOpts @@ -152,36 +150,28 @@ withBuildConfigAndLock -> (Maybe FileLock -> RIO EnvConfig ()) -> IO () withBuildConfigAndLock go needTargets boptsCLI inner = - withBuildConfigExt WithDocker WithDownloadCompiler go needTargets boptsCLI Nothing inner Nothing + withBuildConfigExt WithDocker go needTargets boptsCLI Nothing inner Nothing --- | See issue #2010 for why this exists. Currently just used for the --- specific case of "stack clean --full". -withDefaultBuildConfigAndLockNoDocker - :: GlobalOpts - -> (Maybe FileLock -> RIO EnvConfig ()) - -> IO () -withDefaultBuildConfigAndLockNoDocker go inner = - withBuildConfigExt SkipDocker WithDownloadCompiler go AllowNoTargets defaultBuildOptsCLI Nothing inner Nothing - -withBuildConfigAndLockInClean - :: GlobalOpts - -> (Maybe FileLock -> RIO EnvConfig ()) - -> IO () -withBuildConfigAndLockInClean go inner = - withBuildConfigExt WithDocker SkipDownloadCompiler go AllowNoTargets defaultBuildOptsCLI Nothing inner Nothing - --- | See issue #2010 for why this exists. Currently just used for the --- specific case of "stack clean --full". -withBuildConfigAndLockNoDockerInClean - :: GlobalOpts - -> (Maybe FileLock -> RIO EnvConfig ()) - -> IO () -withBuildConfigAndLockNoDockerInClean go inner = - withBuildConfigExt SkipDocker SkipDownloadCompiler go AllowNoTargets defaultBuildOptsCLI Nothing inner Nothing +-- | A runner specially built for the "stack clean" use case. For some +-- reason (hysterical raisins?), all of the functions in this module +-- which say BuildConfig actually work on an EnvConfig, while the +-- clean command legitimately only needs a BuildConfig. At some point +-- in the future, we could consider renaming everything for more +-- consistency. +-- +-- /NOTE/ This command always runs outside of the Docker environment, +-- since it does not need to run any commands to get information on +-- the project. This is a change as of #4480. For previous behavior, +-- see issue #2010. +withCleanConfig :: GlobalOpts -> RIO BuildConfig () -> IO () +withCleanConfig go inner = + loadConfigWithOpts go $ \lc -> + withUserFileLock go (view stackRootL lc) $ \_lk0 -> do + bconfig <- lcLoadBuildConfig lc $ globalCompiler go + runRIO bconfig inner withBuildConfigExt :: WithDocker - -> WithDownloadCompiler -- ^ bypassed download compiler if SkipDownloadCompiler. -> GlobalOpts -> NeedTargets -> BuildOptsCLI @@ -199,7 +189,7 @@ withBuildConfigExt -- available in this action, since that would require build tools to be -- installed on the host OS. -> IO () -withBuildConfigExt skipDocker downloadCompiler go@GlobalOpts{..} needTargets boptsCLI mbefore inner mafter = loadConfigWithOpts go $ \lc -> do +withBuildConfigExt skipDocker go@GlobalOpts{..} needTargets boptsCLI mbefore inner mafter = loadConfigWithOpts go $ \lc -> do withUserFileLock go (view stackRootL lc) $ \lk0 -> do -- A local bit of state for communication between callbacks: curLk <- newIORef lk0 @@ -217,8 +207,7 @@ withBuildConfigExt skipDocker downloadCompiler go@GlobalOpts{..} needTargets bop let inner'' lk = do bconfig <- lcLoadBuildConfig lc globalCompiler - let bconfig' = bconfig { bcDownloadCompiler = downloadCompiler } - envConfig <- runRIO bconfig' (setupEnv needTargets boptsCLI Nothing) + envConfig <- runRIO bconfig (setupEnv needTargets boptsCLI Nothing) runRIO envConfig (inner' lk) let getCompilerVersion = loadCompilerVersion go lc diff --git a/src/Stack/Setup.hs b/src/Stack/Setup.hs index c25d4cc0ec..49f53a9c57 100644 --- a/src/Stack/Setup.hs +++ b/src/Stack/Setup.hs @@ -63,7 +63,7 @@ import qualified Data.Yaml as Yaml import Distribution.System (OS, Arch (..), Platform (..)) import qualified Distribution.System as Cabal import Distribution.Text (simpleParse) -import Distribution.Types.PackageName (mkPackageName, unPackageName) +import Distribution.Types.PackageName (mkPackageName) import Distribution.Version (mkVersion) import Lens.Micro (set) import Network.HTTP.StackClient (CheckHexDigest (..), DownloadRequest (..), HashCheck (..), @@ -87,7 +87,6 @@ import Stack.Config (loadConfig) import Stack.Constants import Stack.Constants.Config (distRelativeDir) import Stack.GhcPkg (createDatabase, getCabalPkgVer, getGlobalDB, mkGhcPackagePath, ghcPkgPathEnvVar) -import Stack.PackageDump (DumpPackage (..)) import Stack.Prelude hiding (Display (..)) import Stack.SourceMap import Stack.Setup.Installed @@ -96,7 +95,6 @@ import Stack.Types.Compiler import Stack.Types.CompilerBuild import Stack.Types.Config import Stack.Types.Docker -import Stack.Types.GhcPkgId (parseGhcPkgId) import Stack.Types.Runner import Stack.Types.SourceMap import Stack.Types.Version @@ -246,10 +244,7 @@ setupEnv needTargets boptsCLI mResolveMissingGHC = do , soptsGHCJSBootOpts = ["--clean"] } - (mghcBin, mCompilerBuild, _) <- - case bcDownloadCompiler bc of - SkipDownloadCompiler -> return (Nothing, Nothing, False) - WithDownloadCompiler -> ensureCompiler sopts + (mghcBin, mCompilerBuild, _) <- ensureCompiler sopts -- Modify the initial environment to include the GHC path, if a local GHC -- is being used @@ -275,45 +270,11 @@ setupEnv needTargets boptsCLI mResolveMissingGHC = do bcPath = set envOverrideSettingsL (\_ -> return menv) $ set processContextL menv bc sourceMap <- runRIO bcPath $ do - (smActual, prunedActual) <- case bcDownloadCompiler bc of - SkipDownloadCompiler -> do - -- FIXME temprorary version, should be resolved the same way as getCompilerVersion above - sma <- actualFromHints (bcSMWanted bc) compilerVer - let noDepsDump :: PackageName -> a -> DumpedGlobalPackage - noDepsDump pname _ = DumpPackage - { dpGhcPkgId = fromMaybe (error "bad package name") $ - parseGhcPkgId (T.pack $ unPackageName pname) - , dpPackageIdent = PackageIdentifier pname (mkVersion []) - , dpParentLibIdent = Nothing - , dpLicense = Nothing - , dpLibDirs = [] - , dpLibraries = [] - , dpHasExposedModules = True - , dpExposedModules = mempty - , dpDepends = [] - , dpHaddockInterfaces = [] - , dpHaddockHtml = Nothing - , dpIsExposed = True - } - fakeDump = sma { - smaGlobal = Map.mapWithKey noDepsDump (smaGlobal sma) - } - fakePruned = sma { - smaGlobal = Map.map (\(GlobalPackageVersion v) -> GlobalPackage v) - (smaGlobal sma) - } - return (fakeDump, fakePruned) - WithDownloadCompiler -> do - sma <- actualFromGhc (bcSMWanted bc) compilerVer - let actualPkgs = Map.keysSet (smaDeps sma) <> - Map.keysSet (smaProject sma) - return ( sma - , sma { - smaGlobal = pruneGlobals (smaGlobal sma) actualPkgs - } - ) - - let haddockDeps = shouldHaddockDeps (configBuild config) + smActual <- actualFromGhc (bcSMWanted bc) compilerVer + let actualPkgs = Map.keysSet (smaDeps smActual) <> + Map.keysSet (smaProject smActual) + prunedActual = smActual { smaGlobal = pruneGlobals (smaGlobal smActual) actualPkgs } + haddockDeps = shouldHaddockDeps (configBuild config) targets <- parseTargets needTargets haddockDeps boptsCLI prunedActual loadSourceMap targets boptsCLI smActual diff --git a/src/Stack/Types/Config.hs b/src/Stack/Types/Config.hs index 769c12cd8e..9f2ac0c7df 100644 --- a/src/Stack/Types/Config.hs +++ b/src/Stack/Types/Config.hs @@ -83,8 +83,6 @@ module Stack.Types.Config ,LoadConfig(..) -- ** WithDocker ,WithDocker(..) - -- ** WithDownloadCompiler - ,WithDownloadCompiler(..) -- ** Project & ProjectAndConfigMonoid ,Project(..) @@ -495,17 +493,12 @@ data BuildConfig = BuildConfig -- ^ Are we loading from the implicit global stack.yaml? This is useful -- for providing better error messages. , bcCurator :: !(Maybe Curator) - , bcDownloadCompiler :: !WithDownloadCompiler } data WithDocker = SkipDocker | WithDocker -data WithDownloadCompiler - = SkipDownloadCompiler - | WithDownloadCompiler - stackYamlL :: HasBuildConfig env => Lens' env (Path Abs File) stackYamlL = buildConfigL.lens bcStackYaml (\x y -> x { bcStackYaml = y }) diff --git a/src/main/Main.hs b/src/main/Main.hs index 9670bd32d2..53834746e6 100644 --- a/src/main/Main.hs +++ b/src/main/Main.hs @@ -658,12 +658,7 @@ setupCmd sco@SetupCmdOpts{..} go@GlobalOpts{..} = loadConfigWithOpts go $ \lc -> (Just $ munlockFile lk) cleanCmd :: CleanOpts -> GlobalOpts -> IO () -cleanCmd opts go = - -- See issues #2010 and #3468 for why "stack clean --full" is not used - -- within docker. - case opts of - CleanFull{} -> withBuildConfigAndLockNoDockerInClean go (const (clean opts)) - CleanShallow{} -> withBuildConfigAndLockInClean go (const (clean opts)) +cleanCmd opts go = withCleanConfig go (clean opts) -- | Helper for build and install commands buildCmd :: BuildOptsCLI -> GlobalOpts -> IO () @@ -998,7 +993,6 @@ imgDockerCmd (rebuild,images) go@GlobalOpts{..} = loadConfigWithOpts go $ \lc -> let mProjectRoot = lcProjectRoot lc withBuildConfigExt WithDocker - WithDownloadCompiler go NeedTargets defaultBuildOptsCLI From 1cabb987d4214a55fbd5a18a4788c4a4757bd562 Mon Sep 17 00:00:00 2001 From: Michael Snoyman Date: Wed, 6 Mar 2019 15:13:43 +0200 Subject: [PATCH 72/80] Fix up docs With the changes in previous commits, purge will in fact now work with Docker. --- doc/GUIDE.md | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/doc/GUIDE.md b/doc/GUIDE.md index a558176323..1250832e04 100644 --- a/doc/GUIDE.md +++ b/doc/GUIDE.md @@ -286,20 +286,18 @@ You can clean up build artifacts for your project using the `stack clean` and `s ### `stack clean` -`stack clean` deletes the local working directories containing compiler output. +`stack clean` deletes the local working directories containing compiler output. By default, that means the contents of directories in `.stack-work/dist`, for all the `.stack-work` directories within a project. Use `stack clean ` to delete the output for the package _specific-package_ only. ### `stack purge` -`stack purge` deletes the local stack working directories, including extra-deps, git dependencies and the compiler output (including logs). +`stack purge` deletes the local stack working directories, including extra-deps, git dependencies and the compiler output (including logs). It does not delete any snapshot packages, compilers or programs installed using `stack install`. This essentially -reverts the project to a completely fresh state, as if it had never been built. +reverts the project to a completely fresh state, as if it had never been built. `stack purge` is just a shortcut for `stack clean --full` -- Note: `stack purge` is not available when used in docker - ### The build command The build command is the heart and soul of stack. It is the engine that powers From ba50a70879f72e7185097a7885a6599fb02b7605 Mon Sep 17 00:00:00 2001 From: Kirill Zaborsky Date: Wed, 6 Mar 2019 16:56:21 +0300 Subject: [PATCH 73/80] Enable back compiler override in stack.yaml --- src/Stack/Config.hs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/Stack/Config.hs b/src/Stack/Config.hs index 3c30adff7a..8bce95dd54 100644 --- a/src/Stack/Config.hs +++ b/src/Stack/Config.hs @@ -584,7 +584,8 @@ loadBuildConfig mproject maresolver mcompiler = do , "outside of a real project.\n" ] return (p, dest) let project = project' - { projectResolver = fromMaybe (projectResolver project') mresolver + { projectCompiler = mcompiler <|> projectCompiler project' + , projectResolver = fromMaybe (projectResolver project') mresolver } resolver <- completeSnapshotLocation $ projectResolver project @@ -642,7 +643,7 @@ loadBuildConfig mproject maresolver mcompiler = do throwM $ InvalidGhcOptionsSpecification (Map.keys unusedPkgGhcOptions) let wanted = SMWanted - { smwCompiler = fromMaybe (snapshotCompiler snapshot) mcompiler + { smwCompiler = fromMaybe (snapshotCompiler snapshot) (projectCompiler project) , smwProject = packages , smwDeps = deps , smwSnapshotLocation = projectResolver project From 89481325cfd427ef4c613abafffdeec40c6b671b Mon Sep 17 00:00:00 2001 From: Michael Snoyman Date: Thu, 7 Mar 2019 08:56:40 +0200 Subject: [PATCH 74/80] Add a ChangeLog entry --- ChangeLog.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ChangeLog.md b/ChangeLog.md index 9069219be9..fe1af6c780 100644 --- a/ChangeLog.md +++ b/ChangeLog.md @@ -116,6 +116,8 @@ Other enhancements: * `stack clean` will delete the entire `.stack-work/dist` directory, not just the relevant subdirectory for the current GHC version. See [#4480](https://github.com/commercialhaskell/stack/issues/4480). +* Add `stack purge` as a shortcut for `stack clean --full`. See + [#3863](https://github.com/commercialhaskell/stack/issues/3863). Bug fixes: From c7e49d538e15653b30fd3f196859dac959c40476 Mon Sep 17 00:00:00 2001 From: Sibi Prabakaran Date: Thu, 7 Mar 2019 16:12:32 +0530 Subject: [PATCH 75/80] Fix test failure for script-extra-dep This PR fixes this failure: ``` shellsession Running test script-extra-dep Running: /home/sibi/.local/bin/stack script.hs Error: While constructing the build plan, the following exceptions were encountered: In the dependencies for acme-missiles-0.3: stm needed, but the stack configuration has no specified version (latest matching version is 2.5.0.0) needed since acme-missiles is a build target. Some different approaches to resolving this: * Consider trying 'stack solver', which uses the cabal-install solver to attempt to find some working build configuration. This can be convenient when dealing with many complicated constraint errors, but results may be unpredictable. * Recommended action: try adding the following to your extra-deps in /home/sibi/.stack/script/048d6ed9f71bbafeb2cf702cda487d65dbd1010070852364425533166dcd99bc/config.yaml: stm-2.5.0.0@sha256:1fb8bd117550d560d1b33a6404e27fdb090e70921e0f6434722cdbbce20d8256,2086 Plan construction failed. Main.hs: Exited with exit code: ExitFailure 1 CallStack (from HasCallStack): error, called at ../../../lib/StackTest.hs:52:34 in main:StackTest stack, called at ../Main.hs:4:8 in main:Main ``` `stm` package is one of the dependency of acme-missiles, so that also needs to be added as part of `extra-dep` for the test to run properly --- test/integration/tests/script-extra-dep/files/script.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/tests/script-extra-dep/files/script.hs b/test/integration/tests/script-extra-dep/files/script.hs index 5652f3a433..99dd964a0d 100644 --- a/test/integration/tests/script-extra-dep/files/script.hs +++ b/test/integration/tests/script-extra-dep/files/script.hs @@ -1,5 +1,5 @@ #!/usr/bin/env stack --- stack --resolver ghc-8.2.2 script --extra-dep acme-missiles-0.3@rev:0 +-- stack --resolver ghc-8.2.2 script --extra-dep acme-missiles-0.3@rev:0 --extra-dep stm-2.5.0.0@rev:0 import Acme.Missiles main :: IO () From 2c08a24061484b52dfa5458427ffb8f05e1418f3 Mon Sep 17 00:00:00 2001 From: Sibi Prabakaran Date: Thu, 7 Mar 2019 16:18:57 +0530 Subject: [PATCH 76/80] Fix test failure for relative-script-snapshots `stm` package is one of the dependency of `acme-missiles`, so that also needs to be added as part of `packages` section of the `snapshot.yaml` file. --- .../tests/relative-script-snapshots/files/subdir/snapshot.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/test/integration/tests/relative-script-snapshots/files/subdir/snapshot.yaml b/test/integration/tests/relative-script-snapshots/files/subdir/snapshot.yaml index 6182f40ef9..a4882909c3 100644 --- a/test/integration/tests/relative-script-snapshots/files/subdir/snapshot.yaml +++ b/test/integration/tests/relative-script-snapshots/files/subdir/snapshot.yaml @@ -2,3 +2,4 @@ resolver: ghc-8.2.2 name: snapshot packages: - acme-missiles-0.3@rev:0 +- stm-2.5.0.0@rev:0 From 50b9f85b6531a69889538b37759a1d020bea4851 Mon Sep 17 00:00:00 2001 From: Sibi Prabakaran Date: Thu, 7 Mar 2019 17:16:45 +0530 Subject: [PATCH 77/80] Fix integration test for mutable-deps MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Without this, this results in an build error like this: ``` shellsession $ ./run-single-test.sh mutable-deps Running test mutable-deps Running: /home/sibi/.local/bin/stack build directory-1.3.0.2: unregistering (missing dependencies: filepath) filemanip-0.3.6.3: unregistering (missing dependencies: directory, filepath) filepath-1.4.1.2: unregistering (local file changes: System/FilePath/Internal.hs) files-1.0.0: unregistering (missing dependencies: filemanip) filepath-1.4.1.2: configure (lib) filepath-1.4.1.2: build (lib) -- While building package filepath-1.4.1.2 using: /home/sibi/.stack/setup-exe-cache/x86_64-linux/Cabal-simple_mPHDZzAJ_2.0.1.0_ghc-8.2.2 --builddir=.stack-work/dist/x86_64-linux/Cabal-2.0.1.0 build lib:filepath --ghc-options " -ddump-hi -ddump-to-file" Process exited with code: ExitFailure 1 Logs have been written to: /home/sibi/github/stack/test/integration/tests/mutable-deps/files/.stack-work/logs/filepath-1.4.1.2.log Configuring filepath-1.4.1.2... Preprocessing library for filepath-1.4.1.2.. Building library for filepath-1.4.1.2.. /home/sibi/github/stack/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Posix.hs:1:1: error: File name does not match module name: Saw: ‘Main’ Expected: ‘System.FilePath.Posix’ | 1 | {-# LANGUAGE CPP #-} | ^ Main.hs: Exited with exit code: ExitFailure 1 CallStack (from HasCallStack): error, called at ../../../lib/StackTest.hs:132:14 in main:StackTest ``` --- .../files/filepath-1.4.1.2/System/FilePath/Internal.hs | 1 - 1 file changed, 1 deletion(-) diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Internal.hs b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Internal.hs index 5a431e6626..4a376b33b1 100644 --- a/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Internal.hs +++ b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Internal.hs @@ -1,4 +1,3 @@ -{-# ANN module "HLint: ignore" #-} #if __GLASGOW_HASKELL__ >= 704 {-# LANGUAGE Safe #-} #endif From 2c5e2360a8a23ab22044e5b24f166fcbfb0ea2b5 Mon Sep 17 00:00:00 2001 From: Sibi Prabakaran Date: Thu, 7 Mar 2019 19:20:37 +0530 Subject: [PATCH 78/80] Fix the style issue --- .../files/filepath-1.4.1.2/System/FilePath/Internal.hs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Internal.hs b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Internal.hs index 4a376b33b1..54a38c37fa 100644 --- a/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Internal.hs +++ b/test/integration/tests/mutable-deps/files/filepath-1.4.1.2/System/FilePath/Internal.hs @@ -1,6 +1,3 @@ -#if __GLASGOW_HASKELL__ >= 704 -{-# LANGUAGE Safe #-} -#endif {-# LANGUAGE PatternGuards #-} -- This template expects CPP definitions for: @@ -146,6 +143,7 @@ pathSeparator = if isWindows then '\\' else '/' -- > Windows: pathSeparators == ['\\', '/'] -- > Posix: pathSeparators == ['/'] -- > pathSeparator `elem` pathSeparators +{-# ANN pathSeparators "HLint: ignore" #-} pathSeparators :: [Char] pathSeparators = if isWindows then "\\/" else "/" @@ -1025,5 +1023,6 @@ breakEnd p = spanEnd (not . p) -- | The stripSuffix function drops the given suffix from a list. It returns -- Nothing if the list did not end with the suffix given, or Just the list -- before the suffix, if it does. +{-# ANN stripSuffix "HLint: ignore" #-} stripSuffix :: Eq a => [a] -> [a] -> Maybe [a] stripSuffix xs ys = fmap reverse $ stripPrefix (reverse xs) (reverse ys) From 0912e45b1fe31e17c5a528701a0c5a0812867cc7 Mon Sep 17 00:00:00 2001 From: Michael Snoyman Date: Tue, 12 Mar 2019 16:05:26 +0200 Subject: [PATCH 79/80] SkipDocker data constructor never used --- src/Stack/Runners.hs | 32 +++++++++++++------------------- src/Stack/Types/Config.hs | 7 ------- src/main/Main.hs | 1 - 3 files changed, 13 insertions(+), 27 deletions(-) diff --git a/src/Stack/Runners.hs b/src/Stack/Runners.hs index 37866a5b16..4bb4fccf40 100644 --- a/src/Stack/Runners.hs +++ b/src/Stack/Runners.hs @@ -141,7 +141,7 @@ withDefaultBuildConfigAndLock -> (Maybe FileLock -> RIO EnvConfig ()) -> IO () withDefaultBuildConfigAndLock go inner = - withBuildConfigExt WithDocker go AllowNoTargets defaultBuildOptsCLI Nothing inner Nothing + withBuildConfigExt go AllowNoTargets defaultBuildOptsCLI Nothing inner Nothing withBuildConfigAndLock :: GlobalOpts @@ -150,7 +150,7 @@ withBuildConfigAndLock -> (Maybe FileLock -> RIO EnvConfig ()) -> IO () withBuildConfigAndLock go needTargets boptsCLI inner = - withBuildConfigExt WithDocker go needTargets boptsCLI Nothing inner Nothing + withBuildConfigExt go needTargets boptsCLI Nothing inner Nothing -- | A runner specially built for the "stack clean" use case. For some -- reason (hysterical raisins?), all of the functions in this module @@ -171,8 +171,7 @@ withCleanConfig go inner = runRIO bconfig inner withBuildConfigExt - :: WithDocker - -> GlobalOpts + :: GlobalOpts -> NeedTargets -> BuildOptsCLI -> Maybe (RIO Config ()) @@ -189,7 +188,7 @@ withBuildConfigExt -- available in this action, since that would require build tools to be -- installed on the host OS. -> IO () -withBuildConfigExt skipDocker go@GlobalOpts{..} needTargets boptsCLI mbefore inner mafter = loadConfigWithOpts go $ \lc -> do +withBuildConfigExt go@GlobalOpts{..} needTargets boptsCLI mbefore inner mafter = loadConfigWithOpts go $ \lc -> do withUserFileLock go (view stackRootL lc) $ \lk0 -> do -- A local bit of state for communication between callbacks: curLk <- newIORef lk0 @@ -212,20 +211,15 @@ withBuildConfigExt skipDocker go@GlobalOpts{..} needTargets boptsCLI mbefore inn let getCompilerVersion = loadCompilerVersion go lc runRIO (lcConfig lc) $ - case skipDocker of - SkipDocker -> do - forM_ mbefore id - Nix.reexecWithOptionalShell (lcProjectRoot lc) getCompilerVersion (inner'' lk0) - forM_ mafter id - WithDocker -> Docker.reexecWithOptionalContainer - (lcProjectRoot lc) - mbefore - (runRIO (lcConfig lc) $ - Nix.reexecWithOptionalShell (lcProjectRoot lc) getCompilerVersion (inner'' lk0)) - mafter - (Just $ liftIO $ - do lk' <- readIORef curLk - munlockFile lk') + Docker.reexecWithOptionalContainer + (lcProjectRoot lc) + mbefore + (runRIO (lcConfig lc) $ + Nix.reexecWithOptionalShell (lcProjectRoot lc) getCompilerVersion (inner'' lk0)) + mafter + (Just $ liftIO $ + do lk' <- readIORef curLk + munlockFile lk') -- | Load the configuration. Convenience function used -- throughout this module. diff --git a/src/Stack/Types/Config.hs b/src/Stack/Types/Config.hs index 9f2ac0c7df..eff74f636b 100644 --- a/src/Stack/Types/Config.hs +++ b/src/Stack/Types/Config.hs @@ -81,9 +81,6 @@ module Stack.Types.Config ,defaultLogLevel -- ** LoadConfig ,LoadConfig(..) - -- ** WithDocker - ,WithDocker(..) - -- ** Project & ProjectAndConfigMonoid ,Project(..) ,Curator(..) @@ -495,10 +492,6 @@ data BuildConfig = BuildConfig , bcCurator :: !(Maybe Curator) } -data WithDocker - = SkipDocker - | WithDocker - stackYamlL :: HasBuildConfig env => Lens' env (Path Abs File) stackYamlL = buildConfigL.lens bcStackYaml (\x y -> x { bcStackYaml = y }) diff --git a/src/main/Main.hs b/src/main/Main.hs index 53834746e6..9c805ff0ff 100644 --- a/src/main/Main.hs +++ b/src/main/Main.hs @@ -992,7 +992,6 @@ imgDockerCmd :: (Bool, [Text]) -> GlobalOpts -> IO () imgDockerCmd (rebuild,images) go@GlobalOpts{..} = loadConfigWithOpts go $ \lc -> do let mProjectRoot = lcProjectRoot lc withBuildConfigExt - WithDocker go NeedTargets defaultBuildOptsCLI From 91b8bc4da461d7d438df0e2d966135ef6767ac85 Mon Sep 17 00:00:00 2001 From: Michael Snoyman Date: Wed, 13 Mar 2019 11:49:04 +0200 Subject: [PATCH 80/80] Add an integration test for #4324 and #4500 --- .../tests/4324-dot-includes-boot-packages/Main.hs | 9 +++++++++ .../4324-dot-includes-boot-packages/files/.gitignore | 1 + .../4324-dot-includes-boot-packages/files/package.yaml | 7 +++++++ .../4324-dot-includes-boot-packages/files/stack.yaml | 1 + 4 files changed, 18 insertions(+) create mode 100644 test/integration/tests/4324-dot-includes-boot-packages/Main.hs create mode 100644 test/integration/tests/4324-dot-includes-boot-packages/files/.gitignore create mode 100644 test/integration/tests/4324-dot-includes-boot-packages/files/package.yaml create mode 100644 test/integration/tests/4324-dot-includes-boot-packages/files/stack.yaml diff --git a/test/integration/tests/4324-dot-includes-boot-packages/Main.hs b/test/integration/tests/4324-dot-includes-boot-packages/Main.hs new file mode 100644 index 0000000000..493cd2b8ca --- /dev/null +++ b/test/integration/tests/4324-dot-includes-boot-packages/Main.hs @@ -0,0 +1,9 @@ +import StackTest +import Control.Monad (unless) +import Data.List (isInfixOf) + +main :: IO () +main = do + stackCheckStdout ["dot", "--external"] $ \str -> + unless ("\n\"process\" ->" `isInfixOf` str) $ + error "Not showing dependencies of process" diff --git a/test/integration/tests/4324-dot-includes-boot-packages/files/.gitignore b/test/integration/tests/4324-dot-includes-boot-packages/files/.gitignore new file mode 100644 index 0000000000..d43d807c0d --- /dev/null +++ b/test/integration/tests/4324-dot-includes-boot-packages/files/.gitignore @@ -0,0 +1 @@ +*.cabal diff --git a/test/integration/tests/4324-dot-includes-boot-packages/files/package.yaml b/test/integration/tests/4324-dot-includes-boot-packages/files/package.yaml new file mode 100644 index 0000000000..0708d2f2d3 --- /dev/null +++ b/test/integration/tests/4324-dot-includes-boot-packages/files/package.yaml @@ -0,0 +1,7 @@ +name: foo + +dependencies: +- base +- process + +library: {} diff --git a/test/integration/tests/4324-dot-includes-boot-packages/files/stack.yaml b/test/integration/tests/4324-dot-includes-boot-packages/files/stack.yaml new file mode 100644 index 0000000000..a95908b164 --- /dev/null +++ b/test/integration/tests/4324-dot-includes-boot-packages/files/stack.yaml @@ -0,0 +1 @@ +resolver: ghc-8.2.2