mirror of
https://github.com/commercialhaskell/stackage.git
synced 2026-01-15 08:48:31 +01:00
Merge branch 'master' into ghc7.10
Conflicts: build-constraints.yaml
This commit is contained in:
commit
4e5ba05a27
@ -1,3 +1,11 @@
|
|||||||
|
## 0.6.1
|
||||||
|
|
||||||
|
* Switch to V2 upload by default
|
||||||
|
|
||||||
|
## 0.6.0
|
||||||
|
|
||||||
|
* Upload bundle V2 stuff
|
||||||
|
|
||||||
## 0.5.2
|
## 0.5.2
|
||||||
|
|
||||||
* Upload LTS to Hackage with the name LTSHaskell
|
* Upload LTS to Hackage with the name LTSHaskell
|
||||||
|
|||||||
@ -14,9 +14,9 @@ ADD debian-bootstrap.sh /tmp/debian-bootstrap.sh
|
|||||||
RUN DEBIAN_FRONTEND=noninteractive bash /tmp/debian-bootstrap.sh
|
RUN DEBIAN_FRONTEND=noninteractive bash /tmp/debian-bootstrap.sh
|
||||||
RUN rm /tmp/debian-bootstrap.sh
|
RUN rm /tmp/debian-bootstrap.sh
|
||||||
|
|
||||||
RUN DEBIAN_FRONTEND=noninteractive apt-get install -y cabal-install-1.20 ghc-7.8.4
|
RUN DEBIAN_FRONTEND=noninteractive apt-get install -y cabal-install-1.20 ghc-7.8.4 alex-3.1.3 happy-1.19.4
|
||||||
|
|
||||||
ENV PATH /home/stackage/.cabal/bin:/usr/local/sbin:/usr/local/bin:/opt/ghc/7.8.4/bin:/opt/cabal/1.20/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
ENV PATH /home/stackage/.cabal/bin:/usr/local/sbin:/usr/local/bin:/opt/ghc/7.8.4/bin:/opt/cabal/1.20/bin:/opt/alex/3.1.3/bin:/opt/happy/1.19.4/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
||||||
|
|
||||||
RUN cabal update
|
RUN cabal update
|
||||||
ADD . /tmp/stackage
|
ADD . /tmp/stackage
|
||||||
|
|||||||
@ -120,7 +120,7 @@ instance FromJSON PackageConstraints where
|
|||||||
pcBuildBenchmarks <- o .: "build-benchmarks"
|
pcBuildBenchmarks <- o .: "build-benchmarks"
|
||||||
pcFlagOverrides <- Map.mapKeysWith const mkFlagName <$> o .: "flags"
|
pcFlagOverrides <- Map.mapKeysWith const mkFlagName <$> o .: "flags"
|
||||||
pcMaintainer <- o .:? "maintainer"
|
pcMaintainer <- o .:? "maintainer"
|
||||||
pcEnableLibProfile <- fmap (fromMaybe False) (o .:? "library-profiling")
|
pcEnableLibProfile <- fmap (fromMaybe True) (o .:? "library-profiling")
|
||||||
return PackageConstraints {..}
|
return PackageConstraints {..}
|
||||||
|
|
||||||
-- | The proposed plan from the requirements provided by contributors.
|
-- | The proposed plan from the requirements provided by contributors.
|
||||||
|
|||||||
@ -8,6 +8,7 @@ module Stackage.CompleteBuild
|
|||||||
, completeBuild
|
, completeBuild
|
||||||
, justCheck
|
, justCheck
|
||||||
, justUploadNightly
|
, justUploadNightly
|
||||||
|
, getStackageAuthToken
|
||||||
) where
|
) where
|
||||||
|
|
||||||
import Control.Concurrent (threadDelay)
|
import Control.Concurrent (threadDelay)
|
||||||
@ -33,10 +34,14 @@ import System.IO (BufferMode (LineBuffering), hSetBuffering)
|
|||||||
-- | Flags passed in from the command line.
|
-- | Flags passed in from the command line.
|
||||||
data BuildFlags = BuildFlags
|
data BuildFlags = BuildFlags
|
||||||
{ bfEnableTests :: !Bool
|
{ bfEnableTests :: !Bool
|
||||||
|
, bfEnableHaddock :: !Bool
|
||||||
, bfDoUpload :: !Bool
|
, bfDoUpload :: !Bool
|
||||||
, bfEnableLibProfile :: !Bool
|
, bfEnableLibProfile :: !Bool
|
||||||
|
, bfEnableExecDyn :: !Bool
|
||||||
, bfVerbose :: !Bool
|
, bfVerbose :: !Bool
|
||||||
, bfSkipCheck :: !Bool
|
, bfSkipCheck :: !Bool
|
||||||
|
, bfUploadV1 :: !Bool
|
||||||
|
, bfServer :: !StackageServer
|
||||||
} deriving (Show)
|
} deriving (Show)
|
||||||
|
|
||||||
data BuildType = Nightly | LTS BumpType
|
data BuildType = Nightly | LTS BumpType
|
||||||
@ -55,6 +60,8 @@ data Settings = Settings
|
|||||||
, setArgs :: Text -> UploadBundle -> UploadBundle
|
, setArgs :: Text -> UploadBundle -> UploadBundle
|
||||||
, postBuild :: IO ()
|
, postBuild :: IO ()
|
||||||
, distroName :: Text -- ^ distro name on Hackage
|
, distroName :: Text -- ^ distro name on Hackage
|
||||||
|
, snapshotType :: SnapshotType
|
||||||
|
, bundleDest :: FilePath
|
||||||
}
|
}
|
||||||
|
|
||||||
nightlyPlanFile :: Text -- ^ day
|
nightlyPlanFile :: Text -- ^ day
|
||||||
@ -66,7 +73,7 @@ nightlySettings :: Text -- ^ day
|
|||||||
-> Settings
|
-> Settings
|
||||||
nightlySettings day plan' = Settings
|
nightlySettings day plan' = Settings
|
||||||
{ planFile = nightlyPlanFile day
|
{ planFile = nightlyPlanFile day
|
||||||
, buildDir = fpFromText $ "builds/stackage-nightly-" ++ day
|
, buildDir = fpFromText $ "builds/nightly"
|
||||||
, logDir = fpFromText $ "logs/stackage-nightly-" ++ day
|
, logDir = fpFromText $ "logs/stackage-nightly-" ++ day
|
||||||
, title = \ghcVer -> concat
|
, title = \ghcVer -> concat
|
||||||
[ "Stackage Nightly "
|
[ "Stackage Nightly "
|
||||||
@ -79,6 +86,8 @@ nightlySettings day plan' = Settings
|
|||||||
, plan = plan'
|
, plan = plan'
|
||||||
, postBuild = return ()
|
, postBuild = return ()
|
||||||
, distroName = "Stackage"
|
, distroName = "Stackage"
|
||||||
|
, snapshotType = STNightly
|
||||||
|
, bundleDest = fpFromText $ "stackage-nightly-" ++ day ++ ".bundle"
|
||||||
}
|
}
|
||||||
where
|
where
|
||||||
slug' = "nightly-" ++ day
|
slug' = "nightly-" ++ day
|
||||||
@ -119,7 +128,7 @@ getSettings man (LTS bumpType) = do
|
|||||||
|
|
||||||
return Settings
|
return Settings
|
||||||
{ planFile = newfile
|
{ planFile = newfile
|
||||||
, buildDir = fpFromText $ "builds/stackage-lts-" ++ tshow new
|
, buildDir = fpFromText $ "builds/lts"
|
||||||
, logDir = fpFromText $ "logs/stackage-lts-" ++ tshow new
|
, logDir = fpFromText $ "logs/stackage-lts-" ++ tshow new
|
||||||
, title = \ghcVer -> concat
|
, title = \ghcVer -> concat
|
||||||
[ "LTS Haskell "
|
[ "LTS Haskell "
|
||||||
@ -140,6 +149,10 @@ getSettings man (LTS bumpType) = do
|
|||||||
putStrLn "Pushing to Git repository"
|
putStrLn "Pushing to Git repository"
|
||||||
git ["push"]
|
git ["push"]
|
||||||
, distroName = "LTSHaskell"
|
, distroName = "LTSHaskell"
|
||||||
|
, snapshotType =
|
||||||
|
case new of
|
||||||
|
LTSVer x y -> STLTS x y
|
||||||
|
, bundleDest = fpFromText $ "stackage-lts-" ++ tshow new ++ ".bundle"
|
||||||
}
|
}
|
||||||
|
|
||||||
data LTSVer = LTSVer !Int !Int
|
data LTSVer = LTSVer !Int !Int
|
||||||
@ -203,7 +216,9 @@ getPerformBuild buildFlags Settings {..} = PerformBuild
|
|||||||
, pbJobs = 8
|
, pbJobs = 8
|
||||||
, pbGlobalInstall = False
|
, pbGlobalInstall = False
|
||||||
, pbEnableTests = bfEnableTests buildFlags
|
, pbEnableTests = bfEnableTests buildFlags
|
||||||
|
, pbEnableHaddock = bfEnableHaddock buildFlags
|
||||||
, pbEnableLibProfiling = bfEnableLibProfile buildFlags
|
, pbEnableLibProfiling = bfEnableLibProfile buildFlags
|
||||||
|
, pbEnableExecDyn = bfEnableExecDyn buildFlags
|
||||||
, pbVerbose = bfVerbose buildFlags
|
, pbVerbose = bfVerbose buildFlags
|
||||||
, pbAllowNewer = bfSkipCheck buildFlags
|
, pbAllowNewer = bfSkipCheck buildFlags
|
||||||
}
|
}
|
||||||
@ -227,10 +242,23 @@ completeBuild buildType buildFlags = withManager tlsManagerSettings $ \man -> do
|
|||||||
checkBuildPlan plan
|
checkBuildPlan plan
|
||||||
|
|
||||||
putStrLn "Performing build"
|
putStrLn "Performing build"
|
||||||
performBuild (getPerformBuild buildFlags settings) >>= mapM_ putStrLn
|
let pb = getPerformBuild buildFlags settings
|
||||||
|
performBuild pb >>= mapM_ putStrLn
|
||||||
|
|
||||||
|
putStrLn $ "Creating bundle (v2) at: " ++ fpToText bundleDest
|
||||||
|
createBundleV2 CreateBundleV2
|
||||||
|
{ cb2Plan = plan
|
||||||
|
, cb2Type = snapshotType
|
||||||
|
, cb2DocsDir = pbDocDir pb
|
||||||
|
, cb2Dest = bundleDest
|
||||||
|
}
|
||||||
|
|
||||||
when (bfDoUpload buildFlags) $
|
when (bfDoUpload buildFlags) $
|
||||||
finallyUpload settings man
|
finallyUpload
|
||||||
|
(not $ bfUploadV1 buildFlags)
|
||||||
|
(bfServer buildFlags)
|
||||||
|
settings
|
||||||
|
man
|
||||||
|
|
||||||
justUploadNightly
|
justUploadNightly
|
||||||
:: Text -- ^ nightly date
|
:: Text -- ^ nightly date
|
||||||
@ -238,41 +266,64 @@ justUploadNightly
|
|||||||
justUploadNightly day = do
|
justUploadNightly day = do
|
||||||
plan <- decodeFileEither (fpToString $ nightlyPlanFile day)
|
plan <- decodeFileEither (fpToString $ nightlyPlanFile day)
|
||||||
>>= either throwM return
|
>>= either throwM return
|
||||||
withManager tlsManagerSettings $ finallyUpload $ nightlySettings day plan
|
withManager tlsManagerSettings $ finallyUpload False def $ nightlySettings day plan
|
||||||
|
|
||||||
|
getStackageAuthToken :: IO Text
|
||||||
|
getStackageAuthToken = do
|
||||||
|
mtoken <- lookupEnv "STACKAGE_AUTH_TOKEN"
|
||||||
|
case mtoken of
|
||||||
|
Nothing -> decodeUtf8 <$> readFile "/auth-token"
|
||||||
|
Just token -> return $ pack token
|
||||||
|
|
||||||
-- | The final part of the complete build process: uploading a bundle,
|
-- | The final part of the complete build process: uploading a bundle,
|
||||||
-- docs and a distro to hackage.
|
-- docs and a distro to hackage.
|
||||||
finallyUpload :: Settings -> Manager -> IO ()
|
finallyUpload :: Bool -- ^ use v2 upload
|
||||||
finallyUpload settings@Settings{..} man = do
|
-> StackageServer
|
||||||
|
-> Settings -> Manager -> IO ()
|
||||||
|
finallyUpload useV2 server settings@Settings{..} man = do
|
||||||
putStrLn "Uploading bundle to Stackage Server"
|
putStrLn "Uploading bundle to Stackage Server"
|
||||||
|
|
||||||
mtoken <- lookupEnv "STACKAGE_AUTH_TOKEN"
|
token <- getStackageAuthToken
|
||||||
token <-
|
|
||||||
case mtoken of
|
|
||||||
Nothing -> decodeUtf8 <$> readFile "/auth-token"
|
|
||||||
Just token -> return $ pack token
|
|
||||||
|
|
||||||
now <- epochTime
|
if useV2
|
||||||
let ghcVer = display $ siGhcVersion $ bpSystemInfo plan
|
then do
|
||||||
(ident, mloc) <- flip uploadBundle man $ setArgs ghcVer def
|
res <- flip uploadBundleV2 man UploadBundleV2
|
||||||
{ ubContents = serverBundle now (title ghcVer) slug plan
|
{ ub2Server = server
|
||||||
, ubAuthToken = token
|
, ub2AuthToken = token
|
||||||
}
|
, ub2Bundle = bundleDest
|
||||||
putStrLn $ "New ident: " ++ unSnapshotIdent ident
|
}
|
||||||
forM_ mloc $ \loc ->
|
putStrLn $ "New snapshot available at: " ++ res
|
||||||
putStrLn $ "Track progress at: " ++ loc
|
else do
|
||||||
|
now <- epochTime
|
||||||
|
let ghcVer = display $ siGhcVersion $ bpSystemInfo plan
|
||||||
|
(ident, mloc) <- flip uploadBundle man $ setArgs ghcVer def
|
||||||
|
{ ubContents = serverBundle now (title ghcVer) slug plan
|
||||||
|
, ubAuthToken = token
|
||||||
|
}
|
||||||
|
putStrLn $ "New ident: " ++ unSnapshotIdent ident
|
||||||
|
forM_ mloc $ \loc ->
|
||||||
|
putStrLn $ "Track progress at: " ++ loc
|
||||||
|
|
||||||
|
putStrLn "Uploading docs to Stackage Server"
|
||||||
|
res1 <- tryAny $ uploadDocs UploadDocs
|
||||||
|
{ udServer = def
|
||||||
|
, udAuthToken = token
|
||||||
|
, udDocs = pbDocDir pb
|
||||||
|
, udSnapshot = ident
|
||||||
|
} man
|
||||||
|
putStrLn $ "Doc upload response: " ++ tshow res1
|
||||||
|
|
||||||
|
putStrLn "Uploading doc map"
|
||||||
|
tryAny (uploadDocMap UploadDocMap
|
||||||
|
{ udmServer = def
|
||||||
|
, udmAuthToken = token
|
||||||
|
, udmSnapshot = ident
|
||||||
|
, udmDocDir = pbDocDir pb
|
||||||
|
, udmPlan = plan
|
||||||
|
} man) >>= print
|
||||||
|
|
||||||
postBuild `catchAny` print
|
postBuild `catchAny` print
|
||||||
|
|
||||||
putStrLn "Uploading docs to Stackage Server"
|
|
||||||
res1 <- uploadDocs UploadDocs
|
|
||||||
{ udServer = def
|
|
||||||
, udAuthToken = token
|
|
||||||
, udDocs = pbDocDir pb
|
|
||||||
, udSnapshot = ident
|
|
||||||
} man
|
|
||||||
putStrLn $ "Doc upload response: " ++ tshow res1
|
|
||||||
|
|
||||||
ecreds <- tryIO $ readFile "/hackage-creds"
|
ecreds <- tryIO $ readFile "/hackage-creds"
|
||||||
case map encodeUtf8 $ words $ decodeUtf8 $ either (const "") id ecreds of
|
case map encodeUtf8 $ words $ decodeUtf8 $ either (const "") id ecreds of
|
||||||
[username, password] -> do
|
[username, password] -> do
|
||||||
@ -280,14 +331,5 @@ finallyUpload settings@Settings{..} man = do
|
|||||||
res2 <- uploadHackageDistroNamed distroName plan username password man
|
res2 <- uploadHackageDistroNamed distroName plan username password man
|
||||||
putStrLn $ "Distro upload response: " ++ tshow res2
|
putStrLn $ "Distro upload response: " ++ tshow res2
|
||||||
_ -> putStrLn "No creds found, skipping Hackage distro upload"
|
_ -> putStrLn "No creds found, skipping Hackage distro upload"
|
||||||
|
|
||||||
putStrLn "Uploading doc map"
|
|
||||||
uploadDocMap UploadDocMap
|
|
||||||
{ udmServer = def
|
|
||||||
, udmAuthToken = token
|
|
||||||
, udmSnapshot = ident
|
|
||||||
, udmDocDir = pbDocDir pb
|
|
||||||
, udmPlan = plan
|
|
||||||
} man >>= print
|
|
||||||
where
|
where
|
||||||
pb = getPerformBuild (error "finallyUpload.buildFlags") settings
|
pb = getPerformBuild (error "finallyUpload.buildFlags") settings
|
||||||
|
|||||||
104
Stackage/GhcPkg.hs
Normal file
104
Stackage/GhcPkg.hs
Normal file
@ -0,0 +1,104 @@
|
|||||||
|
{-# LANGUAGE NoImplicitPrelude #-}
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
-- | General commands related to ghc-pkg.
|
||||||
|
|
||||||
|
module Stackage.GhcPkg
|
||||||
|
( setupPackageDatabase
|
||||||
|
) where
|
||||||
|
|
||||||
|
import Data.Conduit
|
||||||
|
import qualified Data.Conduit.List as CL
|
||||||
|
import Data.Conduit.Process
|
||||||
|
import qualified Data.Conduit.Text as CT
|
||||||
|
import Data.Maybe
|
||||||
|
import Data.Text (Text)
|
||||||
|
import qualified Data.Text as T
|
||||||
|
import Distribution.Compat.ReadP
|
||||||
|
import Distribution.Package
|
||||||
|
import Distribution.Text (parse)
|
||||||
|
import Filesystem.Path.CurrentOS (FilePath)
|
||||||
|
import qualified Filesystem.Path.CurrentOS as FP
|
||||||
|
import Data.Map (Map)
|
||||||
|
import Data.Version (Version)
|
||||||
|
import Stackage.Prelude
|
||||||
|
import Filesystem (removeTree)
|
||||||
|
|
||||||
|
setupPackageDatabase
|
||||||
|
:: Maybe FilePath -- ^ database location, Nothing if using global DB
|
||||||
|
-> FilePath -- ^ documentation root
|
||||||
|
-> (ByteString -> IO ()) -- ^ logging
|
||||||
|
-> Map PackageName Version -- ^ packages and versions to be installed
|
||||||
|
-> (PackageIdentifier -> IO ()) -- ^ callback to be used when unregistering a package
|
||||||
|
-> IO (Set PackageName) -- ^ packages remaining in the database after cleanup
|
||||||
|
setupPackageDatabase mdb docDir log' toInstall onUnregister = do
|
||||||
|
registered1 <- getRegisteredPackages flags
|
||||||
|
forM_ registered1 $ \pi@(PackageIdentifier name version) ->
|
||||||
|
case lookup name toInstall of
|
||||||
|
Just version' | version /= version' -> unregisterPackage log' onUnregister docDir flags pi
|
||||||
|
_ -> return ()
|
||||||
|
broken <- getBrokenPackages flags
|
||||||
|
forM_ broken $ unregisterPackage log' onUnregister docDir flags
|
||||||
|
foldMap (\(PackageIdentifier name _) -> singletonSet name)
|
||||||
|
<$> getRegisteredPackages flags
|
||||||
|
where
|
||||||
|
flags = ghcPkgFlags mdb
|
||||||
|
|
||||||
|
ghcPkgFlags :: Maybe FilePath -> [String]
|
||||||
|
ghcPkgFlags mdb =
|
||||||
|
"--no-user-package-db" :
|
||||||
|
case mdb of
|
||||||
|
Nothing -> ["--global"]
|
||||||
|
Just fp -> ["--package-db=" ++ fpToString fp]
|
||||||
|
|
||||||
|
-- | Get broken packages.
|
||||||
|
getBrokenPackages :: [String] -> IO [PackageIdentifier]
|
||||||
|
getBrokenPackages flags = do
|
||||||
|
(_,ps) <- sourceProcessWithConsumer
|
||||||
|
(proc
|
||||||
|
"ghc-pkg"
|
||||||
|
("check" : "--simple-output" : flags))
|
||||||
|
(CT.decodeUtf8 $= CT.lines $= CL.consume)
|
||||||
|
return (mapMaybe parsePackageIdent (T.words (T.unlines ps)))
|
||||||
|
|
||||||
|
-- | Get available packages.
|
||||||
|
getRegisteredPackages :: [String] -> IO [PackageIdentifier]
|
||||||
|
getRegisteredPackages flags = do
|
||||||
|
(_,ps) <- sourceProcessWithConsumer
|
||||||
|
(proc
|
||||||
|
"ghc-pkg"
|
||||||
|
("list" : "--simple-output" : flags))
|
||||||
|
(CT.decodeUtf8 $= CT.lines $= CL.consume)
|
||||||
|
return (mapMaybe parsePackageIdent (T.words (T.unlines ps)))
|
||||||
|
|
||||||
|
-- | Parse a package identifier: foo-1.2.3
|
||||||
|
parsePackageIdent :: Text -> Maybe PackageIdentifier
|
||||||
|
parsePackageIdent = fmap fst .
|
||||||
|
listToMaybe .
|
||||||
|
filter (null . snd) .
|
||||||
|
readP_to_S parse . T.unpack
|
||||||
|
|
||||||
|
-- | Unregister a package.
|
||||||
|
unregisterPackage :: (ByteString -> IO ()) -- ^ log func
|
||||||
|
-> (PackageIdentifier -> IO ()) -- ^ callback to be used when unregistering a package
|
||||||
|
-> FilePath -- ^ doc directory
|
||||||
|
-> [String] -> PackageIdentifier -> IO ()
|
||||||
|
unregisterPackage log' onUnregister docDir flags ident@(PackageIdentifier name _) = do
|
||||||
|
log' $ "Unregistering " ++ encodeUtf8 (display ident) ++ "\n"
|
||||||
|
onUnregister ident
|
||||||
|
|
||||||
|
-- Delete libraries
|
||||||
|
sourceProcessWithConsumer
|
||||||
|
(proc "ghc-pkg" ("describe" : flags ++ [unpack $ display ident]))
|
||||||
|
(CT.decodeUtf8
|
||||||
|
$= CT.lines
|
||||||
|
$= CL.mapMaybe parseLibraryDir
|
||||||
|
$= CL.mapM_ (void . tryIO . removeTree))
|
||||||
|
|
||||||
|
void (readProcessWithExitCode
|
||||||
|
"ghc-pkg"
|
||||||
|
("unregister": flags ++ ["--force", unpack $ display name])
|
||||||
|
"")
|
||||||
|
|
||||||
|
void $ tryIO $ removeTree $ docDir </> fpFromText (display ident)
|
||||||
|
where
|
||||||
|
parseLibraryDir = fmap fpFromText . stripPrefix "library-dirs: "
|
||||||
@ -28,7 +28,9 @@ data InstallFlags = InstallFlags
|
|||||||
, ifJobs :: !Int
|
, ifJobs :: !Int
|
||||||
, ifGlobalInstall :: !Bool
|
, ifGlobalInstall :: !Bool
|
||||||
, ifEnableTests :: !Bool
|
, ifEnableTests :: !Bool
|
||||||
|
, ifEnableHaddock :: !Bool
|
||||||
, ifEnableLibProfiling :: !Bool
|
, ifEnableLibProfiling :: !Bool
|
||||||
|
, ifEnableExecDyn :: !Bool
|
||||||
, ifVerbose :: !Bool
|
, ifVerbose :: !Bool
|
||||||
, ifSkipCheck :: !Bool
|
, ifSkipCheck :: !Bool
|
||||||
} deriving (Show)
|
} deriving (Show)
|
||||||
@ -48,7 +50,9 @@ getPerformBuild plan InstallFlags{..} =
|
|||||||
, pbJobs = ifJobs
|
, pbJobs = ifJobs
|
||||||
, pbGlobalInstall = ifGlobalInstall
|
, pbGlobalInstall = ifGlobalInstall
|
||||||
, pbEnableTests = ifEnableTests
|
, pbEnableTests = ifEnableTests
|
||||||
|
, pbEnableHaddock = ifEnableHaddock
|
||||||
, pbEnableLibProfiling = ifEnableLibProfiling
|
, pbEnableLibProfiling = ifEnableLibProfiling
|
||||||
|
, pbEnableExecDyn = ifEnableExecDyn
|
||||||
, pbVerbose = ifVerbose
|
, pbVerbose = ifVerbose
|
||||||
, pbAllowNewer = ifSkipCheck
|
, pbAllowNewer = ifSkipCheck
|
||||||
}
|
}
|
||||||
|
|||||||
@ -19,17 +19,18 @@ import qualified Data.Map as Map
|
|||||||
import Data.NonNull (fromNullable)
|
import Data.NonNull (fromNullable)
|
||||||
import Filesystem (canonicalizePath, createTree,
|
import Filesystem (canonicalizePath, createTree,
|
||||||
getWorkingDirectory, isDirectory,
|
getWorkingDirectory, isDirectory,
|
||||||
removeTree, rename)
|
removeTree, rename, isFile, removeFile)
|
||||||
import Filesystem.Path (parent)
|
import Filesystem.Path (parent)
|
||||||
import qualified Filesystem.Path as F
|
import qualified Filesystem.Path as F
|
||||||
import Stackage.BuildConstraints
|
import Stackage.BuildConstraints
|
||||||
import Stackage.BuildPlan
|
import Stackage.BuildPlan
|
||||||
|
import Stackage.GhcPkg
|
||||||
import Stackage.PackageDescription
|
import Stackage.PackageDescription
|
||||||
import Stackage.Prelude hiding (pi)
|
import Stackage.Prelude hiding (pi)
|
||||||
import System.Directory (findExecutable)
|
import System.Directory (findExecutable)
|
||||||
import System.Environment (getEnvironment)
|
import System.Environment (getEnvironment)
|
||||||
import System.IO (IOMode (WriteMode),
|
import System.IO (IOMode (WriteMode),
|
||||||
withBinaryFile)
|
openBinaryFile)
|
||||||
import System.IO.Temp (withSystemTempDirectory)
|
import System.IO.Temp (withSystemTempDirectory)
|
||||||
|
|
||||||
data BuildException = BuildException (Map PackageName BuildFailure) [Text]
|
data BuildException = BuildException (Map PackageName BuildFailure) [Text]
|
||||||
@ -62,7 +63,9 @@ data PerformBuild = PerformBuild
|
|||||||
, pbGlobalInstall :: Bool
|
, pbGlobalInstall :: Bool
|
||||||
-- ^ Register packages in the global database
|
-- ^ Register packages in the global database
|
||||||
, pbEnableTests :: Bool
|
, pbEnableTests :: Bool
|
||||||
|
, pbEnableHaddock :: Bool
|
||||||
, pbEnableLibProfiling :: Bool
|
, pbEnableLibProfiling :: Bool
|
||||||
|
, pbEnableExecDyn :: Bool
|
||||||
, pbVerbose :: Bool
|
, pbVerbose :: Bool
|
||||||
, pbAllowNewer :: Bool
|
, pbAllowNewer :: Bool
|
||||||
-- ^ Pass --allow-newer to cabal configure
|
-- ^ Pass --allow-newer to cabal configure
|
||||||
@ -89,7 +92,7 @@ waitForDeps toolMap packageMap activeComps bp pi action = do
|
|||||||
Nothing
|
Nothing
|
||||||
| isCoreExe exe -> return ()
|
| isCoreExe exe -> return ()
|
||||||
-- https://github.com/jgm/zip-archive/issues/23
|
-- https://github.com/jgm/zip-archive/issues/23
|
||||||
-- | otherwise -> throwSTM $ ToolMissing exe
|
-- - | otherwise -> throwSTM $ ToolMissing exe
|
||||||
| otherwise -> return ()
|
| otherwise -> return ()
|
||||||
Just packages -> ofoldl1' (<|>) packages
|
Just packages -> ofoldl1' (<|>) packages
|
||||||
action
|
action
|
||||||
@ -133,6 +136,10 @@ pbLibDir pb = pbInstallDest pb </> "lib"
|
|||||||
pbDataDir pb = pbInstallDest pb </> "share"
|
pbDataDir pb = pbInstallDest pb </> "share"
|
||||||
pbDocDir pb = pbInstallDest pb </> "doc"
|
pbDocDir pb = pbInstallDest pb </> "doc"
|
||||||
|
|
||||||
|
-- | Directory keeping previous result info
|
||||||
|
pbPrevResDir :: PerformBuild -> FilePath
|
||||||
|
pbPrevResDir pb = pbInstallDest pb </> "prevres"
|
||||||
|
|
||||||
performBuild :: PerformBuild -> IO [Text]
|
performBuild :: PerformBuild -> IO [Text]
|
||||||
performBuild pb = do
|
performBuild pb = do
|
||||||
cwd <- getWorkingDirectory
|
cwd <- getWorkingDirectory
|
||||||
@ -160,12 +167,13 @@ performBuild' pb@PerformBuild {..} = withBuildDir $ \builddir -> do
|
|||||||
$ \ClosedStream Inherited Inherited -> return ()
|
$ \ClosedStream Inherited Inherited -> return ()
|
||||||
|
|
||||||
let removeTree' fp = whenM (isDirectory fp) (removeTree fp)
|
let removeTree' fp = whenM (isDirectory fp) (removeTree fp)
|
||||||
mapM_ removeTree' [pbInstallDest, pbLogDir]
|
removeTree' pbLogDir
|
||||||
|
|
||||||
forM_ (pbDatabase pb) $ \db -> do
|
forM_ (pbDatabase pb) $ \db ->
|
||||||
createTree $ parent db
|
unlessM (isFile $ db </> "package.cache") $ do
|
||||||
withCheckedProcess (proc "ghc-pkg" ["init", fpToString db])
|
createTree $ parent db
|
||||||
$ \ClosedStream Inherited Inherited -> return ()
|
withCheckedProcess (proc "ghc-pkg" ["init", fpToString db])
|
||||||
|
$ \ClosedStream Inherited Inherited -> return ()
|
||||||
pbLog $ encodeUtf8 "Copying built-in Haddocks\n"
|
pbLog $ encodeUtf8 "Copying built-in Haddocks\n"
|
||||||
copyBuiltInHaddocks (pbDocDir pb)
|
copyBuiltInHaddocks (pbDocDir pb)
|
||||||
pbLog $ encodeUtf8 "Finished copying built-in Haddocks\n"
|
pbLog $ encodeUtf8 "Finished copying built-in Haddocks\n"
|
||||||
@ -186,7 +194,15 @@ performBuild' pb@PerformBuild {..} = withBuildDir $ \builddir -> do
|
|||||||
env <- getEnvironment
|
env <- getEnvironment
|
||||||
haddockFiles <- newTVarIO mempty
|
haddockFiles <- newTVarIO mempty
|
||||||
|
|
||||||
forM_ packageMap $ \pi -> void $ async $ singleBuild pb SingleBuild
|
registeredPackages <- setupPackageDatabase
|
||||||
|
(pbDatabase pb)
|
||||||
|
(pbDocDir pb)
|
||||||
|
pbLog
|
||||||
|
(ppVersion <$> bpPackages pbPlan)
|
||||||
|
(deletePreviousResults pb)
|
||||||
|
|
||||||
|
forM_ packageMap $ \pi -> void $ async $ singleBuild pb registeredPackages
|
||||||
|
SingleBuild
|
||||||
{ sbSem = sem
|
{ sbSem = sem
|
||||||
, sbErrsVar = errsVar
|
, sbErrsVar = errsVar
|
||||||
, sbWarningsVar = warningsVar
|
, sbWarningsVar = warningsVar
|
||||||
@ -248,8 +264,10 @@ data SingleBuild = SingleBuild
|
|||||||
, sbHaddockFiles :: TVar (Map Text FilePath) -- ^ package-version, .haddock file
|
, sbHaddockFiles :: TVar (Map Text FilePath) -- ^ package-version, .haddock file
|
||||||
}
|
}
|
||||||
|
|
||||||
singleBuild :: PerformBuild -> SingleBuild -> IO ()
|
singleBuild :: PerformBuild
|
||||||
singleBuild pb@PerformBuild {..} SingleBuild {..} =
|
-> Set PackageName -- ^ registered packages
|
||||||
|
-> SingleBuild -> IO ()
|
||||||
|
singleBuild pb@PerformBuild {..} registeredPackages SingleBuild {..} =
|
||||||
withCounter sbActive
|
withCounter sbActive
|
||||||
$ handle updateErrs
|
$ handle updateErrs
|
||||||
$ (`finally` void (atomically $ tryPutTMVar (piResult sbPackageInfo) False))
|
$ (`finally` void (atomically $ tryPutTMVar (piResult sbPackageInfo) False))
|
||||||
@ -261,22 +279,25 @@ singleBuild pb@PerformBuild {..} SingleBuild {..} =
|
|||||||
let wfd comps =
|
let wfd comps =
|
||||||
waitForDeps sbToolMap sbPackageMap comps pbPlan sbPackageInfo
|
waitForDeps sbToolMap sbPackageMap comps pbPlan sbPackageInfo
|
||||||
. withTSem sbSem
|
. withTSem sbSem
|
||||||
wfd libComps buildLibrary
|
withUnpacked <- wfd libComps buildLibrary
|
||||||
|
|
||||||
wfd testComps runTests
|
wfd testComps (runTests withUnpacked)
|
||||||
|
|
||||||
name = display $ piName sbPackageInfo
|
pname = piName sbPackageInfo
|
||||||
|
pident = PackageIdentifier pname (ppVersion $ piPlan sbPackageInfo)
|
||||||
|
name = display pname
|
||||||
namever = concat
|
namever = concat
|
||||||
[ name
|
[ name
|
||||||
, "-"
|
, "-"
|
||||||
, display $ ppVersion $ piPlan sbPackageInfo
|
, display $ ppVersion $ piPlan sbPackageInfo
|
||||||
]
|
]
|
||||||
|
|
||||||
runIn wdir outH cmd args =
|
runIn wdir getOutH cmd args = do
|
||||||
withCheckedProcess cp $ \ClosedStream UseProvidedHandle UseProvidedHandle ->
|
outH <- getOutH
|
||||||
|
withCheckedProcess (cp outH) $ \ClosedStream UseProvidedHandle UseProvidedHandle ->
|
||||||
(return () :: IO ())
|
(return () :: IO ())
|
||||||
where
|
where
|
||||||
cp = (proc (unpack $ asText cmd) (map (unpack . asText) args))
|
cp outH = (proc (unpack $ asText cmd) (map (unpack . asText) args))
|
||||||
{ cwd = Just $ fpToString wdir
|
{ cwd = Just $ fpToString wdir
|
||||||
, std_out = UseHandle outH
|
, std_out = UseHandle outH
|
||||||
, std_err = UseHandle outH
|
, std_err = UseHandle outH
|
||||||
@ -302,8 +323,21 @@ singleBuild pb@PerformBuild {..} SingleBuild {..} =
|
|||||||
testRunOut = pbLogDir </> fpFromText namever </> "test-run.out"
|
testRunOut = pbLogDir </> fpFromText namever </> "test-run.out"
|
||||||
|
|
||||||
wf fp inner' = do
|
wf fp inner' = do
|
||||||
createTree $ parent fp
|
ref <- newIORef Nothing
|
||||||
withBinaryFile (fpToString fp) WriteMode inner'
|
let cleanup = do
|
||||||
|
mh <- readIORef ref
|
||||||
|
forM_ mh hClose
|
||||||
|
getH = do
|
||||||
|
mh <- readIORef ref
|
||||||
|
case mh of
|
||||||
|
Just h -> return h
|
||||||
|
Nothing -> mask_ $ do
|
||||||
|
createTree $ parent fp
|
||||||
|
h <- openBinaryFile (fpToString fp) WriteMode
|
||||||
|
writeIORef ref $ Just h
|
||||||
|
return h
|
||||||
|
|
||||||
|
inner' getH `finally` cleanup
|
||||||
|
|
||||||
configArgs = ($ []) $ execWriter $ do
|
configArgs = ($ []) $ execWriter $ do
|
||||||
when pbAllowNewer $ tell' "--allow-newer"
|
when pbAllowNewer $ tell' "--allow-newer"
|
||||||
@ -317,6 +351,7 @@ singleBuild pb@PerformBuild {..} SingleBuild {..} =
|
|||||||
tell' $ "--flags=" ++ flags
|
tell' $ "--flags=" ++ flags
|
||||||
when (pbEnableLibProfiling && pcEnableLibProfile) $
|
when (pbEnableLibProfiling && pcEnableLibProfile) $
|
||||||
tell' "--enable-library-profiling"
|
tell' "--enable-library-profiling"
|
||||||
|
when pbEnableExecDyn $ tell' "--enable-executable-dynamic"
|
||||||
where
|
where
|
||||||
tell' x = tell (x:)
|
tell' x = tell (x:)
|
||||||
|
|
||||||
@ -330,22 +365,40 @@ singleBuild pb@PerformBuild {..} SingleBuild {..} =
|
|||||||
|
|
||||||
PackageConstraints {..} = ppConstraints $ piPlan sbPackageInfo
|
PackageConstraints {..} = ppConstraints $ piPlan sbPackageInfo
|
||||||
|
|
||||||
buildLibrary = wf libOut $ \outH -> do
|
buildLibrary = wf libOut $ \getOutH -> do
|
||||||
let run a b = do when pbVerbose $ log' (unwords (a : b))
|
let run a b = do when pbVerbose $ log' (unwords (a : b))
|
||||||
runChild outH a b
|
runChild getOutH a b
|
||||||
log' $ "Unpacking " ++ namever
|
|
||||||
runParent outH "cabal" ["unpack", namever]
|
|
||||||
|
|
||||||
log' $ "Configuring " ++ namever
|
isUnpacked <- newIORef False
|
||||||
run "cabal" $ "configure" : configArgs
|
let withUnpacked inner = do
|
||||||
|
unlessM (readIORef isUnpacked) $ do
|
||||||
|
log' $ "Unpacking " ++ namever
|
||||||
|
runParent getOutH "cabal" ["unpack", namever]
|
||||||
|
writeIORef isUnpacked True
|
||||||
|
inner
|
||||||
|
|
||||||
log' $ "Building " ++ namever
|
isConfiged <- newIORef False
|
||||||
run "cabal" ["build"]
|
let withConfiged inner = withUnpacked $ do
|
||||||
|
unlessM (readIORef isConfiged) $ do
|
||||||
|
log' $ "Configuring " ++ namever
|
||||||
|
run "cabal" $ "configure" : configArgs
|
||||||
|
writeIORef isConfiged True
|
||||||
|
inner
|
||||||
|
|
||||||
log' $ "Copying/registering " ++ namever
|
prevBuildResult <- getPreviousResult pb Build pident
|
||||||
run "cabal" ["copy"]
|
unless (prevBuildResult == PRSuccess) $ withConfiged $ do
|
||||||
withMVar sbRegisterMutex $ const $
|
assert (pname `notMember` registeredPackages) $ do
|
||||||
run "cabal" ["register"]
|
deletePreviousResults pb pident
|
||||||
|
|
||||||
|
log' $ "Building " ++ namever
|
||||||
|
run "cabal" ["build"]
|
||||||
|
|
||||||
|
log' $ "Copying/registering " ++ namever
|
||||||
|
run "cabal" ["copy"]
|
||||||
|
withMVar sbRegisterMutex $ const $
|
||||||
|
run "cabal" ["register"]
|
||||||
|
|
||||||
|
savePreviousResult pb Build pident True
|
||||||
|
|
||||||
-- Even if the tests later fail, we can allow other libraries to build
|
-- Even if the tests later fail, we can allow other libraries to build
|
||||||
-- on top of our successful results
|
-- on top of our successful results
|
||||||
@ -355,7 +408,11 @@ singleBuild pb@PerformBuild {..} SingleBuild {..} =
|
|||||||
-- dependency's haddocks before this finishes
|
-- dependency's haddocks before this finishes
|
||||||
atomically $ putTMVar (piResult sbPackageInfo) True
|
atomically $ putTMVar (piResult sbPackageInfo) True
|
||||||
|
|
||||||
when (pcHaddocks /= Don'tBuild && not (null $ sdModules $ ppDesc $ piPlan sbPackageInfo)) $ do
|
prevHaddockResult <- getPreviousResult pb Haddock pident
|
||||||
|
let needHaddock = pbEnableHaddock
|
||||||
|
&& checkPrevResult prevHaddockResult pcHaddocks
|
||||||
|
&& not (null $ sdModules $ ppDesc $ piPlan sbPackageInfo)
|
||||||
|
when needHaddock $ withConfiged $ do
|
||||||
log' $ "Haddocks " ++ namever
|
log' $ "Haddocks " ++ namever
|
||||||
hfs <- readTVarIO sbHaddockFiles
|
hfs <- readTVarIO sbHaddockFiles
|
||||||
let hfsOpts = flip map (mapToList hfs) $ \(pkgVer, hf) -> concat
|
let hfsOpts = flip map (mapToList hfs) $ \(pkgVer, hf) -> concat
|
||||||
@ -389,15 +446,21 @@ singleBuild pb@PerformBuild {..} SingleBuild {..} =
|
|||||||
$ modifyTVar sbHaddockFiles
|
$ modifyTVar sbHaddockFiles
|
||||||
$ insertMap namever newPath
|
$ insertMap namever newPath
|
||||||
|
|
||||||
|
savePreviousResult pb Haddock pident $ either (const False) (const True) eres
|
||||||
case (eres, pcHaddocks) of
|
case (eres, pcHaddocks) of
|
||||||
(Left e, ExpectSuccess) -> throwM e
|
(Left e, ExpectSuccess) -> throwM e
|
||||||
(Right (), ExpectFailure) -> warn $ namever ++ ": unexpected Haddock success"
|
(Right (), ExpectFailure) -> warn $ namever ++ ": unexpected Haddock success"
|
||||||
_ -> return ()
|
_ -> return ()
|
||||||
|
|
||||||
runTests = wf testOut $ \outH -> do
|
return withUnpacked
|
||||||
let run = runChild outH
|
|
||||||
|
|
||||||
when (pbEnableTests && pcTests /= Don'tBuild) $ do
|
runTests withUnpacked = wf testOut $ \getOutH -> do
|
||||||
|
let run = runChild getOutH
|
||||||
|
|
||||||
|
prevTestResult <- getPreviousResult pb Test pident
|
||||||
|
let needTest = pbEnableTests
|
||||||
|
&& checkPrevResult prevTestResult pcTests
|
||||||
|
when needTest $ withUnpacked $ do
|
||||||
log' $ "Test configure " ++ namever
|
log' $ "Test configure " ++ namever
|
||||||
run "cabal" $ "configure" : "--enable-tests" : configArgs
|
run "cabal" $ "configure" : "--enable-tests" : configArgs
|
||||||
|
|
||||||
@ -408,6 +471,7 @@ singleBuild pb@PerformBuild {..} SingleBuild {..} =
|
|||||||
log' $ "Test run " ++ namever
|
log' $ "Test run " ++ namever
|
||||||
run "cabal" ["test", "--log=" ++ fpToText testRunOut]
|
run "cabal" ["test", "--log=" ++ fpToText testRunOut]
|
||||||
|
|
||||||
|
savePreviousResult pb Test pident $ either (const False) (const True) eres
|
||||||
case (eres, pcTests) of
|
case (eres, pcTests) of
|
||||||
(Left e, ExpectSuccess) -> throwM e
|
(Left e, ExpectSuccess) -> throwM e
|
||||||
(Right (), ExpectFailure) -> warn $ namever ++ ": unexpected test success"
|
(Right (), ExpectFailure) -> warn $ namever ++ ": unexpected test success"
|
||||||
@ -434,16 +498,6 @@ singleBuild pb@PerformBuild {..} SingleBuild {..} =
|
|||||||
renameOrCopy :: FilePath -> FilePath -> IO ()
|
renameOrCopy :: FilePath -> FilePath -> IO ()
|
||||||
renameOrCopy src dest = rename src dest `catchIO` \_ -> copyDir src dest
|
renameOrCopy src dest = rename src dest `catchIO` \_ -> copyDir src dest
|
||||||
|
|
||||||
copyDir :: FilePath -> FilePath -> IO ()
|
|
||||||
copyDir src dest =
|
|
||||||
runResourceT $ sourceDirectoryDeep False src $$ mapM_C go
|
|
||||||
where
|
|
||||||
src' = src </> ""
|
|
||||||
go fp = forM_ (F.stripPrefix src' fp) $ \suffix -> do
|
|
||||||
let dest' = dest </> suffix
|
|
||||||
liftIO $ createTree $ parent dest'
|
|
||||||
sourceFile fp $$ (sinkFile dest' :: Sink ByteString (ResourceT IO) ())
|
|
||||||
|
|
||||||
copyBuiltInHaddocks :: FilePath -> IO ()
|
copyBuiltInHaddocks :: FilePath -> IO ()
|
||||||
copyBuiltInHaddocks docdir = do
|
copyBuiltInHaddocks docdir = do
|
||||||
mghc <- findExecutable "ghc"
|
mghc <- findExecutable "ghc"
|
||||||
@ -453,3 +507,52 @@ copyBuiltInHaddocks docdir = do
|
|||||||
src <- canonicalizePath
|
src <- canonicalizePath
|
||||||
(parent (fpFromString ghc) </> "../share/doc/ghc/html/libraries")
|
(parent (fpFromString ghc) </> "../share/doc/ghc/html/libraries")
|
||||||
copyDir src docdir
|
copyDir src docdir
|
||||||
|
|
||||||
|
------------- Previous results
|
||||||
|
|
||||||
|
-- | The previous actions that can be run
|
||||||
|
data ResultType = Build | Haddock | Test
|
||||||
|
deriving (Show, Enum, Eq, Ord, Bounded, Read)
|
||||||
|
|
||||||
|
-- | The result generated on a previous run
|
||||||
|
data PrevResult = PRNoResult | PRSuccess | PRFailure
|
||||||
|
deriving (Show, Enum, Eq, Ord, Bounded, Read)
|
||||||
|
|
||||||
|
-- | Check if we should rerun based on a PrevResult and the expected status
|
||||||
|
checkPrevResult :: PrevResult -> TestState -> Bool
|
||||||
|
checkPrevResult _ Don'tBuild = False
|
||||||
|
checkPrevResult PRNoResult _ = True
|
||||||
|
checkPrevResult PRSuccess _ = False
|
||||||
|
checkPrevResult PRFailure ExpectSuccess = True
|
||||||
|
checkPrevResult PRFailure _ = False
|
||||||
|
|
||||||
|
withPRPath :: PerformBuild -> ResultType -> PackageIdentifier -> (FilePath -> IO a) -> IO a
|
||||||
|
withPRPath pb rt ident inner = do
|
||||||
|
createTree $ parent fp
|
||||||
|
inner fp
|
||||||
|
where
|
||||||
|
fp = pbPrevResDir pb </> fpFromString (show rt) </> fpFromText (display ident)
|
||||||
|
|
||||||
|
successBS, failureBS :: ByteString
|
||||||
|
successBS = "success"
|
||||||
|
failureBS = "failure"
|
||||||
|
|
||||||
|
getPreviousResult :: PerformBuild -> ResultType -> PackageIdentifier -> IO PrevResult
|
||||||
|
getPreviousResult w x y = withPRPath w x y $ \fp -> do
|
||||||
|
eres <- tryIO $ readFile fp
|
||||||
|
return $ case eres of
|
||||||
|
Right bs
|
||||||
|
| bs == successBS -> PRSuccess
|
||||||
|
| bs == failureBS -> PRFailure
|
||||||
|
_ -> PRNoResult
|
||||||
|
|
||||||
|
savePreviousResult :: PerformBuild -> ResultType -> PackageIdentifier -> Bool -> IO ()
|
||||||
|
savePreviousResult pb rt ident res =
|
||||||
|
withPRPath pb rt ident $ \fp -> writeFile fp $
|
||||||
|
if res then successBS else failureBS
|
||||||
|
|
||||||
|
deletePreviousResults :: PerformBuild -> PackageIdentifier -> IO ()
|
||||||
|
deletePreviousResults pb name =
|
||||||
|
forM_ [minBound..maxBound] $ \rt ->
|
||||||
|
withPRPath pb rt name $ \fp ->
|
||||||
|
void $ tryIO $ removeFile fp
|
||||||
|
|||||||
@ -20,6 +20,9 @@ import Distribution.Version as X (Version (..),
|
|||||||
VersionRange)
|
VersionRange)
|
||||||
import Distribution.Version as X (withinRange)
|
import Distribution.Version as X (withinRange)
|
||||||
import qualified Distribution.Version as C
|
import qualified Distribution.Version as C
|
||||||
|
import Filesystem (createTree)
|
||||||
|
import Filesystem.Path (parent)
|
||||||
|
import qualified Filesystem.Path as F
|
||||||
|
|
||||||
unPackageName :: PackageName -> Text
|
unPackageName :: PackageName -> Text
|
||||||
unPackageName (PackageName str) = pack str
|
unPackageName (PackageName str) = pack str
|
||||||
@ -101,3 +104,13 @@ topologicalSort toFinal toDeps =
|
|||||||
data TopologicalSortException key = NoEmptyDeps (Map key (Set key))
|
data TopologicalSortException key = NoEmptyDeps (Map key (Set key))
|
||||||
deriving (Show, Typeable)
|
deriving (Show, Typeable)
|
||||||
instance (Show key, Typeable key) => Exception (TopologicalSortException key)
|
instance (Show key, Typeable key) => Exception (TopologicalSortException key)
|
||||||
|
|
||||||
|
copyDir :: FilePath -> FilePath -> IO ()
|
||||||
|
copyDir src dest =
|
||||||
|
runResourceT $ sourceDirectoryDeep False src $$ mapM_C go
|
||||||
|
where
|
||||||
|
src' = src </> ""
|
||||||
|
go fp = forM_ (F.stripPrefix src' fp) $ \suffix -> do
|
||||||
|
let dest' = dest </> suffix
|
||||||
|
liftIO $ createTree $ parent dest'
|
||||||
|
sourceFile fp $$ (sinkFile dest' :: Sink ByteString (ResourceT IO) ())
|
||||||
|
|||||||
@ -7,21 +7,31 @@ module Stackage.ServerBundle
|
|||||||
, epochTime
|
, epochTime
|
||||||
, bpAllPackages
|
, bpAllPackages
|
||||||
, docsListing
|
, docsListing
|
||||||
|
, createBundleV2
|
||||||
|
, CreateBundleV2 (..)
|
||||||
|
, SnapshotType (..)
|
||||||
|
, writeIndexStyle
|
||||||
|
, DocMap
|
||||||
|
, PackageDocs (..)
|
||||||
) where
|
) where
|
||||||
|
|
||||||
import qualified Codec.Archive.Tar as Tar
|
import qualified Codec.Archive.Tar as Tar
|
||||||
import qualified Codec.Archive.Tar.Entry as Tar
|
import qualified Codec.Archive.Tar.Entry as Tar
|
||||||
import qualified Codec.Compression.GZip as GZip
|
import qualified Codec.Compression.GZip as GZip
|
||||||
import qualified Data.Map as M
|
import qualified Data.Map as M
|
||||||
|
import Data.Aeson (ToJSON (..), (.=), object, FromJSON (..), (.:), withObject)
|
||||||
|
import System.IO.Temp (withTempDirectory)
|
||||||
import qualified Data.Yaml as Y
|
import qualified Data.Yaml as Y
|
||||||
import Filesystem (isFile)
|
import Filesystem (isFile, getWorkingDirectory, listDirectory, isDirectory, canonicalizePath)
|
||||||
import Foreign.C.Types (CTime (CTime))
|
import Foreign.C.Types (CTime (CTime))
|
||||||
import Stackage.BuildConstraints
|
import Stackage.BuildConstraints
|
||||||
import Stackage.BuildPlan
|
import Stackage.BuildPlan
|
||||||
import Stackage.Prelude
|
import Stackage.Prelude
|
||||||
|
import System.IO.Temp (withTempDirectory)
|
||||||
import qualified System.PosixCompat.Time as PC
|
import qualified System.PosixCompat.Time as PC
|
||||||
import qualified Text.XML as X
|
import qualified Text.XML as X
|
||||||
import Text.XML.Cursor
|
import Text.XML.Cursor
|
||||||
|
import System.PosixCompat.Files (createSymbolicLink)
|
||||||
|
|
||||||
-- | Get current time
|
-- | Get current time
|
||||||
epochTime :: IO Tar.EpochTime
|
epochTime :: IO Tar.EpochTime
|
||||||
@ -73,13 +83,30 @@ serverBundle time title slug bp@BuildPlan {..} = GZip.compress $ Tar.write
|
|||||||
map (\(PackageName name) -> name)
|
map (\(PackageName name) -> name)
|
||||||
(M.keys $ siCorePackages bpSystemInfo)
|
(M.keys $ siCorePackages bpSystemInfo)
|
||||||
|
|
||||||
|
-- | Package name is key
|
||||||
|
type DocMap = Map Text PackageDocs
|
||||||
|
data PackageDocs = PackageDocs
|
||||||
|
{ pdVersion :: Text
|
||||||
|
, pdModules :: Map Text [Text]
|
||||||
|
-- ^ module name, path
|
||||||
|
}
|
||||||
|
instance ToJSON PackageDocs where
|
||||||
|
toJSON PackageDocs {..} = object
|
||||||
|
[ "version" .= pdVersion
|
||||||
|
, "modules" .= pdModules
|
||||||
|
]
|
||||||
|
instance FromJSON PackageDocs where
|
||||||
|
parseJSON = withObject "PackageDocs" $ \o -> PackageDocs
|
||||||
|
<$> o .: "version"
|
||||||
|
<*> o .: "modules"
|
||||||
|
|
||||||
docsListing :: BuildPlan
|
docsListing :: BuildPlan
|
||||||
-> FilePath -- ^ docs directory
|
-> FilePath -- ^ docs directory
|
||||||
-> IO ByteString
|
-> IO DocMap
|
||||||
docsListing bp docsDir =
|
docsListing bp docsDir =
|
||||||
fmap (Y.encode . fold) $ mapM go $ mapToList $ bpAllPackages bp
|
fmap fold $ mapM go $ mapToList $ bpAllPackages bp
|
||||||
where
|
where
|
||||||
go :: (PackageName, Version) -> IO (Map Text Y.Value)
|
go :: (PackageName, Version) -> IO DocMap
|
||||||
go (package, version) = do -- handleAny (const $ return mempty) $ do
|
go (package, version) = do -- handleAny (const $ return mempty) $ do
|
||||||
let dirname = fpFromText (concat
|
let dirname = fpFromText (concat
|
||||||
[ display package
|
[ display package
|
||||||
@ -107,8 +134,138 @@ docsListing bp docsDir =
|
|||||||
return $ if e
|
return $ if e
|
||||||
then asMap $ singletonMap name [fpToText dirname, href]
|
then asMap $ singletonMap name [fpToText dirname, href]
|
||||||
else mempty
|
else mempty
|
||||||
return $ singletonMap (display package) $ Y.object
|
return $ singletonMap (display package) $ PackageDocs
|
||||||
[ "version" Y..= display version
|
{ pdVersion = display version
|
||||||
, "modules" Y..= m
|
, pdModules = m
|
||||||
]
|
}
|
||||||
else return mempty
|
else return mempty
|
||||||
|
|
||||||
|
data SnapshotType = STNightly
|
||||||
|
| STLTS !Int !Int -- ^ major, minor
|
||||||
|
deriving (Show, Read, Eq, Ord)
|
||||||
|
|
||||||
|
instance ToJSON SnapshotType where
|
||||||
|
toJSON STNightly = object
|
||||||
|
[ "type" .= asText "nightly"
|
||||||
|
]
|
||||||
|
toJSON (STLTS major minor) = object
|
||||||
|
[ "type" .= asText "lts"
|
||||||
|
, "major" .= major
|
||||||
|
, "minor" .= minor
|
||||||
|
]
|
||||||
|
instance FromJSON SnapshotType where
|
||||||
|
parseJSON = withObject "SnapshotType" $ \o -> do
|
||||||
|
t <- o .: "type"
|
||||||
|
case asText t of
|
||||||
|
"nightly" -> return STNightly
|
||||||
|
"lts" -> STLTS
|
||||||
|
<$> o .: "major"
|
||||||
|
<*> o .: "minor"
|
||||||
|
_ -> fail $ "Unknown type for SnapshotType: " ++ unpack t
|
||||||
|
|
||||||
|
data CreateBundleV2 = CreateBundleV2
|
||||||
|
{ cb2Plan :: BuildPlan
|
||||||
|
, cb2Type :: SnapshotType
|
||||||
|
, cb2DocsDir :: FilePath
|
||||||
|
, cb2Dest :: FilePath
|
||||||
|
}
|
||||||
|
|
||||||
|
-- | Create a V2 bundle, which contains the build plan, metadata, docs, and doc
|
||||||
|
-- map.
|
||||||
|
createBundleV2 :: CreateBundleV2 -> IO ()
|
||||||
|
createBundleV2 CreateBundleV2 {..} = do
|
||||||
|
docsDir <- canonicalizePath cb2DocsDir
|
||||||
|
docMap <- docsListing cb2Plan cb2DocsDir
|
||||||
|
|
||||||
|
Y.encodeFile (fpToString $ docsDir </> "build-plan.yaml") cb2Plan
|
||||||
|
Y.encodeFile (fpToString $ docsDir </> "build-type.yaml") cb2Type
|
||||||
|
Y.encodeFile (fpToString $ docsDir </> "docs-map.yaml") docMap
|
||||||
|
void $ writeIndexStyle Nothing cb2DocsDir
|
||||||
|
|
||||||
|
currentDir <- getWorkingDirectory
|
||||||
|
files <- listDirectory docsDir
|
||||||
|
|
||||||
|
let args = "cfJ"
|
||||||
|
: fpToString (currentDir </> cb2Dest)
|
||||||
|
: "--dereference"
|
||||||
|
: map (fpToString . filename) files
|
||||||
|
cp = (proc "tar" args) { cwd = Just $ fpToString docsDir }
|
||||||
|
withCheckedProcess cp $ \ClosedStream Inherited Inherited -> return ()
|
||||||
|
|
||||||
|
writeIndexStyle :: Maybe Text -- ^ snapshot id
|
||||||
|
-> FilePath -- ^ docs dir
|
||||||
|
-> IO [String]
|
||||||
|
writeIndexStyle msnapid dir = do
|
||||||
|
dirs <- fmap sort
|
||||||
|
$ runResourceT
|
||||||
|
$ sourceDirectory dir
|
||||||
|
$$ filterMC (liftIO . isDirectory)
|
||||||
|
=$ mapC (fpToString . filename)
|
||||||
|
=$ sinkList
|
||||||
|
writeFile (dir </> "index.html") $ mkIndex
|
||||||
|
(unpack <$> msnapid)
|
||||||
|
dirs
|
||||||
|
writeFile (dir </> "style.css") styleCss
|
||||||
|
return dirs
|
||||||
|
|
||||||
|
mkIndex :: Maybe String -> [String] -> String
|
||||||
|
mkIndex msnapid dirs = concat
|
||||||
|
[ "<!DOCTYPE html>\n<html lang='en'><head><title>Haddocks index</title>"
|
||||||
|
, "<link rel='stylesheet' href='https://maxcdn.bootstrapcdn.com/bootstrap/3.2.0/css/bootstrap.min.css'>"
|
||||||
|
, "<link rel='stylesheet' href='style.css'>"
|
||||||
|
, "<link rel='shortcut icon' href='http://www.stackage.org/static/img/favicon.ico' />"
|
||||||
|
, "</head>"
|
||||||
|
, "<body><div class='container'>"
|
||||||
|
, "<div class='row'><div class='span12 col-md-12'>"
|
||||||
|
, "<h1>Haddock documentation index</h1>"
|
||||||
|
, flip foldMap msnapid $ \snapid -> concat
|
||||||
|
[ "<p class='return'><a href=\"http://www.stackage.org/stackage/"
|
||||||
|
, snapid
|
||||||
|
, "\">Return to snapshot</a></p>"
|
||||||
|
]
|
||||||
|
, "<ul>"
|
||||||
|
, concatMap toLI dirs
|
||||||
|
, "</ul></div></div></div></body></html>"
|
||||||
|
]
|
||||||
|
where
|
||||||
|
toLI name = concat
|
||||||
|
[ "<li><a href='"
|
||||||
|
, name
|
||||||
|
, "/index.html'>"
|
||||||
|
, name
|
||||||
|
, "</a></li>"
|
||||||
|
]
|
||||||
|
|
||||||
|
styleCss :: String
|
||||||
|
styleCss = concat
|
||||||
|
[ "@media (min-width: 530px) {"
|
||||||
|
, "ul { -webkit-column-count: 2; -moz-column-count: 2; column-count: 2 }"
|
||||||
|
, "}"
|
||||||
|
, "@media (min-width: 760px) {"
|
||||||
|
, "ul { -webkit-column-count: 3; -moz-column-count: 3; column-count: 3 }"
|
||||||
|
, "}"
|
||||||
|
, "ul {"
|
||||||
|
, " margin-left: 0;"
|
||||||
|
, " padding-left: 0;"
|
||||||
|
, " list-style-type: none;"
|
||||||
|
, "}"
|
||||||
|
, "body {"
|
||||||
|
, " background: #f0f0f0;"
|
||||||
|
, " font-family: 'Lato', sans-serif;"
|
||||||
|
, " text-shadow: 1px 1px 1px #ffffff;"
|
||||||
|
, " font-size: 20px;"
|
||||||
|
, " line-height: 30px;"
|
||||||
|
, " padding-bottom: 5em;"
|
||||||
|
, "}"
|
||||||
|
, "h1 {"
|
||||||
|
, " font-weight: normal;"
|
||||||
|
, " color: #06537d;"
|
||||||
|
, " font-size: 45px;"
|
||||||
|
, "}"
|
||||||
|
, ".return a {"
|
||||||
|
, " color: #06537d;"
|
||||||
|
, " font-style: italic;"
|
||||||
|
, "}"
|
||||||
|
, ".return {"
|
||||||
|
, " margin-bottom: 1em;"
|
||||||
|
, "}"]
|
||||||
|
|||||||
@ -14,17 +14,26 @@ module Stackage.Upload
|
|||||||
, uploadHackageDistroNamed
|
, uploadHackageDistroNamed
|
||||||
, UploadDocMap (..)
|
, UploadDocMap (..)
|
||||||
, uploadDocMap
|
, uploadDocMap
|
||||||
|
, uploadBundleV2
|
||||||
|
, UploadBundleV2 (..)
|
||||||
|
, def
|
||||||
|
, StackageServer
|
||||||
|
, unStackageServer
|
||||||
) where
|
) where
|
||||||
|
|
||||||
import Control.Monad.Writer.Strict (execWriter, tell)
|
import Control.Monad.Writer.Strict (execWriter, tell)
|
||||||
import Data.Default.Class (Default (..))
|
import Data.Default.Class (Default (..))
|
||||||
|
import Data.Function (fix)
|
||||||
import Filesystem (isDirectory, isFile)
|
import Filesystem (isDirectory, isFile)
|
||||||
import Network.HTTP.Client
|
import Network.HTTP.Client
|
||||||
|
import qualified Network.HTTP.Client.Conduit as HCC
|
||||||
import Network.HTTP.Client.MultipartFormData
|
import Network.HTTP.Client.MultipartFormData
|
||||||
import Stackage.BuildPlan (BuildPlan)
|
import Stackage.BuildPlan (BuildPlan)
|
||||||
import Stackage.Prelude
|
import Stackage.Prelude
|
||||||
import Stackage.ServerBundle (bpAllPackages, docsListing)
|
import Stackage.ServerBundle (bpAllPackages, docsListing, writeIndexStyle)
|
||||||
import System.IO.Temp (withSystemTempFile)
|
import System.IO.Temp (withSystemTempFile)
|
||||||
|
import qualified System.IO as IO
|
||||||
|
import qualified Data.Yaml as Y
|
||||||
|
|
||||||
newtype StackageServer = StackageServer { unStackageServer :: Text }
|
newtype StackageServer = StackageServer { unStackageServer :: Text }
|
||||||
deriving (Show, Eq, Ord, Hashable, IsString)
|
deriving (Show, Eq, Ord, Hashable, IsString)
|
||||||
@ -107,17 +116,7 @@ uploadDocs (UploadDocs (StackageServer host) fp0 token ident) man = do
|
|||||||
where
|
where
|
||||||
uploadDocsDir = withSystemTempFile "haddocks.tar.xz" $ \fp h -> do
|
uploadDocsDir = withSystemTempFile "haddocks.tar.xz" $ \fp h -> do
|
||||||
hClose h
|
hClose h
|
||||||
dirs <- fmap sort
|
dirs <- writeIndexStyle (Just $ unSnapshotIdent ident) fp0
|
||||||
$ runResourceT
|
|
||||||
$ sourceDirectory fp0
|
|
||||||
$$ filterMC (liftIO . isDirectory)
|
|
||||||
=$ mapC (fpToString . filename)
|
|
||||||
=$ sinkList
|
|
||||||
writeFile (fp0 </> "index.html") $ mkIndex
|
|
||||||
(unpack $ unSnapshotIdent ident)
|
|
||||||
dirs
|
|
||||||
writeFile (fp0 </> "style.css") styleCss
|
|
||||||
-- FIXME write index.html, style.css
|
|
||||||
let cp = (proc "tar" $ "cJf" : fp : "index.html" : "style.css" : dirs)
|
let cp = (proc "tar" $ "cJf" : fp : "index.html" : "style.css" : dirs)
|
||||||
{ cwd = Just $ fpToString fp0
|
{ cwd = Just $ fpToString fp0
|
||||||
}
|
}
|
||||||
@ -201,7 +200,7 @@ uploadDocMap :: UploadDocMap -> Manager -> IO (Response LByteString)
|
|||||||
uploadDocMap UploadDocMap {..} man = do
|
uploadDocMap UploadDocMap {..} man = do
|
||||||
docmap <- docsListing udmPlan udmDocDir
|
docmap <- docsListing udmPlan udmDocDir
|
||||||
req1 <- parseUrl $ unpack $ unStackageServer udmServer ++ "/upload-doc-map"
|
req1 <- parseUrl $ unpack $ unStackageServer udmServer ++ "/upload-doc-map"
|
||||||
req2 <- formDataBody (formData docmap) req1
|
req2 <- formDataBody (formData $ Y.encode docmap) req1
|
||||||
let req3 = req2
|
let req3 = req2
|
||||||
{ method = "PUT"
|
{ method = "PUT"
|
||||||
, requestHeaders =
|
, requestHeaders =
|
||||||
@ -219,61 +218,55 @@ uploadDocMap UploadDocMap {..} man = do
|
|||||||
, partFileRequestBody "docmap" "docmap" $ RequestBodyBS docmap
|
, partFileRequestBody "docmap" "docmap" $ RequestBodyBS docmap
|
||||||
]
|
]
|
||||||
|
|
||||||
mkIndex :: String -> [String] -> String
|
data UploadBundleV2 = UploadBundleV2
|
||||||
mkIndex snapid dirs = concat
|
{ ub2Server :: StackageServer
|
||||||
[ "<!DOCTYPE html>\n<html lang='en'><head><title>Haddocks index</title>"
|
, ub2AuthToken :: Text
|
||||||
, "<link rel='stylesheet' href='https://maxcdn.bootstrapcdn.com/bootstrap/3.2.0/css/bootstrap.min.css'>"
|
, ub2Bundle :: FilePath
|
||||||
, "<link rel='stylesheet' href='style.css'>"
|
}
|
||||||
, "<link rel='shortcut icon' href='http://www.stackage.org/static/img/favicon.ico' />"
|
|
||||||
, "</head>"
|
|
||||||
, "<body><div class='container'>"
|
|
||||||
, "<div class='row'><div class='span12 col-md-12'>"
|
|
||||||
, "<h1>Haddock documentation index</h1>"
|
|
||||||
, "<p class='return'><a href=\"http://www.stackage.org/stackage/"
|
|
||||||
, snapid
|
|
||||||
, "\">Return to snapshot</a></p><ul>"
|
|
||||||
, concatMap toLI dirs
|
|
||||||
, "</ul></div></div></div></body></html>"
|
|
||||||
]
|
|
||||||
where
|
|
||||||
toLI name = concat
|
|
||||||
[ "<li><a href='"
|
|
||||||
, name
|
|
||||||
, "/index.html'>"
|
|
||||||
, name
|
|
||||||
, "</a></li>"
|
|
||||||
]
|
|
||||||
|
|
||||||
styleCss :: String
|
uploadBundleV2 :: UploadBundleV2 -> Manager -> IO Text
|
||||||
styleCss = concat
|
uploadBundleV2 UploadBundleV2 {..} man = IO.withBinaryFile (fpToString ub2Bundle) IO.ReadMode $ \h -> do
|
||||||
[ "@media (min-width: 530px) {"
|
size <- IO.hFileSize h
|
||||||
, "ul { -webkit-column-count: 2; -moz-column-count: 2; column-count: 2 }"
|
putStrLn $ "Bundle size: " ++ tshow size
|
||||||
, "}"
|
req1 <- parseUrl $ unpack $ unStackageServer ub2Server ++ "/upload2"
|
||||||
, "@media (min-width: 760px) {"
|
let req2 = req1
|
||||||
, "ul { -webkit-column-count: 3; -moz-column-count: 3; column-count: 3 }"
|
{ method = "PUT"
|
||||||
, "}"
|
, requestHeaders =
|
||||||
, "ul {"
|
[ ("Authorization", encodeUtf8 ub2AuthToken)
|
||||||
, " margin-left: 0;"
|
, ("Accept", "application/json")
|
||||||
, " padding-left: 0;"
|
, ("Content-Type", "application/x-tar")
|
||||||
, " list-style-type: none;"
|
]
|
||||||
, "}"
|
, requestBody = HCC.requestBodySource (fromIntegral size)
|
||||||
, "body {"
|
$ sourceHandle h $= printProgress size
|
||||||
, " background: #f0f0f0;"
|
}
|
||||||
, " font-family: 'Lato', sans-serif;"
|
sink = decodeUtf8C =$ fix (\loop -> do
|
||||||
, " text-shadow: 1px 1px 1px #ffffff;"
|
mx <- peekC
|
||||||
, " font-size: 20px;"
|
case mx of
|
||||||
, " line-height: 30px;"
|
Nothing -> error $ "uploadBundleV2: premature end of stream"
|
||||||
, " padding-bottom: 5em;"
|
Just _ -> do
|
||||||
, "}"
|
l <- lineC $ takeCE 4096 =$ foldC
|
||||||
, "h1 {"
|
let (cmd, msg') = break (== ':') l
|
||||||
, " font-weight: normal;"
|
msg = dropWhile (== ' ') $ dropWhile (== ':') msg'
|
||||||
, " color: #06537d;"
|
case cmd of
|
||||||
, " font-size: 45px;"
|
"CONT" -> do
|
||||||
, "}"
|
putStrLn msg
|
||||||
, ".return a {"
|
loop
|
||||||
, " color: #06537d;"
|
"FAILURE" -> error $ "uploadBundleV2 failed: " ++ unpack msg
|
||||||
, " font-style: italic;"
|
"SUCCESS" -> return msg
|
||||||
, "}"
|
_ -> error $ "uploadBundleV2: unknown command " ++ unpack cmd
|
||||||
, ".return {"
|
)
|
||||||
, " margin-bottom: 1em;"
|
withResponse req2 man $ \res -> HCC.bodyReaderSource (responseBody res) $$ sink
|
||||||
, "}"]
|
where
|
||||||
|
printProgress total =
|
||||||
|
loop 0 0
|
||||||
|
where
|
||||||
|
loop sent lastPercent =
|
||||||
|
await >>= maybe (putStrLn "Upload complete") go
|
||||||
|
where
|
||||||
|
go bs = do
|
||||||
|
yield bs
|
||||||
|
let sent' = sent + fromIntegral (length bs)
|
||||||
|
percent = sent' * 100 `div` total
|
||||||
|
when (percent /= lastPercent)
|
||||||
|
$ putStrLn $ "Upload progress: " ++ tshow percent ++ "%"
|
||||||
|
loop sent' percent
|
||||||
|
|||||||
@ -10,7 +10,11 @@ import Options.Applicative
|
|||||||
import Filesystem.Path.CurrentOS (decodeString)
|
import Filesystem.Path.CurrentOS (decodeString)
|
||||||
import Paths_stackage (version)
|
import Paths_stackage (version)
|
||||||
import Stackage.CompleteBuild
|
import Stackage.CompleteBuild
|
||||||
|
import Stackage.Upload
|
||||||
import Stackage.InstallBuild
|
import Stackage.InstallBuild
|
||||||
|
import Network.HTTP.Client (withManager)
|
||||||
|
import Network.HTTP.Client.TLS (tlsManagerSettings)
|
||||||
|
import qualified Data.Text as T
|
||||||
|
|
||||||
main :: IO ()
|
main :: IO ()
|
||||||
main =
|
main =
|
||||||
@ -62,7 +66,13 @@ main =
|
|||||||
installBuild
|
installBuild
|
||||||
installFlags
|
installFlags
|
||||||
"install"
|
"install"
|
||||||
"Install a snapshot from an existing build plan"]
|
"Install a snapshot from an existing build plan"
|
||||||
|
, cmnd
|
||||||
|
uploadv2
|
||||||
|
uploadv2Flags
|
||||||
|
"upload2"
|
||||||
|
"Upload a pre-existing v2 bundle"
|
||||||
|
]
|
||||||
|
|
||||||
cmnd exec parse name desc =
|
cmnd exec parse name desc =
|
||||||
command name $
|
command name $
|
||||||
@ -77,6 +87,11 @@ main =
|
|||||||
(switch
|
(switch
|
||||||
(long "skip-tests" <>
|
(long "skip-tests" <>
|
||||||
help "Skip build and running the test suites")) <*>
|
help "Skip build and running the test suites")) <*>
|
||||||
|
fmap
|
||||||
|
not
|
||||||
|
(switch
|
||||||
|
(long "skip-haddock" <>
|
||||||
|
help "Skip generating haddock documentation")) <*>
|
||||||
fmap
|
fmap
|
||||||
not
|
not
|
||||||
(switch
|
(switch
|
||||||
@ -85,12 +100,23 @@ main =
|
|||||||
switch
|
switch
|
||||||
(long "enable-library-profiling" <>
|
(long "enable-library-profiling" <>
|
||||||
help "Enable profiling when building") <*>
|
help "Enable profiling when building") <*>
|
||||||
|
switch
|
||||||
|
(long "enable-executable-dynamic" <>
|
||||||
|
help "Enable dynamic executables when building") <*>
|
||||||
switch
|
switch
|
||||||
(long "verbose" <> short 'v' <>
|
(long "verbose" <> short 'v' <>
|
||||||
help "Output verbose detail about the build steps") <*>
|
help "Output verbose detail about the build steps") <*>
|
||||||
switch
|
switch
|
||||||
(long "skip-check" <>
|
(long "skip-check" <>
|
||||||
help "Skip the check phase, and pass --allow-newer to cabal configure")
|
help "Skip the check phase, and pass --allow-newer to cabal configure") <*>
|
||||||
|
switch
|
||||||
|
(long "upload-v1" <>
|
||||||
|
help "Use the V1 upload code") <*>
|
||||||
|
(fmap fromString (strOption
|
||||||
|
(long "server-url" <>
|
||||||
|
metavar "SERVER-URL" <>
|
||||||
|
showDefault <> value (T.unpack $ unStackageServer def) <>
|
||||||
|
help "Server to upload bundle to")))
|
||||||
|
|
||||||
nightlyUploadFlags = fromString <$> strArgument
|
nightlyUploadFlags = fromString <$> strArgument
|
||||||
(metavar "DATE" <>
|
(metavar "DATE" <>
|
||||||
@ -136,12 +162,39 @@ main =
|
|||||||
(switch
|
(switch
|
||||||
(long "skip-tests" <>
|
(long "skip-tests" <>
|
||||||
help "Skip build and running the test suites")) <*>
|
help "Skip build and running the test suites")) <*>
|
||||||
|
fmap
|
||||||
|
not
|
||||||
|
(switch
|
||||||
|
(long "skip-haddock" <>
|
||||||
|
help "Skip generating haddock documentation")) <*>
|
||||||
switch
|
switch
|
||||||
(long "enable-library-profiling" <>
|
(long "enable-library-profiling" <>
|
||||||
help "Enable profiling when building") <*>
|
help "Enable profiling when building") <*>
|
||||||
|
switch
|
||||||
|
(long "enable-executable-dynamic" <>
|
||||||
|
help "Enable dynamic executables when building") <*>
|
||||||
switch
|
switch
|
||||||
(long "verbose" <> short 'v' <>
|
(long "verbose" <> short 'v' <>
|
||||||
help "Output verbose detail about the build steps") <*>
|
help "Output verbose detail about the build steps") <*>
|
||||||
switch
|
switch
|
||||||
(long "skip-check" <>
|
(long "skip-check" <>
|
||||||
help "Skip the check phase, and pass --allow-newer to cabal configure")
|
help "Skip the check phase, and pass --allow-newer to cabal configure")
|
||||||
|
|
||||||
|
uploadv2 (path, url) = withManager tlsManagerSettings $ \man -> do
|
||||||
|
token <- getStackageAuthToken
|
||||||
|
res <- flip uploadBundleV2 man UploadBundleV2
|
||||||
|
{ ub2AuthToken = token
|
||||||
|
, ub2Server = fromString url
|
||||||
|
, ub2Bundle = decodeString path
|
||||||
|
}
|
||||||
|
putStrLn $ "New URL: " ++ T.unpack res
|
||||||
|
|
||||||
|
uploadv2Flags = (,)
|
||||||
|
<$> (strArgument
|
||||||
|
(metavar "BUNDLE-PATH" <>
|
||||||
|
help "Bundle path"))
|
||||||
|
<*> strOption
|
||||||
|
(long "server-url" <>
|
||||||
|
metavar "SERVER-URL" <>
|
||||||
|
showDefault <> value (T.unpack $ unStackageServer def) <>
|
||||||
|
help "Server to upload bundle to")
|
||||||
|
|||||||
@ -379,6 +379,7 @@ packages:
|
|||||||
- pipes
|
- pipes
|
||||||
- pipes-parse
|
- pipes-parse
|
||||||
- pipes-concurrency
|
- pipes-concurrency
|
||||||
|
- pipes-safe
|
||||||
|
|
||||||
"Chris Allen <cma@bitemyapp.com>":
|
"Chris Allen <cma@bitemyapp.com>":
|
||||||
- bloodhound
|
- bloodhound
|
||||||
@ -390,9 +391,12 @@ packages:
|
|||||||
- fay-jquery
|
- fay-jquery
|
||||||
- fay-text
|
- fay-text
|
||||||
- fay-uri
|
- fay-uri
|
||||||
- fclabels
|
|
||||||
- snaplet-fay
|
- snaplet-fay
|
||||||
|
|
||||||
|
"Sebastiaan Visser <haskell@fvisser.nl>":
|
||||||
|
- clay
|
||||||
|
- fclabels
|
||||||
|
|
||||||
"Rodrigo Setti <rodrigosetti@gmail.com>":
|
"Rodrigo Setti <rodrigosetti@gmail.com>":
|
||||||
- messagepack
|
- messagepack
|
||||||
- messagepack-rpc
|
- messagepack-rpc
|
||||||
@ -473,7 +477,8 @@ packages:
|
|||||||
- openpgp-asciiarmor
|
- openpgp-asciiarmor
|
||||||
- MusicBrainz
|
- MusicBrainz
|
||||||
- DAV
|
- DAV
|
||||||
- hopenpgp-tools
|
# https://github.com/fpco/stackage/issues/463
|
||||||
|
#- hopenpgp-tools
|
||||||
|
|
||||||
# https://github.com/fpco/stackage/issues/160
|
# https://github.com/fpco/stackage/issues/160
|
||||||
"Ketil Malde":
|
"Ketil Malde":
|
||||||
@ -503,9 +508,9 @@ packages:
|
|||||||
- code-builder
|
- code-builder
|
||||||
- fay-builder
|
- fay-builder
|
||||||
- generic-aeson
|
- generic-aeson
|
||||||
|
- generic-xmlpickler
|
||||||
- hxt-pickle-utils
|
- hxt-pickle-utils
|
||||||
- imagesize-conduit
|
- imagesize-conduit
|
||||||
- imagesize-conduit
|
|
||||||
- json-schema
|
- json-schema
|
||||||
- multipart
|
- multipart
|
||||||
- regular-xmlpickler
|
- regular-xmlpickler
|
||||||
@ -518,8 +523,6 @@ packages:
|
|||||||
- rest-types
|
- rest-types
|
||||||
- rest-wai
|
- rest-wai
|
||||||
- tostring
|
- tostring
|
||||||
- tostring
|
|
||||||
- uri-encode
|
|
||||||
- uri-encode
|
- uri-encode
|
||||||
|
|
||||||
"Simon Michael <simon@joyful.com>":
|
"Simon Michael <simon@joyful.com>":
|
||||||
@ -529,7 +532,10 @@ packages:
|
|||||||
- io-manager
|
- io-manager
|
||||||
|
|
||||||
"Dimitri Sabadie <dimitri.sabadie@gmail.com":
|
"Dimitri Sabadie <dimitri.sabadie@gmail.com":
|
||||||
|
# https://github.com/fpco/stackage/pull/461#issuecomment-76914158
|
||||||
|
# - al
|
||||||
- monad-journal
|
- monad-journal
|
||||||
|
- smoothie
|
||||||
|
|
||||||
"Thomas Schilling <nominolo@googlemail.com>":
|
"Thomas Schilling <nominolo@googlemail.com>":
|
||||||
- ghc-syb-utils
|
- ghc-syb-utils
|
||||||
@ -574,6 +580,7 @@ packages:
|
|||||||
- haddock-api
|
- haddock-api
|
||||||
- here
|
- here
|
||||||
- hlibgit2
|
- hlibgit2
|
||||||
|
- gitlib-libgit2
|
||||||
- hostname-validate
|
- hostname-validate
|
||||||
- interpolatedstring-perl6
|
- interpolatedstring-perl6
|
||||||
- iproute
|
- iproute
|
||||||
@ -612,6 +619,9 @@ packages:
|
|||||||
- Spock
|
- Spock
|
||||||
- Spock-digestive
|
- Spock-digestive
|
||||||
- Spock-worker
|
- Spock-worker
|
||||||
|
- users
|
||||||
|
- users-test
|
||||||
|
- users-postgresql-simple
|
||||||
|
|
||||||
"Joey Eremondi <joey@eremondi.com>":
|
"Joey Eremondi <joey@eremondi.com>":
|
||||||
- aeson-pretty
|
- aeson-pretty
|
||||||
@ -641,6 +651,11 @@ packages:
|
|||||||
"Samplecount stefan@samplecount.com @kaoskorobase":
|
"Samplecount stefan@samplecount.com @kaoskorobase":
|
||||||
- shake-language-c
|
- shake-language-c
|
||||||
|
|
||||||
|
"Marcin Mrotek <marcin.jan.mrotek@gmail.com>":
|
||||||
|
- diagrams-hsqml
|
||||||
|
- type-list
|
||||||
|
- vinyl-utils
|
||||||
|
|
||||||
"Marcin Mrotek <marcin.jan.mrotek@gmail.com>":
|
"Marcin Mrotek <marcin.jan.mrotek@gmail.com>":
|
||||||
- type-list
|
- type-list
|
||||||
|
|
||||||
@ -703,19 +718,37 @@ packages:
|
|||||||
"Gabríel Arthúr Pétursson gabriel@system.is":
|
"Gabríel Arthúr Pétursson gabriel@system.is":
|
||||||
- sdl2
|
- sdl2
|
||||||
|
|
||||||
"Stackage upper bounds":
|
"Leon Mergen leon@solatis.com @solatis":
|
||||||
|
- network-attoparsec
|
||||||
|
- network-anonymous-i2p
|
||||||
|
|
||||||
# https://github.com/fpco/stackage/issues/291
|
"Timothy Jones git@zmthy.io @zmthy":
|
||||||
- random < 1.0.1.3
|
- cabal-test-quickcheck
|
||||||
|
- http-media
|
||||||
|
|
||||||
|
"Greg V greg@unrelenting.technology @myfreeweb":
|
||||||
|
- gitson
|
||||||
|
- pcre-heavy
|
||||||
|
|
||||||
|
"Francesco Mazzoli f@mazzo.li @bitonic":
|
||||||
|
- language-c-quote
|
||||||
|
|
||||||
|
"Sönke Hahn soenkehahn@gmail.com @soenkehahn":
|
||||||
|
- string-conversions
|
||||||
|
|
||||||
|
"Oleg Grenrus oleg.grenrus@iki.fi @phadej":
|
||||||
|
- waitra
|
||||||
|
|
||||||
|
"Adam C. Foltzer acfoltzer@galois.com @acfoltzer":
|
||||||
|
- gitrev
|
||||||
|
|
||||||
|
"Stackage upper bounds":
|
||||||
|
|
||||||
# https://github.com/fpco/stackage/issues/390
|
# https://github.com/fpco/stackage/issues/390
|
||||||
# NOTE: When this issue is resolved, remove the expected test failure
|
# NOTE: When this issue is resolved, remove the expected test failure
|
||||||
# for language-ecmascript as well.
|
# for language-ecmascript as well.
|
||||||
- language-ecmascript < 0.17
|
- language-ecmascript < 0.17
|
||||||
|
|
||||||
# https://github.com/fpco/stackage/issues/402
|
|
||||||
- vector-space < 0.9
|
|
||||||
|
|
||||||
# https://github.com/fpco/stackage/issues/407
|
# https://github.com/fpco/stackage/issues/407
|
||||||
- HStringTemplate < 0.8
|
- HStringTemplate < 0.8
|
||||||
|
|
||||||
@ -731,22 +764,32 @@ packages:
|
|||||||
# https://github.com/fpco/stackage/issues/426
|
# https://github.com/fpco/stackage/issues/426
|
||||||
- utf8-string < 1
|
- utf8-string < 1
|
||||||
|
|
||||||
# https://github.com/d12frosted/CanonicalPath/issues/3
|
|
||||||
- system-canonicalpath < 0.3
|
|
||||||
|
|
||||||
# https://github.com/fpco/stackage/issues/440
|
# https://github.com/fpco/stackage/issues/440
|
||||||
- th-orphans < 0.9
|
- th-orphans < 0.9
|
||||||
- file-location < 0.4.7
|
- file-location < 0.4.7
|
||||||
|
- th-desugar < 1.5.1
|
||||||
|
|
||||||
# https://github.com/fpco/stackage/issues/442
|
# https://github.com/fpco/stackage/issues/442
|
||||||
- blaze-builder < 0.4
|
- blaze-builder < 0.4
|
||||||
|
- blaze-markup < 0.7
|
||||||
|
- blaze-html < 0.8
|
||||||
|
|
||||||
# https://github.com/fpco/stackage/issues/443
|
# https://github.com/fpco/stackage/issues/443
|
||||||
- exceptions < 0.7
|
- exceptions < 0.7
|
||||||
- resourcet < 1.1.4
|
- rest-client < 0.5
|
||||||
|
- rest-types < 1.13
|
||||||
|
- rest-core < 0.35
|
||||||
|
- rest-gen < 0.17
|
||||||
|
- rest-wai < 0.1.0.7
|
||||||
|
|
||||||
# https://github.com/fpco/stackage/issues/445
|
# https://github.com/fpco/stackage/issues/467
|
||||||
- semigroupoids < 4.3
|
- lens < 4.8
|
||||||
|
|
||||||
|
# https://github.com/fpco/stackage/issues/476
|
||||||
|
- vector-space < 0.10
|
||||||
|
|
||||||
|
# https://github.com/ekmett/linear/issues/70
|
||||||
|
- linear < 1.18
|
||||||
|
|
||||||
# Package flags are applied to individual packages, and override the values of
|
# Package flags are applied to individual packages, and override the values of
|
||||||
# global-flags
|
# global-flags
|
||||||
@ -918,9 +961,6 @@ expected-test-failures:
|
|||||||
# https://github.com/BioHaskell/octree/issues/4
|
# https://github.com/BioHaskell/octree/issues/4
|
||||||
- Octree
|
- Octree
|
||||||
|
|
||||||
# https://github.com/goldfirere/th-desugar/issues/12
|
|
||||||
- th-desugar
|
|
||||||
|
|
||||||
# https://github.com/jmillikin/haskell-filesystem/issues/3
|
# https://github.com/jmillikin/haskell-filesystem/issues/3
|
||||||
- system-filepath
|
- system-filepath
|
||||||
|
|
||||||
@ -993,20 +1033,52 @@ expected-test-failures:
|
|||||||
# https://github.com/haskell-distributed/distributed-process-execution/issues/2
|
# https://github.com/haskell-distributed/distributed-process-execution/issues/2
|
||||||
- distributed-process-execution
|
- distributed-process-execution
|
||||||
|
|
||||||
|
# Seems to depend on mtl being installed in user package database, which
|
||||||
|
# isn't always the case (e.g., build server)
|
||||||
|
- happy
|
||||||
|
|
||||||
|
# https://github.com/jberryman/directory-tree/issues/4
|
||||||
|
- directory-tree
|
||||||
|
|
||||||
|
# https://github.com/zmthy/http-media/issues/11
|
||||||
|
- http-media
|
||||||
|
|
||||||
|
# https://github.com/ekmett/semigroupoids/issues/18
|
||||||
|
- semigroupoids
|
||||||
|
|
||||||
|
# https://github.com/ndmitchell/hoogle/issues/101
|
||||||
|
- hoogle
|
||||||
|
|
||||||
|
# https://github.com/myfreeweb/gitson/issues/1
|
||||||
|
- gitson
|
||||||
|
|
||||||
|
# https://github.com/jcristovao/enclosed-exceptions/issues/6
|
||||||
|
- enclosed-exceptions
|
||||||
|
|
||||||
|
# Expects a running PostgreSQL server
|
||||||
|
- users-postgresql-simple
|
||||||
|
|
||||||
|
# Problems with linking with system libraries on Ubuntu 12.04
|
||||||
|
- nettle
|
||||||
|
|
||||||
|
# Requires locally running services
|
||||||
|
- network-anonymous-i2p
|
||||||
|
|
||||||
# Haddocks which are expected to fail. Same concept as expected test failures.
|
# Haddocks which are expected to fail. Same concept as expected test failures.
|
||||||
expected-haddock-failures:
|
expected-haddock-failures:
|
||||||
# https://github.com/acw/bytestring-progress/issues/4
|
# https://github.com/acw/bytestring-progress/issues/4
|
||||||
- bytestring-progress
|
- bytestring-progress
|
||||||
|
|
||||||
# https://github.com/ekmett/gl/issues/4
|
|
||||||
- gl
|
|
||||||
|
|
||||||
# https://github.com/leventov/yarr/issues/5
|
# https://github.com/leventov/yarr/issues/5
|
||||||
- yarr
|
- yarr
|
||||||
|
|
||||||
# https://github.com/wereHamster/rethinkdb-client-driver/issues/1
|
# https://github.com/wereHamster/rethinkdb-client-driver/issues/1
|
||||||
- rethinkdb-client-driver
|
- rethinkdb-client-driver
|
||||||
|
|
||||||
|
# Requires build before haddock, which doesn't always happen in incremental
|
||||||
|
# builds. Could consider special-casing this requirement.
|
||||||
|
- gtk
|
||||||
|
|
||||||
# Benchmarks which should not be built. Note that Stackage does *not* generally
|
# Benchmarks which should not be built. Note that Stackage does *not* generally
|
||||||
# build benchmarks. The difference here will be whether dependencies for these
|
# build benchmarks. The difference here will be whether dependencies for these
|
||||||
# benchmarks are included or not.
|
# benchmarks are included or not.
|
||||||
|
|||||||
@ -18,6 +18,9 @@ apt-get install -y \
|
|||||||
build-essential \
|
build-essential \
|
||||||
libncurses-dev \
|
libncurses-dev \
|
||||||
git \
|
git \
|
||||||
|
wget \
|
||||||
|
m4 \
|
||||||
|
texlive-full \
|
||||||
libgmp3c2 \
|
libgmp3c2 \
|
||||||
libgmp3-dev \
|
libgmp3-dev \
|
||||||
zlib1g-dev \
|
zlib1g-dev \
|
||||||
@ -34,11 +37,11 @@ apt-get install -y \
|
|||||||
llvm \
|
llvm \
|
||||||
libbz2-dev \
|
libbz2-dev \
|
||||||
libjudy-dev \
|
libjudy-dev \
|
||||||
|
libsqlite3-dev \
|
||||||
libmysqlclient-dev \
|
libmysqlclient-dev \
|
||||||
libpq-dev \
|
libpq-dev \
|
||||||
libicu-dev \
|
libicu-dev \
|
||||||
libssl-dev \
|
libssl-dev \
|
||||||
nettle-dev \
|
|
||||||
libgsl0-dev \
|
libgsl0-dev \
|
||||||
libblas-dev \
|
libblas-dev \
|
||||||
liblapack-dev \
|
liblapack-dev \
|
||||||
@ -49,4 +52,20 @@ apt-get install -y \
|
|||||||
libyaml-dev \
|
libyaml-dev \
|
||||||
liblzma-dev \
|
liblzma-dev \
|
||||||
libsdl2-dev \
|
libsdl2-dev \
|
||||||
|
libxss-dev \
|
||||||
libzmq3-dev
|
libzmq3-dev
|
||||||
|
|
||||||
|
mkdir /tmp/nettle-build
|
||||||
|
(
|
||||||
|
cd /tmp/nettle-build
|
||||||
|
wget https://ftp.gnu.org/gnu/nettle/nettle-2.7.1.tar.gz
|
||||||
|
tar zxf nettle-2.7.1.tar.gz
|
||||||
|
cd nettle-2.7.1
|
||||||
|
./configure --prefix=/usr
|
||||||
|
make
|
||||||
|
make install
|
||||||
|
|
||||||
|
mkdir -p /usr/lib/x86_64-linux-gnu/
|
||||||
|
ln -sfv /usr/lib/libnettle.so.4.7 /usr/lib/x86_64-linux-gnu/libnettle.so.4
|
||||||
|
)
|
||||||
|
rm -rf /tmp/nettle-build
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
name: stackage
|
name: stackage
|
||||||
version: 0.5.2
|
version: 0.6.0.1
|
||||||
synopsis: "Stable Hackage," tools for creating a vetted set of packages from Hackage.
|
synopsis: "Stable Hackage," tools for creating a vetted set of packages from Hackage.
|
||||||
description: Please see <http://www.stackage.org/package/stackage> for a description and documentation.
|
description: Please see <http://www.stackage.org/package/stackage> for a description and documentation.
|
||||||
homepage: https://github.com/fpco/stackage
|
homepage: https://github.com/fpco/stackage
|
||||||
@ -23,6 +23,7 @@ library
|
|||||||
Stackage.BuildPlan
|
Stackage.BuildPlan
|
||||||
Stackage.CheckBuildPlan
|
Stackage.CheckBuildPlan
|
||||||
Stackage.UpdateBuildPlan
|
Stackage.UpdateBuildPlan
|
||||||
|
Stackage.GhcPkg
|
||||||
Stackage.GithubPings
|
Stackage.GithubPings
|
||||||
Stackage.InstallBuild
|
Stackage.InstallBuild
|
||||||
Stackage.PackageDescription
|
Stackage.PackageDescription
|
||||||
@ -54,6 +55,7 @@ library
|
|||||||
, yaml
|
, yaml
|
||||||
, unix-compat
|
, unix-compat
|
||||||
, http-client
|
, http-client
|
||||||
|
, http-conduit
|
||||||
, http-client-tls
|
, http-client-tls
|
||||||
, temporary
|
, temporary
|
||||||
, data-default-class
|
, data-default-class
|
||||||
@ -63,6 +65,7 @@ library
|
|||||||
, streaming-commons >= 0.1.7.1
|
, streaming-commons >= 0.1.7.1
|
||||||
, semigroups
|
, semigroups
|
||||||
, xml-conduit
|
, xml-conduit
|
||||||
|
, conduit
|
||||||
|
|
||||||
executable stackage
|
executable stackage
|
||||||
default-language: Haskell2010
|
default-language: Haskell2010
|
||||||
@ -72,6 +75,9 @@ executable stackage
|
|||||||
, stackage
|
, stackage
|
||||||
, optparse-applicative >= 0.11
|
, optparse-applicative >= 0.11
|
||||||
, system-filepath
|
, system-filepath
|
||||||
|
, http-client
|
||||||
|
, http-client-tls
|
||||||
|
, text
|
||||||
ghc-options: -rtsopts -threaded -with-rtsopts=-N
|
ghc-options: -rtsopts -threaded -with-rtsopts=-N
|
||||||
|
|
||||||
test-suite spec
|
test-suite spec
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user