mirror of
https://github.com/commercialhaskell/stackage-server.git
synced 2026-01-26 19:01:56 +01:00
commit
89f8650151
107
.travis.yml
107
.travis.yml
@ -1,3 +1,10 @@
|
|||||||
|
# This is the simple Travis configuration, which is intended for use
|
||||||
|
# on applications which do not require cross-platform and
|
||||||
|
# multiple-GHC-version support. For more information and other
|
||||||
|
# options, see:
|
||||||
|
#
|
||||||
|
# https://docs.haskellstack.org/en/stable/travis_ci/
|
||||||
|
#
|
||||||
# Copy these contents into the root directory of your Github project in a file
|
# Copy these contents into the root directory of your Github project in a file
|
||||||
# named .travis.yml
|
# named .travis.yml
|
||||||
|
|
||||||
@ -10,100 +17,26 @@ language: c
|
|||||||
# Caching so the next build will be fast too.
|
# Caching so the next build will be fast too.
|
||||||
cache:
|
cache:
|
||||||
directories:
|
directories:
|
||||||
- $HOME/.ghc
|
|
||||||
- $HOME/.cabal
|
|
||||||
- $HOME/.stack
|
- $HOME/.stack
|
||||||
|
|
||||||
# The different configurations we want to test. We have BUILD=cabal which uses
|
# Ensure necessary system libraries are present
|
||||||
# cabal-install, and BUILD=stack which uses Stack. More documentation on each
|
addons:
|
||||||
# of those below.
|
apt:
|
||||||
#
|
packages:
|
||||||
# We set the compiler values here to tell Travis to use a different
|
- libgmp-dev
|
||||||
# cache file per set of arguments.
|
|
||||||
#
|
|
||||||
# If you need to have different apt packages for each combination in the
|
|
||||||
# matrix, you can use a line such as:
|
|
||||||
# addons: {apt: {packages: [libfcgi-dev,libgmp-dev]}}
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
# The Stack builds. We can pass in arbitrary Stack arguments via the ARGS
|
|
||||||
# variable, such as using --stack-yaml to point to a different file.
|
|
||||||
- env: BUILD=stack ARGS=""
|
|
||||||
compiler: ": #stack default"
|
|
||||||
addons: {apt: {packages: [ghc-7.10.3], sources: [hvr-ghc]}}
|
|
||||||
|
|
||||||
before_install:
|
before_install:
|
||||||
# Using compiler above sets CC to an invalid value, so unset it
|
|
||||||
- unset CC
|
|
||||||
|
|
||||||
# We want to always allow newer versions of packages when building on GHC HEAD
|
|
||||||
- CABALARGS=""
|
|
||||||
- if [ "x$GHCVER" = "xhead" ]; then CABALARGS=--allow-newer; fi
|
|
||||||
|
|
||||||
# Download and unpack the stack executable
|
# Download and unpack the stack executable
|
||||||
- export PATH=/opt/ghc/$GHCVER/bin:/opt/cabal/$CABALVER/bin:$HOME/.local/bin:/opt/alex/$ALEXVER/bin:/opt/happy/$HAPPYVER/bin:$HOME/.cabal/bin:$PATH
|
|
||||||
- mkdir -p ~/.local/bin
|
- mkdir -p ~/.local/bin
|
||||||
- |
|
- export PATH=$HOME/.local/bin:$PATH
|
||||||
if [ `uname` = "Darwin" ]
|
- travis_retry curl -L https://www.stackage.org/stack/linux-x86_64 | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack'
|
||||||
then
|
|
||||||
travis_retry curl --insecure -L https://www.stackage.org/stack/osx-x86_64 | tar xz --strip-components=1 --include '*/stack' -C ~/.local/bin
|
|
||||||
else
|
|
||||||
travis_retry curl -L https://www.stackage.org/stack/linux-x86_64 | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack'
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Use the more reliable S3 mirror of Hackage
|
|
||||||
mkdir -p $HOME/.cabal
|
|
||||||
echo 'remote-repo: hackage.haskell.org:http://hackage.fpcomplete.com/' > $HOME/.cabal/config
|
|
||||||
echo 'remote-repo-cache: $HOME/.cabal/packages' >> $HOME/.cabal/config
|
|
||||||
|
|
||||||
if [ "$CABALVER" != "1.16" ]
|
|
||||||
then
|
|
||||||
echo 'jobs: $ncpus' >> $HOME/.cabal/config
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Get the list of packages from the stack.yaml file
|
|
||||||
- PACKAGES=$(stack --install-ghc query locals | grep '^ *path' | sed 's@^ *path:@@')
|
|
||||||
|
|
||||||
install:
|
install:
|
||||||
- echo "$(ghc --version) [$(ghc --print-project-git-commit-id 2> /dev/null || echo '?')]"
|
# Build dependencies. Start with just haskell-src-exts since it requires a lot
|
||||||
- if [ -f configure.ac ]; then autoreconf -i; fi
|
# of memory and we want it to build by itself.
|
||||||
- |
|
- stack --no-terminal --install-ghc build haskell-src-exts
|
||||||
set -ex
|
- stack --no-terminal --install-ghc test --only-dependencies
|
||||||
case "$BUILD" in
|
|
||||||
stack)
|
|
||||||
# Avoid an out-of-memory condition
|
|
||||||
stack --no-terminal --install-ghc $ARGS build haskell-src-exts
|
|
||||||
|
|
||||||
stack --no-terminal --install-ghc $ARGS test --bench --only-dependencies
|
|
||||||
;;
|
|
||||||
cabal)
|
|
||||||
cabal --version
|
|
||||||
travis_retry cabal update
|
|
||||||
cabal install --only-dependencies --enable-tests --enable-benchmarks --force-reinstalls --ghc-options=-O0 --reorder-goals --max-backjumps=-1 $CABALARGS $PACKAGES
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
set +ex
|
|
||||||
|
|
||||||
script:
|
script:
|
||||||
- |
|
# Build the package, its tests, and its docs and run the tests
|
||||||
set -ex
|
- stack --no-terminal test --haddock --no-haddock-deps
|
||||||
case "$BUILD" in
|
|
||||||
stack)
|
|
||||||
stack --no-terminal $ARGS test --bench --no-run-benchmarks --haddock --no-haddock-deps
|
|
||||||
;;
|
|
||||||
cabal)
|
|
||||||
cabal install --enable-tests --enable-benchmarks --force-reinstalls --ghc-options=-O0 --reorder-goals --max-backjumps=-1 $CABALARGS $PACKAGES
|
|
||||||
|
|
||||||
ORIGDIR=$(pwd)
|
|
||||||
for dir in $PACKAGES
|
|
||||||
do
|
|
||||||
cd $dir
|
|
||||||
cabal check || [ "$CABALVER" == "1.16" ]
|
|
||||||
cabal sdist
|
|
||||||
SRC_TGZ=$(cabal info . | awk '{print $2;exit}').tar.gz && \
|
|
||||||
(cd dist && cabal install --force-reinstalls "$SRC_TGZ")
|
|
||||||
cd $ORIGDIR
|
|
||||||
done
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
set +ex
|
|
||||||
|
|||||||
@ -33,7 +33,7 @@ import Yesod.Default.Config2
|
|||||||
import Yesod.Default.Handlers
|
import Yesod.Default.Handlers
|
||||||
import Yesod.GitRepo
|
import Yesod.GitRepo
|
||||||
import System.Process (rawSystem)
|
import System.Process (rawSystem)
|
||||||
import Stackage.Database.Cron (loadFromS3)
|
import Stackage.Database.Cron (loadFromS3, newHoogleLocker)
|
||||||
import Control.AutoUpdate
|
import Control.AutoUpdate
|
||||||
|
|
||||||
-- Import all relevant handler modules here.
|
-- Import all relevant handler modules here.
|
||||||
@ -136,6 +136,7 @@ makeFoundation appSettings = do
|
|||||||
appHoogleLock <- newMVar ()
|
appHoogleLock <- newMVar ()
|
||||||
|
|
||||||
appMirrorStatus <- mkUpdateMirrorStatus
|
appMirrorStatus <- mkUpdateMirrorStatus
|
||||||
|
appHoogleLocker <- newHoogleLocker
|
||||||
|
|
||||||
return App {..}
|
return App {..}
|
||||||
|
|
||||||
|
|||||||
@ -11,6 +11,7 @@ import Yesod.Core.Types (Logger)
|
|||||||
import Yesod.AtomFeed
|
import Yesod.AtomFeed
|
||||||
import Yesod.GitRepo
|
import Yesod.GitRepo
|
||||||
import Stackage.Database
|
import Stackage.Database
|
||||||
|
import Stackage.Database.Cron (HoogleLocker)
|
||||||
import qualified Yesod.Core.Unsafe as Unsafe
|
import qualified Yesod.Core.Unsafe as Unsafe
|
||||||
|
|
||||||
-- | The site argument for your application. This can be a good place to
|
-- | The site argument for your application. This can be a good place to
|
||||||
@ -30,6 +31,7 @@ data App = App
|
|||||||
-- ^ Avoid concurrent Hoogle queries, see
|
-- ^ Avoid concurrent Hoogle queries, see
|
||||||
-- https://github.com/fpco/stackage-server/issues/172
|
-- https://github.com/fpco/stackage-server/issues/172
|
||||||
, appMirrorStatus :: IO (Status, WidgetT App IO ())
|
, appMirrorStatus :: IO (Status, WidgetT App IO ())
|
||||||
|
, appHoogleLocker :: HoogleLocker
|
||||||
}
|
}
|
||||||
|
|
||||||
instance HasHttpManager App where
|
instance HasHttpManager App where
|
||||||
|
|||||||
@ -15,7 +15,7 @@ import qualified Data.Text as T
|
|||||||
getHoogleDB :: SnapName -> Handler (Maybe FilePath)
|
getHoogleDB :: SnapName -> Handler (Maybe FilePath)
|
||||||
getHoogleDB name = track "Handler.Hoogle.getHoogleDB" $ do
|
getHoogleDB name = track "Handler.Hoogle.getHoogleDB" $ do
|
||||||
app <- getYesod
|
app <- getYesod
|
||||||
liftIO $ Cron.getHoogleDB True (appHttpManager app) name
|
liftIO $ Cron.getHoogleDB (appHoogleLocker app) True (appHttpManager app) name
|
||||||
|
|
||||||
getHoogleR :: SnapName -> Handler Html
|
getHoogleR :: SnapName -> Handler Html
|
||||||
getHoogleR name = track "Handler.Hoogle.getHoogleR" $ do
|
getHoogleR name = track "Handler.Hoogle.getHoogleR" $ do
|
||||||
|
|||||||
@ -2,6 +2,8 @@ module Stackage.Database.Cron
|
|||||||
( stackageServerCron
|
( stackageServerCron
|
||||||
, loadFromS3
|
, loadFromS3
|
||||||
, getHoogleDB
|
, getHoogleDB
|
||||||
|
, HoogleLocker
|
||||||
|
, newHoogleLocker
|
||||||
) where
|
) where
|
||||||
|
|
||||||
import ClassyPrelude.Conduit
|
import ClassyPrelude.Conduit
|
||||||
@ -23,7 +25,7 @@ import Network.AWS (Credentials (Discover),
|
|||||||
import Control.Monad.Trans.AWS (trying, _Error)
|
import Control.Monad.Trans.AWS (trying, _Error)
|
||||||
import Network.AWS.Data.Body (toBody)
|
import Network.AWS.Data.Body (toBody)
|
||||||
import Network.AWS.S3 (ObjectCannedACL (OPublicRead),
|
import Network.AWS.S3 (ObjectCannedACL (OPublicRead),
|
||||||
poACL, putObject,
|
poACL, poContentType, putObject,
|
||||||
BucketName(BucketName),
|
BucketName(BucketName),
|
||||||
ObjectKey(ObjectKey))
|
ObjectKey(ObjectKey))
|
||||||
import Control.Lens (set, view)
|
import Control.Lens (set, view)
|
||||||
@ -122,31 +124,65 @@ hoogleUrl n = concat
|
|||||||
, hoogleKey n
|
, hoogleKey n
|
||||||
]
|
]
|
||||||
|
|
||||||
getHoogleDB :: Bool -- ^ print exceptions?
|
newtype HoogleLocker = HoogleLocker (TVar (Map FilePath (MVar ())))
|
||||||
|
|
||||||
|
newHoogleLocker :: IO HoogleLocker
|
||||||
|
newHoogleLocker = HoogleLocker <$> newTVarIO mempty
|
||||||
|
|
||||||
|
data Finished a = Finished a | TryAgain
|
||||||
|
|
||||||
|
getHoogleDB :: HoogleLocker
|
||||||
|
-> Bool -- ^ print exceptions?
|
||||||
-> Manager -> SnapName -> IO (Maybe FilePath)
|
-> Manager -> SnapName -> IO (Maybe FilePath)
|
||||||
getHoogleDB toPrint man name = do
|
getHoogleDB (HoogleLocker locker) toPrint man name = do
|
||||||
let fp = fromText $ hoogleKey name
|
let fp = fromText $ hoogleKey name
|
||||||
fptmp = encodeString fp <.> "tmp"
|
fptmp = encodeString fp <.> "tmp"
|
||||||
exists <- isFile fp
|
|
||||||
if exists
|
baton <- newMVar ()
|
||||||
then return $ Just (encodeString fp)
|
|
||||||
else do
|
let go :: IO (Finished (Maybe FilePath))
|
||||||
req' <- parseUrl $ unpack $ hoogleUrl name
|
go = withMVar baton $ \() -> bracket acquire fst snd
|
||||||
let req = req'
|
|
||||||
{ checkStatus = \_ _ _ -> Nothing
|
acquire :: IO (IO (), IO (Finished (Maybe FilePath)))
|
||||||
, decompress = const False
|
acquire = atomically $ do
|
||||||
}
|
m <- readTVar locker
|
||||||
withResponse req man $ \res -> if responseStatus res == status200
|
case lookup (encodeString fp) m of
|
||||||
then do
|
Just baton' -> return (return (), readMVar baton' $> TryAgain)
|
||||||
createTree $ parent (fromString fptmp)
|
Nothing -> do
|
||||||
runResourceT $ bodyReaderSource (responseBody res)
|
modifyTVar locker $ insertMap (encodeString fp) baton
|
||||||
$= ungzip
|
let cleanup = modifyTVar locker $ deleteMap (encodeString fp)
|
||||||
$$ sinkFile fptmp
|
return (atomically $ cleanup, Finished <$> inner)
|
||||||
rename (fromString fptmp) fp
|
|
||||||
return $ Just $ encodeString fp
|
|
||||||
|
inner = do
|
||||||
|
exists <- isFile fp
|
||||||
|
if exists
|
||||||
|
then return $ Just (encodeString fp)
|
||||||
else do
|
else do
|
||||||
when toPrint $ mapM brRead res >>= print
|
req' <- parseUrl $ unpack $ hoogleUrl name
|
||||||
return Nothing
|
let req = req'
|
||||||
|
{ checkStatus = \_ _ _ -> Nothing
|
||||||
|
, decompress = const False
|
||||||
|
}
|
||||||
|
withResponse req man $ \res -> if responseStatus res == status200
|
||||||
|
then do
|
||||||
|
createTree $ parent (fromString fptmp)
|
||||||
|
runResourceT $ bodyReaderSource (responseBody res)
|
||||||
|
$= ungzip
|
||||||
|
$$ sinkFile fptmp
|
||||||
|
rename (fromString fptmp) fp
|
||||||
|
return $ Just $ encodeString fp
|
||||||
|
else do
|
||||||
|
when toPrint $ mapM brRead res >>= print
|
||||||
|
return Nothing
|
||||||
|
|
||||||
|
loop :: IO (Maybe FilePath)
|
||||||
|
loop = do
|
||||||
|
mres <- go
|
||||||
|
case mres of
|
||||||
|
TryAgain -> loop
|
||||||
|
Finished res -> return res
|
||||||
|
loop
|
||||||
|
|
||||||
stackageServerCron :: IO ()
|
stackageServerCron :: IO ()
|
||||||
stackageServerCron = do
|
stackageServerCron = do
|
||||||
@ -154,6 +190,8 @@ stackageServerCron = do
|
|||||||
void $ catchIO (bindPortTCP 17834 "127.0.0.1") $ \_ ->
|
void $ catchIO (bindPortTCP 17834 "127.0.0.1") $ \_ ->
|
||||||
error $ "cabal loader process already running, exiting"
|
error $ "cabal loader process already running, exiting"
|
||||||
|
|
||||||
|
locker <- newHoogleLocker
|
||||||
|
|
||||||
env <- newEnv NorthVirginia Discover
|
env <- newEnv NorthVirginia Discover
|
||||||
let upload :: FilePath -> ObjectKey -> IO ()
|
let upload :: FilePath -> ObjectKey -> IO ()
|
||||||
upload fp key = do
|
upload fp key = do
|
||||||
@ -182,6 +220,7 @@ stackageServerCron = do
|
|||||||
let key = ObjectKey "snapshots.json"
|
let key = ObjectKey "snapshots.json"
|
||||||
po =
|
po =
|
||||||
set poACL (Just OPublicRead)
|
set poACL (Just OPublicRead)
|
||||||
|
$ set poContentType (Just "application/json")
|
||||||
$ putObject (BucketName "haddock.stackage.org") key (toBody snapshots)
|
$ putObject (BucketName "haddock.stackage.org") key (toBody snapshots)
|
||||||
putStrLn $ "Uploading: " ++ tshow key
|
putStrLn $ "Uploading: " ++ tshow key
|
||||||
eres <- runResourceT $ runAWS env $ trying _Error $ send po
|
eres <- runResourceT $ runAWS env $ trying _Error $ send po
|
||||||
@ -192,7 +231,7 @@ stackageServerCron = do
|
|||||||
names <- runReaderT last5Lts5Nightly db
|
names <- runReaderT last5Lts5Nightly db
|
||||||
let manager = view envManager env
|
let manager = view envManager env
|
||||||
forM_ names $ \name -> do
|
forM_ names $ \name -> do
|
||||||
mfp <- getHoogleDB False manager name
|
mfp <- getHoogleDB locker False manager name
|
||||||
case mfp of
|
case mfp of
|
||||||
Just _ -> putStrLn $ "Hoogle database exists for: " ++ toPathPiece name
|
Just _ -> putStrLn $ "Hoogle database exists for: " ++ toPathPiece name
|
||||||
Nothing -> do
|
Nothing -> do
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user