STACKAGE_HOOGLE_LOADER

This commit is contained in:
Michael Snoyman 2015-01-04 21:38:57 +02:00
parent 7c94b008aa
commit 3b8e3f596b
3 changed files with 14 additions and 9 deletions

View File

@ -198,6 +198,8 @@ makeFoundation useEcho conf = do
loadWebsiteContent loadWebsiteContent
#endif #endif
env <- getEnvironment
let logger = Yesod.Core.Types.Logger loggerSet' getter let logger = Yesod.Core.Types.Logger loggerSet' getter
mkFoundation du = App mkFoundation du = App
{ settings = conf { settings = conf
@ -219,13 +221,12 @@ makeFoundation useEcho conf = do
let urlRender' = yesodRender (mkFoundation (error "docUnpacker forced")) (appRoot conf) let urlRender' = yesodRender (mkFoundation (error "docUnpacker forced")) (appRoot conf)
docUnpacker <- newDocUnpacker docUnpacker <- newDocUnpacker
haddockRootDir' haddockRootDir'
(lookup "STACKAGE_HOOGLE_LOADER" env /= Just "0")
blobStore' blobStore'
(flip (Database.Persist.runPool dbconf) p) (flip (Database.Persist.runPool dbconf) p)
urlRender' urlRender'
let foundation = mkFoundation docUnpacker let foundation = mkFoundation docUnpacker
env <- getEnvironment
-- Perform database migration using our application's logging settings. -- Perform database migration using our application's logging settings.
when (lookup "STACKAGE_SKIP_MIGRATION" env /= Just "1") $ when (lookup "STACKAGE_SKIP_MIGRATION" env /= Just "1") $
runResourceT $ runResourceT $

View File

@ -29,11 +29,12 @@ import Crypto.Hash (Digest, SHA1)
newDocUnpacker newDocUnpacker
:: FilePath -- ^ haddock root :: FilePath -- ^ haddock root
-> Bool -- ^ loadHoogleDBs
-> BlobStore StoreKey -> BlobStore StoreKey
-> (forall a m. (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a) -> (forall a m. (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a)
-> (Route App -> [(Text, Text)] -> Text) -> (Route App -> [(Text, Text)] -> Text)
-> IO DocUnpacker -> IO DocUnpacker
newDocUnpacker root store runDB urlRender = do newDocUnpacker root loadHoogleDBs store runDB urlRender = do
createDirs dirs createDirs dirs
statusMapVar <- newTVarIO $ asMap mempty statusMapVar <- newTVarIO $ asMap mempty
@ -46,7 +47,7 @@ newDocUnpacker root store runDB urlRender = do
$ insertMap (stackageSlug $ entityVal ent) var $ insertMap (stackageSlug $ entityVal ent) var
writeTChan workChan (forceUnpack, ent, var) writeTChan workChan (forceUnpack, ent, var)
forkForever $ unpackWorker dirs runDB store messageVar urlRender workChan forkForever $ unpackWorker dirs loadHoogleDBs runDB store messageVar urlRender workChan
return DocUnpacker return DocUnpacker
{ duRequestDocs = \ent -> do { duRequestDocs = \ent -> do
@ -91,13 +92,14 @@ forkForever inner = mask $ \restore ->
unpackWorker unpackWorker
:: Dirs :: Dirs
-> Bool -- ^ load Hoogle DBs?
-> (forall a m. (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a) -> (forall a m. (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a)
-> BlobStore StoreKey -> BlobStore StoreKey
-> TVar Text -> TVar Text
-> (Route App -> [(Text, Text)] -> Text) -> (Route App -> [(Text, Text)] -> Text)
-> TChan (Bool, Entity Stackage, TVar UnpackStatus) -> TChan (Bool, Entity Stackage, TVar UnpackStatus)
-> IO () -> IO ()
unpackWorker dirs runDB store messageVar urlRender workChan = do unpackWorker dirs loadHoogleDBs runDB store messageVar urlRender workChan = do
atomically $ writeTVar messageVar "Waiting for new work item" atomically $ writeTVar messageVar "Waiting for new work item"
(forceUnpack, ent, resVar) <- atomically $ readTChan workChan (forceUnpack, ent, resVar) <- atomically $ readTChan workChan
shouldUnpack <- shouldUnpack <-
@ -111,7 +113,7 @@ unpackWorker dirs runDB store messageVar urlRender workChan = do
, msg , msg
] ]
say "Beginning of processing" say "Beginning of processing"
eres <- tryAny $ unpacker dirs runDB store say urlRender ent eres <- tryAny $ unpacker dirs loadHoogleDBs runDB store say urlRender ent
atomically $ writeTVar resVar $ case eres of atomically $ writeTVar resVar $ case eres of
Left e -> USFailed $ tshow e Left e -> USFailed $ tshow e
Right () -> USReady Right () -> USReady
@ -121,13 +123,14 @@ removeTreeIfExists fp = whenM (isDirectory fp) (removeTree fp)
unpacker unpacker
:: Dirs :: Dirs
-> Bool -- ^ load Hoogle DBs?
-> (forall a m. (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a) -> (forall a m. (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a)
-> BlobStore StoreKey -> BlobStore StoreKey
-> (Text -> IO ()) -> (Text -> IO ())
-> (Route App -> [(Text, Text)] -> Text) -> (Route App -> [(Text, Text)] -> Text)
-> Entity Stackage -> Entity Stackage
-> IO () -> IO ()
unpacker dirs runDB store say urlRender stackageEnt@(Entity _ stackage@Stackage {..}) = do unpacker dirs loadHoogleDBs runDB store say urlRender stackageEnt@(Entity _ stackage@Stackage {..}) = do
say "Removing old directories, if they exist" say "Removing old directories, if they exist"
removeTreeIfExists $ dirRawIdent dirs stackageIdent removeTreeIfExists $ dirRawIdent dirs stackageIdent
removeTreeIfExists $ dirGzIdent dirs stackageIdent removeTreeIfExists $ dirGzIdent dirs stackageIdent
@ -183,8 +186,8 @@ unpacker dirs runDB store say urlRender stackageEnt@(Entity _ stackage@Stackage
say "Downloading compiled Hoogle database" say "Downloading compiled Hoogle database"
withBinaryFile (fpToString dstDefaultHoo) WriteMode withBinaryFile (fpToString dstDefaultHoo) WriteMode
$ \h -> src $$ ungzip =$ sinkHandle h $ \h -> src $$ ungzip =$ sinkHandle h
Nothing -> Nothing -> when loadHoogleDBs
handleAny print $ handleAny print
$ withSystemTempDirectory "hoogle-database-gen" $ withSystemTempDirectory "hoogle-database-gen"
$ \hoogletemp' -> do $ \hoogletemp' -> do
let hoogletemp = fpFromString hoogletemp' let hoogletemp = fpFromString hoogletemp'

View File

@ -5,5 +5,6 @@ stanzas:
- production - production
env: env:
STACKAGE_CABAL_LOADER: "0" STACKAGE_CABAL_LOADER: "0"
STACKAGE_HOOGLE_LOADER: "0"
host: www.stackage.org host: www.stackage.org
copy-to: fpuser@www.stackage.org:/var/opt/keter/incoming copy-to: fpuser@www.stackage.org:/var/opt/keter/incoming