mirror of
https://github.com/commercialhaskell/stackage-server.git
synced 2026-02-10 01:57:28 +01:00
Show compressor status
This commit is contained in:
parent
7ad48a91dd
commit
5b9ace6425
@ -56,6 +56,7 @@ import Handler.System
|
|||||||
import Handler.Haddock
|
import Handler.Haddock
|
||||||
import Handler.Package
|
import Handler.Package
|
||||||
import Handler.PackageList
|
import Handler.PackageList
|
||||||
|
import Handler.CompressorStatus
|
||||||
|
|
||||||
-- This line actually creates our YesodDispatch instance. It is the second half
|
-- This line actually creates our YesodDispatch instance. It is the second half
|
||||||
-- of the call to mkYesodData which occurs in Foundation.hs. Please see the
|
-- of the call to mkYesodData which occurs in Foundation.hs. Please see the
|
||||||
@ -138,7 +139,7 @@ makeFoundation useEcho conf = do
|
|||||||
return $ cachedS3Store root creds bucket prefix manager
|
return $ cachedS3Store root creds bucket prefix manager
|
||||||
|
|
||||||
let haddockRootDir' = "/tmp/stackage-server-haddocks2"
|
let haddockRootDir' = "/tmp/stackage-server-haddocks2"
|
||||||
unpacker <- createHaddockUnpacker haddockRootDir' blobStore'
|
(statusRef, unpacker) <- createHaddockUnpacker haddockRootDir' blobStore'
|
||||||
widgetCache' <- newIORef mempty
|
widgetCache' <- newIORef mempty
|
||||||
|
|
||||||
let logger = Yesod.Core.Types.Logger loggerSet' getter
|
let logger = Yesod.Core.Types.Logger loggerSet' getter
|
||||||
@ -156,6 +157,7 @@ makeFoundation useEcho conf = do
|
|||||||
, haddockRootDir = haddockRootDir'
|
, haddockRootDir = haddockRootDir'
|
||||||
, haddockUnpacker = unpacker
|
, haddockUnpacker = unpacker
|
||||||
, widgetCache = widgetCache'
|
, widgetCache = widgetCache'
|
||||||
|
, compressorStatus = statusRef
|
||||||
}
|
}
|
||||||
|
|
||||||
-- Perform database migration using our application's logging settings.
|
-- Perform database migration using our application's logging settings.
|
||||||
|
|||||||
@ -42,6 +42,7 @@ data App = App
|
|||||||
-- time, and (3) so that even if the client connection dies, we finish the
|
-- time, and (3) so that even if the client connection dies, we finish the
|
||||||
-- unpack job.
|
-- unpack job.
|
||||||
, widgetCache :: !(IORef (HashMap Text (UTCTime, GWData (Route App))))
|
, widgetCache :: !(IORef (HashMap Text (UTCTime, GWData (Route App))))
|
||||||
|
, compressorStatus :: !(IORef Text)
|
||||||
}
|
}
|
||||||
|
|
||||||
type ForceUnpack = Bool
|
type ForceUnpack = Bool
|
||||||
|
|||||||
14
Handler/CompressorStatus.hs
Normal file
14
Handler/CompressorStatus.hs
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
module Handler.CompressorStatus where
|
||||||
|
|
||||||
|
import Import
|
||||||
|
|
||||||
|
getCompressorStatusR :: Handler Html
|
||||||
|
getCompressorStatusR = do
|
||||||
|
status <- getYesod >>= readIORef . compressorStatus
|
||||||
|
defaultLayout $ do
|
||||||
|
setTitle "Compressor thread status"
|
||||||
|
[whamlet|
|
||||||
|
<div .container>
|
||||||
|
<h1>Compressor thread status
|
||||||
|
<p>#{status}
|
||||||
|
|]
|
||||||
@ -92,25 +92,30 @@ mkDirPair root ident =
|
|||||||
|
|
||||||
createCompressor
|
createCompressor
|
||||||
:: Dirs
|
:: Dirs
|
||||||
-> IO (IO ()) -- ^ action to kick off compressor again
|
-> IO (IORef Text, IO ()) -- ^ action to kick off compressor again
|
||||||
createCompressor dirs = do
|
createCompressor dirs = do
|
||||||
baton <- newMVar ()
|
baton <- newMVar ()
|
||||||
mask_ $ void $ forkIO $ forever $ do
|
status <- newIORef "Compressor is idle"
|
||||||
|
mask_ $ void $ forkIO $ (`finally` writeIORef status "Compressor thread exited") $ forever $ do
|
||||||
|
writeIORef status "Waiting for signal to start compressing"
|
||||||
takeMVar baton
|
takeMVar baton
|
||||||
runResourceT $ goDir (dirRawRoot dirs)
|
writeIORef status "Received signal, traversing directories"
|
||||||
return $ void $ tryPutMVar baton ()
|
runResourceT $ goDir status (dirRawRoot dirs)
|
||||||
|
return (status, void $ tryPutMVar baton ())
|
||||||
where
|
where
|
||||||
goDir dir = do
|
goDir status dir = do
|
||||||
sourceDirectory dir $$ mapM_C goFP
|
writeIORef status $ "Compressing directory: " ++ fpToText dir
|
||||||
|
sourceDirectory dir $$ mapM_C (goFP status)
|
||||||
liftIO $ void $ tryIO $ removeDirectory dir
|
liftIO $ void $ tryIO $ removeDirectory dir
|
||||||
|
|
||||||
goFP fp = do
|
goFP status fp = do
|
||||||
e <- liftIO $ isFile fp
|
e <- liftIO $ isFile fp
|
||||||
if e
|
if e
|
||||||
then liftIO
|
then liftIO $ do
|
||||||
$ handle (print . asSomeException)
|
writeIORef status $ "Compressing file: " ++ fpToText fp
|
||||||
|
handle (print . asSomeException)
|
||||||
$ gzipHash dirs suffix
|
$ gzipHash dirs suffix
|
||||||
else goDir fp
|
else goDir status fp
|
||||||
where
|
where
|
||||||
Just suffix = F.stripPrefix (dirRawRoot dirs </> "") fp
|
Just suffix = F.stripPrefix (dirRawRoot dirs </> "") fp
|
||||||
|
|
||||||
@ -179,20 +184,20 @@ dirCacheFp dirs digest =
|
|||||||
-- demand.
|
-- demand.
|
||||||
createHaddockUnpacker :: FilePath -- ^ haddock root
|
createHaddockUnpacker :: FilePath -- ^ haddock root
|
||||||
-> BlobStore StoreKey
|
-> BlobStore StoreKey
|
||||||
-> IO (ForceUnpack -> PackageSetIdent -> IO ())
|
-> IO (IORef Text, ForceUnpack -> PackageSetIdent -> IO ())
|
||||||
createHaddockUnpacker root store = do
|
createHaddockUnpacker root store = do
|
||||||
createTree $ dirCacheRoot dirs
|
createTree $ dirCacheRoot dirs
|
||||||
createTree $ dirRawRoot dirs
|
createTree $ dirRawRoot dirs
|
||||||
createTree $ dirGzRoot dirs
|
createTree $ dirGzRoot dirs
|
||||||
|
|
||||||
chan <- newChan
|
chan <- newChan
|
||||||
compressor <- createCompressor dirs
|
(statusRef, compressor) <- createCompressor dirs
|
||||||
|
|
||||||
mask $ \restore -> void $ forkIO $ forever $ do
|
mask $ \restore -> void $ forkIO $ forever $ do
|
||||||
(forceUnpack, ident, res) <- readChan chan
|
(forceUnpack, ident, res) <- readChan chan
|
||||||
try (restore $ go forceUnpack ident) >>= putMVar res
|
try (restore $ go forceUnpack ident) >>= putMVar res
|
||||||
compressor
|
compressor
|
||||||
return $ \forceUnpack ident -> do
|
return (statusRef, \forceUnpack ident -> do
|
||||||
shouldAct <-
|
shouldAct <-
|
||||||
if forceUnpack
|
if forceUnpack
|
||||||
then return True
|
then return True
|
||||||
@ -202,7 +207,7 @@ createHaddockUnpacker root store = do
|
|||||||
res <- newEmptyMVar
|
res <- newEmptyMVar
|
||||||
writeChan chan (forceUnpack, ident, res)
|
writeChan chan (forceUnpack, ident, res)
|
||||||
takeMVar res >>= either (throwM . asSomeException) return
|
takeMVar res >>= either (throwM . asSomeException) return
|
||||||
else return ()
|
else return ())
|
||||||
where
|
where
|
||||||
dirs = mkDirs root
|
dirs = mkDirs root
|
||||||
|
|
||||||
|
|||||||
@ -26,3 +26,4 @@
|
|||||||
/haddock/#PackageSetIdent/*Texts HaddockR GET
|
/haddock/#PackageSetIdent/*Texts HaddockR GET
|
||||||
/package/#PackageName PackageR GET
|
/package/#PackageName PackageR GET
|
||||||
/package PackageListR GET
|
/package PackageListR GET
|
||||||
|
/compressor-status CompressorStatusR GET
|
||||||
|
|||||||
@ -44,6 +44,7 @@ library
|
|||||||
Handler.Haddock
|
Handler.Haddock
|
||||||
Handler.Package
|
Handler.Package
|
||||||
Handler.PackageList
|
Handler.PackageList
|
||||||
|
Handler.CompressorStatus
|
||||||
|
|
||||||
if flag(dev) || flag(library-only)
|
if flag(dev) || flag(library-only)
|
||||||
cpp-options: -DDEVELOPMENT
|
cpp-options: -DDEVELOPMENT
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user