Better unpacking code

This commit is contained in:
Michael Snoyman 2015-01-04 16:43:11 +02:00
parent 0b7d1e4bd0
commit 137453d9a2
10 changed files with 354 additions and 261 deletions

View File

@ -13,6 +13,7 @@ import Control.Monad.Logger (runLoggingT, LoggingT, defaultLogStr)
import Data.BlobStore (fileStore, storeWrite, cachedS3Store)
import Data.Hackage
import Data.Hackage.Views
import Data.Unpacking (newDocUnpacker)
import Data.WebsiteContent
import Data.Slug (SnapSlug (..), safeMakeSlug, HasGenIO)
import Data.Time (diffUTCTime)
@ -165,12 +166,6 @@ makeFoundation useEcho conf = do
blobStore' <- loadBlobStore manager conf
let haddockRootDir' = "/tmp/stackage-server-haddocks2"
urlRenderRef' <- newIORef (error "urlRenderRef not initialized")
(statusRef, unpacker) <- createHaddockUnpacker
haddockRootDir'
blobStore'
(flip (Database.Persist.runPool dbconf) p)
urlRenderRef'
widgetCache' <- newIORef mempty
#if MIN_VERSION_yesod_gitrepo(0,1,1)
@ -204,7 +199,7 @@ makeFoundation useEcho conf = do
#endif
let logger = Yesod.Core.Types.Logger loggerSet' getter
foundation = App
mkFoundation du = App
{ settings = conf
, getStatic = s
, connPool = p
@ -216,13 +211,18 @@ makeFoundation useEcho conf = do
, progressMap = progressMap'
, nextProgressKey = nextProgressKey'
, haddockRootDir = haddockRootDir'
, haddockUnpacker = unpacker
, appDocUnpacker = du
, widgetCache = widgetCache'
, compressorStatus = statusRef
, websiteContent = websiteContent'
}
writeIORef urlRenderRef' (yesodRender foundation (appRoot conf))
let urlRender' = yesodRender (mkFoundation (error "docUnpacker forced")) (appRoot conf)
docUnpacker <- newDocUnpacker
haddockRootDir'
blobStore'
(flip (Database.Persist.runPool dbconf) p)
urlRender'
let foundation = mkFoundation docUnpacker
env <- getEnvironment

View File

@ -18,7 +18,7 @@ import GHC.Prim (RealWorld)
import Text.Blaze (ToMarkup)
newtype Slug = Slug { unSlug :: Text }
deriving (Show, Read, Eq, Typeable, PersistField, ToMarkup)
deriving (Show, Read, Eq, Typeable, PersistField, ToMarkup, Ord, Hashable)
instance PersistFieldSql Slug where
sqlType = sqlType . liftM unSlug
@ -101,6 +101,6 @@ slugField =
-- | Unique identifier for a snapshot.
newtype SnapSlug = SnapSlug { unSnapSlug :: Slug }
deriving (Show, Read, Eq, Typeable, PersistField, ToMarkup, PathPiece)
deriving (Show, Read, Eq, Typeable, PersistField, ToMarkup, PathPiece, Ord, Hashable)
instance PersistFieldSql SnapSlug where
sqlType = sqlType . liftM unSnapSlug

303
Data/Unpacking.hs Normal file
View File

@ -0,0 +1,303 @@
-- | Code for unpacking documentation bundles, building the Hoogle databases,
-- and compressing/deduping contents.
module Data.Unpacking
( newDocUnpacker
) where
import Import hiding (runDB)
import Data.BlobStore
import Handler.Haddock
import Filesystem (createTree, isFile, removeTree, isDirectory, listDirectory)
import Control.Concurrent (forkIO)
import Control.Monad.Trans.Resource (allocate, resourceForkIO, release)
import Data.Char (isAlpha)
import qualified Hoogle
import qualified Data.Text as T
import qualified Data.Yaml as Y
import System.IO (IOMode (ReadMode, WriteMode), withBinaryFile, openBinaryFile)
import System.IO.Temp (withSystemTempFile, withTempFile, createTempDirectory, withSystemTempDirectory)
import System.Directory (getTemporaryDirectory)
import System.Exit (ExitCode (ExitSuccess))
import System.Process (createProcess, proc, cwd, waitForProcess)
newDocUnpacker
:: FilePath -- ^ haddock root
-> BlobStore StoreKey
-> (forall a m. (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a)
-> (Route App -> [(Text, Text)] -> Text)
-> IO DocUnpacker
newDocUnpacker root store runDB urlRender = do
createDirs dirs
statusMapVar <- newTVarIO $ asMap mempty
messageVar <- newTVarIO "Inactive"
workChan <- atomically newTChan
let requestDocs forceUnpack ent = atomically $ do
var <- newTVar USBusy
modifyTVar statusMapVar
$ insertMap (stackageSlug $ entityVal ent) var
writeTChan workChan (forceUnpack, ent, var)
forkForever $ unpackWorker dirs runDB store statusMapVar messageVar urlRender workChan
return DocUnpacker
{ duRequestDocs = \ent -> do
m <- readTVarIO statusMapVar
case lookup (stackageSlug $ entityVal ent) m of
Nothing -> do
b <- isUnpacked dirs ent
if b
then return USReady
else do
requestDocs False ent
return USBusy
Just us -> readTVarIO us
, duGetStatus = readTVarIO messageVar
, duForceReload = \ent -> do
atomically $ modifyTVar statusMapVar
$ deleteMap (stackageSlug $ entityVal ent)
requestDocs True ent
}
where
dirs = mkDirs root
createDirs :: Dirs -> IO ()
createDirs dirs = do
createTree $ dirCacheRoot dirs
createTree $ dirRawRoot dirs
createTree $ dirGzRoot dirs
createTree $ dirHoogleRoot dirs
-- | Check for the presence of file system artifacts indicating that the docs
-- have been unpacked.
isUnpacked :: Dirs -> Entity Stackage -> IO Bool
isUnpacked dirs (Entity _ stackage) =
isFile databasePath
where
databasePath = dirHoogleFp dirs (stackageIdent stackage) ["default.hoo"]
forkForever :: IO () -> IO ()
forkForever inner = mask $ \restore ->
void $ forkIO $ forever $ handleAny print $ restore $ forever inner
unpackWorker dirs runDB store statusMapVar messageVar urlRender workChan = do
atomically $ writeTVar messageVar "Waiting for new work item"
(forceUnpack, ent, resVar) <- atomically $ readTChan workChan
shouldUnpack <-
if forceUnpack
then return True
else not <$> isUnpacked dirs ent
when shouldUnpack $ do
let say msg = atomically $ writeTVar messageVar $ concat
[ toPathPiece (stackageSlug $ entityVal ent)
, ": "
, msg
]
say "Beginning of processing"
eres <- tryAny $ unpacker dirs runDB store say urlRender ent
atomically $ writeTVar resVar $ case eres of
Left e -> USFailed $ tshow e
Right () -> USReady
removeTreeIfExists :: FilePath -> IO ()
removeTreeIfExists fp = whenM (isDirectory fp) (removeTree fp)
unpacker dirs runDB store say urlRender stackageEnt@(Entity _ Stackage {..}) = do
say "Removing old directories, if they exist"
removeTreeIfExists $ dirRawIdent dirs stackageIdent
removeTreeIfExists $ dirGzIdent dirs stackageIdent
removeTreeIfExists $ dirHoogleIdent dirs stackageIdent
withSystemTempFile "haddock-bundle.tar.xz" $ \tempfp temph -> do
say "Downloading raw tarball"
withAcquire (storeRead' store (HaddockBundle stackageIdent)) $ \msrc ->
case msrc of
Nothing -> error "No haddocks exist for that snapshot"
Just src -> src $$ sinkHandle temph
hClose temph
let destdir = dirRawIdent dirs stackageIdent
createTree destdir
say "Unpacking tarball"
(Nothing, Nothing, Nothing, ph) <- createProcess
(proc "tar" ["xf", tempfp])
{ cwd = Just $ fpToString destdir
}
ec <- waitForProcess ph
if ec == ExitSuccess then return () else throwM ec
createTree $ dirHoogleIdent dirs stackageIdent
tmp <- getTemporaryDirectory
withSystemTempDirectory "hoogle-database-gen" $ \hoogletemp' -> do
let hoogletemp = fpFromString hoogletemp'
logFp = fpToString (dirHoogleFp dirs stackageIdent ["error-log"])
withBinaryFile logFp WriteMode $ \errorLog -> do
say "Copying Hoogle text files to temp directory"
runResourceT $ copyHoogleTextFiles errorLog destdir hoogletemp
say "Creating Hoogle database"
createHoogleDb say dirs stackageEnt errorLog hoogletemp urlRender
-- Determine which packages have documentation and update the
-- database appropriately
say "Updating database for available documentation"
runResourceT $ runDB $ do
let sid = entityKey stackageEnt
updateWhere
[PackageStackage ==. sid]
[PackageHasHaddocks =. False]
sourceDirectory destdir $$ mapM_C (\fp -> do
let mnv = nameAndVersionFromPath fp
forM_ mnv $ \(name, version) -> updateWhere
[ PackageStackage ==. sid
, PackageName' ==. PackageName name
, PackageVersion ==. Version version
]
[PackageHasHaddocks =. True]
)
copyHoogleTextFiles :: Handle -- ^ error log handle
-> FilePath -- ^ raw unpacked Haddock files
-> FilePath -- ^ temporary work directory
-> ResourceT IO ()
copyHoogleTextFiles errorLog raw tmp = do
let tmptext = tmp </> "text"
liftIO $ createTree tmptext
sourceDirectory raw $$ mapM_C (\fp ->
forM_ (nameAndVersionFromPath fp) $ \(name, version) -> do
let src = fp </> fpFromText name <.> "txt"
dst = tmptext </> fpFromText (name ++ "-" ++ version)
exists <- liftIO $ isFile src
if exists
then sourceFile src $$ (sinkFile dst :: Sink ByteString (ResourceT IO) ())
else liftIO $ appendHoogleErrors errorLog $ HoogleErrors
{ packageName = name
, packageVersion = version
, errors = ["No textual Hoogle DB (use \"cabal haddock --hoogle\")"]
}
)
createHoogleDb :: (Text -> IO ())
-> Dirs
-> Entity Stackage
-> Handle -- ^ error log handle
-> FilePath -- ^ temp directory
-> (Route App -> [(Text, Text)] -> Text)
-> IO ()
createHoogleDb say dirs (Entity _ stackage) errorLog tmpdir urlRender = do
let ident = stackageIdent stackage
tmpbin = tmpdir </> "binary"
createTree tmpbin
eres <- tryAny $ runResourceT $ do
-- Create hoogle binary databases for each package.
sourceDirectory (tmpdir </> "text") $$ mapM_C
( \fp -> do
(releaseKey, srcH) <- allocate (openBinaryFile (fpToString fp) ReadMode) hClose
forM_ (nameAndVersionFromPath fp) $ \(name, version) -> liftIO $ do
say $ concat
[ "Creating Hoogle database for: "
, name
, "-"
, version
]
src <- unpack . decodeUtf8 . asLByteString <$> hGetContents srcH
let -- Preprocess the haddock-generated manifest file.
src' = unlines $ haddockHacks (Just (unpack docsUrl)) $ lines src
docsUrl = urlRender (HaddockR (stackageSlug stackage) urlPieces) []
urlPieces = [name <> "-" <> version, "index.html"]
-- Compute the filepath of the resulting hoogle
-- database.
out = fpToString $ tmpbin </> fpFromText base
base = name <> "-" <> version <> ".hoo"
errs <- Hoogle.createDatabase "" Hoogle.Haskell [] src' out
when (not $ null errs) $ do
-- TODO: remove this printing once errors are yielded
-- to the user.
putStrLn $ concat
[ base
, " Hoogle errors: "
, tshow errs
]
appendHoogleErrors errorLog $ HoogleErrors
{ packageName = name
, packageVersion = version
, errors = map show errs
}
release releaseKey
)
-- Merge the individual binary databases into one big database.
liftIO $ do
say "Merging all Hoogle databases"
dbs <- listDirectory tmpbin
Hoogle.mergeDatabase
(map fpToString dbs)
(fpToString (dirHoogleFp dirs ident ["default.hoo"]))
case eres of
Right () -> return ()
Left err -> liftIO $ appendHoogleErrors errorLog $ HoogleErrors
{ packageName = "Exception thrown while building hoogle DB"
, packageVersion = ""
, errors = [show err]
}
data HoogleErrors = HoogleErrors
{ packageName :: Text
, packageVersion :: Text
, errors :: [String]
} deriving (Generic)
instance ToJSON HoogleErrors where
instance FromJSON HoogleErrors where
-- Appends hoogle errors to a log file. By encoding within a single
-- list, the resulting file can be decoded as [HoogleErrors].
appendHoogleErrors :: Handle -> HoogleErrors -> IO ()
appendHoogleErrors h errs = hPut h (Y.encode [errs])
nameAndVersionFromPath :: FilePath -> Maybe (Text, Text)
nameAndVersionFromPath fp =
(\name -> (name, version)) <$> stripSuffix "-" name'
where
(name', version) = T.breakOnEnd "-" $ fpToText $ filename fp
---------------------------------------------------------------------
-- HADDOCK HACKS
-- (Copied from hoogle-4.2.36/src/Recipe/Haddock.hs)
-- Modifications:
-- 1) Some name qualification
-- 2) Explicit type sig due to polymorphic elem
-- 3) Fixed an unused binding warning
-- Eliminate @version
-- Change :*: to (:*:), Haddock bug
-- Change !!Int to !Int, Haddock bug
-- Change instance [overlap ok] to instance, Haddock bug
-- Change instance [incoherent] to instance, Haddock bug
-- Change instance [safe] to instance, Haddock bug
-- Change !Int to Int, HSE bug
-- Drop {-# UNPACK #-}, Haddock bug
-- Drop everything after where, Haddock bug
haddockHacks :: Maybe Hoogle.URL -> [String] -> [String]
haddockHacks loc src = maybe id haddockPackageUrl loc (translate src)
where
translate :: [String] -> [String]
translate = map (unwords . g . map f . words) . filter (not . isPrefixOf "@version ")
f "::" = "::"
f (':':xs) = "(:" ++ xs ++ ")"
f ('!':'!':x:xs) | isAlpha x = xs
f ('!':x:xs) | isAlpha x || x `elem` ("[(" :: String) = x:xs
f x | x `elem` ["[overlap","ok]","[incoherent]","[safe]"] = ""
f x | x `elem` ["{-#","UNPACK","#-}"] = ""
f x = x
g ("where":_) = []
g (x:xs) = x : g xs
g [] = []
haddockPackageUrl :: Hoogle.URL -> [String] -> [String]
haddockPackageUrl x = concatMap f
where f y | "@package " `isPrefixOf` y = ["@url " ++ x, y]
| otherwise = [y]

View File

@ -39,17 +39,20 @@ data App = App
, progressMap :: !(IORef (IntMap Progress))
, nextProgressKey :: !(IORef Int)
, haddockRootDir :: !FilePath
, haddockUnpacker :: !(ForceUnpack -> Entity Stackage -> IO ())
, appDocUnpacker :: DocUnpacker
-- ^ We have a dedicated thread so that (1) we don't try to unpack too many
-- things at once, (2) we never unpack the same thing twice at the same
-- time, and (3) so that even if the client connection dies, we finish the
-- unpack job.
, widgetCache :: !(IORef (HashMap Text (UTCTime, GWData (Route App))))
, compressorStatus :: !(IORef Text)
, websiteContent :: GitRepo WebsiteContent
}
type ForceUnpack = Bool
data DocUnpacker = DocUnpacker
{ duRequestDocs :: Entity Stackage -> IO UnpackStatus
, duGetStatus :: IO Text
, duForceReload :: Entity Stackage -> IO ()
}
data Progress = ProgressWorking !Text
| ProgressDone !Text !(Route App)

View File

@ -4,7 +4,7 @@ import Import
getCompressorStatusR :: Handler Html
getCompressorStatusR = do
status <- getYesod >>= readIORef . compressorStatus
status <- getYesod >>= liftIO . duGetStatus . appDocUnpacker
defaultLayout $ do
setTitle "Compressor thread status"
[whamlet|

View File

@ -4,9 +4,11 @@ module Handler.Haddock
, getHaddockR
, getUploadDocMapR
, putUploadDocMapR
, createHaddockUnpacker
-- Exported for use in Handler.Hoogle
, Dirs, getDirs, dirHoogleFp
, Dirs (..), getDirs, dirHoogleFp, mkDirs
, dirRawIdent
, dirGzIdent
, dirHoogleIdent
) where
import Import
@ -59,7 +61,7 @@ getUploadHaddockR slug0 = do
fileSource fileInfo $$ storeWrite (HaddockBundle ident)
runDB $ update sid [StackageHasHaddocks =. True]
master <- getYesod
void $ liftIO $ forkIO $ haddockUnpacker master True stackageEnt
liftIO $ duForceReload (appDocUnpacker master) stackageEnt
setMessage "Haddocks uploaded"
redirect $ SnapshotR slug StackageHomeR
_ -> defaultLayout $ do
@ -87,8 +89,7 @@ getHaddockR slug rest = do
redirectWith status301 $ HaddockR (stackageSlug stackage) rest
mapM_ sanitize rest
dirs <- getDirs
master <- getYesod
liftIO $ haddockUnpacker master False stackageEnt
requireDocs stackageEnt
let ident = stackageIdent (entityVal stackageEnt)
rawfp = dirRawFp dirs ident rest
@ -237,107 +238,6 @@ dirCacheFp dirs digest =
name = decodeUtf8 $ B16.encode $ toBytes digest
(x, y) = splitAt 2 name
-- Should have two threads: one to unpack, one to convert. Never serve the
-- uncompressed files, only the compressed files. When serving, convert on
-- demand.
createHaddockUnpacker :: FilePath -- ^ haddock root
-> BlobStore StoreKey
-> (forall a m. (MonadIO m, MonadBaseControl IO m)
=> SqlPersistT m a -> m a)
-> IORef (Route App -> [(Text, Text)] -> Text)
-> IO (IORef Text, ForceUnpack -> Entity Stackage -> IO ())
createHaddockUnpacker root store runDB' urlRenderRef = do
createTree $ dirCacheRoot dirs
createTree $ dirRawRoot dirs
createTree $ dirGzRoot dirs
createTree $ dirHoogleRoot dirs
chan <- newChan
(statusRef, compressor) <- createCompressor dirs
mask $ \restore -> void $ forkIO $ forever $ do
(forceUnpack, ident, res) <- readChan chan
try (restore $ go forceUnpack ident) >>= putMVar res
compressor
return (statusRef, \forceUnpack stackageEnt -> do
let ident = stackageIdent (entityVal stackageEnt)
shouldAct <-
if forceUnpack
then return True
else not <$> doDirsExist ident
if shouldAct
then do
res <- newEmptyMVar
writeChan chan (forceUnpack, stackageEnt, res)
takeMVar res >>= either (throwM . asSomeException) return
else return ())
where
dirs = mkDirs root
removeTreeIfExists fp = whenM (isDirectory fp) (removeTree fp)
doDirsExist ident = do
e1 <- isDirectory $ dirGzIdent dirs ident
if e1
then return True
else isDirectory $ dirRawIdent dirs ident
go forceUnpack stackageEnt = do
let ident = stackageIdent (entityVal stackageEnt)
toRun <-
if forceUnpack
then do
removeTreeIfExists $ dirRawIdent dirs ident
removeTreeIfExists $ dirGzIdent dirs ident
removeTreeIfExists $ dirHoogleIdent dirs ident
return True
else not <$> doDirsExist ident
when toRun $ do
withSystemTempFile "haddock-bundle.tar.xz" $ \tempfp temph -> do
withAcquire (storeRead' store (HaddockBundle ident)) $ \msrc ->
case msrc of
Nothing -> error "No haddocks exist for that snapshot"
Just src -> src $$ sinkHandle temph
hClose temph
let destdir = dirRawIdent dirs ident
createTree destdir
(Nothing, Nothing, Nothing, ph) <- createProcess
(proc "tar" ["xf", tempfp])
{ cwd = Just $ fpToString destdir
}
ec <- waitForProcess ph
if ec == ExitSuccess then return () else throwM ec
urlRender <- readIORef urlRenderRef
runResourceT $ do
liftIO $ createTree $ dirHoogleIdent dirs ident
tmp <- liftIO getTemporaryDirectory
(_releasekey, hoogletemp) <- allocate
(fpFromString <$> createTempDirectory tmp "hoogle-database-gen")
removeTree
let logFp = fpToString (dirHoogleFp dirs ident ["error-log"])
(_, errorLog) <- allocate (openBinaryFile logFp WriteMode) hClose
copyHoogleTextFiles errorLog destdir hoogletemp
-- TODO: Have hoogle requests block on this finishing.
-- (Or display a "compiling DB" message to the user)
void $ resourceForkIO $ createHoogleDb dirs stackageEnt errorLog hoogletemp urlRender
-- Determine which packages have documentation and update the
-- database appropriately
runResourceT $ runDB' $ do
let sid = entityKey stackageEnt
updateWhere
[PackageStackage ==. sid]
[PackageHasHaddocks =. False]
sourceDirectory destdir $$ mapM_C (\fp -> do
let mnv = nameAndVersionFromPath fp
forM_ mnv $ \(name, version) -> updateWhere
[ PackageStackage ==. sid
, PackageName' ==. PackageName name
, PackageVersion ==. Version version
]
[PackageHasHaddocks =. True]
)
data DocInfo = DocInfo Version (Map Text [Text])
instance FromJSON DocInfo where
parseJSON = withObject "DocInfo" $ \o -> DocInfo
@ -397,140 +297,3 @@ getUploadDocMapR = do
putUploadDocMapR :: Handler Html
putUploadDocMapR = getUploadDocMapR
copyHoogleTextFiles :: Handle -- ^ error log handle
-> FilePath -- ^ raw unpacked Haddock files
-> FilePath -- ^ temporary work directory
-> ResourceT IO ()
copyHoogleTextFiles errorLog raw tmp = do
let tmptext = tmp </> "text"
liftIO $ createTree tmptext
sourceDirectory raw $$ mapM_C (\fp ->
forM_ (nameAndVersionFromPath fp) $ \(name, version) -> do
let src = fp </> fpFromText name <.> "txt"
dst = tmptext </> fpFromText (name ++ "-" ++ version)
exists <- liftIO $ isFile src
if exists
then sourceFile src $$ (sinkFile dst :: Sink ByteString (ResourceT IO) ())
else liftIO $ appendHoogleErrors errorLog $ HoogleErrors
{ packageName = name
, packageVersion = version
, errors = ["No textual Hoogle DB (use \"cabal haddock --hoogle\")"]
}
)
createHoogleDb :: Dirs
-> Entity Stackage
-> Handle -- ^ error log handle
-> FilePath -- ^ temp directory
-> (Route App -> [(Text, Text)] -> Text)
-> ResourceT IO ()
createHoogleDb dirs (Entity _ stackage) errorLog tmpdir urlRender = do
let ident = stackageIdent stackage
tmpbin = tmpdir </> "binary"
liftIO $ createTree tmpbin
eres <- tryAny $ do
-- Create hoogle binary databases for each package.
sourceDirectory (tmpdir </> "text") $$ mapM_C
( \fp -> do
(releaseKey, srcH) <- allocate (openBinaryFile (fpToString fp) ReadMode) hClose
forM_ (nameAndVersionFromPath fp) $ \(name, version) -> liftIO $ do
src <- unpack . decodeUtf8 . asLByteString <$> hGetContents srcH
let -- Preprocess the haddock-generated manifest file.
src' = unlines $ haddockHacks (Just (unpack docsUrl)) $ lines src
docsUrl = urlRender (HaddockR (stackageSlug stackage) urlPieces) []
urlPieces = [name <> "-" <> version, "index.html"]
-- Compute the filepath of the resulting hoogle
-- database.
out = fpToString $ tmpbin </> fpFromText base
base = name <> "-" <> version <> ".hoo"
errs <- Hoogle.createDatabase "" Hoogle.Haskell [] src' out
when (not $ null errs) $ do
-- TODO: remove this printing once errors are yielded
-- to the user.
putStrLn $ concat
[ base
, " Hoogle errors: "
, tshow errs
]
appendHoogleErrors errorLog $ HoogleErrors
{ packageName = name
, packageVersion = version
, errors = map show errs
}
release releaseKey
)
-- Merge the individual binary databases into one big database.
liftIO $ do
dbs <- listDirectory tmpbin
Hoogle.mergeDatabase
(map fpToString dbs)
(fpToString (dirHoogleFp dirs ident ["default.hoo"]))
case eres of
Right () -> return ()
Left err -> liftIO $ appendHoogleErrors errorLog $ HoogleErrors
{ packageName = "Exception thrown while building hoogle DB"
, packageVersion = ""
, errors = [show err]
}
data HoogleErrors = HoogleErrors
{ packageName :: Text
, packageVersion :: Text
, errors :: [String]
} deriving (Generic)
instance ToJSON HoogleErrors where
instance FromJSON HoogleErrors where
-- Appends hoogle errors to a log file. By encoding within a single
-- list, the resulting file can be decoded as [HoogleErrors].
appendHoogleErrors :: Handle -> HoogleErrors -> IO ()
appendHoogleErrors h errs = hPut h (Y.encode [errs])
nameAndVersionFromPath :: FilePath -> Maybe (Text, Text)
nameAndVersionFromPath fp =
(\name -> (name, version)) <$> stripSuffix "-" name'
where
(name', version) = T.breakOnEnd "-" $ fpToText $ filename fp
---------------------------------------------------------------------
-- HADDOCK HACKS
-- (Copied from hoogle-4.2.36/src/Recipe/Haddock.hs)
-- Modifications:
-- 1) Some name qualification
-- 2) Explicit type sig due to polymorphic elem
-- 3) Fixed an unused binding warning
-- Eliminate @version
-- Change :*: to (:*:), Haddock bug
-- Change !!Int to !Int, Haddock bug
-- Change instance [overlap ok] to instance, Haddock bug
-- Change instance [incoherent] to instance, Haddock bug
-- Change instance [safe] to instance, Haddock bug
-- Change !Int to Int, HSE bug
-- Drop {-# UNPACK #-}, Haddock bug
-- Drop everything after where, Haddock bug
haddockHacks :: Maybe Hoogle.URL -> [String] -> [String]
haddockHacks loc src = maybe id haddockPackageUrl loc (translate src)
where
translate :: [String] -> [String]
translate = map (unwords . g . map f . words) . filter (not . isPrefixOf "@version ")
f "::" = "::"
f (':':xs) = "(:" ++ xs ++ ")"
f ('!':'!':x:xs) | isAlpha x = xs
f ('!':x:xs) | isAlpha x || x `elem` ("[(" :: String) = x:xs
f x | x `elem` ["[overlap","ok]","[incoherent]","[safe]"] = ""
f x | x `elem` ["{-#","UNPACK","#-}"] = ""
f x = x
g ("where":_) = []
g (x:xs) = x : g xs
g [] = []
haddockPackageUrl :: Hoogle.URL -> [String] -> [String]
haddockPackageUrl x = concatMap f
where f y | "@package " `isPrefixOf` y = ["@url " ++ x, y]
| otherwise = [y]

View File

@ -30,8 +30,7 @@ getHoogleR slug = do
offset = (page - 1) * perPage
stackageEnt@(Entity _ stackage) <- runDB $ getBy404 $ UniqueSnapshot slug
-- Unpack haddocks and generate hoogle DB, if necessary.
master <- getYesod
liftIO $ haddockUnpacker master False stackageEnt
requireDocs stackageEnt
let databasePath = dirHoogleFp dirs (stackageIdent stackage) ["default.hoo"]
heDatabase = liftIO $ Hoogle.loadDatabase (fpToString databasePath)
-- If the hoogle DB isn't yet generated, yield 404.

View File

@ -34,3 +34,23 @@ parseLtsPair t1 = do
t3 <- stripPrefix "." t2
(y, "") <- either (const Nothing) Just $ decimal t3
Just (x, y)
requireDocs :: Entity Stackage -> Handler ()
requireDocs stackageEnt = do
master <- getYesod
status <- liftIO $ duRequestDocs (appDocUnpacker master) stackageEnt
case status of
USReady -> return ()
USBusy -> (>>= sendResponse) $ defaultLayout $ do
setTitle "Docs unpacking, please wait"
addHeader "Refresh" "1"
msg <- liftIO $ duGetStatus $ appDocUnpacker master
[whamlet|
<div .container>
<p>Docs are currently being unpacked, please wait.
<p>This page will automatically reload every second.
<p>Current status: #{msg}
|]
USFailed e -> invalidArgs
[ "Docs not available: " ++ e
]

View File

@ -113,3 +113,7 @@ class HasHackageRoot a where
getHackageRoot :: a -> HackageRoot
instance HasHackageRoot HackageRoot where
getHackageRoot = id
data UnpackStatus = USReady
| USBusy
| USFailed !Text

View File

@ -28,6 +28,7 @@ library
Data.Hackage.DeprecationInfo
Data.Hackage.Views
Data.WebsiteContent
Data.Unpacking
Types
Handler.Home
Handler.Snapshots