Merge branch '47-hoogle'

This commit is contained in:
Michael Snoyman 2015-01-04 12:06:22 +02:00
commit 86f3add286
13 changed files with 511 additions and 56 deletions

View File

@ -68,6 +68,7 @@ import Handler.CompressorStatus
import Handler.Tag
import Handler.BannedTags
import Handler.RefreshDeprecated
import Handler.Hoogle
-- This line actually creates our YesodDispatch instance. It is the second half
-- of the call to mkYesodData which occurs in Foundation.hs. Please see the
@ -164,8 +165,12 @@ makeFoundation useEcho conf = do
blobStore' <- loadBlobStore manager conf
let haddockRootDir' = "/tmp/stackage-server-haddocks2"
(statusRef, unpacker) <- createHaddockUnpacker haddockRootDir' blobStore'
urlRenderRef' <- newIORef (error "urlRenderRef not initialized")
(statusRef, unpacker) <- createHaddockUnpacker
haddockRootDir'
blobStore'
(flip (Database.Persist.runPool dbconf) p)
urlRenderRef'
widgetCache' <- newIORef mempty
#if MIN_VERSION_yesod_gitrepo(0,1,1)
@ -217,6 +222,8 @@ makeFoundation useEcho conf = do
, websiteContent = websiteContent'
}
writeIORef urlRenderRef' (yesodRender foundation (appRoot conf))
env <- getEnvironment
-- Perform database migration using our application's logging settings.

View File

@ -39,7 +39,7 @@ data App = App
, progressMap :: !(IORef (IntMap Progress))
, nextProgressKey :: !(IORef Int)
, haddockRootDir :: !FilePath
, haddockUnpacker :: !(ForceUnpack -> PackageSetIdent -> IO ())
, haddockUnpacker :: !(ForceUnpack -> Entity Stackage -> IO ())
-- ^ We have a dedicated thread so that (1) we don't try to unpack too many
-- things at once, (2) we never unpack the same thing twice at the same
-- time, and (3) so that even if the client connection dies, we finish the

View File

@ -1,26 +1,38 @@
module Handler.Haddock where
module Handler.Haddock
( getUploadHaddockR
, putUploadHaddockR
, getHaddockR
, getUploadDocMapR
, putUploadDocMapR
, createHaddockUnpacker
-- Exported for use in Handler.Hoogle
, Dirs, getDirs, dirHoogleFp
) where
import Import
import Data.BlobStore
import Filesystem (removeTree, isDirectory, createTree, isFile, rename, removeFile, removeDirectory)
import Filesystem (removeTree, isDirectory, createTree, isFile, rename, removeFile, removeDirectory, listDirectory)
import System.Directory (getTemporaryDirectory)
import Control.Concurrent (forkIO)
import System.IO.Temp (withSystemTempFile, withTempFile)
import System.IO.Temp (withSystemTempFile, withTempFile, createTempDirectory)
import System.Process (createProcess, proc, cwd, waitForProcess)
import System.Exit (ExitCode (ExitSuccess))
import Network.Mime (defaultMimeLookup)
import Crypto.Hash.Conduit (sinkHash)
import System.IO (IOMode (ReadMode), withBinaryFile)
import System.IO (IOMode (ReadMode, WriteMode), withBinaryFile, openBinaryFile)
import Data.Conduit.Zlib (gzip)
import System.Posix.Files (createLink)
import qualified Data.ByteString.Base16 as B16
import Data.Byteable (toBytes)
import Crypto.Hash (Digest, SHA1)
import qualified Filesystem.Path.CurrentOS as F
import Data.Slug (SnapSlug)
import Data.Slug (SnapSlug, unSlug)
import qualified Data.Text as T
import Data.Slug (unSlug)
import qualified Data.Yaml as Y
import Data.Aeson (withObject)
import qualified Hoogle
import Data.Char (isAlpha)
import Control.Monad.Trans.Resource (allocate, resourceForkIO, release)
form :: Form FileInfo
form = renderDivs $ areq fileField "tarball containing docs"
@ -30,7 +42,7 @@ form = renderDivs $ areq fileField "tarball containing docs"
getUploadHaddockR, putUploadHaddockR :: Text -> Handler Html
getUploadHaddockR slug0 = do
uid <- requireAuthIdOrToken
Entity sid Stackage {..} <- runDB $ do
stackageEnt@(Entity sid Stackage {..}) <- runDB $ do
-- Provide fallback for old URLs
ment <- getBy $ UniqueStackage $ PackageSetIdent slug0
case ment of
@ -47,7 +59,7 @@ getUploadHaddockR slug0 = do
fileSource fileInfo $$ storeWrite (HaddockBundle ident)
runDB $ update sid [StackageHasHaddocks =. True]
master <- getYesod
void $ liftIO $ forkIO $ haddockUnpacker master True ident
void $ liftIO $ forkIO $ haddockUnpacker master True stackageEnt
setMessage "Haddocks uploaded"
redirect $ SnapshotR slug StackageHomeR
_ -> defaultLayout $ do
@ -58,7 +70,7 @@ putUploadHaddockR = getUploadHaddockR
getHaddockR :: SnapSlug -> [Text] -> Handler ()
getHaddockR slug rest = do
ident <- runDB $ do
stackageEnt <- runDB $ do
ment <- getBy $ UniqueSnapshot slug
case ment of
Just ent -> do
@ -66,7 +78,7 @@ getHaddockR slug rest = do
[pkgver] -> tryContentsRedirect ent pkgver
[pkgver, "index.html"] -> tryContentsRedirect ent pkgver
_ -> return ()
return $ stackageIdent $ entityVal ent
return ent
Nothing -> do
Entity _ stackage <- getBy404
$ UniqueStackage
@ -74,11 +86,12 @@ getHaddockR slug rest = do
$ toPathPiece slug
redirectWith status301 $ HaddockR (stackageSlug stackage) rest
mapM_ sanitize rest
dirs <- getDirs -- (gzdir, rawdir) <- getHaddockDir ident
dirs <- getDirs
master <- getYesod
liftIO $ haddockUnpacker master False ident
liftIO $ haddockUnpacker master False stackageEnt
let rawfp = dirRawFp dirs ident rest
let ident = stackageIdent (entityVal stackageEnt)
rawfp = dirRawFp dirs ident rest
gzfp = dirGzFp dirs ident rest
mime = defaultMimeLookup $ fpToText $ filename rawfp
@ -124,19 +137,6 @@ tryContentsRedirect (Entity sid Stackage {..}) pkgver = do
dropDash :: Text -> Text
dropDash t = fromMaybe t $ stripSuffix "-" t
getHaddockDir :: PackageSetIdent -> Handler (FilePath, FilePath)
getHaddockDir ident = do
master <- getYesod
return $ mkDirPair (haddockRootDir master) ident
mkDirPair :: FilePath -- ^ root
-> PackageSetIdent
-> (FilePath, FilePath) -- ^ compressed, uncompressed
mkDirPair root ident =
( root </> "idents-raw" </> fpFromText (toPathPiece ident)
, root </> "idents-gz" </> fpFromText (toPathPiece ident)
)
createCompressor
:: Dirs
-> IO (IORef Text, IO ()) -- ^ action to kick off compressor again
@ -206,6 +206,7 @@ data Dirs = Dirs
{ dirRawRoot :: !FilePath
, dirGzRoot :: !FilePath
, dirCacheRoot :: !FilePath
, dirHoogleRoot :: !FilePath
}
getDirs :: Handler Dirs
@ -216,15 +217,18 @@ mkDirs dir = Dirs
{ dirRawRoot = dir </> "idents-raw"
, dirGzRoot = dir </> "idents-gz"
, dirCacheRoot = dir </> "cachedir"
, dirHoogleRoot = dir </> "hoogle"
}
dirGzIdent, dirRawIdent :: Dirs -> PackageSetIdent -> FilePath
dirGzIdent, dirRawIdent, dirHoogleIdent :: Dirs -> PackageSetIdent -> FilePath
dirGzIdent dirs ident = dirGzRoot dirs </> fpFromText (toPathPiece ident)
dirRawIdent dirs ident = dirRawRoot dirs </> fpFromText (toPathPiece ident)
dirHoogleIdent dirs ident = dirHoogleRoot dirs </> fpFromText (toPathPiece ident)
dirGzFp, dirRawFp :: Dirs -> PackageSetIdent -> [Text] -> FilePath
dirGzFp, dirRawFp, dirHoogleFp :: Dirs -> PackageSetIdent -> [Text] -> FilePath
dirGzFp dirs ident rest = dirGzIdent dirs ident </> mconcat (map fpFromText rest)
dirRawFp dirs ident rest = dirRawIdent dirs ident </> mconcat (map fpFromText rest)
dirHoogleFp dirs ident rest = dirHoogleIdent dirs ident </> mconcat (map fpFromText rest)
dirCacheFp :: Dirs -> Digest SHA1 -> FilePath
dirCacheFp dirs digest =
@ -240,11 +244,13 @@ createHaddockUnpacker :: FilePath -- ^ haddock root
-> BlobStore StoreKey
-> (forall a m. (MonadIO m, MonadBaseControl IO m)
=> SqlPersistT m a -> m a)
-> IO (IORef Text, ForceUnpack -> PackageSetIdent -> IO ())
createHaddockUnpacker root store runDB' = do
-> IORef (Route App -> [(Text, Text)] -> Text)
-> IO (IORef Text, ForceUnpack -> Entity Stackage -> IO ())
createHaddockUnpacker root store runDB' urlRenderRef = do
createTree $ dirCacheRoot dirs
createTree $ dirRawRoot dirs
createTree $ dirGzRoot dirs
createTree $ dirHoogleRoot dirs
chan <- newChan
(statusRef, compressor) <- createCompressor dirs
@ -253,7 +259,8 @@ createHaddockUnpacker root store runDB' = do
(forceUnpack, ident, res) <- readChan chan
try (restore $ go forceUnpack ident) >>= putMVar res
compressor
return (statusRef, \forceUnpack ident -> do
return (statusRef, \forceUnpack stackageEnt -> do
let ident = stackageIdent (entityVal stackageEnt)
shouldAct <-
if forceUnpack
then return True
@ -261,7 +268,7 @@ createHaddockUnpacker root store runDB' = do
if shouldAct
then do
res <- newEmptyMVar
writeChan chan (forceUnpack, ident, res)
writeChan chan (forceUnpack, stackageEnt, res)
takeMVar res >>= either (throwM . asSomeException) return
else return ())
where
@ -274,12 +281,14 @@ createHaddockUnpacker root store runDB' = do
if e1
then return True
else isDirectory $ dirRawIdent dirs ident
go forceUnpack ident = do
go forceUnpack stackageEnt = do
let ident = stackageIdent (entityVal stackageEnt)
toRun <-
if forceUnpack
then do
removeTreeIfExists $ dirRawIdent dirs ident
removeTreeIfExists $ dirGzIdent dirs ident
removeTreeIfExists $ dirHoogleIdent dirs ident
return True
else not <$> doDirsExist ident
when toRun $ do
@ -289,8 +298,8 @@ createHaddockUnpacker root store runDB' = do
Nothing -> error "No haddocks exist for that snapshot"
Just src -> src $$ sinkHandle temph
hClose temph
createTree $ dirRawIdent dirs ident
let destdir = dirRawIdent dirs ident
createTree destdir
(Nothing, Nothing, Nothing, ph) <- createProcess
(proc "tar" ["xf", tempfp])
{ cwd = Just $ fpToString destdir
@ -298,27 +307,36 @@ createHaddockUnpacker root store runDB' = do
ec <- waitForProcess ph
if ec == ExitSuccess then return () else throwM ec
urlRender <- readIORef urlRenderRef
runResourceT $ do
liftIO $ createTree $ dirHoogleIdent dirs ident
tmp <- liftIO getTemporaryDirectory
(_releasekey, hoogletemp) <- allocate
(fpFromString <$> createTempDirectory tmp "hoogle-database-gen")
removeTree
let logFp = fpToString (dirHoogleFp dirs ident ["error-log"])
(_, errorLog) <- allocate (openBinaryFile logFp WriteMode) hClose
copyHoogleTextFiles errorLog destdir hoogletemp
-- TODO: Have hoogle requests block on this finishing.
-- (Or display a "compiling DB" message to the user)
void $ resourceForkIO $ createHoogleDb dirs stackageEnt errorLog hoogletemp urlRender
-- Determine which packages have documentation and update the
-- database appropriately
runResourceT $ runDB' $ do
ment <- getBy $ UniqueStackage ident
forM_ ment $ \(Entity sid _) -> do
updateWhere
[PackageStackage ==. sid]
[PackageHasHaddocks =. False]
sourceDirectory destdir $$ mapM_C (\fp -> do
let (name', version) =
T.breakOnEnd "-"
$ fpToText
$ filename fp
mname = stripSuffix "-" name'
forM_ mname $ \name -> updateWhere
[ PackageStackage ==. sid
, PackageName' ==. PackageName name
, PackageVersion ==. Version version
]
[PackageHasHaddocks =. True]
)
let sid = entityKey stackageEnt
updateWhere
[PackageStackage ==. sid]
[PackageHasHaddocks =. False]
sourceDirectory destdir $$ mapM_C (\fp -> do
let mnv = nameAndVersionFromPath fp
forM_ mnv $ \(name, version) -> updateWhere
[ PackageStackage ==. sid
, PackageName' ==. PackageName name
, PackageVersion ==. Version version
]
[PackageHasHaddocks =. True]
)
data DocInfo = DocInfo Version (Map Text [Text])
instance FromJSON DocInfo where
@ -379,3 +397,140 @@ getUploadDocMapR = do
putUploadDocMapR :: Handler Html
putUploadDocMapR = getUploadDocMapR
copyHoogleTextFiles :: Handle -- ^ error log handle
-> FilePath -- ^ raw unpacked Haddock files
-> FilePath -- ^ temporary work directory
-> ResourceT IO ()
copyHoogleTextFiles errorLog raw tmp = do
let tmptext = tmp </> "text"
liftIO $ createTree tmptext
sourceDirectory raw $$ mapM_C (\fp ->
forM_ (nameAndVersionFromPath fp) $ \(name, version) -> do
let src = fp </> fpFromText name <.> "txt"
dst = tmptext </> fpFromText (name ++ "-" ++ version)
exists <- liftIO $ isFile src
if exists
then sourceFile src $$ (sinkFile dst :: Sink ByteString (ResourceT IO) ())
else liftIO $ appendHoogleErrors errorLog $ HoogleErrors
{ packageName = name
, packageVersion = version
, errors = ["No textual Hoogle DB (use \"cabal haddock --hoogle\")"]
}
)
createHoogleDb :: Dirs
-> Entity Stackage
-> Handle -- ^ error log handle
-> FilePath -- ^ temp directory
-> (Route App -> [(Text, Text)] -> Text)
-> ResourceT IO ()
createHoogleDb dirs (Entity _ stackage) errorLog tmpdir urlRender = do
let ident = stackageIdent stackage
tmpbin = tmpdir </> "binary"
liftIO $ createTree tmpbin
eres <- tryAny $ do
-- Create hoogle binary databases for each package.
sourceDirectory (tmpdir </> "text") $$ mapM_C
( \fp -> do
(releaseKey, srcH) <- allocate (openBinaryFile (fpToString fp) ReadMode) hClose
forM_ (nameAndVersionFromPath fp) $ \(name, version) -> liftIO $ do
src <- unpack . decodeUtf8 . asLByteString <$> hGetContents srcH
let -- Preprocess the haddock-generated manifest file.
src' = unlines $ haddockHacks (Just (unpack docsUrl)) $ lines src
docsUrl = urlRender (HaddockR (stackageSlug stackage) urlPieces) []
urlPieces = [name <> "-" <> version, "index.html"]
-- Compute the filepath of the resulting hoogle
-- database.
out = fpToString $ tmpbin </> fpFromText base
base = name <> "-" <> version <> ".hoo"
errs <- Hoogle.createDatabase "" Hoogle.Haskell [] src' out
when (not $ null errs) $ do
-- TODO: remove this printing once errors are yielded
-- to the user.
putStrLn $ concat
[ base
, " Hoogle errors: "
, tshow errs
]
appendHoogleErrors errorLog $ HoogleErrors
{ packageName = name
, packageVersion = version
, errors = map show errs
}
release releaseKey
)
-- Merge the individual binary databases into one big database.
liftIO $ do
dbs <- listDirectory tmpbin
Hoogle.mergeDatabase
(map fpToString dbs)
(fpToString (dirHoogleFp dirs ident ["default.hoo"]))
case eres of
Right () -> return ()
Left err -> liftIO $ appendHoogleErrors errorLog $ HoogleErrors
{ packageName = "Exception thrown while building hoogle DB"
, packageVersion = ""
, errors = [show err]
}
data HoogleErrors = HoogleErrors
{ packageName :: Text
, packageVersion :: Text
, errors :: [String]
} deriving (Generic)
instance ToJSON HoogleErrors where
instance FromJSON HoogleErrors where
-- Appends hoogle errors to a log file. By encoding within a single
-- list, the resulting file can be decoded as [HoogleErrors].
appendHoogleErrors :: Handle -> HoogleErrors -> IO ()
appendHoogleErrors h errs = hPut h (Y.encode [errs])
nameAndVersionFromPath :: FilePath -> Maybe (Text, Text)
nameAndVersionFromPath fp =
(\name -> (name, version)) <$> stripSuffix "-" name'
where
(name', version) = T.breakOnEnd "-" $ fpToText $ F.dropExtension $ filename fp
---------------------------------------------------------------------
-- HADDOCK HACKS
-- (Copied from hoogle-4.2.36/src/Recipe/Haddock.hs)
-- Modifications:
-- 1) Some name qualification
-- 2) Explicit type sig due to polymorphic elem
-- 3) Fixed an unused binding warning
-- Eliminate @version
-- Change :*: to (:*:), Haddock bug
-- Change !!Int to !Int, Haddock bug
-- Change instance [overlap ok] to instance, Haddock bug
-- Change instance [incoherent] to instance, Haddock bug
-- Change instance [safe] to instance, Haddock bug
-- Change !Int to Int, HSE bug
-- Drop {-# UNPACK #-}, Haddock bug
-- Drop everything after where, Haddock bug
haddockHacks :: Maybe Hoogle.URL -> [String] -> [String]
haddockHacks loc src = maybe id haddockPackageUrl loc (translate src)
where
translate :: [String] -> [String]
translate = map (unwords . g . map f . words) . filter (not . isPrefixOf "@version ")
f "::" = "::"
f (':':xs) = "(:" ++ xs ++ ")"
f ('!':'!':x:xs) | isAlpha x = xs
f ('!':x:xs) | isAlpha x || x `elem` ("[(" :: String) = x:xs
f x | x `elem` ["[overlap","ok]","[incoherent]","[safe]"] = ""
f x | x `elem` ["{-#","UNPACK","#-}"] = ""
f x = x
g ("where":_) = []
g (x:xs) = x : g xs
g [] = []
haddockPackageUrl :: Hoogle.URL -> [String] -> [String]
haddockPackageUrl x = concatMap f
where f y | "@package " `isPrefixOf` y = ["@url " ++ x, y]
| otherwise = [y]

153
Handler/Hoogle.hs Normal file
View File

@ -0,0 +1,153 @@
module Handler.Hoogle where
import Control.DeepSeq (NFData(..))
import Control.DeepSeq.Generics (genericRnf)
import Control.Spoon (spoon)
import Data.Data (Data (..))
import Data.Slug (SnapSlug)
import Data.Text.Read (decimal)
import Filesystem (isFile)
import Handler.Haddock (dirHoogleFp, getDirs)
import qualified Hoogle
import Import
import Text.Blaze.Html (preEscapedToHtml)
getHoogleR :: SnapSlug -> Handler Html
getHoogleR slug = do
dirs <- getDirs
mquery <- lookupGetParam "q"
mpage <- lookupGetParam "page"
exact <- maybe False (const True) <$> lookupGetParam "exact"
mresults' <- lookupGetParam "results"
let count' =
case decimal <$> mresults' of
Just (Right (i, "")) -> min perPage i
_ -> perPage
page =
case decimal <$> mpage of
Just (Right (i, "")) -> i
_ -> 1
offset = (page - 1) * perPage
stackageEnt@(Entity _ stackage) <- runDB $ getBy404 $ UniqueSnapshot slug
-- Unpack haddocks and generate hoogle DB, if necessary.
master <- getYesod
liftIO $ haddockUnpacker master False stackageEnt
let databasePath = dirHoogleFp dirs (stackageIdent stackage) ["default.hoo"]
heDatabase = liftIO $ Hoogle.loadDatabase (fpToString databasePath)
-- If the hoogle DB isn't yet generated, yield 404.
dbExists <- liftIO $ isFile databasePath
when (not dbExists) notFound
mresults <- case mquery of
Just query -> runHoogleQuery heDatabase HoogleQueryInput
{ hqiQueryInput = query
, hqiExactSearch = if exact then Just query else Nothing
, hqiLimitTo = count'
, hqiOffsetBy = offset
}
Nothing -> return $ HoogleQueryOutput "" [] Nothing
let queryText = fromMaybe "" mquery
pageLink p = (HoogleR slug
, (if exact then (("exact", "true"):) else id)
$ (maybe id (\q' -> (("q", q'):)) mquery)
[("page", tshow p)])
snapshotLink = SnapshotR slug StackageHomeR
hoogleForm = $(widgetFile "hoogle-form")
defaultLayout $ do
setTitle "Hoogle Search"
$(widgetFile "hoogle")
getPageCount :: Int -> Int
getPageCount totalCount = 1 + div totalCount perPage
perPage :: Int
perPage = 10
data HoogleQueryInput = HoogleQueryInput
{ hqiQueryInput :: Text
, hqiExactSearch :: Maybe Text
, hqiLimitTo :: Int
, hqiOffsetBy :: Int
}
deriving (Eq, Read, Show, Data, Typeable, Ord, Generic)
data HoogleQueryOutput = HoogleQueryBad Text
| HoogleQueryOutput Text [HoogleResult] (Maybe Int) -- ^ Text == HTML version of query, Int == total count
deriving (Read, Typeable, Data, Show, Eq)
data HoogleResult = HoogleResult
{ hrURL :: String
, hrSources :: [(PackageLink, [ModuleLink])]
, hrTitle :: String -- ^ HTML
, hrBody :: String -- ^ plain text
}
deriving (Eq, Read, Show, Data, Typeable, Ord, Generic)
data PackageLink = PackageLink
{ plName :: String
, plURL :: String
}
deriving (Eq, Read, Show, Data, Typeable, Ord, Generic)
data ModuleLink = ModuleLink
{ mlName :: String
, mlURL :: String
}
deriving (Eq, Read, Show, Data, Typeable, Ord, Generic)
instance NFData HoogleResult where rnf = genericRnf
instance NFData PackageLink where rnf = genericRnf
instance NFData ModuleLink where rnf = genericRnf
runHoogleQuery :: Monad m
=> m Hoogle.Database
-> HoogleQueryInput
-> m HoogleQueryOutput
runHoogleQuery heDatabase HoogleQueryInput {..} =
runQuery $ Hoogle.parseQuery Hoogle.Haskell query
where
query = unpack hqiQueryInput
runQuery (Left err) = return $ HoogleQueryBad (tshow err)
runQuery (Right query') = do
hoogledb <- heDatabase
let query'' = Hoogle.queryExact classifier query'
rawRes = concatMap fixResult
$ Hoogle.search hoogledb query''
mres = spoon
$ take (min 100 hqiLimitTo)
$ drop hqiOffsetBy rawRes
mcount = spoon $ limitedLength 0 rawRes
limitedLength x [] = Just x
limitedLength x (_:rest)
| x >= 100 = Nothing
| otherwise = limitedLength (x + 1) rest
rendered = pack $ Hoogle.showTagHTML $ Hoogle.renderQuery query''
return $ case (,) <$> mres <*> mcount of
Nothing ->
HoogleQueryOutput rendered [] (Just 0)
Just (results, mcount') ->
HoogleQueryOutput rendered (take hqiLimitTo results) mcount'
classifier = maybe Nothing
(const (Just Hoogle.UnclassifiedItem))
hqiExactSearch
fixResult (_, Hoogle.Result locs self docs) = do
(loc, _) <- take 1 locs
let sources' = unionsWith (++) $
mapMaybe (getPkgModPair . snd) locs
return HoogleResult
{ hrURL = loc
, hrSources = mapToList sources'
, hrTitle = Hoogle.showTagHTML self
, hrBody = fromMaybe "Problem loading documentation" $
spoon $ Hoogle.showTagText docs
}
getPkgModPair :: [(String, String)]
-> Maybe (Map PackageLink [ModuleLink])
getPkgModPair [(pkg, pkgname), (modu, moduname)] = do
let pkg' = PackageLink pkgname pkg
modu' = ModuleLink moduname modu
return $ asMap $ singletonMap pkg' [modu']
getPkgModPair _ = Nothing

View File

@ -21,6 +21,10 @@ getStackageHomeR slug = do
then Just False
else Nothing
base = maybe 0 (const 1) minclusive :: Int
hoogleForm =
let queryText = "" :: Text
exact = False
in $(widgetFile "hoogle-form")
Entity sid _stackage <- runDB $ getBy404 $ UniqueSnapshot slug
defaultLayout $ do
setTitle $ toHtml $ stackageTitle stackage

View File

@ -33,6 +33,7 @@
/progress/#Int ProgressR GET
/system SystemR GET
/haddock/#SnapSlug/*Texts HaddockR GET
/hoogle/#SnapSlug HoogleR GET
/package/#PackageName PackageR GET
/package/#PackageName/snapshots PackageSnapshotsR GET
/package PackageListR GET

View File

@ -45,6 +45,7 @@ library
Handler.Progress
Handler.System
Handler.Haddock
Handler.Hoogle
Handler.Package
Handler.PackageList
Handler.CompressorStatus
@ -82,6 +83,7 @@ library
RecordWildCards
ScopedTypeVariables
BangPatterns
DeriveGeneric
build-depends:
base >= 4
@ -150,6 +152,10 @@ library
, blaze-html
, haddock-library
, yesod-gitrepo
, hoogle
, spoon
, deepseq
, deepseq-generics
executable stackage-server
if flag(library-only)

View File

@ -0,0 +1,6 @@
<form .hoogle action=@{HoogleR slug}>
<input type=search autofocus name=q value=#{queryText} placeholder="Hoogle Search Phrase" .search>
<input .btn type="submit" value="Search">
<label .checkbox .exact-lookup for=exact title="Only find identifiers matching your search term precisely">
<input type=checkbox name=exact :exact:checked #exact>
Exact lookup

View File

@ -0,0 +1,14 @@
form.hoogle {
margin-bottom: 20px;
.search {
width: 25em;
}
input {
margin-bottom: 0;
}
}
.exact-lookup {
display: inline-block;
margin-left: 1em;
}

39
templates/hoogle.hamlet Normal file
View File

@ -0,0 +1,39 @@
<div .container>
<div .content>
<h1>Hoogle Search (experimental)
<p>Within <a href=@{snapshotLink}>#{stackageTitle stackage}</a>
^{hoogleForm}
$case mresults
$of HoogleQueryBad err
<p>#{err}
<p>For information on what queries should look like, see the <a href="http://www.haskell.org/haskellwiki/Hoogle">hoogle user manual</a>.
$of HoogleQueryOutput _query results mtotalCount
$if null results
<p>Your search produced no results.
$else
<ol .search-results>
$forall HoogleResult url sources self docs <- results
<li>
<p .self>
<a href=#{url}>#{preEscapedToHtml self}
<table .sources>
$forall (pkg, modus) <- sources
<tr>
<th>
<a href=#{plURL pkg}>#{plName pkg}
<td>
$forall ModuleLink name url' <- modus
<a href=#{url'}>#{name}
$if null docs
<p .nodocs>No documentation available.
$else
<p .docs>#{docs}
<p .pagination>
$with mpageCount <- fmap getPageCount mtotalCount
Page #{page} of #{maybe "many" show mpageCount} #
$if page > 1
|
<a href=@?{pageLink $ page - 1}>Previous
$if maybe True ((<) page) mpageCount
|
<a href=@?{pageLink $ page + 1}>Next

5
templates/hoogle.julius Normal file
View File

@ -0,0 +1,5 @@
$(function() {
var input = $(".hoogle .search").get(0);
var len = input.value.length;
input.setSelectionRange(len, len);
})

63
templates/hoogle.lucius Normal file
View File

@ -0,0 +1,63 @@
ol.search-results {
margin: 0;
padding: 0;
list-style: none;
}
.self {
margin-bottom: 0;
/* Use bold instead of italics to indicate matching part of search */
a {
b {
font-weight: normal;
}
i {
font-weight: bold;
font-style: normal;
}
}
}
table.sources {
margin: 0;
padding: 0;
font-size: 0.8em;
th {
padding-right: 0.5em;
}
a, a:visited {
color: #060;
}
}
.docs {
white-space: pre-wrap;
background: #e8e8e8;
}
.docs, .nodocs {
margin-left: 1em;
padding: 0.5em;
}
.nodocs {
font-style: italic;
}
.haddocks {
font-weight: bold;
margin-bottom: 1em;
ul {
display: inline;
padding: 0;
margin: 0;
}
li {
display: inline-block;
font-weight: normal;
margin-left: 1em;
}
}

View File

@ -38,8 +38,10 @@ $newline never
<a href=@{SnapshotR slug StackageCabalConfigR}?global=true>
the global configuration instructions
<h3>
Packages
<h3>Hoogle (experimental)
^{hoogleForm}
<h3>Packages
<p>
<a href=@{SnapshotR slug DocsR}>View documentation by modules