mirror of
https://github.com/commercialhaskell/stackage-server.git
synced 2026-01-12 04:08:29 +01:00
Merge branch 'master' into new-upload
Conflicts: Application.hs Handler/Haddock.hs Handler/StackageHome.hs Import.hs cabal.config config/routes stackage-server.cabal templates/doc-list.hamlet
This commit is contained in:
commit
f4a0d6d61e
@ -13,6 +13,7 @@ import Control.Monad.Logger (runLoggingT, LoggingT, defaultLogStr)
|
||||
import Data.BlobStore (fileStore, storeWrite, cachedS3Store)
|
||||
import Data.Hackage
|
||||
import Data.Hackage.Views
|
||||
import Data.Unpacking (newDocUnpacker, createHoogleDatabases)
|
||||
import Data.WebsiteContent
|
||||
import Data.Slug (SnapSlug (..), safeMakeSlug, HasGenIO)
|
||||
import Data.Time (diffUTCTime)
|
||||
@ -69,6 +70,9 @@ import Handler.Tag
|
||||
import Handler.BannedTags
|
||||
import Handler.RefreshDeprecated
|
||||
import Handler.UploadV2
|
||||
import Handler.Hoogle
|
||||
import Handler.BuildVersion
|
||||
import Handler.PackageCounts
|
||||
|
||||
-- This line actually creates our YesodDispatch instance. It is the second half
|
||||
-- of the call to mkYesodData which occurs in Foundation.hs. Please see the
|
||||
@ -148,17 +152,12 @@ makeFoundation useEcho conf = do
|
||||
(getter, _) <- clockDateCacher
|
||||
|
||||
gen <- MWC.createSystemRandom
|
||||
progressMap' <- newIORef mempty
|
||||
nextProgressKey' <- newIORef 0
|
||||
|
||||
blobStore' <- loadBlobStore manager conf
|
||||
|
||||
let haddockRootDir' = "/tmp/stackage-server-haddocks2"
|
||||
(statusRef, unpacker) <- createHaddockUnpacker haddockRootDir' blobStore'
|
||||
(flip (Database.Persist.runPool dbconf) p)
|
||||
widgetCache' <- newIORef mempty
|
||||
|
||||
#if MIN_VERSION_yesod_gitrepo(0,1,1)
|
||||
websiteContent' <- if development
|
||||
then do
|
||||
void $ rawSystem "git"
|
||||
@ -170,23 +169,12 @@ makeFoundation useEcho conf = do
|
||||
"https://github.com/fpco/stackage-content.git"
|
||||
"master"
|
||||
loadWebsiteContent
|
||||
#else
|
||||
websiteContent' <- if development
|
||||
then do
|
||||
void $ rawSystem "git"
|
||||
[ "clone"
|
||||
, "https://github.com/fpco/stackage-content.git"
|
||||
]
|
||||
tmp <- gitRepo "stackage-content" "master" loadWebsiteContent
|
||||
return tmp
|
||||
{ grRefresh = return ()
|
||||
, grContent = loadWebsiteContent "stackage-content"
|
||||
}
|
||||
else gitRepo
|
||||
"https://github.com/fpco/stackage-content.git"
|
||||
"master"
|
||||
loadWebsiteContent
|
||||
#endif
|
||||
|
||||
env <- getEnvironment
|
||||
|
||||
let runDB' :: (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a
|
||||
runDB' = flip (Database.Persist.runPool dbconf) p
|
||||
docUnpacker <- newDocUnpacker haddockRootDir' blobStore' runDB'
|
||||
|
||||
snapshotInfoCache' <- newIORef mempty
|
||||
|
||||
@ -200,17 +188,14 @@ makeFoundation useEcho conf = do
|
||||
, appLogger = logger
|
||||
, genIO = gen
|
||||
, blobStore = blobStore'
|
||||
, progressMap = progressMap'
|
||||
, nextProgressKey = nextProgressKey'
|
||||
, haddockRootDir = haddockRootDir'
|
||||
, haddockUnpacker = unpacker
|
||||
, appDocUnpacker = docUnpacker
|
||||
, widgetCache = widgetCache'
|
||||
, compressorStatus = statusRef
|
||||
, websiteContent = websiteContent'
|
||||
, snapshotInfoCache = snapshotInfoCache'
|
||||
}
|
||||
|
||||
env <- getEnvironment
|
||||
let urlRender' = yesodRender foundation (appRoot conf)
|
||||
|
||||
-- Perform database migration using our application's logging settings.
|
||||
when (lookup "STACKAGE_SKIP_MIGRATION" env /= Just "1") $
|
||||
@ -224,6 +209,7 @@ makeFoundation useEcho conf = do
|
||||
|
||||
|
||||
let updateDB = lookup "STACKAGE_CABAL_LOADER" env /= Just "0"
|
||||
hoogleGen = lookup "STACKAGE_HOOGLE_GEN" env /= Just "0"
|
||||
forceUpdate = lookup "STACKAGE_FORCE_UPDATE" env == Just "1"
|
||||
loadCabalFiles' = appLoadCabalFiles updateDB forceUpdate foundation dbconf p
|
||||
|
||||
@ -236,6 +222,8 @@ makeFoundation useEcho conf = do
|
||||
|
||||
loadCabalFiles'
|
||||
|
||||
when hoogleGen $ liftIO $ createHoogleDatabases blobStore' runDB' putStrLn urlRender'
|
||||
|
||||
liftIO $ threadDelay $ 30 * 60 * 1000000
|
||||
return foundation
|
||||
where ifRunCabalLoader m =
|
||||
@ -276,6 +264,26 @@ cabalLoaderMain = do
|
||||
}
|
||||
dbconf
|
||||
pool
|
||||
|
||||
let foundation = App
|
||||
{ settings = conf
|
||||
, getStatic = error "getStatic"
|
||||
, connPool = pool
|
||||
, httpManager = manager
|
||||
, persistConfig = dbconf
|
||||
, appLogger = error "appLogger"
|
||||
, genIO = error "genIO"
|
||||
, blobStore = bs
|
||||
, haddockRootDir = error "haddockRootDir"
|
||||
, appDocUnpacker = error "appDocUnpacker"
|
||||
, widgetCache = error "widgetCache"
|
||||
, websiteContent = error "websiteContent"
|
||||
}
|
||||
createHoogleDatabases
|
||||
bs
|
||||
(flip (Database.Persist.runPool dbconf) pool)
|
||||
putStrLn
|
||||
(yesodRender foundation (appRoot conf))
|
||||
where
|
||||
logFunc loc src level str
|
||||
| level > LevelDebug = S.hPutStr stdout $ fromLogStr $ defaultLogStr loc src level str
|
||||
|
||||
@ -23,8 +23,8 @@ instance FromJSON HackageDeprecationInfo where
|
||||
}
|
||||
|
||||
data DeprecationRecord = DeprecationRecord {
|
||||
deprecatedPackage :: PackageName,
|
||||
deprecatedInFavourOf :: [PackageName]
|
||||
_deprecatedPackage :: PackageName,
|
||||
_deprecatedInFavourOf :: [PackageName]
|
||||
}
|
||||
|
||||
instance FromJSON DeprecationRecord where
|
||||
|
||||
@ -18,7 +18,7 @@ import GHC.Prim (RealWorld)
|
||||
import Text.Blaze (ToMarkup)
|
||||
|
||||
newtype Slug = Slug { unSlug :: Text }
|
||||
deriving (Show, Read, Eq, Typeable, PersistField, ToMarkup)
|
||||
deriving (Show, Read, Eq, Typeable, PersistField, ToMarkup, Ord, Hashable)
|
||||
instance PersistFieldSql Slug where
|
||||
sqlType = sqlType . liftM unSlug
|
||||
|
||||
@ -101,6 +101,6 @@ slugField =
|
||||
|
||||
-- | Unique identifier for a snapshot.
|
||||
newtype SnapSlug = SnapSlug { unSnapSlug :: Slug }
|
||||
deriving (Show, Read, Eq, Typeable, PersistField, ToMarkup, PathPiece)
|
||||
deriving (Show, Read, Eq, Typeable, PersistField, ToMarkup, PathPiece, Ord, Hashable)
|
||||
instance PersistFieldSql SnapSlug where
|
||||
sqlType = sqlType . liftM unSnapSlug
|
||||
|
||||
494
Data/Unpacking.hs
Normal file
494
Data/Unpacking.hs
Normal file
@ -0,0 +1,494 @@
|
||||
-- | Code for unpacking documentation bundles, building the Hoogle databases,
|
||||
-- and compressing/deduping contents.
|
||||
module Data.Unpacking
|
||||
( newDocUnpacker
|
||||
, getHoogleDB
|
||||
, makeHoogle
|
||||
, createHoogleDatabases
|
||||
) where
|
||||
|
||||
import Import hiding (runDB)
|
||||
import Data.BlobStore
|
||||
import Handler.Haddock
|
||||
import Filesystem (createTree, isFile, removeTree, isDirectory, listDirectory, removeDirectory, removeFile, rename)
|
||||
import System.Posix.Files (createLink)
|
||||
import Crypto.Hash.Conduit (sinkHash)
|
||||
import Control.Concurrent (forkIO)
|
||||
import Control.Monad.Trans.Resource (allocate, release)
|
||||
import Data.Char (isAlpha)
|
||||
import qualified Hoogle
|
||||
import qualified Data.Text as T
|
||||
import qualified Data.Yaml as Y
|
||||
import System.IO (IOMode (ReadMode), withBinaryFile, openBinaryFile)
|
||||
import System.IO.Temp (withSystemTempFile, withTempFile, withSystemTempDirectory)
|
||||
import System.Exit (ExitCode (ExitSuccess))
|
||||
import System.Process (createProcess, proc, cwd, waitForProcess)
|
||||
import qualified Filesystem.Path.CurrentOS as F
|
||||
import Data.Conduit.Zlib (gzip, ungzip)
|
||||
import qualified Data.ByteString.Base16 as B16
|
||||
import Data.Byteable (toBytes)
|
||||
import Crypto.Hash (Digest, SHA1)
|
||||
|
||||
newDocUnpacker
|
||||
:: FilePath -- ^ haddock root
|
||||
-> BlobStore StoreKey
|
||||
-> (forall a m. (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a)
|
||||
-> IO DocUnpacker
|
||||
newDocUnpacker root store runDB = do
|
||||
createDirs dirs
|
||||
|
||||
statusMapVar <- newTVarIO $ asMap mempty
|
||||
messageVar <- newTVarIO "Inactive"
|
||||
workChan <- atomically newTChan
|
||||
|
||||
let requestDocs forceUnpack ent = atomically $ do
|
||||
var <- newTVar USBusy
|
||||
modifyTVar statusMapVar
|
||||
$ insertMap (stackageSlug $ entityVal ent) var
|
||||
writeTChan workChan (forceUnpack, ent, var)
|
||||
|
||||
forkForever $ unpackWorker dirs runDB store messageVar workChan
|
||||
|
||||
return DocUnpacker
|
||||
{ duRequestDocs = \ent -> do
|
||||
m <- readTVarIO statusMapVar
|
||||
case lookup (stackageSlug $ entityVal ent) m of
|
||||
Nothing -> do
|
||||
b <- isUnpacked dirs (entityVal ent)
|
||||
if b
|
||||
then return USReady
|
||||
else do
|
||||
requestDocs False ent
|
||||
return USBusy
|
||||
Just us -> readTVarIO us
|
||||
, duGetStatus = readTVarIO messageVar
|
||||
, duForceReload = \ent -> do
|
||||
atomically $ modifyTVar statusMapVar
|
||||
$ deleteMap (stackageSlug $ entityVal ent)
|
||||
requestDocs True ent
|
||||
}
|
||||
where
|
||||
dirs = mkDirs root
|
||||
|
||||
createDirs :: Dirs -> IO ()
|
||||
createDirs dirs = do
|
||||
createTree $ dirCacheRoot dirs
|
||||
createTree $ dirRawRoot dirs
|
||||
createTree $ dirGzRoot dirs
|
||||
createTree $ dirHoogleRoot dirs
|
||||
|
||||
-- | Check for the presence of file system artifacts indicating that the docs
|
||||
-- have been unpacked.
|
||||
isUnpacked :: Dirs -> Stackage -> IO Bool
|
||||
isUnpacked dirs stackage = isFile $ completeUnpackFile dirs stackage
|
||||
|
||||
defaultHooDest :: Dirs -> Stackage -> FilePath
|
||||
defaultHooDest dirs stackage = dirHoogleFp dirs (stackageIdent stackage)
|
||||
["default-" ++ VERSION_hoogle ++ ".hoo"]
|
||||
|
||||
forkForever :: IO () -> IO ()
|
||||
forkForever inner = mask $ \restore ->
|
||||
void $ forkIO $ forever $ handleAny print $ restore $ forever inner
|
||||
|
||||
unpackWorker
|
||||
:: Dirs
|
||||
-> (forall a m. (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a)
|
||||
-> BlobStore StoreKey
|
||||
-> TVar Text
|
||||
-> TChan (Bool, Entity Stackage, TVar UnpackStatus)
|
||||
-> IO ()
|
||||
unpackWorker dirs runDB store messageVar workChan = do
|
||||
let say' = atomically . writeTVar messageVar
|
||||
say' "Running the compressor"
|
||||
let shouldStop = fmap not $ atomically $ isEmptyTChan workChan
|
||||
handleAny print $ runCompressor shouldStop say' dirs
|
||||
|
||||
say' "Waiting for new work item"
|
||||
(forceUnpack, ent, resVar) <- atomically $ readTChan workChan
|
||||
shouldUnpack <-
|
||||
if forceUnpack
|
||||
then return True
|
||||
else not <$> isUnpacked dirs (entityVal ent)
|
||||
|
||||
let say msg = atomically $ writeTVar messageVar $ concat
|
||||
[ toPathPiece (stackageSlug $ entityVal ent)
|
||||
, ": "
|
||||
, msg
|
||||
]
|
||||
|
||||
when shouldUnpack $ do
|
||||
say "Beginning of processing"
|
||||
|
||||
-- As soon as the raw unpack is complete, start serving docs
|
||||
let onRawComplete = atomically $ writeTVar resVar USReady
|
||||
|
||||
eres <- tryAny $ unpacker dirs runDB store say onRawComplete ent
|
||||
atomically $ writeTVar resVar $ case eres of
|
||||
Left e -> USFailed $ tshow e
|
||||
Right () -> USReady
|
||||
|
||||
removeTreeIfExists :: FilePath -> IO ()
|
||||
removeTreeIfExists fp = whenM (isDirectory fp) (removeTree fp)
|
||||
|
||||
unpackRawDocsTo
|
||||
:: BlobStore StoreKey
|
||||
-> PackageSetIdent
|
||||
-> (Text -> IO ())
|
||||
-> FilePath
|
||||
-> IO ()
|
||||
unpackRawDocsTo store ident say destdir =
|
||||
withSystemTempFile "haddock-bundle.tar.xz" $ \tempfp temph -> do
|
||||
say "Downloading raw doc tarball"
|
||||
withAcquire (storeRead' store (HaddockBundle ident)) $ \msrc ->
|
||||
case msrc of
|
||||
Nothing -> error "No haddocks exist for that snapshot"
|
||||
Just src -> src $$ sinkHandle temph
|
||||
hClose temph
|
||||
|
||||
createTree destdir
|
||||
say "Unpacking tarball"
|
||||
(Nothing, Nothing, Nothing, ph) <- createProcess
|
||||
(proc "tar" ["xf", tempfp])
|
||||
{ cwd = Just $ fpToString destdir
|
||||
}
|
||||
ec <- waitForProcess ph
|
||||
if ec == ExitSuccess then return () else throwM ec
|
||||
|
||||
|
||||
unpacker
|
||||
:: Dirs
|
||||
-> (forall a m. (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a)
|
||||
-> BlobStore StoreKey
|
||||
-> (Text -> IO ())
|
||||
-> IO () -- ^ onRawComplete
|
||||
-> Entity Stackage
|
||||
-> IO ()
|
||||
unpacker dirs runDB store say onRawComplete (Entity sid stackage@Stackage {..}) = do
|
||||
say "Removing old directories, if they exist"
|
||||
removeTreeIfExists $ dirRawIdent dirs stackageIdent
|
||||
removeTreeIfExists $ dirGzIdent dirs stackageIdent
|
||||
removeTreeIfExists $ dirHoogleIdent dirs stackageIdent
|
||||
|
||||
let destdir = dirRawIdent dirs stackageIdent
|
||||
unpackRawDocsTo store stackageIdent say destdir
|
||||
onRawComplete
|
||||
|
||||
createTree $ dirHoogleIdent dirs stackageIdent
|
||||
|
||||
-- Determine which packages have documentation and update the
|
||||
-- database appropriately
|
||||
say "Updating database for available documentation"
|
||||
runResourceT $ runDB $ do
|
||||
updateWhere
|
||||
[PackageStackage ==. sid]
|
||||
[PackageHasHaddocks =. False]
|
||||
sourceDirectory destdir $$ mapM_C (\fp -> do
|
||||
let mnv = nameAndVersionFromPath fp
|
||||
forM_ mnv $ \(name, version) -> updateWhere
|
||||
[ PackageStackage ==. sid
|
||||
, PackageName' ==. PackageName name
|
||||
, PackageVersion ==. Version version
|
||||
]
|
||||
[PackageHasHaddocks =. True]
|
||||
)
|
||||
|
||||
say "Unpack complete"
|
||||
let completeFP = completeUnpackFile dirs stackage
|
||||
liftIO $ do
|
||||
createTree $ F.parent completeFP
|
||||
writeFile completeFP ("Complete" :: ByteString)
|
||||
|
||||
completeUnpackFile :: Dirs -> Stackage -> FilePath
|
||||
completeUnpackFile dirs stackage =
|
||||
dirGzIdent dirs (stackageIdent stackage) </> "unpack-complete"
|
||||
|
||||
-- | Get the path to the Hoogle database, downloading from persistent storage
|
||||
-- if necessary. This function will /not/ generate a new database, and
|
||||
-- therefore is safe to run on a live web server.
|
||||
getHoogleDB :: Dirs
|
||||
-> Stackage
|
||||
-> Handler (Maybe FilePath)
|
||||
getHoogleDB dirs stackage = do
|
||||
exists <- liftIO $ isFile fp
|
||||
if exists
|
||||
then return $ Just fp
|
||||
else do
|
||||
msrc <- storeRead key
|
||||
case msrc of
|
||||
Nothing -> return Nothing
|
||||
Just src -> do
|
||||
liftIO $ createTree $ F.parent fp
|
||||
let tmpfp = fp <.> "tmp" -- FIXME add something random
|
||||
src $$ ungzip =$ sinkFile tmpfp
|
||||
liftIO $ rename tmpfp fp
|
||||
return $ Just fp
|
||||
where
|
||||
fp = defaultHooDest dirs stackage
|
||||
key = HoogleDB (stackageIdent stackage) $ HoogleVersion VERSION_hoogle
|
||||
|
||||
-- | Make sure that the last 5 LTS and last 5 Nightly releases all have Hoogle
|
||||
-- databases available.
|
||||
createHoogleDatabases
|
||||
:: BlobStore StoreKey
|
||||
-> (forall a m. (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a)
|
||||
-> (Text -> IO ())
|
||||
-> (Route App -> [(Text, Text)] -> Text)
|
||||
-> IO ()
|
||||
createHoogleDatabases store runDB say urlRender = do
|
||||
stackages <- runDB $ do
|
||||
sids <- (++)
|
||||
<$> fmap (map $ ltsStackage . entityVal)
|
||||
(selectList [] [Desc LtsMajor, Desc LtsMinor, LimitTo 5])
|
||||
<*> fmap (map $ nightlyStackage . entityVal)
|
||||
(selectList [] [Desc NightlyDay, LimitTo 5])
|
||||
catMaybes <$> mapM get sids
|
||||
forM_ stackages $ \stackage -> do
|
||||
let say' x = say $ concat
|
||||
[ toPathPiece $ stackageSlug stackage
|
||||
, ": "
|
||||
, x
|
||||
]
|
||||
handleAny (say' . tshow) $ makeHoogle store say' urlRender stackage
|
||||
|
||||
-- | Either download the Hoogle database from persistent storage, or create it.
|
||||
makeHoogle
|
||||
:: BlobStore StoreKey
|
||||
-> (Text -> IO ())
|
||||
-> (Route App -> [(Text, Text)] -> Text)
|
||||
-> Stackage
|
||||
-> IO ()
|
||||
makeHoogle store say urlRender stackage = do
|
||||
say "Making hoogle database"
|
||||
exists <- storeExists' store hoogleKey
|
||||
if exists
|
||||
then say "Hoogle database already exists, skipping"
|
||||
else do
|
||||
say "Generating Hoogle database"
|
||||
generate
|
||||
where
|
||||
ident = stackageIdent stackage
|
||||
hoogleKey = HoogleDB ident $ HoogleVersion VERSION_hoogle
|
||||
|
||||
generate = withSystemTempDirectory "hoogle-database-gen" $ \hoogletemp' -> do
|
||||
let hoogletemp = fpFromString hoogletemp'
|
||||
rawdocs = hoogletemp </> "rawdocs"
|
||||
|
||||
unpackRawDocsTo store ident say rawdocs
|
||||
|
||||
say "Copying Hoogle text files to temp directory"
|
||||
runResourceT $ copyHoogleTextFiles say rawdocs hoogletemp
|
||||
say "Creating Hoogle database"
|
||||
withSystemTempFile "default.hoo" $ \dstFP' dstH -> do
|
||||
let dstFP = fpFromString dstFP'
|
||||
hClose dstH
|
||||
createHoogleDb say dstFP stackage hoogletemp urlRender
|
||||
say "Uploading database to persistent storage"
|
||||
withAcquire (storeWrite' store hoogleKey) $ \sink ->
|
||||
runResourceT $ sourceFile dstFP $$ gzip =$ sink
|
||||
|
||||
runCompressor :: IO Bool -- ^ should stop early?
|
||||
-> (Text -> IO ()) -> Dirs -> IO ()
|
||||
runCompressor shouldStop say dirs =
|
||||
handle (\EarlyStop -> return ()) $ runResourceT $ goDir $ dirRawRoot dirs
|
||||
where
|
||||
goDir dir = do
|
||||
liftIO $ whenM shouldStop $ do
|
||||
say "Stopping compressor early"
|
||||
throwIO EarlyStop
|
||||
liftIO $ say $ "Compressing directory: " ++ fpToText dir
|
||||
sourceDirectory dir $$ mapM_C goFP
|
||||
liftIO $ void $ tryIO $ removeDirectory dir
|
||||
|
||||
goFP fp = do
|
||||
e <- liftIO $ isFile fp
|
||||
if e
|
||||
then liftIO $ do
|
||||
liftIO $ say $ "Compressing file: " ++ fpToText fp
|
||||
handle (print . asSomeException)
|
||||
$ gzipHash dirs suffix
|
||||
else goDir fp
|
||||
where
|
||||
Just suffix = F.stripPrefix (dirRawRoot dirs </> "") fp
|
||||
|
||||
data EarlyStop = EarlyStop
|
||||
deriving (Show, Typeable)
|
||||
instance Exception EarlyStop
|
||||
|
||||
-- Procedure is to:
|
||||
--
|
||||
-- * Gzip the src file to a temp file, and get a hash of the gzipped contents
|
||||
-- * If that hash doesn't exist in the cache, move the new file to the cache
|
||||
-- * Create a hard link from dst to the file in the cache
|
||||
-- * Delete src
|
||||
gzipHash :: Dirs
|
||||
-> FilePath -- ^ suffix
|
||||
-> IO ()
|
||||
gzipHash dirs suffix = do
|
||||
withTempFile (fpToString $ dirCacheRoot dirs) "haddock-file.gz" $ \tempfp temph -> do
|
||||
digest <- withBinaryFile (fpToString src) ReadMode $ \inh ->
|
||||
sourceHandle inh
|
||||
$= gzip
|
||||
$$ (getZipSink $
|
||||
ZipSink (sinkHandle temph) *>
|
||||
ZipSink sinkHash)
|
||||
hClose temph
|
||||
let fpcache = dirCacheFp dirs digest
|
||||
unlessM (isFile fpcache) $ do
|
||||
createTree $ F.parent fpcache
|
||||
rename (fpFromString tempfp) fpcache
|
||||
createTree $ F.parent dst
|
||||
createLink (fpToString fpcache) (fpToString dst)
|
||||
removeFile src
|
||||
where
|
||||
src = dirRawRoot dirs </> suffix
|
||||
dst = dirGzRoot dirs </> suffix
|
||||
|
||||
dirCacheFp :: Dirs -> Digest SHA1 -> FilePath
|
||||
dirCacheFp dirs digest =
|
||||
dirCacheRoot dirs </> fpFromText x </> fpFromText y <.> "gz"
|
||||
where
|
||||
name = decodeUtf8 $ B16.encode $ toBytes digest
|
||||
(x, y) = splitAt 2 name
|
||||
|
||||
copyHoogleTextFiles :: (Text -> IO ()) -- ^ log
|
||||
-> FilePath -- ^ raw unpacked Haddock files
|
||||
-> FilePath -- ^ temporary work directory
|
||||
-> ResourceT IO ()
|
||||
copyHoogleTextFiles say raw tmp = do
|
||||
let tmptext = tmp </> "text"
|
||||
liftIO $ createTree tmptext
|
||||
sourceDirectory raw $$ mapM_C (\fp ->
|
||||
forM_ (nameAndVersionFromPath fp) $ \(name, version) -> do
|
||||
let src = fp </> fpFromText name <.> "txt"
|
||||
dst = tmptext </> fpFromText (name ++ "-" ++ version)
|
||||
exists <- liftIO $ isFile src
|
||||
if exists
|
||||
then sourceFile src $$ (sinkFile dst :: Sink ByteString (ResourceT IO) ())
|
||||
else liftIO $ appendHoogleErrors say $ HoogleErrors
|
||||
{ packageName = name
|
||||
, packageVersion = version
|
||||
, errors = ["No textual Hoogle DB (use \"cabal haddock --hoogle\")"]
|
||||
}
|
||||
)
|
||||
|
||||
createHoogleDb :: (Text -> IO ())
|
||||
-> FilePath -- ^ default.hoo output location
|
||||
-> Stackage
|
||||
-> FilePath -- ^ temp directory
|
||||
-> (Route App -> [(Text, Text)] -> Text)
|
||||
-> IO ()
|
||||
createHoogleDb say dstDefaultHoo stackage tmpdir urlRender = do
|
||||
let tmpbin = tmpdir </> "binary"
|
||||
createTree tmpbin
|
||||
eres <- tryAny $ runResourceT $ do
|
||||
-- Create hoogle binary databases for each package.
|
||||
sourceDirectory (tmpdir </> "text") $$ mapM_C
|
||||
( \fp -> do
|
||||
(releaseKey, srcH) <- allocate (openBinaryFile (fpToString fp) ReadMode) hClose
|
||||
forM_ (nameAndVersionFromPath fp) $ \(name, version) -> liftIO $ do
|
||||
say $ concat
|
||||
[ "Creating Hoogle database for: "
|
||||
, name
|
||||
, "-"
|
||||
, version
|
||||
]
|
||||
src <- unpack . decodeUtf8 . asLByteString <$> hGetContents srcH
|
||||
let -- Preprocess the haddock-generated manifest file.
|
||||
src' = unlines $ haddockHacks (Just (unpack docsUrl)) $ lines src
|
||||
docsUrl = urlRender (HaddockR (stackageSlug stackage) urlPieces) []
|
||||
urlPieces = [name <> "-" <> version, "index.html"]
|
||||
-- Compute the filepath of the resulting hoogle
|
||||
-- database.
|
||||
out = fpToString $ tmpbin </> fpFromText base
|
||||
base = name <> "-" <> version <> ".hoo"
|
||||
errs <- Hoogle.createDatabase "" Hoogle.Haskell [] src' out
|
||||
when (not $ null errs) $ do
|
||||
-- TODO: remove this printing once errors are yielded
|
||||
-- to the user.
|
||||
putStrLn $ concat
|
||||
[ base
|
||||
, " Hoogle errors: "
|
||||
, tshow errs
|
||||
]
|
||||
appendHoogleErrors say $ HoogleErrors
|
||||
{ packageName = name
|
||||
, packageVersion = version
|
||||
, errors = map show errs
|
||||
}
|
||||
release releaseKey
|
||||
)
|
||||
-- Merge the individual binary databases into one big database.
|
||||
liftIO $ do
|
||||
say "Merging all Hoogle databases"
|
||||
dbs <- listDirectory tmpbin
|
||||
Hoogle.mergeDatabase
|
||||
(map fpToString dbs)
|
||||
(fpToString dstDefaultHoo)
|
||||
case eres of
|
||||
Right () -> return ()
|
||||
Left err -> liftIO $ appendHoogleErrors say $ HoogleErrors
|
||||
{ packageName = "Exception thrown while building hoogle DB"
|
||||
, packageVersion = ""
|
||||
, errors = [show err]
|
||||
}
|
||||
|
||||
data HoogleErrors = HoogleErrors
|
||||
{ packageName :: Text
|
||||
, packageVersion :: Text
|
||||
, errors :: [String]
|
||||
} deriving (Generic)
|
||||
|
||||
instance ToJSON HoogleErrors where
|
||||
instance FromJSON HoogleErrors where
|
||||
|
||||
-- Appends hoogle errors to a log file. By encoding within a single
|
||||
-- list, the resulting file can be decoded as [HoogleErrors].
|
||||
appendHoogleErrors :: (Text -> IO ()) -> HoogleErrors -> IO ()
|
||||
appendHoogleErrors say errs = say $ decodeUtf8 $ Y.encode [errs]
|
||||
|
||||
nameAndVersionFromPath :: FilePath -> Maybe (Text, Text)
|
||||
nameAndVersionFromPath fp =
|
||||
(\name -> (name, version)) <$> stripSuffix "-" name'
|
||||
where
|
||||
(name', version) = T.breakOnEnd "-" $ fpToText $ filename fp
|
||||
|
||||
---------------------------------------------------------------------
|
||||
-- HADDOCK HACKS
|
||||
-- (Copied from hoogle-4.2.36/src/Recipe/Haddock.hs)
|
||||
-- Modifications:
|
||||
-- 1) Some name qualification
|
||||
-- 2) Explicit type sig due to polymorphic elem
|
||||
-- 3) Fixed an unused binding warning
|
||||
|
||||
-- Eliminate @version
|
||||
-- Change :*: to (:*:), Haddock bug
|
||||
-- Change !!Int to !Int, Haddock bug
|
||||
-- Change instance [overlap ok] to instance, Haddock bug
|
||||
-- Change instance [incoherent] to instance, Haddock bug
|
||||
-- Change instance [safe] to instance, Haddock bug
|
||||
-- Change !Int to Int, HSE bug
|
||||
-- Drop {-# UNPACK #-}, Haddock bug
|
||||
-- Drop everything after where, Haddock bug
|
||||
|
||||
haddockHacks :: Maybe Hoogle.URL -> [String] -> [String]
|
||||
haddockHacks loc src = maybe id haddockPackageUrl loc (translate src)
|
||||
where
|
||||
translate :: [String] -> [String]
|
||||
translate = map (unwords . g . map f . words) . filter (not . isPrefixOf "@version ")
|
||||
|
||||
f "::" = "::"
|
||||
f (':':xs) = "(:" ++ xs ++ ")"
|
||||
f ('!':'!':x:xs) | isAlpha x = xs
|
||||
f ('!':x:xs) | isAlpha x || x `elem` ("[(" :: String) = x:xs
|
||||
f x | x `elem` ["[overlap","ok]","[incoherent]","[safe]"] = ""
|
||||
f x | x `elem` ["{-#","UNPACK","#-}"] = ""
|
||||
f x = x
|
||||
|
||||
g ("where":_) = []
|
||||
g (x:xs) = x : g xs
|
||||
g [] = []
|
||||
|
||||
haddockPackageUrl :: Hoogle.URL -> [String] -> [String]
|
||||
haddockPackageUrl x = concatMap f
|
||||
where f y | "@package " `isPrefixOf` y = ["@url " ++ x, y]
|
||||
| otherwise = [y]
|
||||
@ -5,6 +5,7 @@ import Data.BlobStore
|
||||
import Data.Slug (safeMakeSlug, HasGenIO (getGenIO), randomSlug, Slug, SnapSlug)
|
||||
import Data.WebsiteContent
|
||||
import qualified Database.Persist
|
||||
import Database.Persist.Sql (PersistentSqlException (Couldn'tGetSQLConnection))
|
||||
import Model
|
||||
import qualified Settings
|
||||
import Settings (widgetFile, Extra (..), GoogleAuth (..))
|
||||
@ -36,18 +37,15 @@ data App = App
|
||||
, httpManager :: Manager
|
||||
, persistConfig :: Settings.PersistConf
|
||||
, appLogger :: Logger
|
||||
, genIO :: !MWC.GenIO
|
||||
, blobStore :: !(BlobStore StoreKey)
|
||||
, progressMap :: !(IORef (IntMap Progress))
|
||||
, nextProgressKey :: !(IORef Int)
|
||||
, haddockRootDir :: !FilePath
|
||||
, haddockUnpacker :: !(ForceUnpack -> PackageSetIdent -> IO ())
|
||||
, genIO :: MWC.GenIO
|
||||
, blobStore :: BlobStore StoreKey
|
||||
, haddockRootDir :: FilePath
|
||||
, appDocUnpacker :: DocUnpacker
|
||||
-- ^ We have a dedicated thread so that (1) we don't try to unpack too many
|
||||
-- things at once, (2) we never unpack the same thing twice at the same
|
||||
-- time, and (3) so that even if the client connection dies, we finish the
|
||||
-- unpack job.
|
||||
, widgetCache :: !(IORef (HashMap Text (UTCTime, GWData (Route App))))
|
||||
, compressorStatus :: !(IORef Text)
|
||||
, widgetCache :: IORef (HashMap Text (UTCTime, GWData (Route App)))
|
||||
, websiteContent :: GitRepo WebsiteContent
|
||||
, snapshotInfoCache :: !(IORef (HashMap PackageSetIdent SnapshotInfo))
|
||||
}
|
||||
@ -58,7 +56,11 @@ data SnapshotInfo = SnapshotInfo
|
||||
, siDocMap :: !DocMap
|
||||
}
|
||||
|
||||
type ForceUnpack = Bool
|
||||
data DocUnpacker = DocUnpacker
|
||||
{ duRequestDocs :: Entity Stackage -> IO UnpackStatus
|
||||
, duGetStatus :: IO Text
|
||||
, duForceReload :: Entity Stackage -> IO ()
|
||||
}
|
||||
|
||||
data Progress = ProgressWorking !Text
|
||||
| ProgressDone !Text !(Route App)
|
||||
@ -101,7 +103,9 @@ instance Yesod App where
|
||||
|
||||
defaultLayout widget = do
|
||||
mmsg <- getMessage
|
||||
muser <- maybeAuth
|
||||
muser <- catch maybeAuth $ \e -> case e of
|
||||
Couldn'tGetSQLConnection -> return Nothing
|
||||
_ -> throwM e
|
||||
|
||||
-- We break up the default layout into two components:
|
||||
-- default-layout is the contents of the body tag, and
|
||||
@ -136,6 +140,7 @@ instance Yesod App where
|
||||
-- The page to be redirected to when authentication is required.
|
||||
authRoute _ = Just $ AuthR LoginR
|
||||
|
||||
{- Temporarily disable to allow for horizontal scaling
|
||||
-- This function creates static content files in the static folder
|
||||
-- and names them based on a hash of their content. This allows
|
||||
-- expiration dates to be set far in the future without worry of
|
||||
@ -147,6 +152,7 @@ instance Yesod App where
|
||||
genFileName lbs
|
||||
| development = "autogen-" ++ base64md5 lbs
|
||||
| otherwise = base64md5 lbs
|
||||
-}
|
||||
|
||||
-- Place Javascript at bottom of the body tag so the rest of the page loads first
|
||||
jsLoader _ = BottomOfBody
|
||||
|
||||
@ -9,6 +9,7 @@ import Data.Slug (Slug)
|
||||
import Handler.StackageHome (getStackageHomeR, getStackageMetadataR, getStackageCabalConfigR, getSnapshotPackagesR, getDocsR)
|
||||
import Handler.StackageIndex (getStackageIndexR, getStackageBundleR)
|
||||
import Handler.StackageSdist (getStackageSdistR)
|
||||
import Handler.Hoogle (getHoogleR)
|
||||
|
||||
handleAliasR :: Slug -> Slug -> [Text] -> Handler ()
|
||||
handleAliasR user name pieces = do
|
||||
@ -77,4 +78,5 @@ goSid sid pieces = do
|
||||
StackageSdistR pnv -> getStackageSdistR slug pnv >>= sendResponse
|
||||
SnapshotPackagesR -> getSnapshotPackagesR slug >>= sendResponse
|
||||
DocsR -> getDocsR slug >>= sendResponse
|
||||
HoogleR -> getHoogleR slug >>= sendResponse
|
||||
_ -> notFound
|
||||
|
||||
29
Handler/BuildVersion.hs
Normal file
29
Handler/BuildVersion.hs
Normal file
@ -0,0 +1,29 @@
|
||||
module Handler.BuildVersion where
|
||||
|
||||
import Import hiding (lift)
|
||||
import Language.Haskell.TH.Syntax
|
||||
import System.Process (rawSystem)
|
||||
import System.Exit
|
||||
|
||||
getBuildVersionR :: Handler Text
|
||||
getBuildVersionR = return $ pack $(do
|
||||
let headFile = ".git/HEAD"
|
||||
qAddDependentFile headFile
|
||||
ehead <- qRunIO $ tryIO $ readFile $ fpFromString headFile
|
||||
case decodeUtf8 <$> ehead of
|
||||
Left e -> lift $ ".git/HEAD not read: " ++ show e
|
||||
Right raw ->
|
||||
case takeWhile (/= '\n') <$> stripPrefix "ref: " raw of
|
||||
Nothing -> lift $ ".git/HEAD not in expected format: " ++ show raw
|
||||
Just fp' -> do
|
||||
let fp = ".git" </> fpFromText fp'
|
||||
qAddDependentFile $ fpToString fp
|
||||
bs <- qRunIO $ readFile fp
|
||||
isDirty <- qRunIO
|
||||
$ (/= ExitSuccess)
|
||||
<$> rawSystem "git" ["diff-files", "--quiet"]
|
||||
lift $ unpack $ unlines
|
||||
[ "Most recent commit: " ++ asText (decodeUtf8 bs)
|
||||
, "Working tree is " ++ (if isDirty then "dirty" else "clean")
|
||||
]
|
||||
)
|
||||
@ -4,7 +4,7 @@ import Import
|
||||
|
||||
getCompressorStatusR :: Handler Html
|
||||
getCompressorStatusR = do
|
||||
status <- getYesod >>= readIORef . compressorStatus
|
||||
status <- getYesod >>= liftIO . duGetStatus . appDocUnpacker
|
||||
defaultLayout $ do
|
||||
setTitle "Compressor thread status"
|
||||
[whamlet|
|
||||
|
||||
@ -1,26 +1,35 @@
|
||||
module Handler.Haddock where
|
||||
module Handler.Haddock
|
||||
( getUploadHaddockR
|
||||
, putUploadHaddockR
|
||||
, getHaddockR
|
||||
, getUploadDocMapR
|
||||
, putUploadDocMapR
|
||||
-- Exported for use in Handler.Hoogle
|
||||
, Dirs (..), getDirs, dirHoogleFp, mkDirs
|
||||
, dirRawIdent
|
||||
, dirGzIdent
|
||||
, dirHoogleIdent
|
||||
, createCompressor
|
||||
) where
|
||||
|
||||
import Import
|
||||
import Data.BlobStore
|
||||
import Filesystem (removeTree, isDirectory, createTree, isFile, rename, removeFile, removeDirectory)
|
||||
import Control.Concurrent (forkIO)
|
||||
import System.IO.Temp (withSystemTempFile, withTempFile)
|
||||
import System.Process (createProcess, proc, cwd, waitForProcess)
|
||||
import System.Exit (ExitCode (ExitSuccess))
|
||||
import Network.Mime (defaultMimeLookup)
|
||||
import Crypto.Hash.Conduit (sinkHash)
|
||||
import System.IO (IOMode (ReadMode), withBinaryFile)
|
||||
import Data.Conduit.Zlib (gzip)
|
||||
import System.Posix.Files (createLink)
|
||||
import Control.Concurrent (forkIO)
|
||||
import Crypto.Hash (Digest, SHA1)
|
||||
import Crypto.Hash.Conduit (sinkHash)
|
||||
import Data.Aeson (withObject)
|
||||
import Data.BlobStore
|
||||
import qualified Data.ByteString.Base16 as B16
|
||||
import Data.Byteable (toBytes)
|
||||
import Crypto.Hash (Digest, SHA1)
|
||||
import qualified Filesystem.Path.CurrentOS as F
|
||||
import Data.Slug (SnapSlug)
|
||||
import Data.Byteable (toBytes)
|
||||
import Data.Conduit.Zlib (gzip)
|
||||
import Data.Slug (SnapSlug, unSlug)
|
||||
import qualified Data.Text as T
|
||||
import Data.Slug (unSlug)
|
||||
import qualified Data.Yaml as Y
|
||||
import Data.Aeson (withObject)
|
||||
import Filesystem (isDirectory, createTree, isFile, rename, removeFile, removeDirectory)
|
||||
import qualified Filesystem.Path.CurrentOS as F
|
||||
import Import
|
||||
import Network.Mime (defaultMimeLookup)
|
||||
import System.IO (IOMode (ReadMode), withBinaryFile)
|
||||
import System.IO.Temp (withTempFile)
|
||||
import System.Posix.Files (createLink)
|
||||
|
||||
form :: Form FileInfo
|
||||
form = renderDivs $ areq fileField "tarball containing docs"
|
||||
@ -30,7 +39,7 @@ form = renderDivs $ areq fileField "tarball containing docs"
|
||||
getUploadHaddockR, putUploadHaddockR :: Text -> Handler Html
|
||||
getUploadHaddockR slug0 = do
|
||||
uid <- requireAuthIdOrToken
|
||||
Entity sid Stackage {..} <- runDB $ do
|
||||
stackageEnt@(Entity sid Stackage {..}) <- runDB $ do
|
||||
-- Provide fallback for old URLs
|
||||
ment <- getBy $ UniqueStackage $ PackageSetIdent slug0
|
||||
case ment of
|
||||
@ -47,7 +56,7 @@ getUploadHaddockR slug0 = do
|
||||
fileSource fileInfo $$ storeWrite (HaddockBundle ident)
|
||||
runDB $ update sid [StackageHasHaddocks =. True]
|
||||
master <- getYesod
|
||||
void $ liftIO $ forkIO $ haddockUnpacker master True ident
|
||||
liftIO $ duForceReload (appDocUnpacker master) stackageEnt
|
||||
setMessage "Haddocks uploaded"
|
||||
redirect $ SnapshotR slug StackageHomeR
|
||||
_ -> defaultLayout $ do
|
||||
@ -58,7 +67,7 @@ putUploadHaddockR = getUploadHaddockR
|
||||
|
||||
getHaddockR :: SnapSlug -> [Text] -> Handler ()
|
||||
getHaddockR slug rest = do
|
||||
ident <- runDB $ do
|
||||
stackageEnt <- runDB $ do
|
||||
ment <- getBy $ UniqueSnapshot slug
|
||||
case ment of
|
||||
Just ent -> do
|
||||
@ -66,7 +75,7 @@ getHaddockR slug rest = do
|
||||
[pkgver] -> tryContentsRedirect ent pkgver
|
||||
[pkgver, "index.html"] -> tryContentsRedirect ent pkgver
|
||||
_ -> return ()
|
||||
return $ stackageIdent $ entityVal ent
|
||||
return ent
|
||||
Nothing -> do
|
||||
Entity _ stackage <- getBy404
|
||||
$ UniqueStackage
|
||||
@ -74,11 +83,11 @@ getHaddockR slug rest = do
|
||||
$ toPathPiece slug
|
||||
redirectWith status301 $ HaddockR (stackageSlug stackage) rest
|
||||
mapM_ sanitize rest
|
||||
dirs <- getDirs -- (gzdir, rawdir) <- getHaddockDir ident
|
||||
master <- getYesod
|
||||
liftIO $ haddockUnpacker master False ident
|
||||
dirs <- getDirs
|
||||
requireDocs stackageEnt
|
||||
|
||||
let rawfp = dirRawFp dirs ident rest
|
||||
let ident = stackageIdent (entityVal stackageEnt)
|
||||
rawfp = dirRawFp dirs ident rest
|
||||
gzfp = dirGzFp dirs ident rest
|
||||
mime = defaultMimeLookup $ fpToText $ filename rawfp
|
||||
|
||||
@ -124,19 +133,6 @@ tryContentsRedirect (Entity sid Stackage {..}) pkgver = do
|
||||
dropDash :: Text -> Text
|
||||
dropDash t = fromMaybe t $ stripSuffix "-" t
|
||||
|
||||
getHaddockDir :: PackageSetIdent -> Handler (FilePath, FilePath)
|
||||
getHaddockDir ident = do
|
||||
master <- getYesod
|
||||
return $ mkDirPair (haddockRootDir master) ident
|
||||
|
||||
mkDirPair :: FilePath -- ^ root
|
||||
-> PackageSetIdent
|
||||
-> (FilePath, FilePath) -- ^ compressed, uncompressed
|
||||
mkDirPair root ident =
|
||||
( root </> "idents-raw" </> fpFromText (toPathPiece ident)
|
||||
, root </> "idents-gz" </> fpFromText (toPathPiece ident)
|
||||
)
|
||||
|
||||
createCompressor
|
||||
:: Dirs
|
||||
-> IO (IORef Text, IO ()) -- ^ action to kick off compressor again
|
||||
@ -209,93 +205,6 @@ dirCacheFp dirs digest =
|
||||
name = decodeUtf8 $ B16.encode $ toBytes digest
|
||||
(x, y) = splitAt 2 name
|
||||
|
||||
-- Should have two threads: one to unpack, one to convert. Never serve the
|
||||
-- uncompressed files, only the compressed files. When serving, convert on
|
||||
-- demand.
|
||||
createHaddockUnpacker :: FilePath -- ^ haddock root
|
||||
-> BlobStore StoreKey
|
||||
-> (forall a m. (MonadIO m, MonadBaseControl IO m)
|
||||
=> SqlPersistT m a -> m a)
|
||||
-> IO (IORef Text, ForceUnpack -> PackageSetIdent -> IO ())
|
||||
createHaddockUnpacker root store runDB' = do
|
||||
createTree $ dirCacheRoot dirs
|
||||
createTree $ dirRawRoot dirs
|
||||
createTree $ dirGzRoot dirs
|
||||
|
||||
chan <- newChan
|
||||
(statusRef, compressor) <- createCompressor dirs
|
||||
|
||||
mask $ \restore -> void $ forkIO $ forever $ do
|
||||
(forceUnpack, ident, res) <- readChan chan
|
||||
try (restore $ go forceUnpack ident) >>= putMVar res
|
||||
compressor
|
||||
return (statusRef, \forceUnpack ident -> do
|
||||
shouldAct <-
|
||||
if forceUnpack
|
||||
then return True
|
||||
else not <$> doDirsExist ident
|
||||
if shouldAct
|
||||
then do
|
||||
res <- newEmptyMVar
|
||||
writeChan chan (forceUnpack, ident, res)
|
||||
takeMVar res >>= either (throwM . asSomeException) return
|
||||
else return ())
|
||||
where
|
||||
dirs = mkDirs root
|
||||
|
||||
removeTreeIfExists fp = whenM (isDirectory fp) (removeTree fp)
|
||||
|
||||
doDirsExist ident = do
|
||||
e1 <- isDirectory $ dirGzIdent dirs ident
|
||||
if e1
|
||||
then return True
|
||||
else isDirectory $ dirRawIdent dirs ident
|
||||
go forceUnpack ident = do
|
||||
toRun <-
|
||||
if forceUnpack
|
||||
then do
|
||||
removeTreeIfExists $ dirRawIdent dirs ident
|
||||
removeTreeIfExists $ dirGzIdent dirs ident
|
||||
return True
|
||||
else not <$> doDirsExist ident
|
||||
when toRun $ do
|
||||
withSystemTempFile "haddock-bundle.tar.xz" $ \tempfp temph -> do
|
||||
withAcquire (storeRead' store (HaddockBundle ident)) $ \msrc ->
|
||||
case msrc of
|
||||
Nothing -> error "No haddocks exist for that snapshot"
|
||||
Just src -> src $$ sinkHandle temph
|
||||
hClose temph
|
||||
createTree $ dirRawIdent dirs ident
|
||||
let destdir = dirRawIdent dirs ident
|
||||
(Nothing, Nothing, Nothing, ph) <- createProcess
|
||||
(proc "tar" ["xf", tempfp])
|
||||
{ cwd = Just $ fpToString destdir
|
||||
}
|
||||
ec <- waitForProcess ph
|
||||
if ec == ExitSuccess then return () else throwM ec
|
||||
|
||||
-- Determine which packages have documentation and update the
|
||||
-- database appropriately
|
||||
runResourceT $ runDB' $ do
|
||||
ment <- getBy $ UniqueStackage ident
|
||||
forM_ ment $ \(Entity sid _) -> do
|
||||
updateWhere
|
||||
[PackageStackage ==. sid]
|
||||
[PackageHasHaddocks =. False]
|
||||
sourceDirectory destdir $$ mapM_C (\fp -> do
|
||||
let (name', version) =
|
||||
T.breakOnEnd "-"
|
||||
$ fpToText
|
||||
$ filename fp
|
||||
mname = stripSuffix "-" name'
|
||||
forM_ mname $ \name -> updateWhere
|
||||
[ PackageStackage ==. sid
|
||||
, PackageName' ==. PackageName name
|
||||
, PackageVersion ==. Version version
|
||||
]
|
||||
[PackageHasHaddocks =. True]
|
||||
)
|
||||
|
||||
data DocInfo = DocInfo Version (Map Text [Text])
|
||||
instance FromJSON DocInfo where
|
||||
parseJSON = withObject "DocInfo" $ \o -> DocInfo
|
||||
|
||||
157
Handler/Hoogle.hs
Normal file
157
Handler/Hoogle.hs
Normal file
@ -0,0 +1,157 @@
|
||||
module Handler.Hoogle where
|
||||
|
||||
import Control.DeepSeq (NFData(..))
|
||||
import Control.DeepSeq.Generics (genericRnf)
|
||||
import Control.Spoon (spoon)
|
||||
import Data.Data (Data (..))
|
||||
import Data.Slug (SnapSlug)
|
||||
import Data.Text.Read (decimal)
|
||||
import Data.Unpacking (getHoogleDB)
|
||||
import Handler.Haddock (getDirs)
|
||||
import qualified Hoogle
|
||||
import Import
|
||||
import Text.Blaze.Html (preEscapedToHtml)
|
||||
|
||||
getHoogleR :: SnapSlug -> Handler Html
|
||||
getHoogleR slug = do
|
||||
dirs <- getDirs
|
||||
mquery <- lookupGetParam "q"
|
||||
mpage <- lookupGetParam "page"
|
||||
exact <- maybe False (const True) <$> lookupGetParam "exact"
|
||||
mresults' <- lookupGetParam "results"
|
||||
let count' =
|
||||
case decimal <$> mresults' of
|
||||
Just (Right (i, "")) -> min perPage i
|
||||
_ -> perPage
|
||||
page =
|
||||
case decimal <$> mpage of
|
||||
Just (Right (i, "")) -> i
|
||||
_ -> 1
|
||||
offset = (page - 1) * perPage
|
||||
Entity _ stackage <- runDB $ getBy404 $ UniqueSnapshot slug
|
||||
mdatabasePath <- getHoogleDB dirs stackage
|
||||
heDatabase <- case mdatabasePath of
|
||||
Just x -> return $ liftIO $ Hoogle.loadDatabase $ fpToString x
|
||||
Nothing -> (>>= sendResponse) $ defaultLayout $ do
|
||||
setTitle "Hoogle database not available"
|
||||
[whamlet|
|
||||
<div .container>
|
||||
<p>The given Hoogle database is not available.
|
||||
<p>
|
||||
<a href=@{SnapshotR slug StackageHomeR}>Return to snapshot homepage
|
||||
|]
|
||||
|
||||
mresults <- case mquery of
|
||||
Just query -> runHoogleQuery heDatabase HoogleQueryInput
|
||||
{ hqiQueryInput = query
|
||||
, hqiExactSearch = if exact then Just query else Nothing
|
||||
, hqiLimitTo = count'
|
||||
, hqiOffsetBy = offset
|
||||
}
|
||||
Nothing -> return $ HoogleQueryOutput "" [] Nothing
|
||||
let queryText = fromMaybe "" mquery
|
||||
pageLink p = (SnapshotR slug HoogleR
|
||||
, (if exact then (("exact", "true"):) else id)
|
||||
$ (maybe id (\q' -> (("q", q'):)) mquery)
|
||||
[("page", tshow p)])
|
||||
snapshotLink = SnapshotR slug StackageHomeR
|
||||
hoogleForm = $(widgetFile "hoogle-form")
|
||||
defaultLayout $ do
|
||||
setTitle "Hoogle Search"
|
||||
$(widgetFile "hoogle")
|
||||
|
||||
getPageCount :: Int -> Int
|
||||
getPageCount totalCount = 1 + div totalCount perPage
|
||||
|
||||
perPage :: Int
|
||||
perPage = 10
|
||||
|
||||
data HoogleQueryInput = HoogleQueryInput
|
||||
{ hqiQueryInput :: Text
|
||||
, hqiExactSearch :: Maybe Text
|
||||
, hqiLimitTo :: Int
|
||||
, hqiOffsetBy :: Int
|
||||
}
|
||||
deriving (Eq, Read, Show, Data, Typeable, Ord, Generic)
|
||||
|
||||
data HoogleQueryOutput = HoogleQueryBad Text
|
||||
| HoogleQueryOutput Text [HoogleResult] (Maybe Int) -- ^ Text == HTML version of query, Int == total count
|
||||
deriving (Read, Typeable, Data, Show, Eq)
|
||||
|
||||
data HoogleResult = HoogleResult
|
||||
{ hrURL :: String
|
||||
, hrSources :: [(PackageLink, [ModuleLink])]
|
||||
, hrTitle :: String -- ^ HTML
|
||||
, hrBody :: String -- ^ plain text
|
||||
}
|
||||
deriving (Eq, Read, Show, Data, Typeable, Ord, Generic)
|
||||
|
||||
data PackageLink = PackageLink
|
||||
{ plName :: String
|
||||
, plURL :: String
|
||||
}
|
||||
deriving (Eq, Read, Show, Data, Typeable, Ord, Generic)
|
||||
|
||||
data ModuleLink = ModuleLink
|
||||
{ mlName :: String
|
||||
, mlURL :: String
|
||||
}
|
||||
deriving (Eq, Read, Show, Data, Typeable, Ord, Generic)
|
||||
|
||||
instance NFData HoogleResult where rnf = genericRnf
|
||||
instance NFData PackageLink where rnf = genericRnf
|
||||
instance NFData ModuleLink where rnf = genericRnf
|
||||
|
||||
runHoogleQuery :: Monad m
|
||||
=> m Hoogle.Database
|
||||
-> HoogleQueryInput
|
||||
-> m HoogleQueryOutput
|
||||
runHoogleQuery heDatabase HoogleQueryInput {..} =
|
||||
runQuery $ Hoogle.parseQuery Hoogle.Haskell query
|
||||
where
|
||||
query = unpack hqiQueryInput
|
||||
|
||||
runQuery (Left err) = return $ HoogleQueryBad (tshow err)
|
||||
runQuery (Right query') = do
|
||||
hoogledb <- heDatabase
|
||||
let query'' = Hoogle.queryExact classifier query'
|
||||
rawRes = concatMap fixResult
|
||||
$ Hoogle.search hoogledb query''
|
||||
mres = spoon
|
||||
$ take (min 100 hqiLimitTo)
|
||||
$ drop hqiOffsetBy rawRes
|
||||
mcount = spoon $ limitedLength 0 rawRes
|
||||
limitedLength x [] = Just x
|
||||
limitedLength x (_:rest)
|
||||
| x >= 100 = Nothing
|
||||
| otherwise = limitedLength (x + 1) rest
|
||||
rendered = pack $ Hoogle.showTagHTML $ Hoogle.renderQuery query''
|
||||
return $ case (,) <$> mres <*> mcount of
|
||||
Nothing ->
|
||||
HoogleQueryOutput rendered [] (Just 0)
|
||||
Just (results, mcount') ->
|
||||
HoogleQueryOutput rendered (take hqiLimitTo results) mcount'
|
||||
|
||||
classifier = maybe Nothing
|
||||
(const (Just Hoogle.UnclassifiedItem))
|
||||
hqiExactSearch
|
||||
|
||||
fixResult (_, Hoogle.Result locs self docs) = do
|
||||
(loc, _) <- take 1 locs
|
||||
let sources' = unionsWith (++) $
|
||||
mapMaybe (getPkgModPair . snd) locs
|
||||
return HoogleResult
|
||||
{ hrURL = loc
|
||||
, hrSources = mapToList sources'
|
||||
, hrTitle = Hoogle.showTagHTML self
|
||||
, hrBody = fromMaybe "Problem loading documentation" $
|
||||
spoon $ Hoogle.showTagText docs
|
||||
}
|
||||
|
||||
getPkgModPair :: [(String, String)]
|
||||
-> Maybe (Map PackageLink [ModuleLink])
|
||||
getPkgModPair [(pkg, pkgname), (modu, moduname)] = do
|
||||
let pkg' = PackageLink pkgname pkg
|
||||
modu' = ModuleLink moduname modu
|
||||
return $ asMap $ singletonMap pkg' [modu']
|
||||
getPkgModPair _ = Nothing
|
||||
39
Handler/PackageCounts.hs
Normal file
39
Handler/PackageCounts.hs
Normal file
@ -0,0 +1,39 @@
|
||||
module Handler.PackageCounts
|
||||
( getPackageCountsR
|
||||
) where
|
||||
|
||||
import Import hiding (Value (..), groupBy, (==.))
|
||||
import Data.Slug (mkSlug)
|
||||
import Database.Esqueleto
|
||||
|
||||
data Count = Count
|
||||
{ name :: Text
|
||||
, date :: Day
|
||||
, packages :: Int
|
||||
}
|
||||
|
||||
toCount :: (Value Text, Value UTCTime, Value Int) -> Count
|
||||
toCount (Value x, Value y, Value z) =
|
||||
Count x (utctDay y) z
|
||||
|
||||
getPackageCountsR :: Handler Html
|
||||
getPackageCountsR = do
|
||||
admins <- adminUsers <$> getExtra
|
||||
counts <- runDB $ do
|
||||
let slugs = mapMaybe mkSlug $ setToList admins
|
||||
adminUids <- selectKeysList [UserHandle <-. slugs] []
|
||||
fmap (map toCount) $ select $ from $ \(s, p) -> do
|
||||
where_ $
|
||||
(not_ $ s ^. StackageTitle `like` val "%inclusive") &&.
|
||||
(s ^. StackageId ==. p ^. PackageStackage) &&.
|
||||
(s ^. StackageUser `in_` valList adminUids)
|
||||
groupBy (s ^. StackageTitle, s ^. StackageUploaded)
|
||||
orderBy [desc $ s ^. StackageUploaded]
|
||||
return
|
||||
( s ^. StackageTitle
|
||||
, s ^. StackageUploaded
|
||||
, countRows
|
||||
)
|
||||
defaultLayout $ do
|
||||
setTitle "Package counts"
|
||||
$(widgetFile "package-counts")
|
||||
@ -1,10 +1,10 @@
|
||||
module Handler.PackageList where
|
||||
|
||||
import qualified Data.HashMap.Strict as M
|
||||
import Data.Time (NominalDiffTime, addUTCTime)
|
||||
import Data.Time (NominalDiffTime)
|
||||
import qualified Database.Esqueleto as E
|
||||
import Import
|
||||
import Yesod.Core.Types (WidgetT (WidgetT), unWidgetT)
|
||||
|
||||
|
||||
-- FIXME maybe just redirect to the LTS or nightly package list
|
||||
getPackageListR :: Handler Html
|
||||
@ -29,7 +29,10 @@ getPackageListR = defaultLayout $ do
|
||||
|
||||
-- FIXME move somewhere else, maybe even yesod-core
|
||||
cachedWidget :: NominalDiffTime -> Text -> Widget -> Widget
|
||||
cachedWidget diff key widget = do
|
||||
cachedWidget _diff _key widget = do
|
||||
-- Temporarily disabled, seems to be eating up too much memory
|
||||
widget
|
||||
{-
|
||||
ref <- widgetCache <$> getYesod
|
||||
now <- liftIO getCurrentTime
|
||||
mpair <- lookup key <$> readIORef ref
|
||||
@ -44,3 +47,4 @@ cachedWidget diff key widget = do
|
||||
-- FIXME render the builders in gw for more efficiency
|
||||
atomicModifyIORef' ref $ \m -> (insertMap key (addUTCTime diff now, gw) m, ())
|
||||
return ((), gw)
|
||||
-}
|
||||
|
||||
@ -2,16 +2,14 @@ module Handler.Progress where
|
||||
|
||||
import Import
|
||||
|
||||
getProgressR :: Int -> Handler Html
|
||||
getProgressR :: UploadProgressId -> Handler Html
|
||||
getProgressR key = do
|
||||
app <- getYesod
|
||||
m <- readIORef $ progressMap app
|
||||
case lookup key m of
|
||||
Nothing -> notFound
|
||||
Just (ProgressWorking text) -> defaultLayout $ do
|
||||
UploadProgress text mdest <- runDB $ get404 key
|
||||
case mdest of
|
||||
Nothing -> defaultLayout $ do
|
||||
addHeader "Refresh" "1"
|
||||
setTitle "Working..."
|
||||
[whamlet|<p>#{text}|]
|
||||
Just (ProgressDone text url) -> do
|
||||
Just url -> do
|
||||
setMessage $ toHtml text
|
||||
redirect url
|
||||
|
||||
@ -31,6 +31,12 @@ getStackageHomeR slug = do
|
||||
else Nothing
|
||||
base = maybe 0 (const 1) minclusive :: Int
|
||||
|
||||
hoogleForm =
|
||||
let queryText = "" :: Text
|
||||
exact = False
|
||||
in $(widgetFile "hoogle-form")
|
||||
Entity sid _stackage <- runDB $ getBy404 $ UniqueSnapshot slug
|
||||
|
||||
defaultLayout $ do
|
||||
setTitle $ toHtml $ stackageTitle stackage
|
||||
cachedWidget (20 * 60) ("package-list-" ++ toPathPiece slug) $ do
|
||||
|
||||
@ -20,6 +20,7 @@ import System.Directory (removeFile, getTemporaryDirectory)
|
||||
import System.Process (runProcess, waitForProcess)
|
||||
import System.Exit (ExitCode (ExitSuccess))
|
||||
import Data.Slug (mkSlug, SnapSlug (..), safeMakeSlug, unSlug)
|
||||
import Control.Debounce
|
||||
|
||||
fileKey :: Text
|
||||
fileKey = "stackage"
|
||||
@ -78,12 +79,28 @@ putUploadStackageR = do
|
||||
when (isJust mstackage) $ invalidArgs ["Stackage already exists"]
|
||||
|
||||
app <- getYesod
|
||||
key <- atomicModifyIORef (nextProgressKey app) $ \i -> (i + 1, i + 1)
|
||||
let updateHelper :: MonadBase IO m => Progress -> m ()
|
||||
updateHelper p = atomicModifyIORef (progressMap app) $ \m -> (insertMap key p m, ())
|
||||
let initProgress = UploadProgress "Upload starting" Nothing
|
||||
key <- runDB $ insert initProgress
|
||||
|
||||
-- We don't want to be writing progress updates to the database too
|
||||
-- frequently, so let's just do it once per second at most.
|
||||
-- Debounce to the rescue!
|
||||
statusRef <- newIORef initProgress
|
||||
writeToDB <- liftIO $ mkDebounce defaultDebounceSettings
|
||||
{ debounceAction = do
|
||||
up <- readIORef statusRef
|
||||
runPool (persistConfig app) (replace key up) (connPool app)
|
||||
}
|
||||
|
||||
let updateHelper :: MonadBase IO m => UploadProgress -> m ()
|
||||
updateHelper p = do
|
||||
writeIORef statusRef p
|
||||
liftBase writeToDB
|
||||
update :: MonadBase IO m => Text -> m ()
|
||||
update msg = updateHelper (ProgressWorking msg)
|
||||
done msg url = updateHelper (ProgressDone msg url)
|
||||
update msg = updateHelper (UploadProgress msg Nothing)
|
||||
done msg route = do
|
||||
render <- getUrlRender
|
||||
updateHelper (UploadProgress msg $ Just $ render route)
|
||||
onExc e = done ("Exception occurred: " ++ tshow e) ProfileR
|
||||
setAlias = do
|
||||
forM_ (malias >>= mkSlug) $ \alias -> do
|
||||
@ -167,8 +184,7 @@ putUploadStackageR = do
|
||||
return slug
|
||||
|
||||
done "Stackage created" $ SnapshotR slug StackageHomeR
|
||||
else do
|
||||
done "Error creating index file" ProfileR
|
||||
else done "Error creating index file" ProfileR
|
||||
|
||||
addHeader "X-Stackage-Ident" $ toPathPiece ident
|
||||
redirect $ ProgressR key
|
||||
|
||||
29
Import.hs
29
Import.hs
@ -86,11 +86,11 @@ getSnapshotInfoByIdent ident = withCache $ do
|
||||
atomicModifyIORef' cacheRef $ \m ->
|
||||
(insertMap ident x m, x)
|
||||
|
||||
|
||||
data Dirs = Dirs
|
||||
{ dirRawRoot :: !FilePath
|
||||
, dirGzRoot :: !FilePath
|
||||
, dirCacheRoot :: !FilePath
|
||||
, dirHoogleRoot :: !FilePath
|
||||
}
|
||||
|
||||
getDirs :: Handler Dirs
|
||||
@ -101,12 +101,35 @@ mkDirs dir = Dirs
|
||||
{ dirRawRoot = dir </> "idents-raw"
|
||||
, dirGzRoot = dir </> "idents-gz"
|
||||
, dirCacheRoot = dir </> "cachedir"
|
||||
, dirHoogleRoot = dir </> "hoogle"
|
||||
}
|
||||
|
||||
dirGzIdent, dirRawIdent :: Dirs -> PackageSetIdent -> FilePath
|
||||
dirGzIdent, dirRawIdent, dirHoogleIdent :: Dirs -> PackageSetIdent -> FilePath
|
||||
dirGzIdent dirs ident = dirGzRoot dirs </> fpFromText (toPathPiece ident)
|
||||
dirRawIdent dirs ident = dirRawRoot dirs </> fpFromText (toPathPiece ident)
|
||||
dirHoogleIdent dirs ident = dirHoogleRoot dirs </> fpFromText (toPathPiece ident)
|
||||
|
||||
dirGzFp, dirRawFp :: Dirs -> PackageSetIdent -> [Text] -> FilePath
|
||||
dirGzFp, dirRawFp, dirHoogleFp :: Dirs -> PackageSetIdent -> [Text] -> FilePath
|
||||
dirGzFp dirs ident rest = dirGzIdent dirs ident </> mconcat (map fpFromText rest)
|
||||
dirRawFp dirs ident rest = dirRawIdent dirs ident </> mconcat (map fpFromText rest)
|
||||
dirHoogleFp dirs ident rest = dirHoogleIdent dirs ident </> mconcat (map fpFromText rest)
|
||||
|
||||
requireDocs :: Entity Stackage -> Handler ()
|
||||
requireDocs stackageEnt = do
|
||||
master <- getYesod
|
||||
status <- liftIO $ duRequestDocs (appDocUnpacker master) stackageEnt
|
||||
case status of
|
||||
USReady -> return ()
|
||||
USBusy -> (>>= sendResponse) $ defaultLayout $ do
|
||||
setTitle "Docs unpacking, please wait"
|
||||
addHeader "Refresh" "1"
|
||||
msg <- liftIO $ duGetStatus $ appDocUnpacker master
|
||||
[whamlet|
|
||||
<div .container>
|
||||
<p>Docs are currently being unpacked, please wait.
|
||||
<p>This page will automatically reload every second.
|
||||
<p>Current status: #{msg}
|
||||
|]
|
||||
USFailed e -> invalidArgs
|
||||
[ "Docs not available: " ++ e
|
||||
]
|
||||
|
||||
16
Types.hs
16
Types.hs
@ -58,8 +58,14 @@ data StoreKey = HackageCabal !PackageName !Version
|
||||
| HackageViewIndex !HackageView
|
||||
| SnapshotBundle !PackageSetIdent
|
||||
| HaddockBundle !PackageSetIdent
|
||||
| HoogleDB !PackageSetIdent !HoogleVersion
|
||||
deriving (Show, Eq, Ord, Typeable)
|
||||
|
||||
newtype HoogleVersion = HoogleVersion Text
|
||||
deriving (Show, Eq, Ord, Typeable, PathPiece)
|
||||
currentHoogleVersion :: HoogleVersion
|
||||
currentHoogleVersion = HoogleVersion VERSION_hoogle
|
||||
|
||||
instance ToPath StoreKey where
|
||||
toPath (HackageCabal name version) = ["hackage", toPathPiece name, toPathPiece version ++ ".cabal"]
|
||||
toPath (HackageSdist name version) = ["hackage", toPathPiece name, toPathPiece version ++ ".tar.gz"]
|
||||
@ -95,6 +101,11 @@ instance ToPath StoreKey where
|
||||
[ "haddock"
|
||||
, toPathPiece ident ++ ".tar.xz"
|
||||
]
|
||||
toPath (HoogleDB ident ver) =
|
||||
[ "hoogle"
|
||||
, toPathPiece ver
|
||||
, toPathPiece ident ++ ".hoo.gz"
|
||||
]
|
||||
instance BackupToS3 StoreKey where
|
||||
shouldBackup HackageCabal{} = False
|
||||
shouldBackup HackageSdist{} = False
|
||||
@ -105,6 +116,7 @@ instance BackupToS3 StoreKey where
|
||||
shouldBackup HackageViewIndex{} = False
|
||||
shouldBackup SnapshotBundle{} = True
|
||||
shouldBackup HaddockBundle{} = True
|
||||
shouldBackup HoogleDB{} = True
|
||||
|
||||
newtype HackageRoot = HackageRoot { unHackageRoot :: Text }
|
||||
deriving (Show, Read, Typeable, Eq, Ord, Hashable, PathPiece, ToMarkup)
|
||||
@ -113,3 +125,7 @@ class HasHackageRoot a where
|
||||
getHackageRoot :: a -> HackageRoot
|
||||
instance HasHackageRoot HackageRoot where
|
||||
getHackageRoot = id
|
||||
|
||||
data UnpackStatus = USReady
|
||||
| USBusy
|
||||
| USFailed !Text
|
||||
|
||||
194
cabal.config
194
cabal.config
@ -1,7 +1,7 @@
|
||||
-- Stackage snapshot from: http://www.stackage.org/snapshot/nightly-2014-12-22
|
||||
-- Stackage snapshot from: http://www.stackage.org/snapshot/lts-1.0
|
||||
-- Please place this file next to your .cabal file as cabal.config
|
||||
-- To only use tested packages, uncomment the following line:
|
||||
-- remote-repo: stackage-nightly-2014-12-22:http://www.stackage.org/snapshot/nightly-2014-12-22
|
||||
-- remote-repo: stackage-lts-1.0:http://www.stackage.org/snapshot/lts-1.0
|
||||
constraints: abstract-deque ==0.3,
|
||||
abstract-par ==0.3.3,
|
||||
accelerate ==0.15.0.0,
|
||||
@ -15,6 +15,7 @@ constraints: abstract-deque ==0.3,
|
||||
aeson-pretty ==0.7.2,
|
||||
aeson-qq ==0.7.4,
|
||||
aeson-utils ==0.2.2.1,
|
||||
alarmclock ==0.2.0.5,
|
||||
alex ==3.1.3,
|
||||
amqp ==0.10.1,
|
||||
ansi-terminal ==0.6.2.1,
|
||||
@ -29,7 +30,7 @@ constraints: abstract-deque ==0.3,
|
||||
asn1-encoding ==0.9.0,
|
||||
asn1-parse ==0.9.0,
|
||||
asn1-types ==0.3.0,
|
||||
async ==2.0.1.6,
|
||||
async ==2.0.2,
|
||||
atto-lisp ==0.2.2,
|
||||
attoparsec ==0.12.1.2,
|
||||
attoparsec-conduit ==1.1.0,
|
||||
@ -39,16 +40,18 @@ constraints: abstract-deque ==0.3,
|
||||
auto-update ==0.1.2.1,
|
||||
aws ==0.11,
|
||||
bake ==0.2,
|
||||
bank-holidays-england ==0.1.0.2,
|
||||
barecheck ==0.2.0.6,
|
||||
base installed,
|
||||
base16-bytestring ==0.1.1.6,
|
||||
base64-bytestring ==1.0.0.1,
|
||||
base-compat ==0.5.0,
|
||||
base-prelude ==0.1.8,
|
||||
base-prelude ==0.1.11,
|
||||
base-unicode-symbols ==0.2.2.4,
|
||||
basic-prelude ==0.3.10,
|
||||
bifunctors ==4.2,
|
||||
binary installed,
|
||||
binary-conduit ==1.2.3,
|
||||
binary-list ==1.0.1.0,
|
||||
bindings-DSL ==1.0.21,
|
||||
bioace ==0.0.1,
|
||||
@ -65,7 +68,7 @@ constraints: abstract-deque ==0.3,
|
||||
blaze-builder ==0.3.3.4,
|
||||
blaze-builder-enumerator ==0.2.0.6,
|
||||
blaze-html ==0.7.0.3,
|
||||
blaze-markup ==0.6.1.1,
|
||||
blaze-markup ==0.6.2.0,
|
||||
blaze-svg ==0.3.4,
|
||||
blaze-textual ==0.2.0.9,
|
||||
BlogLiterately ==0.7.1.7,
|
||||
@ -76,6 +79,7 @@ constraints: abstract-deque ==0.3,
|
||||
bool-extras ==0.4.0,
|
||||
bound ==1.0.4,
|
||||
BoundedChan ==1.0.3.0,
|
||||
broadcast-chan ==0.1.0,
|
||||
bson ==0.3.1,
|
||||
bumper ==0.6.0.2,
|
||||
byteable ==0.1.1,
|
||||
@ -87,17 +91,18 @@ constraints: abstract-deque ==0.3,
|
||||
bytestring-lexing ==0.4.3.2,
|
||||
bytestring-mmap ==0.2.2,
|
||||
bytestring-progress ==1.0.3,
|
||||
bytestring-show ==0.3.5.6,
|
||||
bytestring-trie ==0.2.4,
|
||||
bzlib ==0.5.0.4,
|
||||
bzlib-conduit ==0.2.1.3,
|
||||
c2hs ==0.20.1,
|
||||
Cabal installed,
|
||||
cabal-install ==1.18.0.6,
|
||||
cabal-install ==1.18.0.7,
|
||||
cabal-src ==0.2.5,
|
||||
cairo ==0.13.0.5,
|
||||
cairo ==0.13.0.6,
|
||||
case-insensitive ==1.2.0.3,
|
||||
cases ==0.1.2,
|
||||
cassava ==0.4.2.0,
|
||||
cassava ==0.4.2.1,
|
||||
cautious-file ==1.0.2,
|
||||
cereal ==0.4.1.0,
|
||||
cereal-conduit ==0.7.2.3,
|
||||
@ -105,7 +110,7 @@ constraints: abstract-deque ==0.3,
|
||||
charset ==0.3.7,
|
||||
Chart ==1.3.2,
|
||||
Chart-diagrams ==1.3.2,
|
||||
ChasingBottoms ==1.3.0.8,
|
||||
ChasingBottoms ==1.3.0.9,
|
||||
check-email ==1.0,
|
||||
checkers ==0.4.1,
|
||||
chell ==0.4,
|
||||
@ -116,7 +121,7 @@ constraints: abstract-deque ==0.3,
|
||||
cipher-camellia ==0.0.2,
|
||||
cipher-des ==0.0.6,
|
||||
cipher-rc4 ==0.1.4,
|
||||
circle-packing ==0.1.0.3,
|
||||
circle-packing ==0.1.0.4,
|
||||
classy-prelude ==0.10.2,
|
||||
classy-prelude-conduit ==0.10.2,
|
||||
classy-prelude-yesod ==0.10.2,
|
||||
@ -137,14 +142,14 @@ constraints: abstract-deque ==0.3,
|
||||
concurrent-supply ==0.1.7,
|
||||
cond ==0.4.1.1,
|
||||
conduit ==1.2.3.1,
|
||||
conduit-combinators ==0.3.0.4,
|
||||
conduit-extra ==1.1.5.1,
|
||||
conduit-combinators ==0.3.0.5,
|
||||
conduit-extra ==1.1.6,
|
||||
configurator ==0.3.0.0,
|
||||
connection ==0.2.3,
|
||||
constraints ==0.4.1.1,
|
||||
constraints ==0.4.1.2,
|
||||
containers installed,
|
||||
containers-unicode-symbols ==0.3.1.1,
|
||||
contravariant ==1.2,
|
||||
contravariant ==1.2.0.1,
|
||||
control-monad-free ==0.5.3,
|
||||
control-monad-loop ==0.1,
|
||||
convertible ==1.1.0.0,
|
||||
@ -161,8 +166,8 @@ constraints: abstract-deque ==0.3,
|
||||
cryptohash ==0.11.6,
|
||||
cryptohash-conduit ==0.1.1,
|
||||
cryptohash-cryptoapi ==0.1.3,
|
||||
crypto-numbers ==0.2.3,
|
||||
crypto-pubkey ==0.2.6,
|
||||
crypto-numbers ==0.2.7,
|
||||
crypto-pubkey ==0.2.7,
|
||||
crypto-pubkey-types ==0.4.2.3,
|
||||
crypto-random ==0.0.8,
|
||||
crypto-random-api ==0.2.0,
|
||||
@ -184,12 +189,13 @@ constraints: abstract-deque ==0.3,
|
||||
data-memocombinators ==0.5.1,
|
||||
data-reify ==0.6,
|
||||
DAV ==1.0.3,
|
||||
Decimal ==0.4.2,
|
||||
deepseq installed,
|
||||
deepseq-generics ==0.1.1.2,
|
||||
derive ==2.5.18,
|
||||
diagrams ==1.2,
|
||||
diagrams-builder ==0.6.0.2,
|
||||
diagrams-cairo ==1.2.0.4,
|
||||
diagrams-cairo ==1.2.0.5,
|
||||
diagrams-contrib ==1.1.2.4,
|
||||
diagrams-core ==1.2.0.4,
|
||||
diagrams-haddock ==0.2.2.12,
|
||||
@ -198,13 +204,19 @@ constraints: abstract-deque ==0.3,
|
||||
diagrams-svg ==1.1.0.3,
|
||||
Diff ==0.3.0,
|
||||
digest ==0.0.1.2,
|
||||
digestive-functors ==0.7.1.1,
|
||||
digestive-functors ==0.7.1.3,
|
||||
dimensional ==0.13.0.1,
|
||||
directory installed,
|
||||
directory-tree ==0.12.0,
|
||||
direct-sqlite ==2.3.14,
|
||||
distributed-process ==0.5.3,
|
||||
distributed-process-async ==0.2.1,
|
||||
distributed-process-client-server ==0.1.2,
|
||||
distributed-process-execution ==0.1.1,
|
||||
distributed-process-extras ==0.2.0,
|
||||
distributed-process-simplelocalnet ==0.2.2.0,
|
||||
distributed-process-supervisor ==0.1.2,
|
||||
distributed-process-task ==0.1.1,
|
||||
distributed-static ==0.3.1.0,
|
||||
distributive ==0.4.4,
|
||||
djinn-ghc ==0.0.2.2,
|
||||
@ -219,6 +231,7 @@ constraints: abstract-deque ==0.3,
|
||||
elm-build-lib ==0.14.0.0,
|
||||
elm-compiler ==0.14,
|
||||
elm-core-sources ==1.0.0,
|
||||
elm-package ==0.2.2,
|
||||
email-validate ==2.0.1,
|
||||
enclosed-exceptions ==1.0.1,
|
||||
entropy ==0.3.4.1,
|
||||
@ -232,7 +245,6 @@ constraints: abstract-deque ==0.3,
|
||||
exceptions ==0.6.1,
|
||||
exception-transformers ==0.3.0.4,
|
||||
executable-path ==0.0.3,
|
||||
ex-pool ==0.2,
|
||||
extensible-exceptions ==0.1.1.4,
|
||||
extra ==1.0,
|
||||
failure ==0.2.0.3,
|
||||
@ -247,7 +259,7 @@ constraints: abstract-deque ==0.3,
|
||||
fb ==1.0.7,
|
||||
fb-persistent ==0.3.4,
|
||||
fclabels ==2.0.2,
|
||||
FenwickTree ==0.1.1,
|
||||
FenwickTree ==0.1.2,
|
||||
fgl ==5.5.0.1,
|
||||
file-embed ==0.0.7,
|
||||
file-location ==0.4.5.3,
|
||||
@ -259,6 +271,7 @@ constraints: abstract-deque ==0.3,
|
||||
flexible-defaults ==0.0.1.1,
|
||||
focus ==0.1.3,
|
||||
foldl ==1.0.7,
|
||||
FontyFruity ==0.4,
|
||||
force-layout ==0.3.0.8,
|
||||
foreign-store ==0.1,
|
||||
formatting ==6.0.0,
|
||||
@ -271,24 +284,24 @@ constraints: abstract-deque ==0.3,
|
||||
gd ==3000.7.3,
|
||||
generic-aeson ==0.2.0.2,
|
||||
generic-deriving ==1.6.3,
|
||||
GenericPretty ==1.2.1,
|
||||
generics-sop ==0.1.0.4,
|
||||
ghc-heap-view ==0.5.3,
|
||||
ghcid ==0.3.3,
|
||||
ghc-mod ==5.2.1.1,
|
||||
ghcid ==0.3.4,
|
||||
ghc-mod ==5.2.1.2,
|
||||
ghc-mtl ==1.2.1.0,
|
||||
ghc-paths ==0.1.0.9,
|
||||
ghc-prim installed,
|
||||
ghc-syb-utils ==0.2.2,
|
||||
gio ==0.13.0.3,
|
||||
gio ==0.13.0.4,
|
||||
git-embed ==0.1.0,
|
||||
gl ==0.6.2,
|
||||
glib ==0.13.0.6,
|
||||
glib ==0.13.0.7,
|
||||
Glob ==0.7.5,
|
||||
GLURaw ==1.4.0.1,
|
||||
GLUT ==2.5.1.1,
|
||||
graph-core ==0.2.1.0,
|
||||
graphs ==0.5.0.1,
|
||||
graphviz ==2999.17.0.1,
|
||||
gravatar ==0.6,
|
||||
groundhog ==0.7.0.1,
|
||||
groundhog-mysql ==0.7.0.1,
|
||||
@ -297,15 +310,15 @@ constraints: abstract-deque ==0.3,
|
||||
groundhog-th ==0.7.0,
|
||||
groupoids ==4.0,
|
||||
groups ==0.4.0.0,
|
||||
gtk ==0.13.3,
|
||||
gtk ==0.13.4,
|
||||
gtk2hs-buildtools ==0.13.0.3,
|
||||
haddock-api ==2.15.0,
|
||||
haddock-api ==2.15.0.2,
|
||||
haddock-library ==1.1.1,
|
||||
half ==0.2.0.1,
|
||||
HandsomeSoup ==0.3.5,
|
||||
happstack-server ==7.3.9,
|
||||
happy ==1.19.4,
|
||||
hashable ==1.2.3.0,
|
||||
hashable ==1.2.3.1,
|
||||
hashable-extras ==0.2.0.1,
|
||||
hashmap ==1.3.0.1,
|
||||
hashtables ==1.2.0.1,
|
||||
@ -318,12 +331,12 @@ constraints: abstract-deque ==0.3,
|
||||
haskell-src ==1.0.1.6,
|
||||
haskell-src-exts ==1.16.0.1,
|
||||
haskell-src-meta ==0.6.0.8,
|
||||
hasql ==0.4.1,
|
||||
hasql-backend ==0.2.2,
|
||||
hasql-postgres ==0.9.0,
|
||||
hastache ==0.6.0,
|
||||
hasql ==0.7.1,
|
||||
hasql-backend ==0.4.0,
|
||||
hasql-postgres ==0.10.1,
|
||||
hastache ==0.6.1,
|
||||
HaTeX ==3.16.0.0,
|
||||
HaXml ==1.24.1,
|
||||
HaXml ==1.25,
|
||||
haxr ==3000.10.3.1,
|
||||
HCodecs ==0.5,
|
||||
hdaemonize ==0.5.0.0,
|
||||
@ -333,29 +346,30 @@ constraints: abstract-deque ==0.3,
|
||||
heist ==0.14.0.1,
|
||||
here ==1.2.6,
|
||||
heredoc ==0.2.0.0,
|
||||
hflags ==0.4,
|
||||
highlighting-kate ==0.5.11.1,
|
||||
hinotify ==0.3.7,
|
||||
hint ==0.4.2.1,
|
||||
histogram-fill ==0.8.3.0,
|
||||
hit ==0.6.2,
|
||||
hjsmin ==0.1.4.7,
|
||||
hledger ==0.23.3,
|
||||
hledger-lib ==0.23.3,
|
||||
hledger ==0.24,
|
||||
hledger-lib ==0.24,
|
||||
hlibgit2 ==0.18.0.13,
|
||||
hlint ==1.9.13,
|
||||
hmatrix ==0.16.1.2,
|
||||
hlint ==1.9.14,
|
||||
hmatrix ==0.16.1.3,
|
||||
hmatrix-gsl ==0.16.0.2,
|
||||
hoauth2 ==0.4.3,
|
||||
holy-project ==0.1.1.1,
|
||||
hoogle ==4.2.36,
|
||||
hoopl installed,
|
||||
hOpenPGP ==1.11,
|
||||
hopenpgp-tools ==0.13,
|
||||
hostname ==1.0,
|
||||
hostname-validate ==1.0.0,
|
||||
hourglass ==0.2.6,
|
||||
hpc installed,
|
||||
hPDB ==1.2.0,
|
||||
hPDB-examples ==1.1.2,
|
||||
hPDB ==1.2.0.2,
|
||||
hPDB-examples ==1.2.0.1,
|
||||
hs-bibutils ==5.5,
|
||||
hscolour ==1.20.3,
|
||||
hse-cpp ==0.1,
|
||||
@ -365,7 +379,7 @@ constraints: abstract-deque ==0.3,
|
||||
hspec2 ==0.6.1,
|
||||
hspec-core ==2.1.2,
|
||||
hspec-discover ==2.1.2,
|
||||
hspec-expectations ==0.6.1,
|
||||
hspec-expectations ==0.6.1.1,
|
||||
hspec-meta ==2.0.0,
|
||||
hspec-wai ==0.6.2,
|
||||
hspec-wai-json ==0.6.0,
|
||||
@ -383,7 +397,7 @@ constraints: abstract-deque ==0.3,
|
||||
http-types ==0.8.5,
|
||||
HUnit ==1.2.5.2,
|
||||
hweblib ==0.6.3,
|
||||
hxt ==9.3.1.7,
|
||||
hxt ==9.3.1.10,
|
||||
hxt-charproperties ==9.2.0.0,
|
||||
hxt-http ==9.1.5,
|
||||
hxt-pickle-utils ==0.1.0.2,
|
||||
@ -394,11 +408,12 @@ constraints: abstract-deque ==0.3,
|
||||
hyphenation ==0.4,
|
||||
idna ==0.3.0,
|
||||
ieee754 ==0.7.4,
|
||||
IfElse ==0.85,
|
||||
imagesize-conduit ==1.0.0.4,
|
||||
immortal ==0.2,
|
||||
incremental-parser ==0.2.3.3,
|
||||
indents ==0.3.3,
|
||||
ini ==0.2.2,
|
||||
ini ==0.3.0,
|
||||
integer-gmp installed,
|
||||
integration ==0.2.0.1,
|
||||
interpolate ==0.1.0,
|
||||
@ -411,15 +426,16 @@ constraints: abstract-deque ==0.3,
|
||||
iterable ==3.0,
|
||||
ixset ==1.0.6,
|
||||
js-flot ==0.8.3,
|
||||
js-jquery ==1.11.1,
|
||||
js-jquery ==1.11.2,
|
||||
json-autotype ==0.2.5.4,
|
||||
json-schema ==0.7.3.0,
|
||||
JuicyPixels ==3.1.7.1,
|
||||
JuicyPixels ==3.2.1,
|
||||
JuicyPixels-repa ==0.7,
|
||||
kan-extensions ==4.1.1,
|
||||
kan-extensions ==4.2,
|
||||
kdt ==0.2.2,
|
||||
keter ==1.3.7.1,
|
||||
keys ==3.10.1,
|
||||
kure ==2.4.10,
|
||||
kure ==2.16.4,
|
||||
language-c ==0.4.7,
|
||||
language-ecmascript ==0.16.2,
|
||||
language-glsl ==0.1.1,
|
||||
@ -438,13 +454,14 @@ constraints: abstract-deque ==0.3,
|
||||
lifted-base ==0.2.3.3,
|
||||
linear ==1.15.5,
|
||||
linear-accelerate ==0.2,
|
||||
list-t ==0.3.1,
|
||||
list-t ==0.4.2,
|
||||
loch-th ==0.2.1,
|
||||
log-domain ==0.9.3,
|
||||
logfloat ==0.12.1,
|
||||
logict ==0.6.0.2,
|
||||
loop ==0.2.0,
|
||||
lucid ==2.5,
|
||||
lzma-conduit ==1.1.1,
|
||||
machines ==0.4.1,
|
||||
mandrill ==0.1.1.0,
|
||||
map-syntax ==0.2,
|
||||
@ -464,13 +481,13 @@ constraints: abstract-deque ==0.3,
|
||||
MissingH ==1.3.0.1,
|
||||
mmap ==0.5.9,
|
||||
mmorph ==1.0.4,
|
||||
MonadCatchIO-transformers ==0.3.1.2,
|
||||
MonadCatchIO-transformers ==0.3.1.3,
|
||||
monad-control ==0.3.3.0,
|
||||
monad-coroutine ==0.8.0.1,
|
||||
monadcryptorandom ==0.6.1,
|
||||
monad-extras ==0.5.9,
|
||||
monadic-arrays ==0.2.1.3,
|
||||
monad-journal ==0.6.0.1,
|
||||
monad-journal ==0.6.0.2,
|
||||
monad-logger ==0.3.11.1,
|
||||
monad-loops ==0.4.2.1,
|
||||
monad-par ==0.3.4.7,
|
||||
@ -488,7 +505,7 @@ constraints: abstract-deque ==0.3,
|
||||
mono-traversable ==0.7.0,
|
||||
mtl ==2.1.3.1,
|
||||
mtlparse ==0.1.2,
|
||||
mtl-prelude ==1.0.1,
|
||||
mtl-prelude ==1.0.2,
|
||||
multimap ==1.2.1,
|
||||
multipart ==0.1.2,
|
||||
MusicBrainz ==0.2.2,
|
||||
@ -506,7 +523,7 @@ constraints: abstract-deque ==0.3,
|
||||
network-simple ==0.4.0.2,
|
||||
network-transport ==0.4.1.0,
|
||||
network-transport-tcp ==0.4.1,
|
||||
network-transport-tests ==0.2.1.0,
|
||||
network-transport-tests ==0.2.2.0,
|
||||
network-uri ==2.6.0.1,
|
||||
newtype ==0.2,
|
||||
nsis ==0.2.4,
|
||||
@ -514,7 +531,7 @@ constraints: abstract-deque ==0.3,
|
||||
numeric-extras ==0.0.3,
|
||||
NumInstances ==1.4,
|
||||
numtype ==1.1,
|
||||
Octree ==0.5.3,
|
||||
Octree ==0.5.4.2,
|
||||
old-locale installed,
|
||||
old-time installed,
|
||||
OneTuple ==0.2.1,
|
||||
@ -529,8 +546,8 @@ constraints: abstract-deque ==0.3,
|
||||
pandoc ==1.13.2,
|
||||
pandoc-citeproc ==0.6,
|
||||
pandoc-types ==1.12.4.1,
|
||||
pango ==0.13.0.4,
|
||||
parallel ==3.2.0.5,
|
||||
pango ==0.13.0.5,
|
||||
parallel ==3.2.0.6,
|
||||
parallel-io ==0.3.3,
|
||||
parseargs ==0.1.5.2,
|
||||
parsec ==3.1.7,
|
||||
@ -541,23 +558,23 @@ constraints: abstract-deque ==0.3,
|
||||
pcre-light ==0.4.0.3,
|
||||
pdfinfo ==1.5.1,
|
||||
pem ==0.2.2,
|
||||
persistent ==2.1.1.2,
|
||||
persistent ==2.1.1.3,
|
||||
persistent-mongoDB ==2.1.2,
|
||||
persistent-mysql ==2.1.2,
|
||||
persistent-postgresql ==2.1.2,
|
||||
persistent-sqlite ==2.1.1.1,
|
||||
persistent-sqlite ==2.1.1.2,
|
||||
persistent-template ==2.1.0.1,
|
||||
phantom-state ==0.2.0.2,
|
||||
pipes ==4.1.4,
|
||||
pipes-concurrency ==2.0.2,
|
||||
pipes-parse ==3.0.2,
|
||||
placeholders ==0.1,
|
||||
pointed ==4.1.1,
|
||||
polyparse ==1.9,
|
||||
pointed ==4.2,
|
||||
polyparse ==1.10,
|
||||
pool-conduit ==0.1.2.3,
|
||||
postgresql-binary ==0.5.0,
|
||||
postgresql-libpq ==0.9.0.1,
|
||||
postgresql-simple ==0.4.8.0,
|
||||
postgresql-simple ==0.4.9.0,
|
||||
pqueue ==1.2.1,
|
||||
prefix-units ==0.1.0.2,
|
||||
prelude-extras ==0.4,
|
||||
@ -584,8 +601,9 @@ constraints: abstract-deque ==0.3,
|
||||
QuasiText ==0.1.2.5,
|
||||
QuickCheck ==2.7.6,
|
||||
quickcheck-assertions ==0.1.1,
|
||||
quickcheck-instances ==0.3.9,
|
||||
quickcheck-instances ==0.3.10,
|
||||
quickcheck-io ==0.1.1,
|
||||
quickcheck-unicode ==1.0.0.0,
|
||||
quickpull ==0.4.0.0,
|
||||
rainbow ==0.20.0.4,
|
||||
rainbow-tests ==0.20.0.4,
|
||||
@ -594,6 +612,7 @@ constraints: abstract-deque ==0.3,
|
||||
random-shuffle ==0.0.4,
|
||||
random-source ==0.3.0.6,
|
||||
rank1dynamic ==0.2.0.1,
|
||||
Rasterific ==0.4,
|
||||
raw-strings-qq ==1.0.2,
|
||||
ReadArgs ==1.2.2,
|
||||
reducers ==3.10.3,
|
||||
@ -606,8 +625,9 @@ constraints: abstract-deque ==0.3,
|
||||
regexpr ==0.5.4,
|
||||
regex-tdfa ==1.2.0,
|
||||
regex-tdfa-rc ==1.1.8.3,
|
||||
regular ==0.3.4.3,
|
||||
regular ==0.3.4.4,
|
||||
regular-xmlpickler ==0.2,
|
||||
rematch ==0.2.0.0,
|
||||
repa ==3.3.1.2,
|
||||
repa-algorithms ==3.3.1.2,
|
||||
repa-devil ==0.3.2.2,
|
||||
@ -615,7 +635,7 @@ constraints: abstract-deque ==0.3,
|
||||
reroute ==0.2.2.1,
|
||||
resource-pool ==0.2.3.2,
|
||||
resourcet ==1.1.3.3,
|
||||
rest-client ==0.4.0.1,
|
||||
rest-client ==0.4.0.2,
|
||||
rest-core ==0.33.1.2,
|
||||
rest-gen ==0.16.1.3,
|
||||
rest-happstack ==0.2.10.3,
|
||||
@ -641,12 +661,12 @@ constraints: abstract-deque ==0.3,
|
||||
setenv ==0.1.1.1,
|
||||
SHA ==1.6.4.1,
|
||||
shake ==0.14.2,
|
||||
shake-language-c ==0.6.2,
|
||||
shake-language-c ==0.6.3,
|
||||
shakespeare ==2.0.2.1,
|
||||
shakespeare-i18n ==1.1.0,
|
||||
shakespeare-text ==1.1.0,
|
||||
shell-conduit ==4.5,
|
||||
shelly ==1.5.6,
|
||||
shelly ==1.5.7,
|
||||
silently ==1.2.4.1,
|
||||
simple-reflect ==0.3.2,
|
||||
simple-sendfile ==0.2.18,
|
||||
@ -657,16 +677,18 @@ constraints: abstract-deque ==0.3,
|
||||
smallcheck ==1.1.1,
|
||||
smtLib ==1.0.7,
|
||||
snap ==0.13.3.2,
|
||||
snap-core ==0.9.6.3,
|
||||
snap-core ==0.9.6.4,
|
||||
snaplet-fay ==0.3.3.8,
|
||||
snap-server ==0.9.4.5,
|
||||
snap-server ==0.9.4.6,
|
||||
socks ==0.5.4,
|
||||
sodium ==0.11.0.2,
|
||||
sodium ==0.11.0.3,
|
||||
sourcemap ==0.1.3.0,
|
||||
speculation ==1.5.0.1,
|
||||
sphinx ==0.6.0.1,
|
||||
split ==0.2.2,
|
||||
Spock ==0.7.5.1,
|
||||
Spock ==0.7.6.0,
|
||||
Spock-digestive ==0.1.0.0,
|
||||
Spock-worker ==0.2.1.3,
|
||||
spoon ==0.3.1,
|
||||
sqlite-simple ==0.4.8.0,
|
||||
stateref ==0.3,
|
||||
@ -675,7 +697,7 @@ constraints: abstract-deque ==0.3,
|
||||
statistics-linreg ==0.3,
|
||||
stm ==2.4.4,
|
||||
stm-chans ==3.0.0.2,
|
||||
stm-conduit ==2.5.2,
|
||||
stm-conduit ==2.5.3,
|
||||
stm-containers ==0.2.7,
|
||||
stm-stats ==0.2.0.0,
|
||||
storable-complex ==0.2.1,
|
||||
@ -689,11 +711,11 @@ constraints: abstract-deque ==0.3,
|
||||
stringsearch ==0.3.6.5,
|
||||
stylish-haskell ==0.5.11.0,
|
||||
SVGFonts ==1.4.0.3,
|
||||
syb ==0.4.2,
|
||||
syb ==0.4.3,
|
||||
syb-with-class ==0.6.1.5,
|
||||
system-canonicalpath ==0.2.0.0,
|
||||
system-fileio ==0.3.16,
|
||||
system-filepath ==0.4.13,
|
||||
system-filepath ==0.4.13.1,
|
||||
system-posix-redirect ==1.1.0.1,
|
||||
tabular ==0.2.2.5,
|
||||
tagged ==0.7.3,
|
||||
@ -722,7 +744,7 @@ constraints: abstract-deque ==0.3,
|
||||
testing-feat ==0.4.0.2,
|
||||
testpack ==2.1.3.0,
|
||||
texmath ==0.8.0.1,
|
||||
text ==1.1.1.3,
|
||||
text ==1.2.0.3,
|
||||
text-binary ==0.1.0,
|
||||
text-format ==0.3.1.1,
|
||||
text-icu ==0.7.0.0,
|
||||
@ -731,7 +753,7 @@ constraints: abstract-deque ==0.3,
|
||||
th-expand-syns ==0.3.0.4,
|
||||
th-extras ==0.0.0.2,
|
||||
th-lift ==0.7,
|
||||
th-orphans ==0.8.2,
|
||||
th-orphans ==0.8.3,
|
||||
threads ==0.5.1.2,
|
||||
th-reify-many ==0.1.2,
|
||||
thyme ==0.3.5.5,
|
||||
@ -752,7 +774,7 @@ constraints: abstract-deque ==0.3,
|
||||
type-eq ==0.4.2,
|
||||
type-list ==0.0.0.0,
|
||||
udbus ==0.2.1,
|
||||
unbounded-delays ==0.1.0.8,
|
||||
unbounded-delays ==0.1.0.9,
|
||||
union-find ==0.2,
|
||||
uniplate ==1.6.12,
|
||||
unix installed,
|
||||
@ -763,7 +785,7 @@ constraints: abstract-deque ==0.3,
|
||||
url ==2.1.3,
|
||||
utf8-light ==0.4.2,
|
||||
utf8-string ==0.3.8,
|
||||
uuid ==1.3.7,
|
||||
uuid ==1.3.8,
|
||||
vault ==0.3.0.4,
|
||||
vector ==0.10.12.2,
|
||||
vector-algorithms ==0.6.0.3,
|
||||
@ -778,21 +800,22 @@ constraints: abstract-deque ==0.3,
|
||||
wai-app-static ==3.0.0.5,
|
||||
wai-conduit ==3.0.0.2,
|
||||
wai-eventsource ==3.0.0,
|
||||
wai-extra ==3.0.3.1,
|
||||
wai-extra ==3.0.3.2,
|
||||
wai-logger ==2.2.3,
|
||||
wai-middleware-static ==0.6.0.1,
|
||||
wai-websockets ==3.0.0.3,
|
||||
warp ==3.0.4.1,
|
||||
warp ==3.0.5,
|
||||
warp-tls ==3.0.1.1,
|
||||
webdriver ==0.6.0.3,
|
||||
web-fpco ==0.1.1.0,
|
||||
websockets ==0.9.2.1,
|
||||
websockets ==0.9.2.2,
|
||||
wizards ==1.0.1,
|
||||
wl-pprint ==1.1,
|
||||
wl-pprint-extras ==3.5.0.3,
|
||||
wl-pprint-terminfo ==3.7.1.3,
|
||||
wl-pprint-text ==1.1.0.2,
|
||||
wl-pprint-text ==1.1.0.3,
|
||||
word8 ==0.1.1,
|
||||
wordpass ==1.0.0.2,
|
||||
X11 ==1.6.1.2,
|
||||
x509 ==1.5.0.1,
|
||||
x509-store ==1.5.0,
|
||||
@ -804,7 +827,7 @@ constraints: abstract-deque ==0.3,
|
||||
xml-conduit ==1.2.3.1,
|
||||
xmlgen ==0.6.2.1,
|
||||
xml-hamlet ==0.4.0.9,
|
||||
xmlhtml ==0.2.3.3,
|
||||
xmlhtml ==0.2.3.4,
|
||||
xml-types ==0.3.4,
|
||||
xss-sanitize ==0.3.5.4,
|
||||
yackage ==0.7.0.6,
|
||||
@ -812,11 +835,12 @@ constraints: abstract-deque ==0.3,
|
||||
Yampa ==0.9.6,
|
||||
YampaSynth ==0.2,
|
||||
yesod ==1.4.1.3,
|
||||
yesod-auth ==1.4.1.1,
|
||||
yesod-auth ==1.4.1.2,
|
||||
yesod-auth-deskcom ==1.4.0,
|
||||
yesod-auth-fb ==1.6.6,
|
||||
yesod-auth-hashdb ==1.4.1.1,
|
||||
yesod-bin ==1.4.3.1,
|
||||
yesod-auth-hashdb ==1.4.1.2,
|
||||
yesod-auth-oauth2 ==0.0.11,
|
||||
yesod-bin ==1.4.3.2,
|
||||
yesod-core ==1.4.7.1,
|
||||
yesod-eventsource ==1.4.0.1,
|
||||
yesod-fay ==0.7.0,
|
||||
@ -827,7 +851,7 @@ constraints: abstract-deque ==0.3,
|
||||
yesod-persistent ==1.4.0.2,
|
||||
yesod-sitemap ==1.4.0.1,
|
||||
yesod-static ==1.4.0.4,
|
||||
yesod-test ==1.4.2.1,
|
||||
yesod-test ==1.4.2.2,
|
||||
yesod-text-markdown ==0.1.7,
|
||||
yesod-websockets ==0.2.1.1,
|
||||
zeromq4-haskell ==0.6.2,
|
||||
|
||||
@ -5,5 +5,5 @@ stanzas:
|
||||
- production
|
||||
env:
|
||||
STACKAGE_CABAL_LOADER: "0"
|
||||
STACKAGE_HOOGLE_GEN: "0"
|
||||
host: www.stackage.org
|
||||
copy-to: fpuser@www.stackage.org:/var/opt/keter/incoming
|
||||
|
||||
@ -134,3 +134,7 @@ Suggested
|
||||
package PackageName
|
||||
insteadOf PackageName
|
||||
UniqueSuggested package insteadOf
|
||||
|
||||
UploadProgress
|
||||
message Text
|
||||
dest Text Maybe
|
||||
|
||||
@ -1 +1,2 @@
|
||||
User-agent: *
|
||||
Disallow: /haddock/
|
||||
|
||||
@ -25,12 +25,13 @@
|
||||
/package/#PackageNameVersion StackageSdistR GET
|
||||
/packages SnapshotPackagesR GET
|
||||
/docs DocsR GET
|
||||
/hoogle HoogleR GET
|
||||
|
||||
/hackage-view/#HackageView/00-index.tar.gz HackageViewIndexR GET
|
||||
/hackage-view/#HackageView/package/#PackageNameVersion HackageViewSdistR GET
|
||||
/aliases AliasesR PUT
|
||||
/alias/#Slug/#Slug/*Texts AliasR
|
||||
/progress/#Int ProgressR GET
|
||||
/progress/#UploadProgressId ProgressR GET
|
||||
/system SystemR GET
|
||||
/haddock/#SnapSlug/*Texts HaddockR GET
|
||||
/package/#PackageName PackageR GET
|
||||
@ -54,3 +55,5 @@
|
||||
|
||||
/refresh-deprecated RefreshDeprecatedR GET
|
||||
/upload2 UploadV2R PUT
|
||||
/build-version BuildVersionR GET
|
||||
/package-counts PackageCountsR GET
|
||||
|
||||
8
fpbuild.config
Normal file
8
fpbuild.config
Normal file
@ -0,0 +1,8 @@
|
||||
docker:
|
||||
repo-suffix: "_ghc-7.8.4.20141229_stackage-lts-1.0"
|
||||
image-tag: "20150101"
|
||||
# For fpbuild <= 0.1.0
|
||||
registry-username: "dummy"
|
||||
registry-password: "no-auth-required"
|
||||
packages:
|
||||
- "."
|
||||
@ -27,6 +27,7 @@ library
|
||||
Data.Hackage.DeprecationInfo
|
||||
Data.Hackage.Views
|
||||
Data.WebsiteContent
|
||||
Data.Unpacking
|
||||
Types
|
||||
Handler.Home
|
||||
Handler.Snapshots
|
||||
@ -44,6 +45,7 @@ library
|
||||
Handler.Progress
|
||||
Handler.System
|
||||
Handler.Haddock
|
||||
Handler.Hoogle
|
||||
Handler.Package
|
||||
Handler.PackageList
|
||||
Handler.CompressorStatus
|
||||
@ -51,6 +53,8 @@ library
|
||||
Handler.BannedTags
|
||||
Handler.RefreshDeprecated
|
||||
Handler.UploadV2
|
||||
Handler.BuildVersion
|
||||
Handler.PackageCounts
|
||||
|
||||
if flag(dev) || flag(library-only)
|
||||
cpp-options: -DDEVELOPMENT
|
||||
@ -83,6 +87,7 @@ library
|
||||
ScopedTypeVariables
|
||||
BangPatterns
|
||||
TupleSections
|
||||
DeriveGeneric
|
||||
|
||||
build-depends:
|
||||
base >= 4
|
||||
@ -150,9 +155,14 @@ library
|
||||
, formatting
|
||||
, blaze-html
|
||||
, haddock-library
|
||||
, yesod-gitrepo
|
||||
, async
|
||||
, stackage >= 0.4
|
||||
, yesod-gitrepo >= 0.1.1
|
||||
, hoogle
|
||||
, spoon
|
||||
, deepseq
|
||||
, deepseq-generics
|
||||
, auto-update
|
||||
|
||||
executable stackage-server
|
||||
if flag(library-only)
|
||||
|
||||
@ -1,8 +1,9 @@
|
||||
<h1>Module listing for #{toPathPiece slug}
|
||||
<p>
|
||||
<a href=@{SnapshotR slug StackageHomeR}>Return to snapshot
|
||||
<ul>
|
||||
$forall (name, url, package, version) <- modules
|
||||
<li>
|
||||
<a href=#{url}>#{name}
|
||||
(#{package}-#{version})
|
||||
<div .container>
|
||||
<h1>Module listing for #{toPathPiece slug}
|
||||
<p>
|
||||
<a href=@{SnapshotR slug StackageHomeR}>Return to snapshot
|
||||
<ul>
|
||||
$forall (name, url, package, version) <- modules
|
||||
<li>
|
||||
<a href=#{url}>#{name}
|
||||
(#{package}-#{version})
|
||||
|
||||
6
templates/hoogle-form.hamlet
Normal file
6
templates/hoogle-form.hamlet
Normal file
@ -0,0 +1,6 @@
|
||||
<form .hoogle action=@{SnapshotR slug HoogleR}>
|
||||
<input type=search autofocus name=q value=#{queryText} placeholder="Hoogle Search Phrase" .search>
|
||||
<input .btn type="submit" value="Search">
|
||||
<label .checkbox .exact-lookup for=exact title="Only find identifiers matching your search term precisely">
|
||||
<input type=checkbox name=exact :exact:checked #exact>
|
||||
Exact lookup
|
||||
14
templates/hoogle-form.lucius
Normal file
14
templates/hoogle-form.lucius
Normal file
@ -0,0 +1,14 @@
|
||||
form.hoogle {
|
||||
margin-bottom: 20px;
|
||||
.search {
|
||||
width: 25em;
|
||||
}
|
||||
input {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
}
|
||||
|
||||
.exact-lookup {
|
||||
display: inline-block;
|
||||
margin-left: 1em;
|
||||
}
|
||||
39
templates/hoogle.hamlet
Normal file
39
templates/hoogle.hamlet
Normal file
@ -0,0 +1,39 @@
|
||||
<div .container>
|
||||
<div .content>
|
||||
<h1>Hoogle Search (experimental)
|
||||
<p>Within <a href=@{snapshotLink}>#{stackageTitle stackage}</a>
|
||||
^{hoogleForm}
|
||||
$case mresults
|
||||
$of HoogleQueryBad err
|
||||
<p>#{err}
|
||||
<p>For information on what queries should look like, see the <a href="http://www.haskell.org/haskellwiki/Hoogle">hoogle user manual</a>.
|
||||
$of HoogleQueryOutput _query results mtotalCount
|
||||
$if null results
|
||||
<p>Your search produced no results.
|
||||
$else
|
||||
<ol .search-results>
|
||||
$forall HoogleResult url sources self docs <- results
|
||||
<li>
|
||||
<p .self>
|
||||
<a href=#{url}>#{preEscapedToHtml self}
|
||||
<table .sources>
|
||||
$forall (pkg, modus) <- sources
|
||||
<tr>
|
||||
<th>
|
||||
<a href=#{plURL pkg}>#{plName pkg}
|
||||
<td>
|
||||
$forall ModuleLink name url' <- modus
|
||||
<a href=#{url'}>#{name}
|
||||
$if null docs
|
||||
<p .nodocs>No documentation available.
|
||||
$else
|
||||
<p .docs>#{docs}
|
||||
<p .pagination>
|
||||
$with mpageCount <- fmap getPageCount mtotalCount
|
||||
Page #{page} of #{maybe "many" show mpageCount} #
|
||||
$if page > 1
|
||||
|
|
||||
<a href=@?{pageLink $ page - 1}>Previous
|
||||
$if maybe True ((<) page) mpageCount
|
||||
|
|
||||
<a href=@?{pageLink $ page + 1}>Next
|
||||
5
templates/hoogle.julius
Normal file
5
templates/hoogle.julius
Normal file
@ -0,0 +1,5 @@
|
||||
$(function() {
|
||||
var input = $(".hoogle .search").get(0);
|
||||
var len = input.value.length;
|
||||
input.setSelectionRange(len, len);
|
||||
})
|
||||
63
templates/hoogle.lucius
Normal file
63
templates/hoogle.lucius
Normal file
@ -0,0 +1,63 @@
|
||||
ol.search-results {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
.self {
|
||||
margin-bottom: 0;
|
||||
/* Use bold instead of italics to indicate matching part of search */
|
||||
a {
|
||||
b {
|
||||
font-weight: normal;
|
||||
}
|
||||
i {
|
||||
font-weight: bold;
|
||||
font-style: normal;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
table.sources {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
font-size: 0.8em;
|
||||
|
||||
th {
|
||||
padding-right: 0.5em;
|
||||
}
|
||||
|
||||
a, a:visited {
|
||||
color: #060;
|
||||
}
|
||||
}
|
||||
|
||||
.docs {
|
||||
white-space: pre-wrap;
|
||||
background: #e8e8e8;
|
||||
}
|
||||
|
||||
.docs, .nodocs {
|
||||
margin-left: 1em;
|
||||
padding: 0.5em;
|
||||
}
|
||||
|
||||
.nodocs {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.haddocks {
|
||||
font-weight: bold;
|
||||
margin-bottom: 1em;
|
||||
|
||||
ul {
|
||||
display: inline;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
li {
|
||||
display: inline-block;
|
||||
font-weight: normal;
|
||||
margin-left: 1em;
|
||||
}
|
||||
}
|
||||
20
templates/package-counts.hamlet
Normal file
20
templates/package-counts.hamlet
Normal file
@ -0,0 +1,20 @@
|
||||
<div .container>
|
||||
<h1>Package counts
|
||||
|
||||
<p>
|
||||
This page provides historical information on the number of packages included
|
||||
in Stackage Nightly and LTS Haskell snapshots, purely for the sake of
|
||||
curiosity.
|
||||
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th .name>Title
|
||||
<th .count>Count
|
||||
<th .date>Date
|
||||
<tbody>
|
||||
$forall c <- counts
|
||||
<tr>
|
||||
<td .name>#{name c}
|
||||
<td .count>#{packages c}
|
||||
<td .date>#{show $ date c}
|
||||
12
templates/package-counts.lucius
Normal file
12
templates/package-counts.lucius
Normal file
@ -0,0 +1,12 @@
|
||||
th {
|
||||
font-size: 1.2em;
|
||||
}
|
||||
|
||||
td, th {
|
||||
padding: 0.5em;
|
||||
}
|
||||
|
||||
.name {
|
||||
text-align: right;
|
||||
font-weight: bold;
|
||||
}
|
||||
@ -57,8 +57,6 @@ h3 {
|
||||
border-top: 1px solid #ddd;
|
||||
padding-top: 0.5em;
|
||||
ul {
|
||||
max-height: 20ex;
|
||||
overflow: auto;
|
||||
list-style-type: none;
|
||||
margin-left: 0;
|
||||
padding-left: 0;
|
||||
|
||||
@ -37,8 +37,13 @@ $newline never
|
||||
<a href=@{SnapshotR slug StackageCabalConfigR}?global=true>
|
||||
the global configuration instructions
|
||||
|
||||
<h3>
|
||||
Packages
|
||||
<h3>Hoogle (experimental)
|
||||
^{hoogleForm}
|
||||
|
||||
<h3>Packages
|
||||
|
||||
<p>
|
||||
<a href=@{SnapshotR slug DocsR}>View documentation by modules
|
||||
|
||||
<div .container .content>
|
||||
<div .packages>
|
||||
|
||||
Loading…
Reference in New Issue
Block a user