diff --git a/Application.hs b/Application.hs index dd07e0e..67c62ed 100644 --- a/Application.hs +++ b/Application.hs @@ -11,9 +11,6 @@ import Control.Concurrent (forkIO, threadDelay) import Control.Exception (catch) import Control.Monad.Logger (runLoggingT, LoggingT, defaultLogStr) import Data.BlobStore (fileStore, cachedS3Store) -import Data.Hackage -import Data.Hackage.DeprecationInfo -import Data.Unpacking (newDocUnpacker, createHoogleDatabases) import Data.WebsiteContent import Data.Slug (SnapSlug (..), safeMakeSlug, HasGenIO) import Data.Streaming.Network (bindPortTCP) @@ -53,28 +50,21 @@ import Handler.Snapshots import Handler.Profile import Handler.Email import Handler.ResetToken -import Handler.UploadStackage import Handler.StackageHome import Handler.StackageIndex import Handler.StackageSdist -import Handler.Aliases -import Handler.Alias -import Handler.Progress import Handler.System import Handler.Haddock import Handler.Package import Handler.PackageList -import Handler.CompressorStatus import Handler.Tag import Handler.BannedTags -import Handler.RefreshDeprecated -import Handler.UploadV2 import Handler.Hoogle import Handler.BuildVersion -import Handler.PackageCounts import Handler.Sitemap import Handler.BuildPlan import Handler.Download +import Handler.OldLinks -- This line actually creates our YesodDispatch instance. It is the second half -- of the call to mkYesodData which occurs in Foundation.hs. Please see the @@ -157,9 +147,6 @@ makeFoundation useEcho conf = do blobStore' <- loadBlobStore manager conf - let haddockRootDir' = "/tmp/stackage-server-haddocks2" - widgetCache' <- newIORef mempty - websiteContent' <- if development then do void $ rawSystem "git" @@ -182,7 +169,6 @@ makeFoundation useEcho conf = do let runDB' :: (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a runDB' = flip (Database.Persist.runPool dbconf) p - docUnpacker <- newDocUnpacker haddockRootDir' blobStore' runDB' let logger = Yesod.Core.Types.Logger loggerSet' getter foundation = App @@ -194,9 +180,6 @@ makeFoundation useEcho conf = do , appLogger = logger , genIO = gen , blobStore = blobStore' - , haddockRootDir = haddockRootDir' - , appDocUnpacker = docUnpacker - , widgetCache = widgetCache' , websiteContent = websiteContent' } @@ -209,27 +192,16 @@ makeFoundation useEcho conf = do flip runLoggingT (messageLoggerSource foundation logger) $ flip (Database.Persist.runPool dbconf) p $ do runMigration migrateAll + {- checkMigration 1 fixSnapSlugs checkMigration 2 setCorePackages + -} let updateDB = lookup "STACKAGE_CABAL_LOADER" env /= Just "0" hoogleGen = lookup "STACKAGE_HOOGLE_GEN" env /= Just "0" forceUpdate = lookup "STACKAGE_FORCE_UPDATE" env == Just "1" - loadCabalFiles' = appLoadCabalFiles updateDB forceUpdate foundation dbconf p - -- Start the cabal file loader - ifRunCabalLoader $ forkIO $ forever $ flip runLoggingT (messageLoggerSource foundation logger) $ do - $logInfoS "CLEANUP" "Cleaning up /tmp" - now <- liftIO getCurrentTime - runResourceT $ sourceDirectory "/tmp" $$ mapM_C (cleanupTemp now) - $logInfoS "CLEANUP" "Cleaning up complete" - - loadCabalFiles' - - when hoogleGen $ liftIO $ createHoogleDatabases blobStore' runDB' putStrLn urlRender' - - liftIO $ threadDelay $ 30 * 60 * 1000000 return foundation where ifRunCabalLoader m = if cabalFileLoader @@ -255,6 +227,8 @@ cabalLoaderMain = do void $ catchIO (bindPortTCP 17834 "127.0.0.1") $ \_ -> error $ "cabal loader process already running, exiting" + error "cabalLoaderMain" + {- FIXME conf <- fromArgs parseExtra dbconf <- getDbConf conf pool <- Database.Persist.createPoolConfig dbconf @@ -297,77 +271,7 @@ cabalLoaderMain = do logFunc loc src level str | level > LevelDebug = S.hPutStr stdout $ fromLogStr $ defaultLogStr loc src level str | otherwise = return () - -appLoadCabalFiles :: ( PersistConfig c - , PersistConfigBackend c ~ SqlPersistT - , HasHackageRoot env - , HasBlobStore env StoreKey - , HasHttpManager env - ) - => Bool -- ^ update database? - -> Bool -- ^ force update? - -> env - -> c - -> PersistConfigPool c - -> LoggingT IO () -appLoadCabalFiles updateDB forceUpdate env dbconf p = do - eres <- tryAny $ flip runReaderT env $ do - let runDB' :: SqlPersistT (ResourceT (ReaderT env (LoggingT IO))) a - -> ReaderT env (LoggingT IO) a - runDB' = runResourceT . flip (Database.Persist.runPool dbconf) p - - $logInfo "Updating deprecation tags" - loadDeprecationInfo >>= \ei -> case ei of - Left e -> $logError (pack e) - Right info -> runDB' $ do - deleteWhere ([] :: [Filter Deprecated]) - insertMany_ (deprecations info) - deleteWhere ([] :: [Filter Suggested]) - insertMany_ (suggestions info) - $logInfo "Finished updating deprecation tags" - - let toMDPair (E.Value name, E.Value version, E.Value hash') = - (name, (version, hash')) - metadata0 <- fmap (mapFromList . map toMDPair) - $ runDB' $ E.select $ E.from $ \m -> return - ( m E.^. MetadataName - , m E.^. MetadataVersion - , m E.^. MetadataHash - ) - UploadState _ newMD <- loadCabalFiles updateDB forceUpdate metadata0 - $logInfo $ "Updating metadatas: " ++ tshow (length newMD) - runDB' $ do - let newMD' = toList newMD - deleteWhere [MetadataName <-. map metadataName newMD'] - insertMany_ newMD' - forM_ newMD' $ \md -> do - deleteWhere [DependencyUser ==. metadataName md] - insertMany_ $ flip map (metadataDeps md) $ \dep -> - Dependency (PackageName dep) (metadataName md) - - case eres of - Left e -> $logError $ tshow e - Right () -> return () - -cleanupTemp :: UTCTime -> FilePath -> ResourceT (LoggingT IO) () -cleanupTemp now fp - | any (`isPrefixOf` name) prefixes = handleAny ($logError . tshow) $ do - modified <- liftIO $ getModified fp - if (diffUTCTime now modified > 60 * 60) - then do - $logInfoS "CLEANUP" $ "Removing temp directory: " ++ fpToText fp - liftIO $ removeTree fp - $logInfoS "CLEANUP" $ "Temp directory deleted: " ++ fpToText fp - else $logInfoS "CLEANUP" $ "Ignoring recent entry: " ++ fpToText fp - | otherwise = $logInfoS "CLEANUP" $ "Ignoring unmatched path: " ++ fpToText fp - where - name = fpToText $ filename fp - prefixes = asVector $ pack - [ "hackage-index" - , "createview" - , "build00index." - , "newindex" - ] + -} -- for yesod devel getApplicationDev :: Bool -> IO (Int, Application) @@ -387,38 +291,3 @@ checkMigration num f = do case eres of Left _ -> return () Right _ -> f - -fixSnapSlugs :: (MonadResource m, HasGenIO env, MonadReader env m) - => ReaderT SqlBackend m () -fixSnapSlugs = - selectSource [] [Asc StackageUploaded] $$ mapM_C go - where - go (Entity sid Stackage {..}) = - loop (1 :: Int) - where - base = T.replace "haskell platform" "hp" - $ T.replace "stackage build for " "" - $ toLower stackageTitle - loop 50 = error "fixSnapSlugs can't find a good slug" - loop i = do - slug' <- lift $ safeMakeSlug base $ if i == 1 then False else True - let slug = SnapSlug slug' - ms <- getBy $ UniqueSnapshot slug - case ms of - Nothing -> update sid [StackageSlug =. slug] - Just _ -> loop (i + 1) - -setCorePackages :: MonadIO m => ReaderT SqlBackend m () -setCorePackages = - updateWhere - [ PackageName' <-. defaultCorePackages - , PackageCore ==. Nothing - ] - [PackageCore =. Just True] - where - defaultCorePackages = map PackageName $ words =<< - [ "ghc hoopl bytestring unix haskeline Cabal base time xhtml" - , "haskell98 hpc filepath process array integer-gmp bin-package-db" - , "containers haskell2010 binary ghc-prim old-time old-locale rts" - , "terminfo transformers deepseq pretty template-haskell directory" - ] diff --git a/Data/Hackage.hs b/Data/Hackage.hs deleted file mode 100644 index 358a732..0000000 --- a/Data/Hackage.hs +++ /dev/null @@ -1,379 +0,0 @@ -module Data.Hackage - ( loadCabalFiles - , sourceHackageSdist - , UploadState (..) - ) where - -import ClassyPrelude.Yesod hiding (get) -import Types -import Data.BlobStore -import Data.Conduit.Lazy (MonadActive (..), lazyConsume) -import qualified Codec.Archive.Tar as Tar -import Control.Monad.Logger (runNoLoggingT) -import qualified Data.Text as T -import Data.Conduit.Zlib (ungzip) -import System.IO.Temp (withSystemTempFile) -import System.IO (IOMode (ReadMode), openBinaryFile) -import Model (Metadata (..)) -import Distribution.PackageDescription.Parse (parsePackageDescription, ParseResult (ParseOk)) -import qualified Distribution.PackageDescription as PD -import qualified Distribution.Package as PD -import Control.Monad.State.Strict (put, get, execStateT, MonadState) -import Crypto.Hash.Conduit (sinkHash) -import Crypto.Hash (Digest, SHA256) -import Data.Byteable (toBytes) -import Distribution.Text (display) -import Text.Markdown (Markdown (Markdown)) -import qualified Data.Traversable as T -import qualified Data.Version -import Text.ParserCombinators.ReadP (readP_to_S) -import Text.Blaze.Html.Renderer.Utf8 (renderHtml) -import Text.Blaze.Html (unsafeByteString) -import qualified Text.Blaze.Html5 as H -import qualified Text.Blaze.Html5.Attributes as A -import qualified Documentation.Haddock.Parser as Haddock -import Documentation.Haddock.Types (DocH (..), Hyperlink (..), Picture (..), Header (..), Example (..)) -import qualified Data.HashMap.Lazy as HM - -loadCabalFiles :: ( MonadActive m - , MonadBaseControl IO m - , MonadThrow m - , MonadIO m - , MonadReader env m - , HasHttpManager env - , HasBlobStore env StoreKey - , HasHackageRoot env - , MonadLogger m - , MonadMask m - ) - => Bool -- ^ do the database updating - -> Bool -- ^ force updates regardless of hash value? - -> HashMap PackageName (Version, ByteString) - -> m (UploadState Metadata) -loadCabalFiles dbUpdates forceUpdate metadata0 = (>>= T.mapM liftIO) $ flip execStateT (UploadState metadata1 mempty) $ do - HackageRoot root <- liftM getHackageRoot ask - $logDebug $ "Entering loadCabalFiles, root == " ++ root - req <- parseUrl $ unpack $ root ++ "/00-index.tar.gz" - withSystemTempFile "hackage-index" $ \tempIndex handleOut -> do - $logDebug $ "Requesting: " ++ tshow req - withResponse req $ \res -> responseBody res $$ sinkHandle handleOut - liftIO $ hClose handleOut - withBinaryFile tempIndex ReadMode $ \handleIn -> do - bss <- lazyConsume $ sourceHandle handleIn $= ungzip - tarSource (Tar.read $ fromChunks bss) - $$ parMapMC 32 go - =$ scanlC (\x _ -> x + 1) (0 :: Int) - =$ filterC ((== 0) . (`mod` 1000)) - =$ mapM_C (\i -> $logInfo $ "Processing cabal file #" ++ tshow i) - $logInfo "Finished processing cabal files" - where - metadata1 = flip fmap metadata0 $ \(v, h) -> MetaSig - v - (fromMaybe (pack [0, 0, 0]) $ readVersion v) - h - withBinaryFile fp mode = bracket (liftIO $ openBinaryFile fp mode) (liftIO . hClose) - - go entry = do - case Tar.entryContent entry of - Tar.NormalFile lbs _ - | Just (name, version) <- parseFilePath (Tar.entryPath entry) -> do - let key = HackageCabal name version - -- It's not longer sufficient to simply check if the cabal - -- file exists, since Hackage now allows updating in place. - -- Instead, we have to check if it matches what we have - -- and, if not, update it. - store <- liftM getBlobStore ask - newDigest :: Digest SHA256 <- sourceLazy lbs $$ sinkHash - toStore <- withAcquire (storeRead' store key) $ \mcurr -> - case mcurr of - Nothing -> return True - Just curr -> do - -- Check if it matches. This is cheaper than - -- always writing, since it can take advantage - -- of the local filesystem cache and not go to - -- S3 each time. - currDigest <- curr $$ sinkHash - return $! currDigest /= newDigest - when toStore $ withAcquire (storeWrite' store key) $ \sink -> - sourceLazy lbs $$ sink - when dbUpdates $ do - case readVersion version of - Nothing -> return () - Just dataVersion -> setMetadata - forceUpdate - name - version - dataVersion - (toBytes newDigest) - (parsePackageDescription $ unpack $ decodeUtf8 lbs) - _ -> return () - -readVersion :: Version -> Maybe (UVector Int) -readVersion v = - case filter (null . snd) $ readP_to_S Data.Version.parseVersion . unpack . unVersion $ v of - (dv, _):_ -> Just $ pack $ Data.Version.versionBranch dv - [] -> Nothing - -tarSource :: (Exception e, MonadThrow m) - => Tar.Entries e - -> Producer m Tar.Entry -tarSource Tar.Done = return () -tarSource (Tar.Fail e) = throwM e -tarSource (Tar.Next e es) = yield e >> tarSource es - -data UploadState md = UploadState - { usMetadata :: !(HashMap PackageName MetaSig) - , usMetaChanges :: (HashMap PackageName md) - } - deriving (Functor, Foldable, Traversable) - -data MetaSig = MetaSig - {-# UNPACK #-} !Version - {-# UNPACK #-} !(UVector Int) -- versionBranch - {-# UNPACK #-} !ByteString -- hash - -setMetadata :: ( MonadBaseControl IO m - , MonadThrow m - , MonadIO m - , MonadReader env m - , MonadState (UploadState (IO Metadata)) m - , HasHttpManager env - , MonadLogger m - , MonadActive m - , HasBlobStore env StoreKey - , HasHackageRoot env - ) - => Bool -- ^ force update? - -> PackageName - -> Version - -> UVector Int -- ^ versionBranch - -> ByteString - -> ParseResult PD.GenericPackageDescription - -> m () -setMetadata forceUpdate name version dataVersion hash' gpdRes = do - UploadState mdMap mdChanges <- get - let toUpdate = - case lookup name mdMap of - Just (MetaSig _currVersion currDataVersion currHash) -> - case compare currDataVersion dataVersion of - LT -> True - GT -> False - EQ -> currHash /= hash' || forceUpdate - Nothing -> True - if toUpdate - then case gpdRes of - ParseOk _ gpd -> do - !md <- getMetadata name version hash' gpd - put $! UploadState - (insertMap name (MetaSig version dataVersion hash') mdMap) - (HM.insert name md mdChanges) - _ -> return () - else return () - -getMetadata :: ( MonadActive m - , MonadIO m - , MonadBaseControl IO m - , MonadThrow m - , MonadReader env m - , HasBlobStore env StoreKey - , HasHackageRoot env - , HasHttpManager env - , MonadLogger m - ) - => PackageName - -> Version - -> ByteString - -> PD.GenericPackageDescription - -> m (IO Metadata) -getMetadata name version hash' gpd = do - let pd = PD.packageDescription gpd - env <- ask - return $ liftIO $ runNoLoggingT $ flip runReaderT env $ do - (mreadme, mchangelog, mlicenseContent) <- - grabExtraFiles name version -#if MIN_VERSION_Cabal(1, 20, 0) - $ PD.licenseFiles pd -#else - [PD.licenseFile pd] -#endif - let collapseHtml = unsafeByteString . toStrict . renderHtml - return Metadata - { metadataName = name - , metadataVersion = version - , metadataHash = hash' - , metadataDeps = setToList - $ asSet - $ concat - [ foldMap goTree $ PD.condLibrary gpd - , foldMap (goTree . snd) $ PD.condExecutables gpd - ] - , metadataAuthor = pack $ PD.author pd - , metadataMaintainer = pack $ PD.maintainer pd - , metadataLicenseName = pack $ display $ PD.license pd - , metadataHomepage = pack $ PD.homepage pd - , metadataBugReports = pack $ PD.bugReports pd - , metadataSynopsis = pack $ PD.synopsis pd - , metadataSourceRepo = mapMaybe showSourceRepo $ PD.sourceRepos pd - , metadataCategory = pack $ PD.category pd - , metadataLibrary = isJust $ PD.library pd - , metadataExes = length $ PD.executables pd - , metadataTestSuites = length $ PD.testSuites pd - , metadataBenchmarks = length $ PD.benchmarks pd - , metadataReadme = collapseHtml $ fromMaybe - (hToHtml . Haddock.toRegular . Haddock.parseParas $ PD.description pd) - mreadme - , metadataChangelog = collapseHtml <$> mchangelog - , metadataLicenseContent = collapseHtml <$> mlicenseContent - } - where - goTree (PD.CondNode _ deps comps) = concatMap goDep deps ++ concatMap goComp comps - goDep (PD.Dependency (PD.PackageName n) _) = singletonSet $ pack n - goComp (_, tree, mtree) = goTree tree ++ maybe mempty goTree mtree - --- | Convert a Haddock doc to HTML. -hToHtml :: DocH String String -> Html -hToHtml = - go - where - go :: DocH String String -> Html - go DocEmpty = mempty - go (DocAppend x y) = go x ++ go y - go (DocString x) = toHtml x - go (DocParagraph x) = H.p $ go x - go (DocIdentifier s) = H.code $ toHtml s - go (DocIdentifierUnchecked s) = H.code $ toHtml s - go (DocModule s) = H.code $ toHtml s - go (DocWarning x) = H.span H.! A.class_ "warning" $ go x - go (DocEmphasis x) = H.em $ go x - go (DocMonospaced x) = H.code $ go x - go (DocBold x) = H.strong $ go x - go (DocUnorderedList xs) = H.ul $ foldMap (H.li . go) xs - go (DocOrderedList xs) = H.ol $ foldMap (H.li . go) xs - go (DocDefList xs) = H.dl $ flip foldMap xs $ \(x, y) -> - H.dt (go x) ++ H.dd (go y) - go (DocCodeBlock x) = H.pre $ H.code $ go x - go (DocHyperlink (Hyperlink url mlabel)) = - H.a H.! A.href (H.toValue url) $ toHtml label - where - label = fromMaybe url mlabel - go (DocPic (Picture url mtitle)) = - H.img H.! A.src (H.toValue url) H.! A.title (H.toValue $ fromMaybe mempty mtitle) - go (DocAName s) = H.div H.! A.id (H.toValue s) $ mempty - go (DocProperty s) = H.pre $ H.code $ toHtml s - go (DocExamples es) = flip foldMap es $ \(Example exp' ress) -> - H.div H.! A.class_ "example" $ do - H.pre H.! A.class_ "expression" $ H.code $ toHtml exp' - flip foldMap ress $ \res -> - H.pre H.! A.class_ "result" $ H.code $ toHtml res - go (DocHeader (Header level content)) = - wrapper level $ go content - where - wrapper 1 = H.h1 - wrapper 2 = H.h2 - wrapper 3 = H.h3 - wrapper 4 = H.h4 - wrapper 5 = H.h5 - wrapper _ = H.h6 - -showSourceRepo :: PD.SourceRepo -> Maybe Text -showSourceRepo = fmap pack . PD.repoLocation - -grabExtraFiles :: ( MonadActive m - , MonadIO m - , MonadBaseControl IO m - , MonadThrow m - , MonadReader env m - , HasBlobStore env StoreKey - , HasHackageRoot env - , HasHttpManager env - , MonadLogger m - ) - => PackageName - -> Version - -> [String] -- ^ license files - -> m (Maybe Html, Maybe Html, Maybe Html) -- ^ README, changelog, license -grabExtraFiles name version lfiles = runResourceT $ do - msrc <- sourceHackageSdist name version - handle (\(_ :: Tar.FormatError) -> return (Nothing,Nothing,Nothing)) $ - case msrc of - Nothing -> return mempty - Just src -> do - bss <- lazyConsume $ src $= ungzip - tarSource (Tar.read $ fromChunks bss) $$ foldlC go mempty - where - go trip@(mreadme, mchangelog, mlicense) entry = - case Tar.entryContent entry of - Tar.NormalFile lbs _ -> - let name' = drop 1 $ dropWhile (/= '/') $ Tar.entryPath entry in - case toLower name' of - "readme.md" -> (md lbs, mchangelog, mlicense) - "readme" -> (txt lbs, mchangelog, mlicense) - "readme.txt" -> (txt lbs, mchangelog, mlicense) - "changelog.md" -> (mreadme, md lbs, mlicense) - "changelog" -> (mreadme, txt lbs, mlicense) - "changelog.txt" -> (mreadme, txt lbs, mlicense) - "changes.md" -> (mreadme, md lbs, mlicense) - "changes" -> (mreadme, txt lbs, mlicense) - "changes.txt" -> (mreadme, txt lbs, mlicense) - _ | name' `elem` lfiles -> (mreadme, mchangelog, txt lbs) - _ -> trip - _ -> trip - - md = wrapClass "markdown" . Markdown . decodeUtf8 - txt = wrapClass "plain-text" . Textarea . toStrict . decodeUtf8 - - wrapClass clazz inner = Just $ H.div H.! A.class_ clazz $ toHtml inner - -parseFilePath :: String -> Maybe (PackageName, Version) -parseFilePath s = - case filter (not . null) $ T.split (== '/') $ pack s of - (name:version:_) -> Just (PackageName name, Version version) - _ -> Nothing - -sourceHackageSdist :: ( MonadIO m - , MonadThrow m - , MonadBaseControl IO m - , MonadResource m - , MonadReader env m - , HasHttpManager env - , HasHackageRoot env - , HasBlobStore env StoreKey - , MonadLogger m - ) - => PackageName - -> Version - -> m (Maybe (Source m ByteString)) -sourceHackageSdist name version = do - let key = HackageSdist name version - msrc1 <- storeRead key - case msrc1 of - Just src -> return $ Just src - Nothing -> do - HackageRoot root <- liftM getHackageRoot ask - let url = concat - [ root - , "/package/" - , toPathPiece name - , "-" - , toPathPiece version - , ".tar.gz" - ] - req' <- parseUrl $ unpack url - let req = req' { checkStatus = \_ _ _ -> Nothing } - $logDebug $ "Requesting: " ++ tshow req - exists <- withResponse req $ \res -> - if responseStatus res == status200 - then do - responseBody res $$ storeWrite key - return True - else return False - if exists - then storeRead key - else return Nothing - --- FIXME put in conduit-combinators -parMapMC :: (MonadIO m, MonadBaseControl IO m) - => Int - -> (i -> m o) - -> Conduit i m o -parMapMC _ = mapMC diff --git a/Data/Hackage/DeprecationInfo.hs b/Data/Hackage/DeprecationInfo.hs deleted file mode 100644 index d79be1d..0000000 --- a/Data/Hackage/DeprecationInfo.hs +++ /dev/null @@ -1,58 +0,0 @@ --- | Transforms http://hackage.haskell.org/packages/deprecated.json --- into model data to be stored in the database. -module Data.Hackage.DeprecationInfo - ( HackageDeprecationInfo(..) - , loadDeprecationInfo - ) where - -import ClassyPrelude.Yesod -import Data.Aeson as Aeson -import Model -import Types - -data HackageDeprecationInfo = HackageDeprecationInfo { - deprecations :: [Deprecated], - suggestions :: [Suggested] -} - -instance FromJSON HackageDeprecationInfo where - parseJSON j = do - deprecationRecords <- parseJSON j - return $ HackageDeprecationInfo { - deprecations = map toDeprecated deprecationRecords, - suggestions = concatMap toSuggestions deprecationRecords - } - -data DeprecationRecord = DeprecationRecord { - _deprecatedPackage :: PackageName, - _deprecatedInFavourOf :: [PackageName] -} - -instance FromJSON DeprecationRecord where - parseJSON = withObject "DeprecationRecord" $ \obj -> do - package <- PackageName <$> (obj .: "deprecated-package") - inFavourOf <- map PackageName <$> (obj .: "in-favour-of") - return $ DeprecationRecord package inFavourOf - -toDeprecated :: DeprecationRecord -> Deprecated -toDeprecated (DeprecationRecord deprecated _) = Deprecated deprecated - -toSuggestions :: DeprecationRecord -> [Suggested] -toSuggestions (DeprecationRecord deprecated inFavourOf) = - map toSuggestion inFavourOf - where - toSuggestion favoured = Suggested { - suggestedPackage = favoured, - suggestedInsteadOf = deprecated - } - -loadDeprecationInfo :: - ( HasHttpManager env - , MonadReader env m - , MonadThrow m - , MonadIO m) - => m (Either String HackageDeprecationInfo) -loadDeprecationInfo = do - req <- parseUrl "http://hackage.haskell.org/packages/deprecated.json" - res <- httpLbs req - return $! Aeson.eitherDecode (responseBody res) diff --git a/Data/Unpacking.hs b/Data/Unpacking.hs deleted file mode 100644 index 43a70df..0000000 --- a/Data/Unpacking.hs +++ /dev/null @@ -1,504 +0,0 @@ --- | Code for unpacking documentation bundles, building the Hoogle databases, --- and compressing/deduping contents. -module Data.Unpacking - ( newDocUnpacker - , getHoogleDB - , makeHoogle - , createHoogleDatabases - ) where - -import Control.Concurrent.Async -import Data.Conduit.Process -import Import hiding (runDB) -import Data.BlobStore -import Handler.Haddock -import Filesystem (createTree, isFile, removeTree, isDirectory, listDirectory, removeDirectory, removeFile, rename) -import System.Posix.Files (createLink) -import Crypto.Hash.Conduit (sinkHash) -import Control.Concurrent (forkIO) -import Control.Monad.Trans.Resource (allocate, release) -import Data.Char (isAlpha) -import qualified Hoogle -import qualified Data.Text as T -import qualified Data.Yaml as Y -import System.IO (IOMode (ReadMode), withBinaryFile, openBinaryFile) -import System.IO.Temp (withSystemTempFile, withTempFile, withSystemTempDirectory) -import System.Exit (ExitCode (ExitSuccess)) -import qualified Filesystem.Path.CurrentOS as F -import Data.Conduit.Zlib (gzip, ungzip) -import qualified Data.ByteString.Base16 as B16 -import Data.Byteable (toBytes) -import Crypto.Hash (Digest, SHA1) - -newDocUnpacker - :: FilePath -- ^ haddock root - -> BlobStore StoreKey - -> (forall a m. (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a) - -> IO DocUnpacker -newDocUnpacker root store runDB = do - createDirs dirs - - statusMapVar <- newTVarIO $ asMap mempty - messageVar <- newTVarIO "Inactive" - workChan <- atomically newTChan - - let requestDocs forceUnpack ent = atomically $ do - var <- newTVar USBusy - modifyTVar statusMapVar - $ insertMap (stackageSlug $ entityVal ent) var - writeTChan workChan (forceUnpack, ent, var) - - forkForever $ unpackWorker dirs runDB store messageVar workChan - - return DocUnpacker - { duRequestDocs = \ent -> do - m <- readTVarIO statusMapVar - case lookup (stackageSlug $ entityVal ent) m of - Nothing -> do - b <- isUnpacked dirs (entityVal ent) - if b - then return USReady - else do - requestDocs False ent - return USBusy - Just us -> readTVarIO us - , duGetStatus = readTVarIO messageVar - , duForceReload = \ent -> do - atomically $ modifyTVar statusMapVar - $ deleteMap (stackageSlug $ entityVal ent) - requestDocs True ent - } - where - dirs = mkDirs root - -createDirs :: Dirs -> IO () -createDirs dirs = do - createTree $ dirCacheRoot dirs - createTree $ dirRawRoot dirs - createTree $ dirGzRoot dirs - createTree $ dirHoogleRoot dirs - --- | Check for the presence of file system artifacts indicating that the docs --- have been unpacked. -isUnpacked :: Dirs -> Stackage -> IO Bool -isUnpacked dirs stackage = isFile $ completeUnpackFile dirs stackage - -defaultHooDest :: Dirs -> Stackage -> FilePath -defaultHooDest dirs stackage = dirHoogleFp dirs (stackageIdent stackage) - ["default-" ++ VERSION_hoogle ++ ".hoo"] - -forkForever :: IO () -> IO () -forkForever inner = mask $ \restore -> - void $ forkIO $ forever $ handleAny print $ restore $ forever inner - -unpackWorker - :: Dirs - -> (forall a m. (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a) - -> BlobStore StoreKey - -> TVar Text - -> TChan (Bool, Entity Stackage, TVar UnpackStatus) - -> IO () -unpackWorker dirs runDB store messageVar workChan = do - let say' = atomically . writeTVar messageVar - say' "Running the compressor" - let shouldStop = fmap not $ atomically $ isEmptyTChan workChan - handleAny print $ runCompressor shouldStop say' dirs - - say' "Waiting for new work item" - (forceUnpack, ent, resVar) <- atomically $ readTChan workChan - shouldUnpack <- - if forceUnpack - then return True - else not <$> isUnpacked dirs (entityVal ent) - - let say msg = atomically $ writeTVar messageVar $ concat - [ toPathPiece (stackageSlug $ entityVal ent) - , ": " - , msg - ] - - when shouldUnpack $ do - say "Beginning of processing" - - -- As soon as the raw unpack is complete, start serving docs - let onRawComplete = atomically $ writeTVar resVar USReady - - eres <- tryAny $ unpacker dirs runDB store say onRawComplete ent - atomically $ writeTVar resVar $ case eres of - Left e -> USFailed $ tshow e - Right () -> USReady - -removeTreeIfExists :: FilePath -> IO () -removeTreeIfExists fp = whenM (isDirectory fp) (removeTree fp) - -unpackRawDocsTo - :: BlobStore StoreKey - -> PackageSetIdent - -> (Text -> IO ()) - -> FilePath - -> IO () -unpackRawDocsTo store ident say destdir = - withSystemTempFile "haddock-bundle.tar.xz" $ \tempfp temph -> do - say "Downloading raw doc tarball" - withAcquire (storeRead' store (HaddockBundle ident)) $ \msrc -> - case msrc of - Nothing -> error "No haddocks exist for that snapshot" - Just src -> src $$ sinkHandle temph - hClose temph - - createTree destdir - say "Unpacking tarball" - (ClosedStream, out, err, cph) <- streamingProcess (proc "tar" ["xf", tempfp]) - { cwd = Just $ fpToString destdir - } - (ec, out', err') <- liftIO $ runConcurrently $ (,,) - <$> Concurrently (waitForStreamingProcess cph) - <*> Concurrently (out $$ foldC) - <*> Concurrently (err $$ foldC) - unless (ec == ExitSuccess) $ throwM - $ HaddockBundleUnpackException ec out' err' - -data HaddockBundleUnpackException = HaddockBundleUnpackException - !ExitCode - !ByteString - !ByteString - deriving (Show, Typeable) -instance Exception HaddockBundleUnpackException - -unpacker - :: Dirs - -> (forall a m. (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a) - -> BlobStore StoreKey - -> (Text -> IO ()) - -> IO () -- ^ onRawComplete - -> Entity Stackage - -> IO () -unpacker dirs runDB store say onRawComplete (Entity sid stackage@Stackage {..}) = do - say "Removing old directories, if they exist" - removeTreeIfExists $ dirRawIdent dirs stackageIdent - removeTreeIfExists $ dirGzIdent dirs stackageIdent - removeTreeIfExists $ dirHoogleIdent dirs stackageIdent - - let destdir = dirRawIdent dirs stackageIdent - unpackRawDocsTo store stackageIdent say destdir - onRawComplete - - createTree $ dirHoogleIdent dirs stackageIdent - - -- Determine which packages have documentation and update the - -- database appropriately - say "Updating database for available documentation" - runResourceT $ runDB $ do - updateWhere - [PackageStackage ==. sid] - [PackageHasHaddocks =. False] - sourceDirectory destdir $$ mapM_C (\fp -> do - let mnv = nameAndVersionFromPath fp - forM_ mnv $ \(name, version) -> updateWhere - [ PackageStackage ==. sid - , PackageName' ==. PackageName name - , PackageVersion ==. Version version - ] - [PackageHasHaddocks =. True] - ) - - say "Unpack complete" - let completeFP = completeUnpackFile dirs stackage - liftIO $ do - createTree $ F.parent completeFP - writeFile completeFP ("Complete" :: ByteString) - -completeUnpackFile :: Dirs -> Stackage -> FilePath -completeUnpackFile dirs stackage = - dirGzIdent dirs (stackageIdent stackage) > "unpack-complete" - --- | Get the path to the Hoogle database, downloading from persistent storage --- if necessary. This function will /not/ generate a new database, and --- therefore is safe to run on a live web server. -getHoogleDB :: Dirs - -> Stackage - -> Handler (Maybe FilePath) -getHoogleDB dirs stackage = do - exists <- liftIO $ isFile fp - if exists - then return $ Just fp - else do - msrc <- storeRead key - case msrc of - Nothing -> return Nothing - Just src -> do - liftIO $ createTree $ F.parent fp - let tmpfp = fp <.> "tmp" -- FIXME add something random - src $$ ungzip =$ sinkFile tmpfp - liftIO $ rename tmpfp fp - return $ Just fp - where - fp = defaultHooDest dirs stackage - key = HoogleDB (stackageIdent stackage) $ HoogleVersion VERSION_hoogle - --- | Make sure that the last 5 LTS and last 5 Nightly releases all have Hoogle --- databases available. -createHoogleDatabases - :: BlobStore StoreKey - -> (forall a m. (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a) - -> (Text -> IO ()) - -> (Route App -> [(Text, Text)] -> Text) - -> IO () -createHoogleDatabases store runDB say urlRender = do - stackages <- runDB $ do - sids <- (++) - <$> fmap (map $ ltsStackage . entityVal) - (selectList [] [Desc LtsMajor, Desc LtsMinor, LimitTo 5]) - <*> fmap (map $ nightlyStackage . entityVal) - (selectList [] [Desc NightlyDay, LimitTo 5]) - catMaybes <$> mapM get sids - forM_ stackages $ \stackage -> do - let say' x = say $ concat - [ toPathPiece $ stackageSlug stackage - , ": " - , x - ] - handleAny (say' . tshow) $ makeHoogle store say' urlRender stackage - --- | Either download the Hoogle database from persistent storage, or create it. -makeHoogle - :: BlobStore StoreKey - -> (Text -> IO ()) - -> (Route App -> [(Text, Text)] -> Text) - -> Stackage - -> IO () -makeHoogle store say urlRender stackage = do - say "Making hoogle database" - exists <- storeExists' store hoogleKey - if exists - then say "Hoogle database already exists, skipping" - else do - say "Generating Hoogle database" - generate - where - ident = stackageIdent stackage - hoogleKey = HoogleDB ident $ HoogleVersion VERSION_hoogle - - generate = withSystemTempDirectory "hoogle-database-gen" $ \hoogletemp' -> do - let hoogletemp = fpFromString hoogletemp' - rawdocs = hoogletemp > "rawdocs" - - unpackRawDocsTo store ident say rawdocs - - say "Copying Hoogle text files to temp directory" - runResourceT $ copyHoogleTextFiles say rawdocs hoogletemp - say "Creating Hoogle database" - withSystemTempFile "default.hoo" $ \dstFP' dstH -> do - let dstFP = fpFromString dstFP' - hClose dstH - createHoogleDb say dstFP stackage hoogletemp urlRender - say "Uploading database to persistent storage" - withAcquire (storeWrite' store hoogleKey) $ \sink -> - runResourceT $ sourceFile dstFP $$ gzip =$ sink - -runCompressor :: IO Bool -- ^ should stop early? - -> (Text -> IO ()) -> Dirs -> IO () -runCompressor shouldStop say dirs = - handle (\EarlyStop -> return ()) $ runResourceT $ goDir $ dirRawRoot dirs - where - goDir dir = do - liftIO $ whenM shouldStop $ do - say "Stopping compressor early" - throwIO EarlyStop - liftIO $ say $ "Compressing directory: " ++ fpToText dir - sourceDirectory dir $$ mapM_C goFP - liftIO $ void $ tryIO $ removeDirectory dir - - goFP fp = do - e <- liftIO $ isFile fp - if e - then liftIO $ do - liftIO $ say $ "Compressing file: " ++ fpToText fp - handle (print . asSomeException) - $ gzipHash dirs suffix - else goDir fp - where - Just suffix = F.stripPrefix (dirRawRoot dirs > "") fp - -data EarlyStop = EarlyStop - deriving (Show, Typeable) -instance Exception EarlyStop - --- Procedure is to: --- --- * Gzip the src file to a temp file, and get a hash of the gzipped contents --- * If that hash doesn't exist in the cache, move the new file to the cache --- * Create a hard link from dst to the file in the cache --- * Delete src -gzipHash :: Dirs - -> FilePath -- ^ suffix - -> IO () -gzipHash dirs suffix = do - withTempFile (fpToString $ dirCacheRoot dirs) "haddock-file.gz" $ \tempfp temph -> do - digest <- withBinaryFile (fpToString src) ReadMode $ \inh -> - sourceHandle inh - $= gzip - $$ (getZipSink $ - ZipSink (sinkHandle temph) *> - ZipSink sinkHash) - hClose temph - let fpcache = dirCacheFp dirs digest - unlessM (isFile fpcache) $ do - createTree $ F.parent fpcache - rename (fpFromString tempfp) fpcache - createTree $ F.parent dst - createLink (fpToString fpcache) (fpToString dst) - removeFile src - where - src = dirRawRoot dirs > suffix - dst = dirGzRoot dirs > suffix - -dirCacheFp :: Dirs -> Digest SHA1 -> FilePath -dirCacheFp dirs digest = - dirCacheRoot dirs > fpFromText x > fpFromText y <.> "gz" - where - name = decodeUtf8 $ B16.encode $ toBytes digest - (x, y) = splitAt 2 name - -copyHoogleTextFiles :: (Text -> IO ()) -- ^ log - -> FilePath -- ^ raw unpacked Haddock files - -> FilePath -- ^ temporary work directory - -> ResourceT IO () -copyHoogleTextFiles say raw tmp = do - let tmptext = tmp > "text" - liftIO $ createTree tmptext - sourceDirectory raw $$ mapM_C (\fp -> - forM_ (nameAndVersionFromPath fp) $ \(name, version) -> do - let src = fp > fpFromText name <.> "txt" - dst = tmptext > fpFromText (name ++ "-" ++ version) - exists <- liftIO $ isFile src - if exists - then sourceFile src $$ (sinkFile dst :: Sink ByteString (ResourceT IO) ()) - else liftIO $ appendHoogleErrors say $ HoogleErrors - { packageName = name - , packageVersion = version - , errors = ["No textual Hoogle DB (use \"cabal haddock --hoogle\")"] - } - ) - -createHoogleDb :: (Text -> IO ()) - -> FilePath -- ^ default.hoo output location - -> Stackage - -> FilePath -- ^ temp directory - -> (Route App -> [(Text, Text)] -> Text) - -> IO () -createHoogleDb say dstDefaultHoo stackage tmpdir urlRender = do - let tmpbin = tmpdir > "binary" - createTree tmpbin - eres <- tryAny $ runResourceT $ do - -- Create hoogle binary databases for each package. - sourceDirectory (tmpdir > "text") $$ mapM_C - ( \fp -> do - (releaseKey, srcH) <- allocate (openBinaryFile (fpToString fp) ReadMode) hClose - forM_ (nameAndVersionFromPath fp) $ \(name, version) -> liftIO $ do - say $ concat - [ "Creating Hoogle database for: " - , name - , "-" - , version - ] - src <- unpack . decodeUtf8 . asLByteString <$> hGetContents srcH - let -- Preprocess the haddock-generated manifest file. - src' = unlines $ haddockHacks (Just (unpack docsUrl)) $ lines src - docsUrl = urlRender (HaddockR (stackageSlug stackage) urlPieces) [] - urlPieces = [name <> "-" <> version, "index.html"] - -- Compute the filepath of the resulting hoogle - -- database. - out = fpToString $ tmpbin > fpFromText base - base = name <> "-" <> version <> ".hoo" - errs <- Hoogle.createDatabase "" Hoogle.Haskell [] src' out - when (not $ null errs) $ do - -- TODO: remove this printing once errors are yielded - -- to the user. - putStrLn $ concat - [ base - , " Hoogle errors: " - , tshow errs - ] - appendHoogleErrors say $ HoogleErrors - { packageName = name - , packageVersion = version - , errors = map show errs - } - release releaseKey - ) - -- Merge the individual binary databases into one big database. - liftIO $ do - say "Merging all Hoogle databases" - dbs <- listDirectory tmpbin - Hoogle.mergeDatabase - (map fpToString dbs) - (fpToString dstDefaultHoo) - case eres of - Right () -> return () - Left err -> liftIO $ appendHoogleErrors say $ HoogleErrors - { packageName = "Exception thrown while building hoogle DB" - , packageVersion = "" - , errors = [show err] - } - -data HoogleErrors = HoogleErrors - { packageName :: Text - , packageVersion :: Text - , errors :: [String] - } deriving (Generic) - -instance ToJSON HoogleErrors where -instance FromJSON HoogleErrors where - --- Appends hoogle errors to a log file. By encoding within a single --- list, the resulting file can be decoded as [HoogleErrors]. -appendHoogleErrors :: (Text -> IO ()) -> HoogleErrors -> IO () -appendHoogleErrors say errs = say $ decodeUtf8 $ Y.encode [errs] - -nameAndVersionFromPath :: FilePath -> Maybe (Text, Text) -nameAndVersionFromPath fp = - (\name -> (name, version)) <$> stripSuffix "-" name' - where - (name', version) = T.breakOnEnd "-" $ fpToText $ filename fp - ---------------------------------------------------------------------- --- HADDOCK HACKS --- (Copied from hoogle-4.2.36/src/Recipe/Haddock.hs) --- Modifications: --- 1) Some name qualification --- 2) Explicit type sig due to polymorphic elem --- 3) Fixed an unused binding warning - --- Eliminate @version --- Change :*: to (:*:), Haddock bug --- Change !!Int to !Int, Haddock bug --- Change instance [overlap ok] to instance, Haddock bug --- Change instance [incoherent] to instance, Haddock bug --- Change instance [safe] to instance, Haddock bug --- Change !Int to Int, HSE bug --- Drop {-# UNPACK #-}, Haddock bug --- Drop everything after where, Haddock bug - -haddockHacks :: Maybe Hoogle.URL -> [String] -> [String] -haddockHacks loc src = maybe id haddockPackageUrl loc (translate src) - where - translate :: [String] -> [String] - translate = map (unwords . g . map f . words) . filter (not . isPrefixOf "@version ") - - f "::" = "::" - f (':':xs) = "(:" ++ xs ++ ")" - f ('!':'!':x:xs) | isAlpha x = xs - f ('!':x:xs) | isAlpha x || x `elem` ("[(" :: String) = x:xs - f x | x `elem` ["[overlap","ok]","[incoherent]","[safe]"] = "" - f x | x `elem` ["{-#","UNPACK","#-}"] = "" - f x = x - - g ("where":_) = [] - g (x:xs) = x : g xs - g [] = [] - -haddockPackageUrl :: Hoogle.URL -> [String] -> [String] -haddockPackageUrl x = concatMap f - where f y | "@package " `isPrefixOf` y = ["@url " ++ x, y] - | otherwise = [y] diff --git a/Foundation.hs b/Foundation.hs index 896118c..1cf0100 100644 --- a/Foundation.hs +++ b/Foundation.hs @@ -18,7 +18,7 @@ import Types import Yesod.Auth import Yesod.Auth.BrowserId import Yesod.Auth.GoogleEmail2 (authGoogleEmail) -import Yesod.Core.Types (Logger, GWData) +import Yesod.Core.Types (Logger) import Yesod.Default.Config import Yesod.GitRepo @@ -35,25 +35,9 @@ data App = App , appLogger :: Logger , genIO :: MWC.GenIO , blobStore :: BlobStore StoreKey - , haddockRootDir :: FilePath - , appDocUnpacker :: DocUnpacker - -- ^ We have a dedicated thread so that (1) we don't try to unpack too many - -- things at once, (2) we never unpack the same thing twice at the same - -- time, and (3) so that even if the client connection dies, we finish the - -- unpack job. - , widgetCache :: IORef (HashMap Text (UTCTime, GWData (Route App))) , websiteContent :: GitRepo WebsiteContent } -data DocUnpacker = DocUnpacker - { duRequestDocs :: Entity Stackage -> IO UnpackStatus - , duGetStatus :: IO Text - , duForceReload :: Entity Stackage -> IO () - } - -data Progress = ProgressWorking !Text - | ProgressDone !Text !(Route App) - instance HasBlobStore App StoreKey where getBlobStore = blobStore @@ -75,8 +59,6 @@ instance HasHackageRoot App where -- explanation for this split. mkYesodData "App" $(parseRoutesFile "config/routes") -deriving instance Show Progress - type Form x = Html -> MForm (HandlerT App IO) (FormResult x, Widget) defaultLayoutNoContainer :: Widget -> Handler Html @@ -167,16 +149,12 @@ instance Yesod App where makeLogger = return . appLogger - maximumContentLength _ (Just UploadStackageR) = Just 50000000 - maximumContentLength _ (Just UploadHaddockR{}) = Just 100000000 - maximumContentLength _ (Just UploadV2R) = Just 100000000 maximumContentLength _ _ = Just 2000000 instance ToMarkup (Route App) where toMarkup c = case c of AllSnapshotsR{} -> "Snapshots" - UploadStackageR{} -> "Upload" AuthR (LoginR{}) -> "Login" _ -> "" diff --git a/Handler/Alias.hs b/Handler/Alias.hs deleted file mode 100644 index 235492c..0000000 --- a/Handler/Alias.hs +++ /dev/null @@ -1,86 +0,0 @@ -module Handler.Alias - ( handleAliasR - , getLtsR - , getNightlyR - ) where - -import Import -import Data.Slug (Slug) -import Handler.StackageHome (getStackageHomeR, getStackageMetadataR, getStackageCabalConfigR, getSnapshotPackagesR, getDocsR) -import Handler.StackageIndex (getStackageIndexR) -import Handler.StackageSdist (getStackageSdistR) -import Handler.Hoogle (getHoogleR, getHoogleDatabaseR) -import Handler.BuildPlan (getBuildPlanR) -import Handler.Download (getGhcMajorVersionR) - -handleAliasR :: Slug -> Slug -> [Text] -> Handler () -handleAliasR user name pieces = do - $logDebug $ tshow (user, name, pieces) - Entity _ (Alias _ _ setid) <- runDB $ do - Entity uid _ <- getBy404 $ UniqueHandle user - getBy404 $ UniqueAlias uid name - $logDebug $ "setid: " ++ tshow (setid, pieces) - case parseRoute ("stackage" : toPathPiece setid : pieces, []) of - Nothing -> notFound - Just route -> redirect (route :: Route App) - -getLtsR :: [Text] -> Handler () -getLtsR pieces0 = - case pieces0 of - [] -> go [] - piece:pieces' - | Just (x, y) <- parseLtsPair piece -> goXY x y pieces' - | Just x <- fromPathPiece piece -> goX x pieces' - | otherwise -> go pieces0 - where - go pieces = do - mlts <- runDB $ selectFirst [] [Desc LtsMajor, Desc LtsMinor] - case mlts of - Nothing -> notFound - Just (Entity _ (Lts _ _ sid)) -> goSid sid pieces - - goX x pieces = do - mlts <- runDB $ selectFirst [LtsMajor ==. x] [Desc LtsMinor] - case mlts of - Nothing -> notFound - Just (Entity _ (Lts _ _ sid)) -> goSid sid pieces - - goXY x y pieces = do - Entity _ (Lts _ _ sid) <- runDB $ getBy404 $ UniqueLts x y - goSid sid pieces - -getNightlyR :: [Text] -> Handler () -getNightlyR pieces0 = - case pieces0 of - [] -> go [] - piece:pieces' - | Just day <- fromPathPiece piece -> goDay day pieces' - | otherwise -> go pieces0 - where - go pieces = do - mn <- runDB $ selectFirst [] [Desc NightlyDay] - case mn of - Nothing -> notFound - Just (Entity _ (Nightly _ _ sid)) -> goSid sid pieces - goDay day pieces = do - Entity _ (Nightly _ _ sid) <- runDB $ getBy404 $ UniqueNightly day - goSid sid pieces - -goSid :: StackageId -> [Text] -> Handler () -goSid sid pieces = do - s <- runDB $ get404 sid - case parseRoute ("snapshot" : toPathPiece (stackageSlug s) : pieces, []) of - Just (SnapshotR slug sr) -> - case sr of - StackageHomeR -> getStackageHomeR slug >>= sendResponse - StackageMetadataR -> getStackageMetadataR slug >>= sendResponse - StackageCabalConfigR -> getStackageCabalConfigR slug >>= sendResponse - StackageIndexR -> getStackageIndexR slug >>= sendResponse - StackageSdistR pnv -> getStackageSdistR slug pnv >>= sendResponse - SnapshotPackagesR -> getSnapshotPackagesR slug >>= sendResponse - DocsR -> getDocsR slug >>= sendResponse - HoogleR -> getHoogleR slug >>= sendResponse - HoogleDatabaseR -> getHoogleDatabaseR slug >>= sendResponse - BuildPlanR -> getBuildPlanR slug >>= sendResponse - GhcMajorVersionR -> getGhcMajorVersionR slug >>= sendResponse - _ -> notFound diff --git a/Handler/Aliases.hs b/Handler/Aliases.hs deleted file mode 100644 index bf4a9f3..0000000 --- a/Handler/Aliases.hs +++ /dev/null @@ -1,23 +0,0 @@ -module Handler.Aliases where - -import Import -import Data.Text (strip) - -putAliasesR :: Handler () -putAliasesR = do - uid <- requireAuthId - aliasesText <- runInputPost $ ireq textField "aliases" - aliases <- mapM (parseAlias uid) $ lines aliasesText - runDB $ do - deleteWhere [AliasUser ==. uid] - mapM_ insert_ aliases - setMessage "Aliases updated" - redirect ProfileR - -parseAlias :: UserId -> Text -> Handler Alias -parseAlias uid t = maybe (invalidArgs ["Invalid alias: " ++ t]) return $ do - name <- fromPathPiece x - setid <- fromPathPiece y - return $ Alias uid name setid - where - (strip -> x, (strip . drop 1) -> y) = break (== ':') t diff --git a/Handler/CompressorStatus.hs b/Handler/CompressorStatus.hs deleted file mode 100644 index c26a25a..0000000 --- a/Handler/CompressorStatus.hs +++ /dev/null @@ -1,14 +0,0 @@ -module Handler.CompressorStatus where - -import Import - -getCompressorStatusR :: Handler Html -getCompressorStatusR = do - status <- getYesod >>= liftIO . duGetStatus . appDocUnpacker - defaultLayout $ do - setTitle "Compressor thread status" - [whamlet| -
#{status} - |] diff --git a/Handler/Download.hs b/Handler/Download.hs index 1f341cc..7b8a06f 100644 --- a/Handler/Download.hs +++ b/Handler/Download.hs @@ -36,6 +36,7 @@ getDownloadR = defaultLayout $ do setTitle "Download" $(widgetFile "download") +{- FIXME ltsMajorVersions :: YesodDB App [Lts] ltsMajorVersions = (dropOldMinors . map entityVal) @@ -47,12 +48,15 @@ dropOldMinors (l@(Lts x _ _):rest) = l : dropOldMinors (dropWhile sameMinor rest) where sameMinor (Lts y _ _) = x == y +-} getDownloadSnapshotsJsonR :: Handler Value getDownloadSnapshotsJsonR = getDownloadLtsSnapshotsJsonR getDownloadLtsSnapshotsJsonR :: Handler Value getDownloadLtsSnapshotsJsonR = do + error "getDownloadLtsSnapshotsJsonR" + {- (mlatestNightly, ltses) <- runDB $ (,) <$> getLatestNightly <*> ltsMajorVersions @@ -82,11 +86,15 @@ ghcMajorVersionText snapshot = ghcMajorVersionToText $ fromMaybe (GhcMajorVersion 7 8) $ stackageGhcMajorVersion snapshot + -} getGhcMajorVersionR :: SnapSlug -> Handler Text -getGhcMajorVersionR slug = do +getGhcMajorVersionR _slug = do + error "getGhcMajorVersionR" + {- snapshot <- runDB $ getBy404 $ UniqueSnapshot slug return $ ghcMajorVersionText $ entityVal snapshot + -} getDownloadGhcLinksR :: SupportedArch -> Text -> Handler TypedContent getDownloadGhcLinksR arch fileName = do diff --git a/Handler/Haddock.hs b/Handler/Haddock.hs index 05dd0f0..100aa4a 100644 --- a/Handler/Haddock.hs +++ b/Handler/Haddock.hs @@ -1,15 +1,5 @@ module Handler.Haddock - ( getUploadHaddockR - , putUploadHaddockR - , getHaddockR - , getUploadDocMapR - , putUploadDocMapR - -- Exported for use in Handler.Hoogle - , Dirs (..), getDirs, dirHoogleFp, mkDirs - , dirRawIdent - , dirGzIdent - , dirHoogleIdent - , createCompressor + ( getHaddockR ) where import Control.Concurrent (forkIO) @@ -31,269 +21,8 @@ import System.IO (IOMode (ReadMode), withBinaryFile) import System.IO.Temp (withTempFile) import System.Posix.Files (createLink) -form :: Form FileInfo -form = renderDivs $ areq fileField "tarball containing docs" - { fsName = Just "tarball" - } Nothing - -getUploadHaddockR, putUploadHaddockR :: Text -> Handler Html -getUploadHaddockR slug0 = do - uid <- requireAuthIdOrToken - stackageEnt@(Entity sid Stackage {..}) <- runDB $ do - -- Provide fallback for old URLs - ment <- getBy $ UniqueStackage $ PackageSetIdent slug0 - case ment of - Just ent -> return ent - Nothing -> do - slug <- maybe notFound return $ fromPathPiece slug0 - getBy404 $ UniqueSnapshot slug - let ident = stackageIdent - slug = stackageSlug - unless (uid == stackageUser) $ permissionDenied "You do not control this snapshot" - ((res, widget), enctype) <- runFormPostNoToken form - case res of - FormSuccess fileInfo -> do - fileSource fileInfo $$ storeWrite (HaddockBundle ident) - runDB $ update sid [StackageHasHaddocks =. True] - master <- getYesod - liftIO $ duForceReload (appDocUnpacker master) stackageEnt - setMessage "Haddocks uploaded" - redirect $ SnapshotR slug StackageHomeR - _ -> defaultLayout $ do - setTitle "Upload Haddocks" - $(widgetFile "upload-haddock") - -putUploadHaddockR = getUploadHaddockR - getHaddockR :: SnapSlug -> [Text] -> Handler () -getHaddockR slug rest = do - stackageEnt <- runDB $ do - onS3 <- fmap isJust $ getBy $ UniqueDocsOnS3 slug - when onS3 $ redirect $ concat - $ "http://haddock.stackage.org/" - : toPathPiece slug - : map (cons '/') rest - ment <- getBy $ UniqueSnapshot slug - case ment of - Just ent -> do - case rest of - [pkgver] -> tryContentsRedirect ent pkgver - [pkgver, "index.html"] -> tryContentsRedirect ent pkgver - _ -> return () - return ent - Nothing -> do - Entity _ stackage <- getBy404 - $ UniqueStackage - $ PackageSetIdent - $ toPathPiece slug - redirectWith status301 $ HaddockR (stackageSlug stackage) rest - mapM_ sanitize rest - dirs <- getDirs - requireDocs stackageEnt - - let ident = stackageIdent (entityVal stackageEnt) - rawfp = dirRawFp dirs ident rest - gzfp = dirGzFp dirs ident rest - mime = defaultMimeLookup $ fpToText $ filename rawfp - - whenM (liftIO $ isDirectory rawfp) - $ redirect $ HaddockR slug $ rest ++ ["index.html"] - whenM (liftIO $ isDirectory gzfp) - $ redirect $ HaddockR slug $ rest ++ ["index.html"] - - whenM (liftIO $ isFile gzfp) $ do - addHeader "Content-Encoding" "gzip" - sendFile mime $ fpToString gzfp - - -- Note: There's a small race window here, where the compressor thread - -- could pull the rug out from under us. We can work around this by opening - -- the file and, if that fails, try the compressed version again. - whenM (liftIO $ isFile rawfp) $ sendFile mime $ fpToString rawfp - - notFound - where - sanitize p - | ("/" `isInfixOf` p) || p `member` (asHashSet $ setFromList ["", ".", ".."]) = - permissionDenied "Invalid request" - | otherwise = return () - --- | Try to redirect to the snapshot's package page instead of the --- Haddock-generated HTML. -tryContentsRedirect :: Entity Stackage -> Text -> YesodDB App () -tryContentsRedirect (Entity sid Stackage {..}) pkgver = do - mdocs <- selectFirst - [ DocsName ==. name - , DocsVersion ==. version - , DocsSnapshot ==. Just sid - ] - [] - forM_ mdocs $ const - $ redirect - $ SnapshotR stackageSlug - $ StackageSdistR - $ PNVNameVersion name version - where - (PackageName . dropDash -> name, Version -> version) = T.breakOnEnd "-" pkgver - -dropDash :: Text -> Text -dropDash t = fromMaybe t $ stripSuffix "-" t - -createCompressor - :: Dirs - -> IO (IORef Text, IO ()) -- ^ action to kick off compressor again -createCompressor dirs = do - baton <- newMVar () - status <- newIORef "Compressor is idle" - mask_ $ void $ forkIO $ (finallyE $ \e -> writeIORef status $ "Compressor thread exited: " ++ tshow e) $ forever $ do - writeIORef status "Waiting for signal to start compressing" - takeMVar baton - writeIORef status "Received signal, traversing directories" - let rawRoot = dirRawRoot dirs - whenM (isDirectory rawRoot) $ runResourceT $ goDir status rawRoot - return (status, void $ tryPutMVar baton ()) - where - finallyE f g = mask $ \restore -> do - restore g `catch` \e -> do - () <- f $ Just (e :: SomeException) - () <- throwIO e - return () - f Nothing - goDir status dir = do - writeIORef status $ "Compressing directory: " ++ fpToText dir - sourceDirectory dir $$ mapM_C (goFP status) - liftIO $ void $ tryIO $ removeDirectory dir - - goFP status fp = do - e <- liftIO $ isFile fp - if e - then liftIO $ do - writeIORef status $ "Compressing file: " ++ fpToText fp - handle (print . asSomeException) - $ gzipHash dirs suffix - else goDir status fp - where - Just suffix = F.stripPrefix (dirRawRoot dirs > "") fp - --- Procedure is to: --- --- * Gzip the src file to a temp file, and get a hash of the gzipped contents --- * If that hash doesn't exist in the cache, move the new file to the cache --- * Create a hard link from dst to the file in the cache --- * Delete src -gzipHash :: Dirs - -> FilePath -- ^ suffix - -> IO () -gzipHash dirs suffix = do - withTempFile (fpToString $ dirCacheRoot dirs) "haddock-file.gz" $ \tempfp temph -> do - digest <- withBinaryFile (fpToString src) ReadMode $ \inh -> - sourceHandle inh - $= gzip - $$ (getZipSink $ - ZipSink (sinkHandle temph) *> - ZipSink sinkHash) - hClose temph - let fpcache = dirCacheFp dirs digest - unlessM (isFile fpcache) $ do - createTree $ F.parent fpcache - rename (fpFromString tempfp) fpcache - createTree $ F.parent dst - createLink (fpToString fpcache) (fpToString dst) - removeFile src - where - src = dirRawRoot dirs > suffix - dst = dirGzRoot dirs > suffix - -data Dirs = Dirs - { dirRawRoot :: !FilePath - , dirGzRoot :: !FilePath - , dirCacheRoot :: !FilePath - , dirHoogleRoot :: !FilePath - } - -getDirs :: Handler Dirs -getDirs = mkDirs . haddockRootDir <$> getYesod - -mkDirs :: FilePath -> Dirs -mkDirs dir = Dirs - { dirRawRoot = dir > "idents-raw" - , dirGzRoot = dir > "idents-gz" - , dirCacheRoot = dir > "cachedir" - , dirHoogleRoot = dir > "hoogle" - } - -dirGzIdent, dirRawIdent, dirHoogleIdent :: Dirs -> PackageSetIdent -> FilePath -dirGzIdent dirs ident = dirGzRoot dirs > fpFromText (toPathPiece ident) -dirRawIdent dirs ident = dirRawRoot dirs > fpFromText (toPathPiece ident) -dirHoogleIdent dirs ident = dirHoogleRoot dirs > fpFromText (toPathPiece ident) - -dirGzFp, dirRawFp, dirHoogleFp :: Dirs -> PackageSetIdent -> [Text] -> FilePath -dirGzFp dirs ident rest = dirGzIdent dirs ident > mconcat (map fpFromText rest) -dirRawFp dirs ident rest = dirRawIdent dirs ident > mconcat (map fpFromText rest) -dirHoogleFp dirs ident rest = dirHoogleIdent dirs ident > mconcat (map fpFromText rest) - -dirCacheFp :: Dirs -> Digest SHA1 -> FilePath -dirCacheFp dirs digest = - dirCacheRoot dirs > fpFromText x > fpFromText y <.> "gz" - where - name = decodeUtf8 $ B16.encode $ toBytes digest - (x, y) = splitAt 2 name - -data DocInfo = DocInfo Version (Map Text [Text]) -instance FromJSON DocInfo where - parseJSON = withObject "DocInfo" $ \o -> DocInfo - <$> (Version <$> o .: "version") - <*> o .: "modules" - -getUploadDocMapR :: Handler Html -getUploadDocMapR = do - uid <- requireAuthIdOrToken - user <- runDB $ get404 uid - extra <- getExtra - when (unSlug (userHandle user) `notMember` adminUsers extra) - $ permissionDenied "Must be an administrator" - - ((res, widget), enctype) <- runFormPostNoToken $ renderDivs $ (,) - <$> areq - fileField - "YAML file with map" { fsName = Just "docmap" } - Nothing - <*> areq textField "Stackage ID" { fsName = Just "snapshot" } Nothing - case res of - FormSuccess (fi, snapshot) -> do - Entity sid stackage <- runDB $ do - ment <- getBy $ UniqueStackage $ PackageSetIdent snapshot - case ment of - Just ent -> return ent - Nothing -> do - slug <- maybe notFound return $ fromPathPiece snapshot - getBy404 $ UniqueSnapshot slug - unless (stackageHasHaddocks stackage) $ invalidArgs $ return - "Cannot use a snapshot without docs for a docmap" - bs <- fileSource fi $$ foldC - case Y.decodeEither bs of - Left e -> invalidArgs [pack e] - Right m0 -> do - now <- liftIO getCurrentTime - render <- getUrlRender - runDB $ forM_ (mapToList $ asMap m0) $ \(package, DocInfo version ms) -> do - did <- insert Docs - { docsName = PackageName package - , docsVersion = version - , docsUploaded = now - , docsSnapshot = Just sid - } - forM_ (mapToList ms) $ \(name, pieces) -> do - let url = render $ HaddockR (stackageSlug stackage) pieces - insert_ $ Module did name url - setMessage "Doc map complete" - redirect UploadDocMapR - _ -> defaultLayout $ do - setTitle "Upload doc map" - [whamlet| -