mirror of
https://github.com/commercialhaskell/stackage-server.git
synced 2026-01-11 19:58:28 +01:00
Delete a whole bunch of stuff, nothing works
This commit is contained in:
parent
06c5059392
commit
d956b074c0
143
Application.hs
143
Application.hs
@ -11,9 +11,6 @@ import Control.Concurrent (forkIO, threadDelay)
|
||||
import Control.Exception (catch)
|
||||
import Control.Monad.Logger (runLoggingT, LoggingT, defaultLogStr)
|
||||
import Data.BlobStore (fileStore, cachedS3Store)
|
||||
import Data.Hackage
|
||||
import Data.Hackage.DeprecationInfo
|
||||
import Data.Unpacking (newDocUnpacker, createHoogleDatabases)
|
||||
import Data.WebsiteContent
|
||||
import Data.Slug (SnapSlug (..), safeMakeSlug, HasGenIO)
|
||||
import Data.Streaming.Network (bindPortTCP)
|
||||
@ -53,28 +50,21 @@ import Handler.Snapshots
|
||||
import Handler.Profile
|
||||
import Handler.Email
|
||||
import Handler.ResetToken
|
||||
import Handler.UploadStackage
|
||||
import Handler.StackageHome
|
||||
import Handler.StackageIndex
|
||||
import Handler.StackageSdist
|
||||
import Handler.Aliases
|
||||
import Handler.Alias
|
||||
import Handler.Progress
|
||||
import Handler.System
|
||||
import Handler.Haddock
|
||||
import Handler.Package
|
||||
import Handler.PackageList
|
||||
import Handler.CompressorStatus
|
||||
import Handler.Tag
|
||||
import Handler.BannedTags
|
||||
import Handler.RefreshDeprecated
|
||||
import Handler.UploadV2
|
||||
import Handler.Hoogle
|
||||
import Handler.BuildVersion
|
||||
import Handler.PackageCounts
|
||||
import Handler.Sitemap
|
||||
import Handler.BuildPlan
|
||||
import Handler.Download
|
||||
import Handler.OldLinks
|
||||
|
||||
-- This line actually creates our YesodDispatch instance. It is the second half
|
||||
-- of the call to mkYesodData which occurs in Foundation.hs. Please see the
|
||||
@ -157,9 +147,6 @@ makeFoundation useEcho conf = do
|
||||
|
||||
blobStore' <- loadBlobStore manager conf
|
||||
|
||||
let haddockRootDir' = "/tmp/stackage-server-haddocks2"
|
||||
widgetCache' <- newIORef mempty
|
||||
|
||||
websiteContent' <- if development
|
||||
then do
|
||||
void $ rawSystem "git"
|
||||
@ -182,7 +169,6 @@ makeFoundation useEcho conf = do
|
||||
|
||||
let runDB' :: (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a
|
||||
runDB' = flip (Database.Persist.runPool dbconf) p
|
||||
docUnpacker <- newDocUnpacker haddockRootDir' blobStore' runDB'
|
||||
|
||||
let logger = Yesod.Core.Types.Logger loggerSet' getter
|
||||
foundation = App
|
||||
@ -194,9 +180,6 @@ makeFoundation useEcho conf = do
|
||||
, appLogger = logger
|
||||
, genIO = gen
|
||||
, blobStore = blobStore'
|
||||
, haddockRootDir = haddockRootDir'
|
||||
, appDocUnpacker = docUnpacker
|
||||
, widgetCache = widgetCache'
|
||||
, websiteContent = websiteContent'
|
||||
}
|
||||
|
||||
@ -209,27 +192,16 @@ makeFoundation useEcho conf = do
|
||||
flip runLoggingT (messageLoggerSource foundation logger) $
|
||||
flip (Database.Persist.runPool dbconf) p $ do
|
||||
runMigration migrateAll
|
||||
{-
|
||||
checkMigration 1 fixSnapSlugs
|
||||
checkMigration 2 setCorePackages
|
||||
-}
|
||||
|
||||
|
||||
let updateDB = lookup "STACKAGE_CABAL_LOADER" env /= Just "0"
|
||||
hoogleGen = lookup "STACKAGE_HOOGLE_GEN" env /= Just "0"
|
||||
forceUpdate = lookup "STACKAGE_FORCE_UPDATE" env == Just "1"
|
||||
loadCabalFiles' = appLoadCabalFiles updateDB forceUpdate foundation dbconf p
|
||||
|
||||
-- Start the cabal file loader
|
||||
ifRunCabalLoader $ forkIO $ forever $ flip runLoggingT (messageLoggerSource foundation logger) $ do
|
||||
$logInfoS "CLEANUP" "Cleaning up /tmp"
|
||||
now <- liftIO getCurrentTime
|
||||
runResourceT $ sourceDirectory "/tmp" $$ mapM_C (cleanupTemp now)
|
||||
$logInfoS "CLEANUP" "Cleaning up complete"
|
||||
|
||||
loadCabalFiles'
|
||||
|
||||
when hoogleGen $ liftIO $ createHoogleDatabases blobStore' runDB' putStrLn urlRender'
|
||||
|
||||
liftIO $ threadDelay $ 30 * 60 * 1000000
|
||||
return foundation
|
||||
where ifRunCabalLoader m =
|
||||
if cabalFileLoader
|
||||
@ -255,6 +227,8 @@ cabalLoaderMain = do
|
||||
void $ catchIO (bindPortTCP 17834 "127.0.0.1") $ \_ ->
|
||||
error $ "cabal loader process already running, exiting"
|
||||
|
||||
error "cabalLoaderMain"
|
||||
{- FIXME
|
||||
conf <- fromArgs parseExtra
|
||||
dbconf <- getDbConf conf
|
||||
pool <- Database.Persist.createPoolConfig dbconf
|
||||
@ -297,77 +271,7 @@ cabalLoaderMain = do
|
||||
logFunc loc src level str
|
||||
| level > LevelDebug = S.hPutStr stdout $ fromLogStr $ defaultLogStr loc src level str
|
||||
| otherwise = return ()
|
||||
|
||||
appLoadCabalFiles :: ( PersistConfig c
|
||||
, PersistConfigBackend c ~ SqlPersistT
|
||||
, HasHackageRoot env
|
||||
, HasBlobStore env StoreKey
|
||||
, HasHttpManager env
|
||||
)
|
||||
=> Bool -- ^ update database?
|
||||
-> Bool -- ^ force update?
|
||||
-> env
|
||||
-> c
|
||||
-> PersistConfigPool c
|
||||
-> LoggingT IO ()
|
||||
appLoadCabalFiles updateDB forceUpdate env dbconf p = do
|
||||
eres <- tryAny $ flip runReaderT env $ do
|
||||
let runDB' :: SqlPersistT (ResourceT (ReaderT env (LoggingT IO))) a
|
||||
-> ReaderT env (LoggingT IO) a
|
||||
runDB' = runResourceT . flip (Database.Persist.runPool dbconf) p
|
||||
|
||||
$logInfo "Updating deprecation tags"
|
||||
loadDeprecationInfo >>= \ei -> case ei of
|
||||
Left e -> $logError (pack e)
|
||||
Right info -> runDB' $ do
|
||||
deleteWhere ([] :: [Filter Deprecated])
|
||||
insertMany_ (deprecations info)
|
||||
deleteWhere ([] :: [Filter Suggested])
|
||||
insertMany_ (suggestions info)
|
||||
$logInfo "Finished updating deprecation tags"
|
||||
|
||||
let toMDPair (E.Value name, E.Value version, E.Value hash') =
|
||||
(name, (version, hash'))
|
||||
metadata0 <- fmap (mapFromList . map toMDPair)
|
||||
$ runDB' $ E.select $ E.from $ \m -> return
|
||||
( m E.^. MetadataName
|
||||
, m E.^. MetadataVersion
|
||||
, m E.^. MetadataHash
|
||||
)
|
||||
UploadState _ newMD <- loadCabalFiles updateDB forceUpdate metadata0
|
||||
$logInfo $ "Updating metadatas: " ++ tshow (length newMD)
|
||||
runDB' $ do
|
||||
let newMD' = toList newMD
|
||||
deleteWhere [MetadataName <-. map metadataName newMD']
|
||||
insertMany_ newMD'
|
||||
forM_ newMD' $ \md -> do
|
||||
deleteWhere [DependencyUser ==. metadataName md]
|
||||
insertMany_ $ flip map (metadataDeps md) $ \dep ->
|
||||
Dependency (PackageName dep) (metadataName md)
|
||||
|
||||
case eres of
|
||||
Left e -> $logError $ tshow e
|
||||
Right () -> return ()
|
||||
|
||||
cleanupTemp :: UTCTime -> FilePath -> ResourceT (LoggingT IO) ()
|
||||
cleanupTemp now fp
|
||||
| any (`isPrefixOf` name) prefixes = handleAny ($logError . tshow) $ do
|
||||
modified <- liftIO $ getModified fp
|
||||
if (diffUTCTime now modified > 60 * 60)
|
||||
then do
|
||||
$logInfoS "CLEANUP" $ "Removing temp directory: " ++ fpToText fp
|
||||
liftIO $ removeTree fp
|
||||
$logInfoS "CLEANUP" $ "Temp directory deleted: " ++ fpToText fp
|
||||
else $logInfoS "CLEANUP" $ "Ignoring recent entry: " ++ fpToText fp
|
||||
| otherwise = $logInfoS "CLEANUP" $ "Ignoring unmatched path: " ++ fpToText fp
|
||||
where
|
||||
name = fpToText $ filename fp
|
||||
prefixes = asVector $ pack
|
||||
[ "hackage-index"
|
||||
, "createview"
|
||||
, "build00index."
|
||||
, "newindex"
|
||||
]
|
||||
-}
|
||||
|
||||
-- for yesod devel
|
||||
getApplicationDev :: Bool -> IO (Int, Application)
|
||||
@ -387,38 +291,3 @@ checkMigration num f = do
|
||||
case eres of
|
||||
Left _ -> return ()
|
||||
Right _ -> f
|
||||
|
||||
fixSnapSlugs :: (MonadResource m, HasGenIO env, MonadReader env m)
|
||||
=> ReaderT SqlBackend m ()
|
||||
fixSnapSlugs =
|
||||
selectSource [] [Asc StackageUploaded] $$ mapM_C go
|
||||
where
|
||||
go (Entity sid Stackage {..}) =
|
||||
loop (1 :: Int)
|
||||
where
|
||||
base = T.replace "haskell platform" "hp"
|
||||
$ T.replace "stackage build for " ""
|
||||
$ toLower stackageTitle
|
||||
loop 50 = error "fixSnapSlugs can't find a good slug"
|
||||
loop i = do
|
||||
slug' <- lift $ safeMakeSlug base $ if i == 1 then False else True
|
||||
let slug = SnapSlug slug'
|
||||
ms <- getBy $ UniqueSnapshot slug
|
||||
case ms of
|
||||
Nothing -> update sid [StackageSlug =. slug]
|
||||
Just _ -> loop (i + 1)
|
||||
|
||||
setCorePackages :: MonadIO m => ReaderT SqlBackend m ()
|
||||
setCorePackages =
|
||||
updateWhere
|
||||
[ PackageName' <-. defaultCorePackages
|
||||
, PackageCore ==. Nothing
|
||||
]
|
||||
[PackageCore =. Just True]
|
||||
where
|
||||
defaultCorePackages = map PackageName $ words =<<
|
||||
[ "ghc hoopl bytestring unix haskeline Cabal base time xhtml"
|
||||
, "haskell98 hpc filepath process array integer-gmp bin-package-db"
|
||||
, "containers haskell2010 binary ghc-prim old-time old-locale rts"
|
||||
, "terminfo transformers deepseq pretty template-haskell directory"
|
||||
]
|
||||
|
||||
379
Data/Hackage.hs
379
Data/Hackage.hs
@ -1,379 +0,0 @@
|
||||
module Data.Hackage
|
||||
( loadCabalFiles
|
||||
, sourceHackageSdist
|
||||
, UploadState (..)
|
||||
) where
|
||||
|
||||
import ClassyPrelude.Yesod hiding (get)
|
||||
import Types
|
||||
import Data.BlobStore
|
||||
import Data.Conduit.Lazy (MonadActive (..), lazyConsume)
|
||||
import qualified Codec.Archive.Tar as Tar
|
||||
import Control.Monad.Logger (runNoLoggingT)
|
||||
import qualified Data.Text as T
|
||||
import Data.Conduit.Zlib (ungzip)
|
||||
import System.IO.Temp (withSystemTempFile)
|
||||
import System.IO (IOMode (ReadMode), openBinaryFile)
|
||||
import Model (Metadata (..))
|
||||
import Distribution.PackageDescription.Parse (parsePackageDescription, ParseResult (ParseOk))
|
||||
import qualified Distribution.PackageDescription as PD
|
||||
import qualified Distribution.Package as PD
|
||||
import Control.Monad.State.Strict (put, get, execStateT, MonadState)
|
||||
import Crypto.Hash.Conduit (sinkHash)
|
||||
import Crypto.Hash (Digest, SHA256)
|
||||
import Data.Byteable (toBytes)
|
||||
import Distribution.Text (display)
|
||||
import Text.Markdown (Markdown (Markdown))
|
||||
import qualified Data.Traversable as T
|
||||
import qualified Data.Version
|
||||
import Text.ParserCombinators.ReadP (readP_to_S)
|
||||
import Text.Blaze.Html.Renderer.Utf8 (renderHtml)
|
||||
import Text.Blaze.Html (unsafeByteString)
|
||||
import qualified Text.Blaze.Html5 as H
|
||||
import qualified Text.Blaze.Html5.Attributes as A
|
||||
import qualified Documentation.Haddock.Parser as Haddock
|
||||
import Documentation.Haddock.Types (DocH (..), Hyperlink (..), Picture (..), Header (..), Example (..))
|
||||
import qualified Data.HashMap.Lazy as HM
|
||||
|
||||
loadCabalFiles :: ( MonadActive m
|
||||
, MonadBaseControl IO m
|
||||
, MonadThrow m
|
||||
, MonadIO m
|
||||
, MonadReader env m
|
||||
, HasHttpManager env
|
||||
, HasBlobStore env StoreKey
|
||||
, HasHackageRoot env
|
||||
, MonadLogger m
|
||||
, MonadMask m
|
||||
)
|
||||
=> Bool -- ^ do the database updating
|
||||
-> Bool -- ^ force updates regardless of hash value?
|
||||
-> HashMap PackageName (Version, ByteString)
|
||||
-> m (UploadState Metadata)
|
||||
loadCabalFiles dbUpdates forceUpdate metadata0 = (>>= T.mapM liftIO) $ flip execStateT (UploadState metadata1 mempty) $ do
|
||||
HackageRoot root <- liftM getHackageRoot ask
|
||||
$logDebug $ "Entering loadCabalFiles, root == " ++ root
|
||||
req <- parseUrl $ unpack $ root ++ "/00-index.tar.gz"
|
||||
withSystemTempFile "hackage-index" $ \tempIndex handleOut -> do
|
||||
$logDebug $ "Requesting: " ++ tshow req
|
||||
withResponse req $ \res -> responseBody res $$ sinkHandle handleOut
|
||||
liftIO $ hClose handleOut
|
||||
withBinaryFile tempIndex ReadMode $ \handleIn -> do
|
||||
bss <- lazyConsume $ sourceHandle handleIn $= ungzip
|
||||
tarSource (Tar.read $ fromChunks bss)
|
||||
$$ parMapMC 32 go
|
||||
=$ scanlC (\x _ -> x + 1) (0 :: Int)
|
||||
=$ filterC ((== 0) . (`mod` 1000))
|
||||
=$ mapM_C (\i -> $logInfo $ "Processing cabal file #" ++ tshow i)
|
||||
$logInfo "Finished processing cabal files"
|
||||
where
|
||||
metadata1 = flip fmap metadata0 $ \(v, h) -> MetaSig
|
||||
v
|
||||
(fromMaybe (pack [0, 0, 0]) $ readVersion v)
|
||||
h
|
||||
withBinaryFile fp mode = bracket (liftIO $ openBinaryFile fp mode) (liftIO . hClose)
|
||||
|
||||
go entry = do
|
||||
case Tar.entryContent entry of
|
||||
Tar.NormalFile lbs _
|
||||
| Just (name, version) <- parseFilePath (Tar.entryPath entry) -> do
|
||||
let key = HackageCabal name version
|
||||
-- It's not longer sufficient to simply check if the cabal
|
||||
-- file exists, since Hackage now allows updating in place.
|
||||
-- Instead, we have to check if it matches what we have
|
||||
-- and, if not, update it.
|
||||
store <- liftM getBlobStore ask
|
||||
newDigest :: Digest SHA256 <- sourceLazy lbs $$ sinkHash
|
||||
toStore <- withAcquire (storeRead' store key) $ \mcurr ->
|
||||
case mcurr of
|
||||
Nothing -> return True
|
||||
Just curr -> do
|
||||
-- Check if it matches. This is cheaper than
|
||||
-- always writing, since it can take advantage
|
||||
-- of the local filesystem cache and not go to
|
||||
-- S3 each time.
|
||||
currDigest <- curr $$ sinkHash
|
||||
return $! currDigest /= newDigest
|
||||
when toStore $ withAcquire (storeWrite' store key) $ \sink ->
|
||||
sourceLazy lbs $$ sink
|
||||
when dbUpdates $ do
|
||||
case readVersion version of
|
||||
Nothing -> return ()
|
||||
Just dataVersion -> setMetadata
|
||||
forceUpdate
|
||||
name
|
||||
version
|
||||
dataVersion
|
||||
(toBytes newDigest)
|
||||
(parsePackageDescription $ unpack $ decodeUtf8 lbs)
|
||||
_ -> return ()
|
||||
|
||||
readVersion :: Version -> Maybe (UVector Int)
|
||||
readVersion v =
|
||||
case filter (null . snd) $ readP_to_S Data.Version.parseVersion . unpack . unVersion $ v of
|
||||
(dv, _):_ -> Just $ pack $ Data.Version.versionBranch dv
|
||||
[] -> Nothing
|
||||
|
||||
tarSource :: (Exception e, MonadThrow m)
|
||||
=> Tar.Entries e
|
||||
-> Producer m Tar.Entry
|
||||
tarSource Tar.Done = return ()
|
||||
tarSource (Tar.Fail e) = throwM e
|
||||
tarSource (Tar.Next e es) = yield e >> tarSource es
|
||||
|
||||
data UploadState md = UploadState
|
||||
{ usMetadata :: !(HashMap PackageName MetaSig)
|
||||
, usMetaChanges :: (HashMap PackageName md)
|
||||
}
|
||||
deriving (Functor, Foldable, Traversable)
|
||||
|
||||
data MetaSig = MetaSig
|
||||
{-# UNPACK #-} !Version
|
||||
{-# UNPACK #-} !(UVector Int) -- versionBranch
|
||||
{-# UNPACK #-} !ByteString -- hash
|
||||
|
||||
setMetadata :: ( MonadBaseControl IO m
|
||||
, MonadThrow m
|
||||
, MonadIO m
|
||||
, MonadReader env m
|
||||
, MonadState (UploadState (IO Metadata)) m
|
||||
, HasHttpManager env
|
||||
, MonadLogger m
|
||||
, MonadActive m
|
||||
, HasBlobStore env StoreKey
|
||||
, HasHackageRoot env
|
||||
)
|
||||
=> Bool -- ^ force update?
|
||||
-> PackageName
|
||||
-> Version
|
||||
-> UVector Int -- ^ versionBranch
|
||||
-> ByteString
|
||||
-> ParseResult PD.GenericPackageDescription
|
||||
-> m ()
|
||||
setMetadata forceUpdate name version dataVersion hash' gpdRes = do
|
||||
UploadState mdMap mdChanges <- get
|
||||
let toUpdate =
|
||||
case lookup name mdMap of
|
||||
Just (MetaSig _currVersion currDataVersion currHash) ->
|
||||
case compare currDataVersion dataVersion of
|
||||
LT -> True
|
||||
GT -> False
|
||||
EQ -> currHash /= hash' || forceUpdate
|
||||
Nothing -> True
|
||||
if toUpdate
|
||||
then case gpdRes of
|
||||
ParseOk _ gpd -> do
|
||||
!md <- getMetadata name version hash' gpd
|
||||
put $! UploadState
|
||||
(insertMap name (MetaSig version dataVersion hash') mdMap)
|
||||
(HM.insert name md mdChanges)
|
||||
_ -> return ()
|
||||
else return ()
|
||||
|
||||
getMetadata :: ( MonadActive m
|
||||
, MonadIO m
|
||||
, MonadBaseControl IO m
|
||||
, MonadThrow m
|
||||
, MonadReader env m
|
||||
, HasBlobStore env StoreKey
|
||||
, HasHackageRoot env
|
||||
, HasHttpManager env
|
||||
, MonadLogger m
|
||||
)
|
||||
=> PackageName
|
||||
-> Version
|
||||
-> ByteString
|
||||
-> PD.GenericPackageDescription
|
||||
-> m (IO Metadata)
|
||||
getMetadata name version hash' gpd = do
|
||||
let pd = PD.packageDescription gpd
|
||||
env <- ask
|
||||
return $ liftIO $ runNoLoggingT $ flip runReaderT env $ do
|
||||
(mreadme, mchangelog, mlicenseContent) <-
|
||||
grabExtraFiles name version
|
||||
#if MIN_VERSION_Cabal(1, 20, 0)
|
||||
$ PD.licenseFiles pd
|
||||
#else
|
||||
[PD.licenseFile pd]
|
||||
#endif
|
||||
let collapseHtml = unsafeByteString . toStrict . renderHtml
|
||||
return Metadata
|
||||
{ metadataName = name
|
||||
, metadataVersion = version
|
||||
, metadataHash = hash'
|
||||
, metadataDeps = setToList
|
||||
$ asSet
|
||||
$ concat
|
||||
[ foldMap goTree $ PD.condLibrary gpd
|
||||
, foldMap (goTree . snd) $ PD.condExecutables gpd
|
||||
]
|
||||
, metadataAuthor = pack $ PD.author pd
|
||||
, metadataMaintainer = pack $ PD.maintainer pd
|
||||
, metadataLicenseName = pack $ display $ PD.license pd
|
||||
, metadataHomepage = pack $ PD.homepage pd
|
||||
, metadataBugReports = pack $ PD.bugReports pd
|
||||
, metadataSynopsis = pack $ PD.synopsis pd
|
||||
, metadataSourceRepo = mapMaybe showSourceRepo $ PD.sourceRepos pd
|
||||
, metadataCategory = pack $ PD.category pd
|
||||
, metadataLibrary = isJust $ PD.library pd
|
||||
, metadataExes = length $ PD.executables pd
|
||||
, metadataTestSuites = length $ PD.testSuites pd
|
||||
, metadataBenchmarks = length $ PD.benchmarks pd
|
||||
, metadataReadme = collapseHtml $ fromMaybe
|
||||
(hToHtml . Haddock.toRegular . Haddock.parseParas $ PD.description pd)
|
||||
mreadme
|
||||
, metadataChangelog = collapseHtml <$> mchangelog
|
||||
, metadataLicenseContent = collapseHtml <$> mlicenseContent
|
||||
}
|
||||
where
|
||||
goTree (PD.CondNode _ deps comps) = concatMap goDep deps ++ concatMap goComp comps
|
||||
goDep (PD.Dependency (PD.PackageName n) _) = singletonSet $ pack n
|
||||
goComp (_, tree, mtree) = goTree tree ++ maybe mempty goTree mtree
|
||||
|
||||
-- | Convert a Haddock doc to HTML.
|
||||
hToHtml :: DocH String String -> Html
|
||||
hToHtml =
|
||||
go
|
||||
where
|
||||
go :: DocH String String -> Html
|
||||
go DocEmpty = mempty
|
||||
go (DocAppend x y) = go x ++ go y
|
||||
go (DocString x) = toHtml x
|
||||
go (DocParagraph x) = H.p $ go x
|
||||
go (DocIdentifier s) = H.code $ toHtml s
|
||||
go (DocIdentifierUnchecked s) = H.code $ toHtml s
|
||||
go (DocModule s) = H.code $ toHtml s
|
||||
go (DocWarning x) = H.span H.! A.class_ "warning" $ go x
|
||||
go (DocEmphasis x) = H.em $ go x
|
||||
go (DocMonospaced x) = H.code $ go x
|
||||
go (DocBold x) = H.strong $ go x
|
||||
go (DocUnorderedList xs) = H.ul $ foldMap (H.li . go) xs
|
||||
go (DocOrderedList xs) = H.ol $ foldMap (H.li . go) xs
|
||||
go (DocDefList xs) = H.dl $ flip foldMap xs $ \(x, y) ->
|
||||
H.dt (go x) ++ H.dd (go y)
|
||||
go (DocCodeBlock x) = H.pre $ H.code $ go x
|
||||
go (DocHyperlink (Hyperlink url mlabel)) =
|
||||
H.a H.! A.href (H.toValue url) $ toHtml label
|
||||
where
|
||||
label = fromMaybe url mlabel
|
||||
go (DocPic (Picture url mtitle)) =
|
||||
H.img H.! A.src (H.toValue url) H.! A.title (H.toValue $ fromMaybe mempty mtitle)
|
||||
go (DocAName s) = H.div H.! A.id (H.toValue s) $ mempty
|
||||
go (DocProperty s) = H.pre $ H.code $ toHtml s
|
||||
go (DocExamples es) = flip foldMap es $ \(Example exp' ress) ->
|
||||
H.div H.! A.class_ "example" $ do
|
||||
H.pre H.! A.class_ "expression" $ H.code $ toHtml exp'
|
||||
flip foldMap ress $ \res ->
|
||||
H.pre H.! A.class_ "result" $ H.code $ toHtml res
|
||||
go (DocHeader (Header level content)) =
|
||||
wrapper level $ go content
|
||||
where
|
||||
wrapper 1 = H.h1
|
||||
wrapper 2 = H.h2
|
||||
wrapper 3 = H.h3
|
||||
wrapper 4 = H.h4
|
||||
wrapper 5 = H.h5
|
||||
wrapper _ = H.h6
|
||||
|
||||
showSourceRepo :: PD.SourceRepo -> Maybe Text
|
||||
showSourceRepo = fmap pack . PD.repoLocation
|
||||
|
||||
grabExtraFiles :: ( MonadActive m
|
||||
, MonadIO m
|
||||
, MonadBaseControl IO m
|
||||
, MonadThrow m
|
||||
, MonadReader env m
|
||||
, HasBlobStore env StoreKey
|
||||
, HasHackageRoot env
|
||||
, HasHttpManager env
|
||||
, MonadLogger m
|
||||
)
|
||||
=> PackageName
|
||||
-> Version
|
||||
-> [String] -- ^ license files
|
||||
-> m (Maybe Html, Maybe Html, Maybe Html) -- ^ README, changelog, license
|
||||
grabExtraFiles name version lfiles = runResourceT $ do
|
||||
msrc <- sourceHackageSdist name version
|
||||
handle (\(_ :: Tar.FormatError) -> return (Nothing,Nothing,Nothing)) $
|
||||
case msrc of
|
||||
Nothing -> return mempty
|
||||
Just src -> do
|
||||
bss <- lazyConsume $ src $= ungzip
|
||||
tarSource (Tar.read $ fromChunks bss) $$ foldlC go mempty
|
||||
where
|
||||
go trip@(mreadme, mchangelog, mlicense) entry =
|
||||
case Tar.entryContent entry of
|
||||
Tar.NormalFile lbs _ ->
|
||||
let name' = drop 1 $ dropWhile (/= '/') $ Tar.entryPath entry in
|
||||
case toLower name' of
|
||||
"readme.md" -> (md lbs, mchangelog, mlicense)
|
||||
"readme" -> (txt lbs, mchangelog, mlicense)
|
||||
"readme.txt" -> (txt lbs, mchangelog, mlicense)
|
||||
"changelog.md" -> (mreadme, md lbs, mlicense)
|
||||
"changelog" -> (mreadme, txt lbs, mlicense)
|
||||
"changelog.txt" -> (mreadme, txt lbs, mlicense)
|
||||
"changes.md" -> (mreadme, md lbs, mlicense)
|
||||
"changes" -> (mreadme, txt lbs, mlicense)
|
||||
"changes.txt" -> (mreadme, txt lbs, mlicense)
|
||||
_ | name' `elem` lfiles -> (mreadme, mchangelog, txt lbs)
|
||||
_ -> trip
|
||||
_ -> trip
|
||||
|
||||
md = wrapClass "markdown" . Markdown . decodeUtf8
|
||||
txt = wrapClass "plain-text" . Textarea . toStrict . decodeUtf8
|
||||
|
||||
wrapClass clazz inner = Just $ H.div H.! A.class_ clazz $ toHtml inner
|
||||
|
||||
parseFilePath :: String -> Maybe (PackageName, Version)
|
||||
parseFilePath s =
|
||||
case filter (not . null) $ T.split (== '/') $ pack s of
|
||||
(name:version:_) -> Just (PackageName name, Version version)
|
||||
_ -> Nothing
|
||||
|
||||
sourceHackageSdist :: ( MonadIO m
|
||||
, MonadThrow m
|
||||
, MonadBaseControl IO m
|
||||
, MonadResource m
|
||||
, MonadReader env m
|
||||
, HasHttpManager env
|
||||
, HasHackageRoot env
|
||||
, HasBlobStore env StoreKey
|
||||
, MonadLogger m
|
||||
)
|
||||
=> PackageName
|
||||
-> Version
|
||||
-> m (Maybe (Source m ByteString))
|
||||
sourceHackageSdist name version = do
|
||||
let key = HackageSdist name version
|
||||
msrc1 <- storeRead key
|
||||
case msrc1 of
|
||||
Just src -> return $ Just src
|
||||
Nothing -> do
|
||||
HackageRoot root <- liftM getHackageRoot ask
|
||||
let url = concat
|
||||
[ root
|
||||
, "/package/"
|
||||
, toPathPiece name
|
||||
, "-"
|
||||
, toPathPiece version
|
||||
, ".tar.gz"
|
||||
]
|
||||
req' <- parseUrl $ unpack url
|
||||
let req = req' { checkStatus = \_ _ _ -> Nothing }
|
||||
$logDebug $ "Requesting: " ++ tshow req
|
||||
exists <- withResponse req $ \res ->
|
||||
if responseStatus res == status200
|
||||
then do
|
||||
responseBody res $$ storeWrite key
|
||||
return True
|
||||
else return False
|
||||
if exists
|
||||
then storeRead key
|
||||
else return Nothing
|
||||
|
||||
-- FIXME put in conduit-combinators
|
||||
parMapMC :: (MonadIO m, MonadBaseControl IO m)
|
||||
=> Int
|
||||
-> (i -> m o)
|
||||
-> Conduit i m o
|
||||
parMapMC _ = mapMC
|
||||
@ -1,58 +0,0 @@
|
||||
-- | Transforms http://hackage.haskell.org/packages/deprecated.json
|
||||
-- into model data to be stored in the database.
|
||||
module Data.Hackage.DeprecationInfo
|
||||
( HackageDeprecationInfo(..)
|
||||
, loadDeprecationInfo
|
||||
) where
|
||||
|
||||
import ClassyPrelude.Yesod
|
||||
import Data.Aeson as Aeson
|
||||
import Model
|
||||
import Types
|
||||
|
||||
data HackageDeprecationInfo = HackageDeprecationInfo {
|
||||
deprecations :: [Deprecated],
|
||||
suggestions :: [Suggested]
|
||||
}
|
||||
|
||||
instance FromJSON HackageDeprecationInfo where
|
||||
parseJSON j = do
|
||||
deprecationRecords <- parseJSON j
|
||||
return $ HackageDeprecationInfo {
|
||||
deprecations = map toDeprecated deprecationRecords,
|
||||
suggestions = concatMap toSuggestions deprecationRecords
|
||||
}
|
||||
|
||||
data DeprecationRecord = DeprecationRecord {
|
||||
_deprecatedPackage :: PackageName,
|
||||
_deprecatedInFavourOf :: [PackageName]
|
||||
}
|
||||
|
||||
instance FromJSON DeprecationRecord where
|
||||
parseJSON = withObject "DeprecationRecord" $ \obj -> do
|
||||
package <- PackageName <$> (obj .: "deprecated-package")
|
||||
inFavourOf <- map PackageName <$> (obj .: "in-favour-of")
|
||||
return $ DeprecationRecord package inFavourOf
|
||||
|
||||
toDeprecated :: DeprecationRecord -> Deprecated
|
||||
toDeprecated (DeprecationRecord deprecated _) = Deprecated deprecated
|
||||
|
||||
toSuggestions :: DeprecationRecord -> [Suggested]
|
||||
toSuggestions (DeprecationRecord deprecated inFavourOf) =
|
||||
map toSuggestion inFavourOf
|
||||
where
|
||||
toSuggestion favoured = Suggested {
|
||||
suggestedPackage = favoured,
|
||||
suggestedInsteadOf = deprecated
|
||||
}
|
||||
|
||||
loadDeprecationInfo ::
|
||||
( HasHttpManager env
|
||||
, MonadReader env m
|
||||
, MonadThrow m
|
||||
, MonadIO m)
|
||||
=> m (Either String HackageDeprecationInfo)
|
||||
loadDeprecationInfo = do
|
||||
req <- parseUrl "http://hackage.haskell.org/packages/deprecated.json"
|
||||
res <- httpLbs req
|
||||
return $! Aeson.eitherDecode (responseBody res)
|
||||
@ -1,504 +0,0 @@
|
||||
-- | Code for unpacking documentation bundles, building the Hoogle databases,
|
||||
-- and compressing/deduping contents.
|
||||
module Data.Unpacking
|
||||
( newDocUnpacker
|
||||
, getHoogleDB
|
||||
, makeHoogle
|
||||
, createHoogleDatabases
|
||||
) where
|
||||
|
||||
import Control.Concurrent.Async
|
||||
import Data.Conduit.Process
|
||||
import Import hiding (runDB)
|
||||
import Data.BlobStore
|
||||
import Handler.Haddock
|
||||
import Filesystem (createTree, isFile, removeTree, isDirectory, listDirectory, removeDirectory, removeFile, rename)
|
||||
import System.Posix.Files (createLink)
|
||||
import Crypto.Hash.Conduit (sinkHash)
|
||||
import Control.Concurrent (forkIO)
|
||||
import Control.Monad.Trans.Resource (allocate, release)
|
||||
import Data.Char (isAlpha)
|
||||
import qualified Hoogle
|
||||
import qualified Data.Text as T
|
||||
import qualified Data.Yaml as Y
|
||||
import System.IO (IOMode (ReadMode), withBinaryFile, openBinaryFile)
|
||||
import System.IO.Temp (withSystemTempFile, withTempFile, withSystemTempDirectory)
|
||||
import System.Exit (ExitCode (ExitSuccess))
|
||||
import qualified Filesystem.Path.CurrentOS as F
|
||||
import Data.Conduit.Zlib (gzip, ungzip)
|
||||
import qualified Data.ByteString.Base16 as B16
|
||||
import Data.Byteable (toBytes)
|
||||
import Crypto.Hash (Digest, SHA1)
|
||||
|
||||
newDocUnpacker
|
||||
:: FilePath -- ^ haddock root
|
||||
-> BlobStore StoreKey
|
||||
-> (forall a m. (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a)
|
||||
-> IO DocUnpacker
|
||||
newDocUnpacker root store runDB = do
|
||||
createDirs dirs
|
||||
|
||||
statusMapVar <- newTVarIO $ asMap mempty
|
||||
messageVar <- newTVarIO "Inactive"
|
||||
workChan <- atomically newTChan
|
||||
|
||||
let requestDocs forceUnpack ent = atomically $ do
|
||||
var <- newTVar USBusy
|
||||
modifyTVar statusMapVar
|
||||
$ insertMap (stackageSlug $ entityVal ent) var
|
||||
writeTChan workChan (forceUnpack, ent, var)
|
||||
|
||||
forkForever $ unpackWorker dirs runDB store messageVar workChan
|
||||
|
||||
return DocUnpacker
|
||||
{ duRequestDocs = \ent -> do
|
||||
m <- readTVarIO statusMapVar
|
||||
case lookup (stackageSlug $ entityVal ent) m of
|
||||
Nothing -> do
|
||||
b <- isUnpacked dirs (entityVal ent)
|
||||
if b
|
||||
then return USReady
|
||||
else do
|
||||
requestDocs False ent
|
||||
return USBusy
|
||||
Just us -> readTVarIO us
|
||||
, duGetStatus = readTVarIO messageVar
|
||||
, duForceReload = \ent -> do
|
||||
atomically $ modifyTVar statusMapVar
|
||||
$ deleteMap (stackageSlug $ entityVal ent)
|
||||
requestDocs True ent
|
||||
}
|
||||
where
|
||||
dirs = mkDirs root
|
||||
|
||||
createDirs :: Dirs -> IO ()
|
||||
createDirs dirs = do
|
||||
createTree $ dirCacheRoot dirs
|
||||
createTree $ dirRawRoot dirs
|
||||
createTree $ dirGzRoot dirs
|
||||
createTree $ dirHoogleRoot dirs
|
||||
|
||||
-- | Check for the presence of file system artifacts indicating that the docs
|
||||
-- have been unpacked.
|
||||
isUnpacked :: Dirs -> Stackage -> IO Bool
|
||||
isUnpacked dirs stackage = isFile $ completeUnpackFile dirs stackage
|
||||
|
||||
defaultHooDest :: Dirs -> Stackage -> FilePath
|
||||
defaultHooDest dirs stackage = dirHoogleFp dirs (stackageIdent stackage)
|
||||
["default-" ++ VERSION_hoogle ++ ".hoo"]
|
||||
|
||||
forkForever :: IO () -> IO ()
|
||||
forkForever inner = mask $ \restore ->
|
||||
void $ forkIO $ forever $ handleAny print $ restore $ forever inner
|
||||
|
||||
unpackWorker
|
||||
:: Dirs
|
||||
-> (forall a m. (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a)
|
||||
-> BlobStore StoreKey
|
||||
-> TVar Text
|
||||
-> TChan (Bool, Entity Stackage, TVar UnpackStatus)
|
||||
-> IO ()
|
||||
unpackWorker dirs runDB store messageVar workChan = do
|
||||
let say' = atomically . writeTVar messageVar
|
||||
say' "Running the compressor"
|
||||
let shouldStop = fmap not $ atomically $ isEmptyTChan workChan
|
||||
handleAny print $ runCompressor shouldStop say' dirs
|
||||
|
||||
say' "Waiting for new work item"
|
||||
(forceUnpack, ent, resVar) <- atomically $ readTChan workChan
|
||||
shouldUnpack <-
|
||||
if forceUnpack
|
||||
then return True
|
||||
else not <$> isUnpacked dirs (entityVal ent)
|
||||
|
||||
let say msg = atomically $ writeTVar messageVar $ concat
|
||||
[ toPathPiece (stackageSlug $ entityVal ent)
|
||||
, ": "
|
||||
, msg
|
||||
]
|
||||
|
||||
when shouldUnpack $ do
|
||||
say "Beginning of processing"
|
||||
|
||||
-- As soon as the raw unpack is complete, start serving docs
|
||||
let onRawComplete = atomically $ writeTVar resVar USReady
|
||||
|
||||
eres <- tryAny $ unpacker dirs runDB store say onRawComplete ent
|
||||
atomically $ writeTVar resVar $ case eres of
|
||||
Left e -> USFailed $ tshow e
|
||||
Right () -> USReady
|
||||
|
||||
removeTreeIfExists :: FilePath -> IO ()
|
||||
removeTreeIfExists fp = whenM (isDirectory fp) (removeTree fp)
|
||||
|
||||
unpackRawDocsTo
|
||||
:: BlobStore StoreKey
|
||||
-> PackageSetIdent
|
||||
-> (Text -> IO ())
|
||||
-> FilePath
|
||||
-> IO ()
|
||||
unpackRawDocsTo store ident say destdir =
|
||||
withSystemTempFile "haddock-bundle.tar.xz" $ \tempfp temph -> do
|
||||
say "Downloading raw doc tarball"
|
||||
withAcquire (storeRead' store (HaddockBundle ident)) $ \msrc ->
|
||||
case msrc of
|
||||
Nothing -> error "No haddocks exist for that snapshot"
|
||||
Just src -> src $$ sinkHandle temph
|
||||
hClose temph
|
||||
|
||||
createTree destdir
|
||||
say "Unpacking tarball"
|
||||
(ClosedStream, out, err, cph) <- streamingProcess (proc "tar" ["xf", tempfp])
|
||||
{ cwd = Just $ fpToString destdir
|
||||
}
|
||||
(ec, out', err') <- liftIO $ runConcurrently $ (,,)
|
||||
<$> Concurrently (waitForStreamingProcess cph)
|
||||
<*> Concurrently (out $$ foldC)
|
||||
<*> Concurrently (err $$ foldC)
|
||||
unless (ec == ExitSuccess) $ throwM
|
||||
$ HaddockBundleUnpackException ec out' err'
|
||||
|
||||
data HaddockBundleUnpackException = HaddockBundleUnpackException
|
||||
!ExitCode
|
||||
!ByteString
|
||||
!ByteString
|
||||
deriving (Show, Typeable)
|
||||
instance Exception HaddockBundleUnpackException
|
||||
|
||||
unpacker
|
||||
:: Dirs
|
||||
-> (forall a m. (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a)
|
||||
-> BlobStore StoreKey
|
||||
-> (Text -> IO ())
|
||||
-> IO () -- ^ onRawComplete
|
||||
-> Entity Stackage
|
||||
-> IO ()
|
||||
unpacker dirs runDB store say onRawComplete (Entity sid stackage@Stackage {..}) = do
|
||||
say "Removing old directories, if they exist"
|
||||
removeTreeIfExists $ dirRawIdent dirs stackageIdent
|
||||
removeTreeIfExists $ dirGzIdent dirs stackageIdent
|
||||
removeTreeIfExists $ dirHoogleIdent dirs stackageIdent
|
||||
|
||||
let destdir = dirRawIdent dirs stackageIdent
|
||||
unpackRawDocsTo store stackageIdent say destdir
|
||||
onRawComplete
|
||||
|
||||
createTree $ dirHoogleIdent dirs stackageIdent
|
||||
|
||||
-- Determine which packages have documentation and update the
|
||||
-- database appropriately
|
||||
say "Updating database for available documentation"
|
||||
runResourceT $ runDB $ do
|
||||
updateWhere
|
||||
[PackageStackage ==. sid]
|
||||
[PackageHasHaddocks =. False]
|
||||
sourceDirectory destdir $$ mapM_C (\fp -> do
|
||||
let mnv = nameAndVersionFromPath fp
|
||||
forM_ mnv $ \(name, version) -> updateWhere
|
||||
[ PackageStackage ==. sid
|
||||
, PackageName' ==. PackageName name
|
||||
, PackageVersion ==. Version version
|
||||
]
|
||||
[PackageHasHaddocks =. True]
|
||||
)
|
||||
|
||||
say "Unpack complete"
|
||||
let completeFP = completeUnpackFile dirs stackage
|
||||
liftIO $ do
|
||||
createTree $ F.parent completeFP
|
||||
writeFile completeFP ("Complete" :: ByteString)
|
||||
|
||||
completeUnpackFile :: Dirs -> Stackage -> FilePath
|
||||
completeUnpackFile dirs stackage =
|
||||
dirGzIdent dirs (stackageIdent stackage) </> "unpack-complete"
|
||||
|
||||
-- | Get the path to the Hoogle database, downloading from persistent storage
|
||||
-- if necessary. This function will /not/ generate a new database, and
|
||||
-- therefore is safe to run on a live web server.
|
||||
getHoogleDB :: Dirs
|
||||
-> Stackage
|
||||
-> Handler (Maybe FilePath)
|
||||
getHoogleDB dirs stackage = do
|
||||
exists <- liftIO $ isFile fp
|
||||
if exists
|
||||
then return $ Just fp
|
||||
else do
|
||||
msrc <- storeRead key
|
||||
case msrc of
|
||||
Nothing -> return Nothing
|
||||
Just src -> do
|
||||
liftIO $ createTree $ F.parent fp
|
||||
let tmpfp = fp <.> "tmp" -- FIXME add something random
|
||||
src $$ ungzip =$ sinkFile tmpfp
|
||||
liftIO $ rename tmpfp fp
|
||||
return $ Just fp
|
||||
where
|
||||
fp = defaultHooDest dirs stackage
|
||||
key = HoogleDB (stackageIdent stackage) $ HoogleVersion VERSION_hoogle
|
||||
|
||||
-- | Make sure that the last 5 LTS and last 5 Nightly releases all have Hoogle
|
||||
-- databases available.
|
||||
createHoogleDatabases
|
||||
:: BlobStore StoreKey
|
||||
-> (forall a m. (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a)
|
||||
-> (Text -> IO ())
|
||||
-> (Route App -> [(Text, Text)] -> Text)
|
||||
-> IO ()
|
||||
createHoogleDatabases store runDB say urlRender = do
|
||||
stackages <- runDB $ do
|
||||
sids <- (++)
|
||||
<$> fmap (map $ ltsStackage . entityVal)
|
||||
(selectList [] [Desc LtsMajor, Desc LtsMinor, LimitTo 5])
|
||||
<*> fmap (map $ nightlyStackage . entityVal)
|
||||
(selectList [] [Desc NightlyDay, LimitTo 5])
|
||||
catMaybes <$> mapM get sids
|
||||
forM_ stackages $ \stackage -> do
|
||||
let say' x = say $ concat
|
||||
[ toPathPiece $ stackageSlug stackage
|
||||
, ": "
|
||||
, x
|
||||
]
|
||||
handleAny (say' . tshow) $ makeHoogle store say' urlRender stackage
|
||||
|
||||
-- | Either download the Hoogle database from persistent storage, or create it.
|
||||
makeHoogle
|
||||
:: BlobStore StoreKey
|
||||
-> (Text -> IO ())
|
||||
-> (Route App -> [(Text, Text)] -> Text)
|
||||
-> Stackage
|
||||
-> IO ()
|
||||
makeHoogle store say urlRender stackage = do
|
||||
say "Making hoogle database"
|
||||
exists <- storeExists' store hoogleKey
|
||||
if exists
|
||||
then say "Hoogle database already exists, skipping"
|
||||
else do
|
||||
say "Generating Hoogle database"
|
||||
generate
|
||||
where
|
||||
ident = stackageIdent stackage
|
||||
hoogleKey = HoogleDB ident $ HoogleVersion VERSION_hoogle
|
||||
|
||||
generate = withSystemTempDirectory "hoogle-database-gen" $ \hoogletemp' -> do
|
||||
let hoogletemp = fpFromString hoogletemp'
|
||||
rawdocs = hoogletemp </> "rawdocs"
|
||||
|
||||
unpackRawDocsTo store ident say rawdocs
|
||||
|
||||
say "Copying Hoogle text files to temp directory"
|
||||
runResourceT $ copyHoogleTextFiles say rawdocs hoogletemp
|
||||
say "Creating Hoogle database"
|
||||
withSystemTempFile "default.hoo" $ \dstFP' dstH -> do
|
||||
let dstFP = fpFromString dstFP'
|
||||
hClose dstH
|
||||
createHoogleDb say dstFP stackage hoogletemp urlRender
|
||||
say "Uploading database to persistent storage"
|
||||
withAcquire (storeWrite' store hoogleKey) $ \sink ->
|
||||
runResourceT $ sourceFile dstFP $$ gzip =$ sink
|
||||
|
||||
runCompressor :: IO Bool -- ^ should stop early?
|
||||
-> (Text -> IO ()) -> Dirs -> IO ()
|
||||
runCompressor shouldStop say dirs =
|
||||
handle (\EarlyStop -> return ()) $ runResourceT $ goDir $ dirRawRoot dirs
|
||||
where
|
||||
goDir dir = do
|
||||
liftIO $ whenM shouldStop $ do
|
||||
say "Stopping compressor early"
|
||||
throwIO EarlyStop
|
||||
liftIO $ say $ "Compressing directory: " ++ fpToText dir
|
||||
sourceDirectory dir $$ mapM_C goFP
|
||||
liftIO $ void $ tryIO $ removeDirectory dir
|
||||
|
||||
goFP fp = do
|
||||
e <- liftIO $ isFile fp
|
||||
if e
|
||||
then liftIO $ do
|
||||
liftIO $ say $ "Compressing file: " ++ fpToText fp
|
||||
handle (print . asSomeException)
|
||||
$ gzipHash dirs suffix
|
||||
else goDir fp
|
||||
where
|
||||
Just suffix = F.stripPrefix (dirRawRoot dirs </> "") fp
|
||||
|
||||
data EarlyStop = EarlyStop
|
||||
deriving (Show, Typeable)
|
||||
instance Exception EarlyStop
|
||||
|
||||
-- Procedure is to:
|
||||
--
|
||||
-- * Gzip the src file to a temp file, and get a hash of the gzipped contents
|
||||
-- * If that hash doesn't exist in the cache, move the new file to the cache
|
||||
-- * Create a hard link from dst to the file in the cache
|
||||
-- * Delete src
|
||||
gzipHash :: Dirs
|
||||
-> FilePath -- ^ suffix
|
||||
-> IO ()
|
||||
gzipHash dirs suffix = do
|
||||
withTempFile (fpToString $ dirCacheRoot dirs) "haddock-file.gz" $ \tempfp temph -> do
|
||||
digest <- withBinaryFile (fpToString src) ReadMode $ \inh ->
|
||||
sourceHandle inh
|
||||
$= gzip
|
||||
$$ (getZipSink $
|
||||
ZipSink (sinkHandle temph) *>
|
||||
ZipSink sinkHash)
|
||||
hClose temph
|
||||
let fpcache = dirCacheFp dirs digest
|
||||
unlessM (isFile fpcache) $ do
|
||||
createTree $ F.parent fpcache
|
||||
rename (fpFromString tempfp) fpcache
|
||||
createTree $ F.parent dst
|
||||
createLink (fpToString fpcache) (fpToString dst)
|
||||
removeFile src
|
||||
where
|
||||
src = dirRawRoot dirs </> suffix
|
||||
dst = dirGzRoot dirs </> suffix
|
||||
|
||||
dirCacheFp :: Dirs -> Digest SHA1 -> FilePath
|
||||
dirCacheFp dirs digest =
|
||||
dirCacheRoot dirs </> fpFromText x </> fpFromText y <.> "gz"
|
||||
where
|
||||
name = decodeUtf8 $ B16.encode $ toBytes digest
|
||||
(x, y) = splitAt 2 name
|
||||
|
||||
copyHoogleTextFiles :: (Text -> IO ()) -- ^ log
|
||||
-> FilePath -- ^ raw unpacked Haddock files
|
||||
-> FilePath -- ^ temporary work directory
|
||||
-> ResourceT IO ()
|
||||
copyHoogleTextFiles say raw tmp = do
|
||||
let tmptext = tmp </> "text"
|
||||
liftIO $ createTree tmptext
|
||||
sourceDirectory raw $$ mapM_C (\fp ->
|
||||
forM_ (nameAndVersionFromPath fp) $ \(name, version) -> do
|
||||
let src = fp </> fpFromText name <.> "txt"
|
||||
dst = tmptext </> fpFromText (name ++ "-" ++ version)
|
||||
exists <- liftIO $ isFile src
|
||||
if exists
|
||||
then sourceFile src $$ (sinkFile dst :: Sink ByteString (ResourceT IO) ())
|
||||
else liftIO $ appendHoogleErrors say $ HoogleErrors
|
||||
{ packageName = name
|
||||
, packageVersion = version
|
||||
, errors = ["No textual Hoogle DB (use \"cabal haddock --hoogle\")"]
|
||||
}
|
||||
)
|
||||
|
||||
createHoogleDb :: (Text -> IO ())
|
||||
-> FilePath -- ^ default.hoo output location
|
||||
-> Stackage
|
||||
-> FilePath -- ^ temp directory
|
||||
-> (Route App -> [(Text, Text)] -> Text)
|
||||
-> IO ()
|
||||
createHoogleDb say dstDefaultHoo stackage tmpdir urlRender = do
|
||||
let tmpbin = tmpdir </> "binary"
|
||||
createTree tmpbin
|
||||
eres <- tryAny $ runResourceT $ do
|
||||
-- Create hoogle binary databases for each package.
|
||||
sourceDirectory (tmpdir </> "text") $$ mapM_C
|
||||
( \fp -> do
|
||||
(releaseKey, srcH) <- allocate (openBinaryFile (fpToString fp) ReadMode) hClose
|
||||
forM_ (nameAndVersionFromPath fp) $ \(name, version) -> liftIO $ do
|
||||
say $ concat
|
||||
[ "Creating Hoogle database for: "
|
||||
, name
|
||||
, "-"
|
||||
, version
|
||||
]
|
||||
src <- unpack . decodeUtf8 . asLByteString <$> hGetContents srcH
|
||||
let -- Preprocess the haddock-generated manifest file.
|
||||
src' = unlines $ haddockHacks (Just (unpack docsUrl)) $ lines src
|
||||
docsUrl = urlRender (HaddockR (stackageSlug stackage) urlPieces) []
|
||||
urlPieces = [name <> "-" <> version, "index.html"]
|
||||
-- Compute the filepath of the resulting hoogle
|
||||
-- database.
|
||||
out = fpToString $ tmpbin </> fpFromText base
|
||||
base = name <> "-" <> version <> ".hoo"
|
||||
errs <- Hoogle.createDatabase "" Hoogle.Haskell [] src' out
|
||||
when (not $ null errs) $ do
|
||||
-- TODO: remove this printing once errors are yielded
|
||||
-- to the user.
|
||||
putStrLn $ concat
|
||||
[ base
|
||||
, " Hoogle errors: "
|
||||
, tshow errs
|
||||
]
|
||||
appendHoogleErrors say $ HoogleErrors
|
||||
{ packageName = name
|
||||
, packageVersion = version
|
||||
, errors = map show errs
|
||||
}
|
||||
release releaseKey
|
||||
)
|
||||
-- Merge the individual binary databases into one big database.
|
||||
liftIO $ do
|
||||
say "Merging all Hoogle databases"
|
||||
dbs <- listDirectory tmpbin
|
||||
Hoogle.mergeDatabase
|
||||
(map fpToString dbs)
|
||||
(fpToString dstDefaultHoo)
|
||||
case eres of
|
||||
Right () -> return ()
|
||||
Left err -> liftIO $ appendHoogleErrors say $ HoogleErrors
|
||||
{ packageName = "Exception thrown while building hoogle DB"
|
||||
, packageVersion = ""
|
||||
, errors = [show err]
|
||||
}
|
||||
|
||||
data HoogleErrors = HoogleErrors
|
||||
{ packageName :: Text
|
||||
, packageVersion :: Text
|
||||
, errors :: [String]
|
||||
} deriving (Generic)
|
||||
|
||||
instance ToJSON HoogleErrors where
|
||||
instance FromJSON HoogleErrors where
|
||||
|
||||
-- Appends hoogle errors to a log file. By encoding within a single
|
||||
-- list, the resulting file can be decoded as [HoogleErrors].
|
||||
appendHoogleErrors :: (Text -> IO ()) -> HoogleErrors -> IO ()
|
||||
appendHoogleErrors say errs = say $ decodeUtf8 $ Y.encode [errs]
|
||||
|
||||
nameAndVersionFromPath :: FilePath -> Maybe (Text, Text)
|
||||
nameAndVersionFromPath fp =
|
||||
(\name -> (name, version)) <$> stripSuffix "-" name'
|
||||
where
|
||||
(name', version) = T.breakOnEnd "-" $ fpToText $ filename fp
|
||||
|
||||
---------------------------------------------------------------------
|
||||
-- HADDOCK HACKS
|
||||
-- (Copied from hoogle-4.2.36/src/Recipe/Haddock.hs)
|
||||
-- Modifications:
|
||||
-- 1) Some name qualification
|
||||
-- 2) Explicit type sig due to polymorphic elem
|
||||
-- 3) Fixed an unused binding warning
|
||||
|
||||
-- Eliminate @version
|
||||
-- Change :*: to (:*:), Haddock bug
|
||||
-- Change !!Int to !Int, Haddock bug
|
||||
-- Change instance [overlap ok] to instance, Haddock bug
|
||||
-- Change instance [incoherent] to instance, Haddock bug
|
||||
-- Change instance [safe] to instance, Haddock bug
|
||||
-- Change !Int to Int, HSE bug
|
||||
-- Drop {-# UNPACK #-}, Haddock bug
|
||||
-- Drop everything after where, Haddock bug
|
||||
|
||||
haddockHacks :: Maybe Hoogle.URL -> [String] -> [String]
|
||||
haddockHacks loc src = maybe id haddockPackageUrl loc (translate src)
|
||||
where
|
||||
translate :: [String] -> [String]
|
||||
translate = map (unwords . g . map f . words) . filter (not . isPrefixOf "@version ")
|
||||
|
||||
f "::" = "::"
|
||||
f (':':xs) = "(:" ++ xs ++ ")"
|
||||
f ('!':'!':x:xs) | isAlpha x = xs
|
||||
f ('!':x:xs) | isAlpha x || x `elem` ("[(" :: String) = x:xs
|
||||
f x | x `elem` ["[overlap","ok]","[incoherent]","[safe]"] = ""
|
||||
f x | x `elem` ["{-#","UNPACK","#-}"] = ""
|
||||
f x = x
|
||||
|
||||
g ("where":_) = []
|
||||
g (x:xs) = x : g xs
|
||||
g [] = []
|
||||
|
||||
haddockPackageUrl :: Hoogle.URL -> [String] -> [String]
|
||||
haddockPackageUrl x = concatMap f
|
||||
where f y | "@package " `isPrefixOf` y = ["@url " ++ x, y]
|
||||
| otherwise = [y]
|
||||
@ -18,7 +18,7 @@ import Types
|
||||
import Yesod.Auth
|
||||
import Yesod.Auth.BrowserId
|
||||
import Yesod.Auth.GoogleEmail2 (authGoogleEmail)
|
||||
import Yesod.Core.Types (Logger, GWData)
|
||||
import Yesod.Core.Types (Logger)
|
||||
import Yesod.Default.Config
|
||||
import Yesod.GitRepo
|
||||
|
||||
@ -35,25 +35,9 @@ data App = App
|
||||
, appLogger :: Logger
|
||||
, genIO :: MWC.GenIO
|
||||
, blobStore :: BlobStore StoreKey
|
||||
, haddockRootDir :: FilePath
|
||||
, appDocUnpacker :: DocUnpacker
|
||||
-- ^ We have a dedicated thread so that (1) we don't try to unpack too many
|
||||
-- things at once, (2) we never unpack the same thing twice at the same
|
||||
-- time, and (3) so that even if the client connection dies, we finish the
|
||||
-- unpack job.
|
||||
, widgetCache :: IORef (HashMap Text (UTCTime, GWData (Route App)))
|
||||
, websiteContent :: GitRepo WebsiteContent
|
||||
}
|
||||
|
||||
data DocUnpacker = DocUnpacker
|
||||
{ duRequestDocs :: Entity Stackage -> IO UnpackStatus
|
||||
, duGetStatus :: IO Text
|
||||
, duForceReload :: Entity Stackage -> IO ()
|
||||
}
|
||||
|
||||
data Progress = ProgressWorking !Text
|
||||
| ProgressDone !Text !(Route App)
|
||||
|
||||
instance HasBlobStore App StoreKey where
|
||||
getBlobStore = blobStore
|
||||
|
||||
@ -75,8 +59,6 @@ instance HasHackageRoot App where
|
||||
-- explanation for this split.
|
||||
mkYesodData "App" $(parseRoutesFile "config/routes")
|
||||
|
||||
deriving instance Show Progress
|
||||
|
||||
type Form x = Html -> MForm (HandlerT App IO) (FormResult x, Widget)
|
||||
|
||||
defaultLayoutNoContainer :: Widget -> Handler Html
|
||||
@ -167,16 +149,12 @@ instance Yesod App where
|
||||
|
||||
makeLogger = return . appLogger
|
||||
|
||||
maximumContentLength _ (Just UploadStackageR) = Just 50000000
|
||||
maximumContentLength _ (Just UploadHaddockR{}) = Just 100000000
|
||||
maximumContentLength _ (Just UploadV2R) = Just 100000000
|
||||
maximumContentLength _ _ = Just 2000000
|
||||
|
||||
instance ToMarkup (Route App) where
|
||||
toMarkup c =
|
||||
case c of
|
||||
AllSnapshotsR{} -> "Snapshots"
|
||||
UploadStackageR{} -> "Upload"
|
||||
AuthR (LoginR{}) -> "Login"
|
||||
_ -> ""
|
||||
|
||||
|
||||
@ -1,86 +0,0 @@
|
||||
module Handler.Alias
|
||||
( handleAliasR
|
||||
, getLtsR
|
||||
, getNightlyR
|
||||
) where
|
||||
|
||||
import Import
|
||||
import Data.Slug (Slug)
|
||||
import Handler.StackageHome (getStackageHomeR, getStackageMetadataR, getStackageCabalConfigR, getSnapshotPackagesR, getDocsR)
|
||||
import Handler.StackageIndex (getStackageIndexR)
|
||||
import Handler.StackageSdist (getStackageSdistR)
|
||||
import Handler.Hoogle (getHoogleR, getHoogleDatabaseR)
|
||||
import Handler.BuildPlan (getBuildPlanR)
|
||||
import Handler.Download (getGhcMajorVersionR)
|
||||
|
||||
handleAliasR :: Slug -> Slug -> [Text] -> Handler ()
|
||||
handleAliasR user name pieces = do
|
||||
$logDebug $ tshow (user, name, pieces)
|
||||
Entity _ (Alias _ _ setid) <- runDB $ do
|
||||
Entity uid _ <- getBy404 $ UniqueHandle user
|
||||
getBy404 $ UniqueAlias uid name
|
||||
$logDebug $ "setid: " ++ tshow (setid, pieces)
|
||||
case parseRoute ("stackage" : toPathPiece setid : pieces, []) of
|
||||
Nothing -> notFound
|
||||
Just route -> redirect (route :: Route App)
|
||||
|
||||
getLtsR :: [Text] -> Handler ()
|
||||
getLtsR pieces0 =
|
||||
case pieces0 of
|
||||
[] -> go []
|
||||
piece:pieces'
|
||||
| Just (x, y) <- parseLtsPair piece -> goXY x y pieces'
|
||||
| Just x <- fromPathPiece piece -> goX x pieces'
|
||||
| otherwise -> go pieces0
|
||||
where
|
||||
go pieces = do
|
||||
mlts <- runDB $ selectFirst [] [Desc LtsMajor, Desc LtsMinor]
|
||||
case mlts of
|
||||
Nothing -> notFound
|
||||
Just (Entity _ (Lts _ _ sid)) -> goSid sid pieces
|
||||
|
||||
goX x pieces = do
|
||||
mlts <- runDB $ selectFirst [LtsMajor ==. x] [Desc LtsMinor]
|
||||
case mlts of
|
||||
Nothing -> notFound
|
||||
Just (Entity _ (Lts _ _ sid)) -> goSid sid pieces
|
||||
|
||||
goXY x y pieces = do
|
||||
Entity _ (Lts _ _ sid) <- runDB $ getBy404 $ UniqueLts x y
|
||||
goSid sid pieces
|
||||
|
||||
getNightlyR :: [Text] -> Handler ()
|
||||
getNightlyR pieces0 =
|
||||
case pieces0 of
|
||||
[] -> go []
|
||||
piece:pieces'
|
||||
| Just day <- fromPathPiece piece -> goDay day pieces'
|
||||
| otherwise -> go pieces0
|
||||
where
|
||||
go pieces = do
|
||||
mn <- runDB $ selectFirst [] [Desc NightlyDay]
|
||||
case mn of
|
||||
Nothing -> notFound
|
||||
Just (Entity _ (Nightly _ _ sid)) -> goSid sid pieces
|
||||
goDay day pieces = do
|
||||
Entity _ (Nightly _ _ sid) <- runDB $ getBy404 $ UniqueNightly day
|
||||
goSid sid pieces
|
||||
|
||||
goSid :: StackageId -> [Text] -> Handler ()
|
||||
goSid sid pieces = do
|
||||
s <- runDB $ get404 sid
|
||||
case parseRoute ("snapshot" : toPathPiece (stackageSlug s) : pieces, []) of
|
||||
Just (SnapshotR slug sr) ->
|
||||
case sr of
|
||||
StackageHomeR -> getStackageHomeR slug >>= sendResponse
|
||||
StackageMetadataR -> getStackageMetadataR slug >>= sendResponse
|
||||
StackageCabalConfigR -> getStackageCabalConfigR slug >>= sendResponse
|
||||
StackageIndexR -> getStackageIndexR slug >>= sendResponse
|
||||
StackageSdistR pnv -> getStackageSdistR slug pnv >>= sendResponse
|
||||
SnapshotPackagesR -> getSnapshotPackagesR slug >>= sendResponse
|
||||
DocsR -> getDocsR slug >>= sendResponse
|
||||
HoogleR -> getHoogleR slug >>= sendResponse
|
||||
HoogleDatabaseR -> getHoogleDatabaseR slug >>= sendResponse
|
||||
BuildPlanR -> getBuildPlanR slug >>= sendResponse
|
||||
GhcMajorVersionR -> getGhcMajorVersionR slug >>= sendResponse
|
||||
_ -> notFound
|
||||
@ -1,23 +0,0 @@
|
||||
module Handler.Aliases where
|
||||
|
||||
import Import
|
||||
import Data.Text (strip)
|
||||
|
||||
putAliasesR :: Handler ()
|
||||
putAliasesR = do
|
||||
uid <- requireAuthId
|
||||
aliasesText <- runInputPost $ ireq textField "aliases"
|
||||
aliases <- mapM (parseAlias uid) $ lines aliasesText
|
||||
runDB $ do
|
||||
deleteWhere [AliasUser ==. uid]
|
||||
mapM_ insert_ aliases
|
||||
setMessage "Aliases updated"
|
||||
redirect ProfileR
|
||||
|
||||
parseAlias :: UserId -> Text -> Handler Alias
|
||||
parseAlias uid t = maybe (invalidArgs ["Invalid alias: " ++ t]) return $ do
|
||||
name <- fromPathPiece x
|
||||
setid <- fromPathPiece y
|
||||
return $ Alias uid name setid
|
||||
where
|
||||
(strip -> x, (strip . drop 1) -> y) = break (== ':') t
|
||||
@ -1,14 +0,0 @@
|
||||
module Handler.CompressorStatus where
|
||||
|
||||
import Import
|
||||
|
||||
getCompressorStatusR :: Handler Html
|
||||
getCompressorStatusR = do
|
||||
status <- getYesod >>= liftIO . duGetStatus . appDocUnpacker
|
||||
defaultLayout $ do
|
||||
setTitle "Compressor thread status"
|
||||
[whamlet|
|
||||
<div .container>
|
||||
<h1>Compressor thread status
|
||||
<p>#{status}
|
||||
|]
|
||||
@ -36,6 +36,7 @@ getDownloadR = defaultLayout $ do
|
||||
setTitle "Download"
|
||||
$(widgetFile "download")
|
||||
|
||||
{- FIXME
|
||||
ltsMajorVersions :: YesodDB App [Lts]
|
||||
ltsMajorVersions =
|
||||
(dropOldMinors . map entityVal)
|
||||
@ -47,12 +48,15 @@ dropOldMinors (l@(Lts x _ _):rest) =
|
||||
l : dropOldMinors (dropWhile sameMinor rest)
|
||||
where
|
||||
sameMinor (Lts y _ _) = x == y
|
||||
-}
|
||||
|
||||
getDownloadSnapshotsJsonR :: Handler Value
|
||||
getDownloadSnapshotsJsonR = getDownloadLtsSnapshotsJsonR
|
||||
|
||||
getDownloadLtsSnapshotsJsonR :: Handler Value
|
||||
getDownloadLtsSnapshotsJsonR = do
|
||||
error "getDownloadLtsSnapshotsJsonR"
|
||||
{-
|
||||
(mlatestNightly, ltses) <- runDB $ (,)
|
||||
<$> getLatestNightly
|
||||
<*> ltsMajorVersions
|
||||
@ -82,11 +86,15 @@ ghcMajorVersionText snapshot
|
||||
= ghcMajorVersionToText
|
||||
$ fromMaybe (GhcMajorVersion 7 8)
|
||||
$ stackageGhcMajorVersion snapshot
|
||||
-}
|
||||
|
||||
getGhcMajorVersionR :: SnapSlug -> Handler Text
|
||||
getGhcMajorVersionR slug = do
|
||||
getGhcMajorVersionR _slug = do
|
||||
error "getGhcMajorVersionR"
|
||||
{-
|
||||
snapshot <- runDB $ getBy404 $ UniqueSnapshot slug
|
||||
return $ ghcMajorVersionText $ entityVal snapshot
|
||||
-}
|
||||
|
||||
getDownloadGhcLinksR :: SupportedArch -> Text -> Handler TypedContent
|
||||
getDownloadGhcLinksR arch fileName = do
|
||||
|
||||
@ -1,15 +1,5 @@
|
||||
module Handler.Haddock
|
||||
( getUploadHaddockR
|
||||
, putUploadHaddockR
|
||||
, getHaddockR
|
||||
, getUploadDocMapR
|
||||
, putUploadDocMapR
|
||||
-- Exported for use in Handler.Hoogle
|
||||
, Dirs (..), getDirs, dirHoogleFp, mkDirs
|
||||
, dirRawIdent
|
||||
, dirGzIdent
|
||||
, dirHoogleIdent
|
||||
, createCompressor
|
||||
( getHaddockR
|
||||
) where
|
||||
|
||||
import Control.Concurrent (forkIO)
|
||||
@ -31,269 +21,8 @@ import System.IO (IOMode (ReadMode), withBinaryFile)
|
||||
import System.IO.Temp (withTempFile)
|
||||
import System.Posix.Files (createLink)
|
||||
|
||||
form :: Form FileInfo
|
||||
form = renderDivs $ areq fileField "tarball containing docs"
|
||||
{ fsName = Just "tarball"
|
||||
} Nothing
|
||||
|
||||
getUploadHaddockR, putUploadHaddockR :: Text -> Handler Html
|
||||
getUploadHaddockR slug0 = do
|
||||
uid <- requireAuthIdOrToken
|
||||
stackageEnt@(Entity sid Stackage {..}) <- runDB $ do
|
||||
-- Provide fallback for old URLs
|
||||
ment <- getBy $ UniqueStackage $ PackageSetIdent slug0
|
||||
case ment of
|
||||
Just ent -> return ent
|
||||
Nothing -> do
|
||||
slug <- maybe notFound return $ fromPathPiece slug0
|
||||
getBy404 $ UniqueSnapshot slug
|
||||
let ident = stackageIdent
|
||||
slug = stackageSlug
|
||||
unless (uid == stackageUser) $ permissionDenied "You do not control this snapshot"
|
||||
((res, widget), enctype) <- runFormPostNoToken form
|
||||
case res of
|
||||
FormSuccess fileInfo -> do
|
||||
fileSource fileInfo $$ storeWrite (HaddockBundle ident)
|
||||
runDB $ update sid [StackageHasHaddocks =. True]
|
||||
master <- getYesod
|
||||
liftIO $ duForceReload (appDocUnpacker master) stackageEnt
|
||||
setMessage "Haddocks uploaded"
|
||||
redirect $ SnapshotR slug StackageHomeR
|
||||
_ -> defaultLayout $ do
|
||||
setTitle "Upload Haddocks"
|
||||
$(widgetFile "upload-haddock")
|
||||
|
||||
putUploadHaddockR = getUploadHaddockR
|
||||
|
||||
getHaddockR :: SnapSlug -> [Text] -> Handler ()
|
||||
getHaddockR slug rest = do
|
||||
stackageEnt <- runDB $ do
|
||||
onS3 <- fmap isJust $ getBy $ UniqueDocsOnS3 slug
|
||||
when onS3 $ redirect $ concat
|
||||
$ "http://haddock.stackage.org/"
|
||||
: toPathPiece slug
|
||||
: map (cons '/') rest
|
||||
ment <- getBy $ UniqueSnapshot slug
|
||||
case ment of
|
||||
Just ent -> do
|
||||
case rest of
|
||||
[pkgver] -> tryContentsRedirect ent pkgver
|
||||
[pkgver, "index.html"] -> tryContentsRedirect ent pkgver
|
||||
_ -> return ()
|
||||
return ent
|
||||
Nothing -> do
|
||||
Entity _ stackage <- getBy404
|
||||
$ UniqueStackage
|
||||
$ PackageSetIdent
|
||||
$ toPathPiece slug
|
||||
redirectWith status301 $ HaddockR (stackageSlug stackage) rest
|
||||
mapM_ sanitize rest
|
||||
dirs <- getDirs
|
||||
requireDocs stackageEnt
|
||||
|
||||
let ident = stackageIdent (entityVal stackageEnt)
|
||||
rawfp = dirRawFp dirs ident rest
|
||||
gzfp = dirGzFp dirs ident rest
|
||||
mime = defaultMimeLookup $ fpToText $ filename rawfp
|
||||
|
||||
whenM (liftIO $ isDirectory rawfp)
|
||||
$ redirect $ HaddockR slug $ rest ++ ["index.html"]
|
||||
whenM (liftIO $ isDirectory gzfp)
|
||||
$ redirect $ HaddockR slug $ rest ++ ["index.html"]
|
||||
|
||||
whenM (liftIO $ isFile gzfp) $ do
|
||||
addHeader "Content-Encoding" "gzip"
|
||||
sendFile mime $ fpToString gzfp
|
||||
|
||||
-- Note: There's a small race window here, where the compressor thread
|
||||
-- could pull the rug out from under us. We can work around this by opening
|
||||
-- the file and, if that fails, try the compressed version again.
|
||||
whenM (liftIO $ isFile rawfp) $ sendFile mime $ fpToString rawfp
|
||||
|
||||
notFound
|
||||
where
|
||||
sanitize p
|
||||
| ("/" `isInfixOf` p) || p `member` (asHashSet $ setFromList ["", ".", ".."]) =
|
||||
permissionDenied "Invalid request"
|
||||
| otherwise = return ()
|
||||
|
||||
-- | Try to redirect to the snapshot's package page instead of the
|
||||
-- Haddock-generated HTML.
|
||||
tryContentsRedirect :: Entity Stackage -> Text -> YesodDB App ()
|
||||
tryContentsRedirect (Entity sid Stackage {..}) pkgver = do
|
||||
mdocs <- selectFirst
|
||||
[ DocsName ==. name
|
||||
, DocsVersion ==. version
|
||||
, DocsSnapshot ==. Just sid
|
||||
]
|
||||
[]
|
||||
forM_ mdocs $ const
|
||||
$ redirect
|
||||
$ SnapshotR stackageSlug
|
||||
$ StackageSdistR
|
||||
$ PNVNameVersion name version
|
||||
where
|
||||
(PackageName . dropDash -> name, Version -> version) = T.breakOnEnd "-" pkgver
|
||||
|
||||
dropDash :: Text -> Text
|
||||
dropDash t = fromMaybe t $ stripSuffix "-" t
|
||||
|
||||
createCompressor
|
||||
:: Dirs
|
||||
-> IO (IORef Text, IO ()) -- ^ action to kick off compressor again
|
||||
createCompressor dirs = do
|
||||
baton <- newMVar ()
|
||||
status <- newIORef "Compressor is idle"
|
||||
mask_ $ void $ forkIO $ (finallyE $ \e -> writeIORef status $ "Compressor thread exited: " ++ tshow e) $ forever $ do
|
||||
writeIORef status "Waiting for signal to start compressing"
|
||||
takeMVar baton
|
||||
writeIORef status "Received signal, traversing directories"
|
||||
let rawRoot = dirRawRoot dirs
|
||||
whenM (isDirectory rawRoot) $ runResourceT $ goDir status rawRoot
|
||||
return (status, void $ tryPutMVar baton ())
|
||||
where
|
||||
finallyE f g = mask $ \restore -> do
|
||||
restore g `catch` \e -> do
|
||||
() <- f $ Just (e :: SomeException)
|
||||
() <- throwIO e
|
||||
return ()
|
||||
f Nothing
|
||||
goDir status dir = do
|
||||
writeIORef status $ "Compressing directory: " ++ fpToText dir
|
||||
sourceDirectory dir $$ mapM_C (goFP status)
|
||||
liftIO $ void $ tryIO $ removeDirectory dir
|
||||
|
||||
goFP status fp = do
|
||||
e <- liftIO $ isFile fp
|
||||
if e
|
||||
then liftIO $ do
|
||||
writeIORef status $ "Compressing file: " ++ fpToText fp
|
||||
handle (print . asSomeException)
|
||||
$ gzipHash dirs suffix
|
||||
else goDir status fp
|
||||
where
|
||||
Just suffix = F.stripPrefix (dirRawRoot dirs </> "") fp
|
||||
|
||||
-- Procedure is to:
|
||||
--
|
||||
-- * Gzip the src file to a temp file, and get a hash of the gzipped contents
|
||||
-- * If that hash doesn't exist in the cache, move the new file to the cache
|
||||
-- * Create a hard link from dst to the file in the cache
|
||||
-- * Delete src
|
||||
gzipHash :: Dirs
|
||||
-> FilePath -- ^ suffix
|
||||
-> IO ()
|
||||
gzipHash dirs suffix = do
|
||||
withTempFile (fpToString $ dirCacheRoot dirs) "haddock-file.gz" $ \tempfp temph -> do
|
||||
digest <- withBinaryFile (fpToString src) ReadMode $ \inh ->
|
||||
sourceHandle inh
|
||||
$= gzip
|
||||
$$ (getZipSink $
|
||||
ZipSink (sinkHandle temph) *>
|
||||
ZipSink sinkHash)
|
||||
hClose temph
|
||||
let fpcache = dirCacheFp dirs digest
|
||||
unlessM (isFile fpcache) $ do
|
||||
createTree $ F.parent fpcache
|
||||
rename (fpFromString tempfp) fpcache
|
||||
createTree $ F.parent dst
|
||||
createLink (fpToString fpcache) (fpToString dst)
|
||||
removeFile src
|
||||
where
|
||||
src = dirRawRoot dirs </> suffix
|
||||
dst = dirGzRoot dirs </> suffix
|
||||
|
||||
data Dirs = Dirs
|
||||
{ dirRawRoot :: !FilePath
|
||||
, dirGzRoot :: !FilePath
|
||||
, dirCacheRoot :: !FilePath
|
||||
, dirHoogleRoot :: !FilePath
|
||||
}
|
||||
|
||||
getDirs :: Handler Dirs
|
||||
getDirs = mkDirs . haddockRootDir <$> getYesod
|
||||
|
||||
mkDirs :: FilePath -> Dirs
|
||||
mkDirs dir = Dirs
|
||||
{ dirRawRoot = dir </> "idents-raw"
|
||||
, dirGzRoot = dir </> "idents-gz"
|
||||
, dirCacheRoot = dir </> "cachedir"
|
||||
, dirHoogleRoot = dir </> "hoogle"
|
||||
}
|
||||
|
||||
dirGzIdent, dirRawIdent, dirHoogleIdent :: Dirs -> PackageSetIdent -> FilePath
|
||||
dirGzIdent dirs ident = dirGzRoot dirs </> fpFromText (toPathPiece ident)
|
||||
dirRawIdent dirs ident = dirRawRoot dirs </> fpFromText (toPathPiece ident)
|
||||
dirHoogleIdent dirs ident = dirHoogleRoot dirs </> fpFromText (toPathPiece ident)
|
||||
|
||||
dirGzFp, dirRawFp, dirHoogleFp :: Dirs -> PackageSetIdent -> [Text] -> FilePath
|
||||
dirGzFp dirs ident rest = dirGzIdent dirs ident </> mconcat (map fpFromText rest)
|
||||
dirRawFp dirs ident rest = dirRawIdent dirs ident </> mconcat (map fpFromText rest)
|
||||
dirHoogleFp dirs ident rest = dirHoogleIdent dirs ident </> mconcat (map fpFromText rest)
|
||||
|
||||
dirCacheFp :: Dirs -> Digest SHA1 -> FilePath
|
||||
dirCacheFp dirs digest =
|
||||
dirCacheRoot dirs </> fpFromText x </> fpFromText y <.> "gz"
|
||||
where
|
||||
name = decodeUtf8 $ B16.encode $ toBytes digest
|
||||
(x, y) = splitAt 2 name
|
||||
|
||||
data DocInfo = DocInfo Version (Map Text [Text])
|
||||
instance FromJSON DocInfo where
|
||||
parseJSON = withObject "DocInfo" $ \o -> DocInfo
|
||||
<$> (Version <$> o .: "version")
|
||||
<*> o .: "modules"
|
||||
|
||||
getUploadDocMapR :: Handler Html
|
||||
getUploadDocMapR = do
|
||||
uid <- requireAuthIdOrToken
|
||||
user <- runDB $ get404 uid
|
||||
extra <- getExtra
|
||||
when (unSlug (userHandle user) `notMember` adminUsers extra)
|
||||
$ permissionDenied "Must be an administrator"
|
||||
|
||||
((res, widget), enctype) <- runFormPostNoToken $ renderDivs $ (,)
|
||||
<$> areq
|
||||
fileField
|
||||
"YAML file with map" { fsName = Just "docmap" }
|
||||
Nothing
|
||||
<*> areq textField "Stackage ID" { fsName = Just "snapshot" } Nothing
|
||||
case res of
|
||||
FormSuccess (fi, snapshot) -> do
|
||||
Entity sid stackage <- runDB $ do
|
||||
ment <- getBy $ UniqueStackage $ PackageSetIdent snapshot
|
||||
case ment of
|
||||
Just ent -> return ent
|
||||
Nothing -> do
|
||||
slug <- maybe notFound return $ fromPathPiece snapshot
|
||||
getBy404 $ UniqueSnapshot slug
|
||||
unless (stackageHasHaddocks stackage) $ invalidArgs $ return
|
||||
"Cannot use a snapshot without docs for a docmap"
|
||||
bs <- fileSource fi $$ foldC
|
||||
case Y.decodeEither bs of
|
||||
Left e -> invalidArgs [pack e]
|
||||
Right m0 -> do
|
||||
now <- liftIO getCurrentTime
|
||||
render <- getUrlRender
|
||||
runDB $ forM_ (mapToList $ asMap m0) $ \(package, DocInfo version ms) -> do
|
||||
did <- insert Docs
|
||||
{ docsName = PackageName package
|
||||
, docsVersion = version
|
||||
, docsUploaded = now
|
||||
, docsSnapshot = Just sid
|
||||
}
|
||||
forM_ (mapToList ms) $ \(name, pieces) -> do
|
||||
let url = render $ HaddockR (stackageSlug stackage) pieces
|
||||
insert_ $ Module did name url
|
||||
setMessage "Doc map complete"
|
||||
redirect UploadDocMapR
|
||||
_ -> defaultLayout $ do
|
||||
setTitle "Upload doc map"
|
||||
[whamlet|
|
||||
<form method=post action=?_method=PUT enctype=#{enctype}>
|
||||
^{widget}
|
||||
<input type=submit .btn value="Set document map">
|
||||
|]
|
||||
|
||||
putUploadDocMapR :: Handler Html
|
||||
putUploadDocMapR = getUploadDocMapR
|
||||
getHaddockR slug rest = redirect $ concat
|
||||
$ "http://haddock.stackage.org/"
|
||||
: toPathPiece slug
|
||||
: map (cons '/') rest
|
||||
|
||||
@ -1,5 +1,10 @@
|
||||
{-# LANGUAGE TupleSections, OverloadedStrings #-}
|
||||
module Handler.Home where
|
||||
module Handler.Home
|
||||
( getHomeR
|
||||
, getAuthorsR
|
||||
, getInstallR
|
||||
, getOlderReleasesR
|
||||
) where
|
||||
|
||||
import Data.Slug
|
||||
import Database.Esqueleto as E hiding (isNothing)
|
||||
@ -31,51 +36,3 @@ contentHelper title accessor = do
|
||||
defaultLayout $ do
|
||||
setTitle title
|
||||
toWidget homepage
|
||||
|
||||
-- FIXME remove this and switch to above getHomeR' when new homepage is ready
|
||||
getHomeR' :: Handler Html
|
||||
getHomeR' = do
|
||||
windowsLatest <- linkFor "unstable-ghc78hp-inclusive"
|
||||
restLatest <- linkFor "unstable-ghc78-inclusive"
|
||||
defaultLayout $ do
|
||||
setTitle "Stackage Server"
|
||||
$(combineStylesheets 'StaticR
|
||||
[ css_bootstrap_modified_css
|
||||
, css_bootstrap_responsive_modified_css
|
||||
])
|
||||
$(widgetFile "homepage")
|
||||
where
|
||||
linkFor name =
|
||||
do slug <- mkSlug name
|
||||
fpcomplete <- mkSlug "fpcomplete"
|
||||
selecting (\(alias, user, stackage) ->
|
||||
do where_ $
|
||||
alias ^. AliasName ==. val slug &&.
|
||||
alias ^. AliasUser ==. user ^. UserId &&.
|
||||
user ^. UserHandle ==. val fpcomplete &&.
|
||||
alias ^. AliasTarget ==. stackage ^. StackageIdent
|
||||
return (stackage ^. StackageSlug))
|
||||
where selecting =
|
||||
fmap (fmap unValue . listToMaybe) .
|
||||
runDB .
|
||||
select .
|
||||
from
|
||||
|
||||
addSnapshot title short = do
|
||||
mex <- handlerToWidget $ linkFor $ name "exclusive"
|
||||
min' <- handlerToWidget $ linkFor $ name "inclusive"
|
||||
when (isJust mex || isJust min')
|
||||
[whamlet|
|
||||
<tr>
|
||||
<td>
|
||||
#{asHtml title}
|
||||
<td>
|
||||
$maybe ex <- mex
|
||||
<a href=@{SnapshotR ex StackageHomeR}>exclusive
|
||||
$if isJust mex && isJust min'
|
||||
<td>
|
||||
$maybe in <- min'
|
||||
<a href=@{SnapshotR in StackageHomeR}>inclusive
|
||||
|]
|
||||
where
|
||||
name suffix = concat ["unstable-", short, "-", suffix]
|
||||
|
||||
@ -6,14 +6,14 @@ import Control.Spoon (spoon)
|
||||
import Data.Data (Data (..))
|
||||
import Data.Slug (SnapSlug)
|
||||
import Data.Text.Read (decimal)
|
||||
import Data.Unpacking (getHoogleDB)
|
||||
import Handler.Haddock (getDirs)
|
||||
import qualified Hoogle
|
||||
import Import
|
||||
import Text.Blaze.Html (preEscapedToHtml)
|
||||
|
||||
getHoogleR :: SnapSlug -> Handler Html
|
||||
getHoogleR slug = do
|
||||
error "getHoogleR"
|
||||
{- FIXME
|
||||
dirs <- getDirs
|
||||
mquery <- lookupGetParam "q"
|
||||
mpage <- lookupGetParam "page"
|
||||
@ -52,9 +52,12 @@ getHoogleR slug = do
|
||||
defaultLayout $ do
|
||||
setTitle "Hoogle Search"
|
||||
$(widgetFile "hoogle")
|
||||
-}
|
||||
|
||||
getHoogleDatabaseR :: SnapSlug -> Handler Html
|
||||
getHoogleDatabaseR slug = do
|
||||
error "getHoogleDatabaseR"
|
||||
{-
|
||||
dirs <- getDirs
|
||||
Entity _ stackage <- runDB $ getBy404 $ UniqueSnapshot slug
|
||||
mdatabasePath <- getHoogleDB dirs stackage
|
||||
@ -167,3 +170,4 @@ runHoogleQuery heDatabase HoogleQueryInput {..} =
|
||||
modu' = ModuleLink moduname modu
|
||||
return $ asMap $ singletonMap pkg' [modu']
|
||||
getPkgModPair _ = Nothing
|
||||
-}
|
||||
|
||||
12
Handler/OldLinks.hs
Normal file
12
Handler/OldLinks.hs
Normal file
@ -0,0 +1,12 @@
|
||||
module Handler.OldLinks
|
||||
( getLtsR
|
||||
, getNightlyR
|
||||
) where
|
||||
|
||||
import Import
|
||||
|
||||
getLtsR :: [Text] -> Handler ()
|
||||
getLtsR foo = return ()
|
||||
|
||||
getNightlyR :: [Text] -> Handler ()
|
||||
getNightlyR foo = return ()
|
||||
@ -2,7 +2,14 @@
|
||||
|
||||
-- | Lists the package page similar to Hackage.
|
||||
|
||||
module Handler.Package where
|
||||
module Handler.Package
|
||||
( getPackageR
|
||||
, getPackageSnapshotsR
|
||||
, postPackageLikeR
|
||||
, postPackageUnlikeR
|
||||
, postPackageTagR
|
||||
, postPackageUntagR
|
||||
) where
|
||||
|
||||
import Data.Char
|
||||
import Data.Slug
|
||||
@ -22,6 +29,8 @@ import Text.Email.Validate
|
||||
-- | Page metadata package.
|
||||
getPackageR :: PackageName -> Handler Html
|
||||
getPackageR pn =
|
||||
error "getPackageR"
|
||||
{-
|
||||
packagePage pn Nothing (selectFirst [DocsName ==. pn] [Desc DocsUploaded])
|
||||
|
||||
packagePage :: PackageName
|
||||
@ -266,6 +275,7 @@ renderEmail = T.decodeUtf8 . toByteString
|
||||
-- | Format a number with commas nicely.
|
||||
formatNum :: Int -> Text
|
||||
formatNum = sformat commas
|
||||
-}
|
||||
|
||||
postPackageLikeR :: PackageName -> Handler ()
|
||||
postPackageLikeR packageName = maybeAuthId >>= \muid -> case muid of
|
||||
@ -309,7 +319,8 @@ postPackageUntagR packageName =
|
||||
Nothing -> error "Need a slug"
|
||||
|
||||
getPackageSnapshotsR :: PackageName -> Handler Html
|
||||
getPackageSnapshotsR pn =
|
||||
getPackageSnapshotsR pn = error "getPackageSnapshotsR"
|
||||
{-
|
||||
do let haddocksLink ident version =
|
||||
HaddockR ident [concat [toPathPiece pn, "-", toPathPiece version]]
|
||||
snapshots <- (runDB .
|
||||
@ -335,3 +346,4 @@ getPackageSnapshotsR pn =
|
||||
,fromMaybe title (stripPrefix "Stackage build for " title)
|
||||
,ident
|
||||
,hasHaddocks)
|
||||
-}
|
||||
|
||||
@ -1,39 +0,0 @@
|
||||
module Handler.PackageCounts
|
||||
( getPackageCountsR
|
||||
) where
|
||||
|
||||
import Import hiding (Value (..), groupBy, (==.))
|
||||
import Data.Slug (mkSlug)
|
||||
import Database.Esqueleto
|
||||
|
||||
data Count = Count
|
||||
{ name :: Text
|
||||
, date :: Day
|
||||
, packages :: Int
|
||||
}
|
||||
|
||||
toCount :: (Value Text, Value UTCTime, Value Int) -> Count
|
||||
toCount (Value x, Value y, Value z) =
|
||||
Count x (utctDay y) z
|
||||
|
||||
getPackageCountsR :: Handler Html
|
||||
getPackageCountsR = do
|
||||
admins <- adminUsers <$> getExtra
|
||||
counts <- runDB $ do
|
||||
let slugs = mapMaybe mkSlug $ setToList admins
|
||||
adminUids <- selectKeysList [UserHandle <-. slugs] []
|
||||
fmap (map toCount) $ select $ from $ \(s, p) -> do
|
||||
where_ $
|
||||
(not_ $ s ^. StackageTitle `like` val "%inclusive") &&.
|
||||
(s ^. StackageId ==. p ^. PackageStackage) &&.
|
||||
(s ^. StackageUser `in_` valList adminUids)
|
||||
groupBy (s ^. StackageTitle, s ^. StackageUploaded)
|
||||
orderBy [desc $ s ^. StackageUploaded]
|
||||
return
|
||||
( s ^. StackageTitle
|
||||
, s ^. StackageUploaded
|
||||
, countRows
|
||||
)
|
||||
defaultLayout $ do
|
||||
setTitle "Package counts"
|
||||
$(widgetFile "package-counts")
|
||||
@ -9,6 +9,8 @@ import Import
|
||||
-- FIXME maybe just redirect to the LTS or nightly package list
|
||||
getPackageListR :: Handler Html
|
||||
getPackageListR = defaultLayout $ do
|
||||
error "getPackageListR"
|
||||
{-
|
||||
setTitle "Package list"
|
||||
cachedWidget (20 * 60) "package-list" $ do
|
||||
let clean (x, y) =
|
||||
@ -47,3 +49,4 @@ cachedWidget _diff _key widget = do
|
||||
atomicModifyIORef' ref $ \m -> (insertMap key (addUTCTime diff now, gw) m, ())
|
||||
return ((), gw)
|
||||
-}
|
||||
-}
|
||||
|
||||
@ -21,19 +21,10 @@ getProfileR = do
|
||||
setMessage "Profile updated"
|
||||
redirect ProfileR
|
||||
_ -> return ()
|
||||
(emails, aliases) <- runDB $ (,)
|
||||
<$> selectList [EmailUser ==. uid] [Asc EmailEmail]
|
||||
<*> selectList [AliasUser ==. uid] [Asc AliasName]
|
||||
emails <- runDB $ selectList [EmailUser ==. uid] [Asc EmailEmail]
|
||||
defaultLayout $ do
|
||||
setTitle "Your Profile"
|
||||
$(widgetFile "profile")
|
||||
|
||||
aliasToText :: Entity Alias -> Text
|
||||
aliasToText (Entity _ (Alias _ name target)) = concat
|
||||
[ toPathPiece name
|
||||
, ": "
|
||||
, toPathPiece target
|
||||
]
|
||||
|
||||
putProfileR :: Handler Html
|
||||
putProfileR = getProfileR
|
||||
|
||||
@ -1,15 +0,0 @@
|
||||
module Handler.Progress where
|
||||
|
||||
import Import
|
||||
|
||||
getProgressR :: UploadProgressId -> Handler Html
|
||||
getProgressR key = do
|
||||
UploadProgress text mdest <- runDB $ get404 key
|
||||
case mdest of
|
||||
Nothing -> defaultLayout $ do
|
||||
addHeader "Refresh" "1"
|
||||
setTitle "Working..."
|
||||
[whamlet|<p>#{text}|]
|
||||
Just url -> do
|
||||
setMessage $ toHtml text
|
||||
redirect url
|
||||
@ -1,20 +0,0 @@
|
||||
module Handler.RefreshDeprecated where
|
||||
|
||||
import Import
|
||||
import qualified Data.Aeson as Aeson
|
||||
import Network.HTTP.Conduit (simpleHttp)
|
||||
import Data.Hackage.DeprecationInfo
|
||||
|
||||
getRefreshDeprecatedR :: Handler Html
|
||||
getRefreshDeprecatedR = do
|
||||
bs <- simpleHttp "http://hackage.haskell.org/packages/deprecated.json"
|
||||
case Aeson.decode bs of
|
||||
Nothing -> return "Failed to parse"
|
||||
Just info -> do
|
||||
runDB $ do
|
||||
deleteWhere ([] :: [Filter Deprecated])
|
||||
insertMany_ (deprecations info)
|
||||
runDB $ do
|
||||
deleteWhere ([] :: [Filter Suggested])
|
||||
insertMany_ (suggestions info)
|
||||
return "Done"
|
||||
@ -10,6 +10,8 @@ type Sitemap = forall m. Monad m => Producer m (SitemapUrl (Route App))
|
||||
|
||||
getSitemapR :: Handler TypedContent
|
||||
getSitemapR = sitemap $ do
|
||||
error "getSitemapR"
|
||||
{- FIXME
|
||||
priority 1.0 $ HomeR
|
||||
|
||||
priority 0.9 $ LtsR []
|
||||
@ -105,3 +107,4 @@ url loc = yield $ SitemapUrl
|
||||
, sitemapChangeFreq = Nothing
|
||||
, sitemapPriority = Nothing
|
||||
}
|
||||
-}
|
||||
|
||||
@ -20,6 +20,8 @@ snapshotsPerPage = 50
|
||||
-- inclined, or create a single monolithic file.
|
||||
getAllSnapshotsR :: Handler Html
|
||||
getAllSnapshotsR = do
|
||||
error "getAllSnapshotsR"
|
||||
{-
|
||||
now' <- liftIO getCurrentTime
|
||||
currentPageMay <- lookupGetParam "page"
|
||||
let currentPage :: Int
|
||||
@ -51,3 +53,4 @@ getAllSnapshotsR = do
|
||||
let (E.Value ident, E.Value title, E.Value uploaded, E.Value display, E.Value handle') = c
|
||||
in (ident,title,format (diff True) (diffUTCTime uploaded now'),display,handle')
|
||||
groupUp now' (c, rs) = (c, (groupBy (on (==) (\(_,_,uploaded,_,_) -> uploaded)) . map (uncrapify now')) rs)
|
||||
-}
|
||||
|
||||
@ -1,23 +1,24 @@
|
||||
module Handler.StackageHome where
|
||||
module Handler.StackageHome
|
||||
( getStackageHomeR
|
||||
, getStackageCabalConfigR
|
||||
, getDocsR
|
||||
, getSnapshotPackagesR
|
||||
) where
|
||||
|
||||
import Import
|
||||
import Data.Time (FormatTime)
|
||||
import Data.Slug (SnapSlug)
|
||||
import qualified Database.Esqueleto as E
|
||||
import Handler.PackageList (cachedWidget)
|
||||
|
||||
getStackageHomeR :: SnapSlug -> Handler Html
|
||||
getStackageHomeR slug = do
|
||||
error "getStackageHomeR"
|
||||
{-
|
||||
stackage <- runDB $ do
|
||||
Entity _ stackage <- getBy404 $ UniqueSnapshot slug
|
||||
return stackage
|
||||
|
||||
let minclusive =
|
||||
if "inclusive" `isSuffixOf` stackageTitle stackage
|
||||
then Just True
|
||||
else if "exclusive" `isSuffixOf` stackageTitle stackage
|
||||
then Just False
|
||||
else Nothing
|
||||
let minclusive = Just False
|
||||
base = maybe 0 (const 1) minclusive :: Int
|
||||
hoogleForm =
|
||||
let queryText = "" :: Text
|
||||
@ -26,78 +27,53 @@ getStackageHomeR slug = do
|
||||
Entity sid _stackage <- runDB $ getBy404 $ UniqueSnapshot slug
|
||||
defaultLayout $ do
|
||||
setTitle $ toHtml $ stackageTitle stackage
|
||||
cachedWidget (20 * 60) ("package-list-" ++ toPathPiece slug) $ do
|
||||
let maxPackages = 5000
|
||||
(packageListClipped, packages') <- handlerToWidget $ runDB $ do
|
||||
packages' <- E.select $ E.from $ \(m,p) -> do
|
||||
E.where_ $
|
||||
(m E.^. MetadataName E.==. p E.^. PackageName') E.&&.
|
||||
(p E.^. PackageStackage E.==. E.val sid)
|
||||
E.orderBy [E.asc $ m E.^. MetadataName]
|
||||
E.groupBy ( m E.^. MetadataName
|
||||
, m E.^. MetadataSynopsis
|
||||
)
|
||||
E.limit maxPackages
|
||||
return
|
||||
( m E.^. MetadataName
|
||||
, m E.^. MetadataSynopsis
|
||||
, E.max_ (p E.^. PackageVersion)
|
||||
, E.max_ $ E.case_
|
||||
[ ( p E.^. PackageHasHaddocks
|
||||
, p E.^. PackageVersion
|
||||
)
|
||||
]
|
||||
(E.val (Version ""))
|
||||
)
|
||||
packageCount <- count [PackageStackage ==. sid]
|
||||
let packageListClipped = packageCount > maxPackages
|
||||
return (packageListClipped, packages')
|
||||
let packages = flip map packages' $ \(name, syn, latestVersion, forceNotNull -> mversion) ->
|
||||
( E.unValue name
|
||||
, fmap unVersion $ E.unValue latestVersion
|
||||
, strip $ E.unValue syn
|
||||
, (<$> mversion) $ \version -> HaddockR slug $ return $ concat
|
||||
[ toPathPiece $ E.unValue name
|
||||
, "-"
|
||||
, version
|
||||
let maxPackages = 5000
|
||||
(packageListClipped, packages') <- handlerToWidget $ runDB $ do
|
||||
packages' <- E.select $ E.from $ \(m,p) -> do
|
||||
E.where_ $
|
||||
(m E.^. MetadataName E.==. p E.^. PackageName') E.&&.
|
||||
(p E.^. PackageStackage E.==. E.val sid)
|
||||
E.orderBy [E.asc $ m E.^. MetadataName]
|
||||
E.groupBy ( m E.^. MetadataName
|
||||
, m E.^. MetadataSynopsis
|
||||
)
|
||||
E.limit maxPackages
|
||||
return
|
||||
( m E.^. MetadataName
|
||||
, m E.^. MetadataSynopsis
|
||||
, E.max_ (p E.^. PackageVersion)
|
||||
, E.max_ $ E.case_
|
||||
[ ( p E.^. PackageHasHaddocks
|
||||
, p E.^. PackageVersion
|
||||
)
|
||||
]
|
||||
(E.val (Version ""))
|
||||
)
|
||||
forceNotNull (E.Value Nothing) = Nothing
|
||||
forceNotNull (E.Value (Just (Version v)))
|
||||
| null v = Nothing
|
||||
| otherwise = Just v
|
||||
$(widgetFile "stackage-home")
|
||||
packageCount <- count [PackageStackage ==. sid]
|
||||
let packageListClipped = packageCount > maxPackages
|
||||
return (packageListClipped, packages')
|
||||
let packages = flip map packages' $ \(name, syn, latestVersion, forceNotNull -> mversion) ->
|
||||
( E.unValue name
|
||||
, fmap unVersion $ E.unValue latestVersion
|
||||
, strip $ E.unValue syn
|
||||
, (<$> mversion) $ \version -> HaddockR slug $ return $ concat
|
||||
[ toPathPiece $ E.unValue name
|
||||
, "-"
|
||||
, version
|
||||
]
|
||||
)
|
||||
forceNotNull (E.Value Nothing) = Nothing
|
||||
forceNotNull (E.Value (Just (Version v)))
|
||||
| null v = Nothing
|
||||
| otherwise = Just v
|
||||
$(widgetFile "stackage-home")
|
||||
where strip x = fromMaybe x (stripSuffix "." x)
|
||||
|
||||
getStackageMetadataR :: SnapSlug -> Handler TypedContent
|
||||
getStackageMetadataR slug = do
|
||||
Entity sid _ <- runDB $ getBy404 $ UniqueSnapshot slug
|
||||
respondSourceDB typePlain $ do
|
||||
sendChunkBS "Override packages\n"
|
||||
sendChunkBS "=================\n"
|
||||
stream sid True
|
||||
sendChunkBS "\nPackages from Hackage\n"
|
||||
sendChunkBS "=====================\n"
|
||||
stream sid False
|
||||
where
|
||||
stream sid isOverwrite =
|
||||
selectSource
|
||||
[ PackageStackage ==. sid
|
||||
, PackageOverwrite ==. isOverwrite
|
||||
]
|
||||
[ Asc PackageName'
|
||||
, Asc PackageVersion
|
||||
] $= mapC (Chunk . toBuilder . showPackage)
|
||||
|
||||
showPackage (Entity _ p) = concat
|
||||
[ toPathPiece $ packageName' p
|
||||
, "-"
|
||||
, toPathPiece $ packageVersion p
|
||||
, "\n"
|
||||
]
|
||||
-}
|
||||
|
||||
getStackageCabalConfigR :: SnapSlug -> Handler TypedContent
|
||||
getStackageCabalConfigR slug = do
|
||||
error "getStackageCabalConfigR"
|
||||
{-
|
||||
Entity sid _ <- runDB $ getBy404 $ UniqueSnapshot slug
|
||||
render <- getUrlRender
|
||||
|
||||
@ -175,19 +151,15 @@ getStackageCabalConfigR slug = do
|
||||
toBuilder (asText ",\n ") ++
|
||||
toBuilder (toPathPiece $ packageName' p) ++
|
||||
constraint p
|
||||
-}
|
||||
|
||||
yearMonthDay :: FormatTime t => t -> String
|
||||
yearMonthDay = formatTime defaultTimeLocale "%Y-%m-%d"
|
||||
|
||||
getOldStackageR :: PackageSetIdent -> [Text] -> Handler ()
|
||||
getOldStackageR ident pieces = do
|
||||
Entity _ stackage <- runDB $ getBy404 $ UniqueStackage ident
|
||||
case parseRoute ("snapshot" : toPathPiece (stackageSlug stackage) : pieces, []) of
|
||||
Nothing -> notFound
|
||||
Just route -> redirect (route :: Route App)
|
||||
|
||||
getSnapshotPackagesR :: SnapSlug -> Handler Html
|
||||
getSnapshotPackagesR slug = do
|
||||
error "getSnapshotPackagesR"
|
||||
{-
|
||||
Entity sid _stackage <- runDB $ getBy404 $ UniqueSnapshot slug
|
||||
defaultLayout $ do
|
||||
setTitle $ toHtml $ "Package list for " ++ toPathPiece slug
|
||||
@ -227,9 +199,12 @@ getSnapshotPackagesR slug = do
|
||||
$(widgetFile "package-list")
|
||||
where strip x = fromMaybe x (stripSuffix "." x)
|
||||
mback = Just (SnapshotR slug StackageHomeR, "Return to snapshot")
|
||||
-}
|
||||
|
||||
getDocsR :: SnapSlug -> Handler Html
|
||||
getDocsR slug = do
|
||||
error "getDocsR"
|
||||
{-
|
||||
Entity sid _stackage <- runDB $ getBy404 $ UniqueSnapshot slug
|
||||
defaultLayout $ do
|
||||
setTitle $ toHtml $ "Module list for " ++ toPathPiece slug
|
||||
@ -254,3 +229,4 @@ getDocsR slug = do
|
||||
, E.unValue version
|
||||
)
|
||||
$(widgetFile "doc-list")
|
||||
-}
|
||||
|
||||
@ -6,6 +6,8 @@ import Data.Slug (SnapSlug)
|
||||
|
||||
getStackageIndexR :: SnapSlug -> Handler TypedContent
|
||||
getStackageIndexR slug = do
|
||||
error "getStackageIndexR"
|
||||
{-
|
||||
Entity _ stackage <- runDB $ getBy404 $ UniqueSnapshot slug
|
||||
let ident = stackageIdent stackage
|
||||
msrc <- storeRead $ CabalIndex ident
|
||||
@ -16,3 +18,4 @@ getStackageIndexR slug = do
|
||||
addHeader "content-disposition" "attachment; filename=\"00-index.tar.gz\""
|
||||
neverExpires
|
||||
respondSource "application/x-gzip" $ mapOutput (Chunk . toBuilder) src
|
||||
-}
|
||||
|
||||
@ -1,16 +1,17 @@
|
||||
module Handler.StackageSdist where
|
||||
module Handler.StackageSdist
|
||||
( getStackageSdistR
|
||||
) where
|
||||
|
||||
import Import
|
||||
import Data.BlobStore
|
||||
import Data.Hackage
|
||||
import Data.Slug (SnapSlug)
|
||||
import Handler.Package (packagePage)
|
||||
|
||||
getStackageSdistR :: SnapSlug -> PackageNameVersion -> Handler TypedContent
|
||||
getStackageSdistR slug (PNVTarball name version) = do
|
||||
error "getStackageSdistR"
|
||||
{-
|
||||
Entity _ stackage <- runDB $ getBy404 $ UniqueSnapshot slug
|
||||
let ident = stackageIdent stackage
|
||||
addDownload (Just ident) name version
|
||||
msrc1 <- storeRead (CustomSdist ident name version)
|
||||
msrc <-
|
||||
case msrc1 of
|
||||
@ -38,6 +39,7 @@ getStackageSdistR slug (PNVName name) = runDB $ do
|
||||
redirect $ SnapshotR slug
|
||||
$ StackageSdistR
|
||||
$ PNVNameVersion name packageVersion
|
||||
{- FIXME
|
||||
getStackageSdistR slug (PNVNameVersion name version) = packagePage
|
||||
name (Just version)
|
||||
(do
|
||||
@ -54,12 +56,5 @@ getStackageSdistR slug (PNVNameVersion name version) = packagePage
|
||||
, [DocsName ==. name]
|
||||
]
|
||||
) >>= sendResponse
|
||||
|
||||
addDownload :: Maybe PackageSetIdent
|
||||
-> PackageName
|
||||
-> Version
|
||||
-> Handler ()
|
||||
addDownload downloadIdent downloadPackage downloadVersion = do
|
||||
downloadUserAgent <- fmap decodeUtf8 <$> lookupHeader "user-agent"
|
||||
downloadTimestamp <- liftIO getCurrentTime
|
||||
runDB $ insert_ Download {..}
|
||||
-}
|
||||
-}
|
||||
|
||||
@ -20,6 +20,8 @@ getTagListR = do
|
||||
|
||||
getTagR :: Slug -> Handler Html
|
||||
getTagR tagSlug = do
|
||||
error "getTagR"
|
||||
{-
|
||||
-- FIXME arguably: check if this tag is banned. Leaving it as displayed for
|
||||
-- now, since someone needs to go out of their way to find it.
|
||||
packages <- fmap (map (\(E.Value t,E.Value s) -> (t,strip s))) $ runDB $
|
||||
@ -33,3 +35,4 @@ getTagR tagSlug = do
|
||||
setTitle $ "Stackage tag"
|
||||
$(widgetFile "tag")
|
||||
where strip x = fromMaybe x (stripSuffix "." x)
|
||||
-}
|
||||
|
||||
@ -1,351 +0,0 @@
|
||||
module Handler.UploadStackage where
|
||||
|
||||
import Import hiding (catch, get, update)
|
||||
import qualified Import
|
||||
import System.IO.Temp (withSystemTempFile, withSystemTempDirectory, openBinaryTempFile)
|
||||
import Crypto.Hash.Conduit (sinkHash)
|
||||
import Crypto.Hash (Digest, SHA1)
|
||||
import Data.Byteable (toBytes)
|
||||
import qualified Data.ByteString.Base16 as B16
|
||||
import Data.Conduit.Zlib (gzip, ungzip)
|
||||
import qualified Codec.Archive.Tar as Tar
|
||||
import qualified Data.Text as T
|
||||
import Filesystem.Path (splitExtension)
|
||||
import Data.BlobStore
|
||||
import Filesystem (createTree)
|
||||
import Control.Monad.State.Strict (execStateT, get, put, modify)
|
||||
import qualified Codec.Compression.GZip as GZip
|
||||
import Control.Monad.Trans.Resource (allocate)
|
||||
import System.Directory (removeFile, getTemporaryDirectory)
|
||||
import System.Process (runProcess, waitForProcess)
|
||||
import System.Exit (ExitCode (ExitSuccess))
|
||||
import Data.Slug (mkSlug, SnapSlug (..), safeMakeSlug, unSlug)
|
||||
import Control.Debounce
|
||||
|
||||
fileKey :: Text
|
||||
fileKey = "stackage"
|
||||
|
||||
slugKey :: Text
|
||||
slugKey = "slug"
|
||||
|
||||
getUploadStackageR :: Handler Html
|
||||
getUploadStackageR = do
|
||||
_ <- requireAuth
|
||||
defaultLayout $ do
|
||||
setTitle "Upload"
|
||||
$(widgetFile "upload-stackage")
|
||||
|
||||
putUploadStackageR :: Handler TypedContent
|
||||
putUploadStackageR = do
|
||||
uid <- requireAuthIdOrToken
|
||||
|
||||
-- Only admin users can use slugs starting with "lts" and "nightly",
|
||||
-- enforce that here
|
||||
muser <- runDB $ Import.get uid
|
||||
extra <- getExtra
|
||||
let isAdmin =
|
||||
case muser of
|
||||
Nothing -> False
|
||||
Just user -> unSlug (userHandle user) `member` adminUsers extra
|
||||
allowedSlug Nothing = Nothing
|
||||
allowedSlug (Just t)
|
||||
| isAdmin = Just t
|
||||
| "lts" `isPrefixOf` t = Nothing
|
||||
| "nightly" `isPrefixOf` t = Nothing
|
||||
| otherwise = Just t
|
||||
|
||||
mfile <- lookupFile fileKey
|
||||
mslug0 <- allowedSlug <$> lookupPostParam slugKey
|
||||
case mfile of
|
||||
Nothing -> invalidArgs ["Upload missing"]
|
||||
Just file -> do
|
||||
malias <- lookupPostParam "alias"
|
||||
mlts <- lookupPostParam "lts"
|
||||
mnightly <- lookupPostParam "nightly"
|
||||
|
||||
tempDir <- liftIO getTemporaryDirectory
|
||||
(_releaseKey, (fp, handleOut)) <- allocate
|
||||
(openBinaryTempFile tempDir "upload-stackage.")
|
||||
(\(fp, h) -> hClose h `finally` removeFile fp)
|
||||
digest <- fileSource file
|
||||
$$ getZipSink (ZipSink sinkHash <* ZipSink (ungzip =$ sinkHandle handleOut))
|
||||
liftIO $ hClose handleOut
|
||||
|
||||
let bs = toBytes (digest :: Digest SHA1)
|
||||
ident = PackageSetIdent $ decodeUtf8 $ B16.encode bs
|
||||
|
||||
-- Check for duplicates
|
||||
mstackage <- runDB $ getBy $ UniqueStackage ident
|
||||
when (isJust mstackage) $ invalidArgs ["Stackage already exists"]
|
||||
|
||||
app <- getYesod
|
||||
let initProgress = UploadProgress "Upload starting" Nothing
|
||||
key <- runDB $ insert initProgress
|
||||
|
||||
-- We don't want to be writing progress updates to the database too
|
||||
-- frequently, so let's just do it once per second at most.
|
||||
-- Debounce to the rescue!
|
||||
statusRef <- newIORef initProgress
|
||||
writeToDB <- liftIO $ mkDebounce defaultDebounceSettings
|
||||
{ debounceAction = do
|
||||
up <- readIORef statusRef
|
||||
runPool (persistConfig app) (replace key up) (connPool app)
|
||||
}
|
||||
|
||||
let updateHelper :: MonadBase IO m => UploadProgress -> m ()
|
||||
updateHelper p = do
|
||||
writeIORef statusRef p
|
||||
liftBase writeToDB
|
||||
update :: MonadBase IO m => Text -> m ()
|
||||
update msg = updateHelper (UploadProgress msg Nothing)
|
||||
done msg route = do
|
||||
render <- getUrlRender
|
||||
updateHelper (UploadProgress msg $ Just $ render route)
|
||||
onExc e = done ("Exception occurred: " ++ tshow e) ProfileR
|
||||
setAlias = do
|
||||
forM_ (malias >>= mkSlug) $ \alias -> do
|
||||
deleteWhere [AliasUser ==. uid, AliasName ==. alias]
|
||||
insert_ Alias
|
||||
{ aliasUser = uid
|
||||
, aliasName = alias
|
||||
, aliasTarget = ident
|
||||
}
|
||||
whenAdmin = when isAdmin
|
||||
setLts sid = forM_ mlts
|
||||
$ \lts -> whenAdmin
|
||||
$ forM_ (parseLtsPair lts) $ \(major, minor) -> do
|
||||
mx <- getBy $ UniqueLts major minor
|
||||
when (isNothing mx) $ insert_ $ Lts major minor sid
|
||||
setNightly sid = forM_ mnightly $ \nightly -> whenAdmin $ do
|
||||
now <- liftIO getCurrentTime
|
||||
let day = utctDay now
|
||||
mx <- getBy $ UniqueNightly day
|
||||
when (isNothing mx) $ insert_ Nightly
|
||||
{ nightlyDay = day
|
||||
, nightlyGhcVersion = nightly
|
||||
, nightlyStackage = sid
|
||||
}
|
||||
|
||||
update "Starting"
|
||||
|
||||
forkHandler onExc $ do
|
||||
now <- liftIO getCurrentTime
|
||||
baseSlug <- fmap SnapSlug $ mkSlug $ fromMaybe (tshow $ utctDay now) mslug0
|
||||
let initial = Stackage
|
||||
{ stackageUser = uid
|
||||
, stackageIdent = ident
|
||||
, stackageUploaded = now
|
||||
, stackageTitle = "Untitled Stackage"
|
||||
, stackageDesc = "No description provided"
|
||||
, stackageHasHaddocks = False
|
||||
, stackageSlug = baseSlug
|
||||
, stackageGhcMajorVersion = Nothing -- Assumption: this file is deprecated
|
||||
}
|
||||
|
||||
-- Evil lazy I/O thanks to tar package
|
||||
lbs <- readFile $ fpFromString fp
|
||||
withSystemTempDirectory "build00index." $ \dir -> do
|
||||
LoopState _ stackage files _ contents cores <- execStateT (loop isAdmin update (Tar.read lbs)) LoopState
|
||||
{ lsRoot = fpFromString dir
|
||||
, lsStackage = initial
|
||||
, lsFiles = mempty
|
||||
, lsIdent = ident
|
||||
, lsContents = []
|
||||
, lsCores = mempty
|
||||
}
|
||||
withSystemTempFile "newindex" $ \fp' h -> do
|
||||
ec <- liftIO $ do
|
||||
hClose h
|
||||
let args = "cfz"
|
||||
: fp'
|
||||
: map fpToString (setToList files)
|
||||
ph <- runProcess "tar" args (Just dir) Nothing Nothing Nothing Nothing
|
||||
waitForProcess ph
|
||||
if ec == ExitSuccess
|
||||
then do
|
||||
sourceFile (fpFromString fp') $$ storeWrite (CabalIndex ident)
|
||||
sourceFile (fpFromString fp) $$ gzip =$ storeWrite (SnapshotBundle ident)
|
||||
slug <- runDB $ do
|
||||
slug <- getUniqueSlug $ stackageSlug stackage
|
||||
sid <- insert stackage { stackageSlug = slug}
|
||||
forM_ contents $ \(name, version, overwrite) -> insert_ Package
|
||||
{ packageStackage = sid
|
||||
, packageName' = name
|
||||
, packageVersion = version
|
||||
, packageOverwrite = overwrite
|
||||
, packageHasHaddocks = False
|
||||
, packageCore = Just $ name `member` cores
|
||||
}
|
||||
|
||||
setAlias
|
||||
setLts sid
|
||||
setNightly sid
|
||||
|
||||
return slug
|
||||
|
||||
done "Stackage created" $ SnapshotR slug StackageHomeR
|
||||
else done "Error creating index file" ProfileR
|
||||
|
||||
addHeader "X-Stackage-Ident" $ toPathPiece ident
|
||||
redirect $ ProgressR key
|
||||
where
|
||||
loop _ update Tar.Done = update "Finished processing files"
|
||||
loop _ _ (Tar.Fail e) = throwM e
|
||||
loop isAdmin update (Tar.Next entry entries) = do
|
||||
addEntry isAdmin update entry
|
||||
loop isAdmin update entries
|
||||
|
||||
addEntry isAdmin update entry = do
|
||||
_ <- update $ "Processing file: " ++ pack (Tar.entryPath entry)
|
||||
case Tar.entryContent entry of
|
||||
Tar.NormalFile lbs _ ->
|
||||
case filename $ fpFromString $ Tar.entryPath entry of
|
||||
"desc" -> do
|
||||
$logDebug $ "desc: " ++ tshow lbs
|
||||
let (title, drop 1 -> desc) = break (== '\n')
|
||||
$ decodeUtf8
|
||||
$ toStrict lbs
|
||||
ls <- get
|
||||
put ls
|
||||
{ lsStackage = (lsStackage ls)
|
||||
{ stackageTitle = title
|
||||
, stackageDesc = desc
|
||||
}
|
||||
}
|
||||
"slug" -> do
|
||||
let t = decodeUtf8 $ toStrict lbs
|
||||
when (isAdmin || not ("lts" `isPrefixOf` t || "nightly" `isPrefixOf` t)) $ do
|
||||
slug <- safeMakeSlug t False
|
||||
ls <- get
|
||||
put ls { lsStackage = (lsStackage ls) { stackageSlug = SnapSlug slug } }
|
||||
"hackage" -> forM_ (lines $ decodeUtf8 $ toStrict lbs) $ \line ->
|
||||
case parseName line of
|
||||
Just (name, version) -> do
|
||||
$logDebug $ "hackage: " ++ tshow (name, version)
|
||||
_ <- update $ concat
|
||||
[ "Adding Hackage package: "
|
||||
, toPathPiece name
|
||||
, "-"
|
||||
, toPathPiece version
|
||||
]
|
||||
msrc <- storeRead (HackageCabal name version)
|
||||
case msrc of
|
||||
Nothing | name == "base" -> return () -- workaround in case base isn't uploaded to Hackage
|
||||
Nothing -> invalidArgs ["Unknown Hackage name/version: " ++ tshow (name, version)]
|
||||
Just src -> addFile False name version src
|
||||
|
||||
Nothing -> return ()
|
||||
|
||||
"core" -> forM_ (lines $ decodeUtf8 $ toStrict lbs) $ \name ->
|
||||
modify $ \ls -> ls
|
||||
{ lsCores = insertSet (PackageName name)
|
||||
$ lsCores ls
|
||||
}
|
||||
|
||||
fp | (base1, Just "gz") <- splitExtension fp
|
||||
, (fpToText -> base, Just "tar") <- splitExtension base1 -> do
|
||||
ident <- lsIdent <$> get
|
||||
_ <- update $ concat
|
||||
[ "Extracting cabal file for custom tarball: "
|
||||
, base
|
||||
]
|
||||
(name, version, cabalLBS) <- extractCabal lbs base
|
||||
sourceLazy lbs $$ storeWrite (CustomSdist ident name version)
|
||||
addFile True name version $ sourceLazy cabalLBS
|
||||
_ -> return ()
|
||||
_ -> return ()
|
||||
where
|
||||
addFile isOverride name version src = do
|
||||
ls <- get
|
||||
when (isOverride || fp `notMember` lsFiles ls) $ do
|
||||
let fp' = lsRoot ls </> fp
|
||||
liftIO $ createTree $ directory fp'
|
||||
src $$ sinkFile fp'
|
||||
put ls
|
||||
{ lsFiles = insertSet fp $ lsFiles ls
|
||||
, lsContents
|
||||
= (name, version, isOverride)
|
||||
: lsContents ls
|
||||
}
|
||||
where
|
||||
fp = mkFP name version
|
||||
|
||||
mkFP name version
|
||||
= fpFromText (toPathPiece name)
|
||||
</> fpFromText (toPathPiece version)
|
||||
</> fpFromText (concat
|
||||
[ toPathPiece name
|
||||
, "-"
|
||||
, toPathPiece version
|
||||
, ".cabal"
|
||||
])
|
||||
|
||||
parseName t =
|
||||
case T.breakOnEnd "-" t of
|
||||
("", _) -> Nothing
|
||||
(_, "") -> Nothing
|
||||
(T.init -> name, version) -> Just (PackageName name, Version version)
|
||||
|
||||
data LoopState = LoopState
|
||||
{ lsRoot :: !FilePath
|
||||
, lsStackage :: !Stackage
|
||||
, lsFiles :: !(Set FilePath)
|
||||
, lsIdent :: !PackageSetIdent
|
||||
|
||||
, lsContents :: ![(PackageName, Version, IsOverride)] -- FIXME use SnocVector when ready
|
||||
, lsCores :: !(Set PackageName) -- ^ core packages
|
||||
}
|
||||
|
||||
type IsOverride = Bool
|
||||
|
||||
extractCabal :: (MonadLogger m, MonadThrow m)
|
||||
=> LByteString
|
||||
-> Text -- ^ basename
|
||||
-> m (PackageName, Version, LByteString)
|
||||
extractCabal lbs basename' =
|
||||
loop $ Tar.read $ GZip.decompress lbs
|
||||
where
|
||||
loop Tar.Done = error $ "extractCabal: cabal file missing for " ++ unpack basename'
|
||||
loop (Tar.Fail e) = throwM e
|
||||
loop (Tar.Next e es) = do
|
||||
$logDebug $ pack $ Tar.entryPath e
|
||||
case Tar.entryContent e of
|
||||
Tar.NormalFile lbs' _
|
||||
| Just (name, version) <- parseNameVersion (pack $ Tar.entryPath e)
|
||||
-> return (name, version, lbs')
|
||||
_ -> loop es
|
||||
|
||||
parseNameVersion t = do
|
||||
[dir, filename'] <- Just $ T.splitOn "/" t
|
||||
let (name', version) = T.breakOnEnd "-" dir
|
||||
name <- stripSuffix "-" name'
|
||||
guard $ name ++ ".cabal" == filename'
|
||||
return (PackageName name, Version version)
|
||||
|
||||
-- | Get a unique version of the given slug by appending random numbers to the
|
||||
-- end.
|
||||
getUniqueSlug :: MonadIO m => SnapSlug -> ReaderT SqlBackend m SnapSlug
|
||||
getUniqueSlug base =
|
||||
loop Nothing
|
||||
where
|
||||
loop msuffix = do
|
||||
slug <- checkSlug $ addSuffix msuffix
|
||||
ment <- getBy $ UniqueSnapshot slug
|
||||
case ment of
|
||||
Nothing -> return slug
|
||||
Just _ ->
|
||||
case msuffix of
|
||||
Nothing -> loop $ Just (1 :: Int)
|
||||
Just i
|
||||
| i > 50 -> error "No unique slug found"
|
||||
| otherwise -> loop $ Just $ i + 1
|
||||
|
||||
txt = toPathPiece base
|
||||
|
||||
addSuffix Nothing = txt
|
||||
addSuffix (Just i) = txt ++ pack ('-' : show i)
|
||||
|
||||
checkSlug slug =
|
||||
case fromPathPiece slug of
|
||||
Nothing -> error $ "Invalid snapshot slug: " ++ unpack slug
|
||||
Just s -> return s
|
||||
@ -1,296 +0,0 @@
|
||||
module Handler.UploadV2
|
||||
( putUploadV2R
|
||||
) where
|
||||
|
||||
import Import
|
||||
import Data.BlobStore
|
||||
import Control.Concurrent.Lifted (threadDelay)
|
||||
import Data.Slug (unSlug, mkSlug, SnapSlug (..))
|
||||
import Control.Monad.Trans.Resource (allocate)
|
||||
import System.Directory (removeFile, getTemporaryDirectory)
|
||||
import System.IO.Temp (openBinaryTempFile, withSystemTempDirectory, withSystemTempFile)
|
||||
import Crypto.Hash.Conduit (sinkHash)
|
||||
import Crypto.Hash (Digest, SHA1)
|
||||
import Data.Byteable (toBytes)
|
||||
import qualified Data.ByteString.Base16 as B16
|
||||
import System.Timeout.Lifted (timeout)
|
||||
import Control.Concurrent.Async (async, cancel, waitCatchSTM)
|
||||
import Yesod.Core.Types (HandlerT (..))
|
||||
import Stackage.Types
|
||||
import Filesystem (createTree)
|
||||
import Filesystem.Path (parent)
|
||||
import Data.Conduit.Process
|
||||
import Data.Yaml (decodeEither')
|
||||
import Distribution.Version (versionBranch)
|
||||
|
||||
putUploadV2R :: Handler TypedContent
|
||||
putUploadV2R = do
|
||||
uid <- requireAuthIdOrToken
|
||||
user <- runDB $ get404 uid
|
||||
extra <- getExtra
|
||||
when (unSlug (userHandle user) `notMember` adminUsers extra)
|
||||
$ permissionDenied "Only admins can upload V2 bundles"
|
||||
|
||||
tempDir <- liftIO getTemporaryDirectory
|
||||
(_releaseKey, (bundleFP, bundleHOut)) <- allocate
|
||||
(openBinaryTempFile tempDir "upload.stackage2")
|
||||
(\(fp, h) -> hClose h `finally` removeFile fp)
|
||||
digest <- rawRequestBody $$ getZipSink
|
||||
(ZipSink (sinkHandle bundleHOut) *>
|
||||
ZipSink sinkHash)
|
||||
liftIO $ hClose bundleHOut
|
||||
|
||||
let digestBS = toBytes (digest :: Digest SHA1)
|
||||
ident = PackageSetIdent $ decodeUtf8 $ B16.encode digestBS
|
||||
|
||||
mstackage <- runDB $ getBy $ UniqueStackage ident
|
||||
when (isJust mstackage) $ invalidArgs ["Bundle already uploaded"]
|
||||
|
||||
status <- liftIO $ newTVarIO ""
|
||||
|
||||
let cont text = do
|
||||
sendChunkBS "CONT: "
|
||||
sendChunkText text
|
||||
sendChunkBS "\n"
|
||||
sendFlush
|
||||
|
||||
-- Grab the internal HandlerT state to perform magic
|
||||
hd <- HandlerT return
|
||||
worker <- fmap snd $ flip allocate cancel $ async $ flip unHandlerT hd
|
||||
$ doUpload status uid ident (fpFromString bundleFP)
|
||||
|
||||
respondSource "text/plain" $ do
|
||||
let displayStatus prev = do
|
||||
memsg <- liftIO $ timeout 20000000 $ atomically $ (do
|
||||
msg <- readTVar status
|
||||
checkSTM (msg /= prev)
|
||||
return (Right msg)) <|> (Left <$> waitCatchSTM worker)
|
||||
case memsg of
|
||||
Nothing -> do
|
||||
cont "Still working"
|
||||
displayStatus prev
|
||||
Just (Left (Left e)) -> do
|
||||
sendChunkText "FAILURE: "
|
||||
sendChunkText $ tshow e
|
||||
sendChunkText "\n"
|
||||
Just (Left (Right t)) -> do
|
||||
sendChunkText "SUCCESS: "
|
||||
sendChunkText t
|
||||
sendChunkText "\n"
|
||||
Just (Right t) -> do
|
||||
cont t
|
||||
displayStatus t
|
||||
displayStatus ""
|
||||
|
||||
doUpload :: TVar Text
|
||||
-> UserId
|
||||
-> PackageSetIdent
|
||||
-> FilePath -- ^ temporary bundle file
|
||||
-> Handler Text
|
||||
doUpload status uid ident bundleFP = do
|
||||
say $ "Uploading to persistent storage with ident " ++ toPathPiece ident
|
||||
sourceFile bundleFP $$ storeWrite (HaddockBundle ident)
|
||||
threadDelay 1000000 -- FIXME remove
|
||||
|
||||
say $ "Unpacking bundle"
|
||||
|
||||
(siType, siPlan, siDocMap :: DocMap) <-
|
||||
withSystemTempDirectory "uploadv2" $ \dir' -> do
|
||||
let dir = fpFromString dir'
|
||||
withCheckedProcess
|
||||
(proc "tar" ["xf", fpToString bundleFP])
|
||||
{ cwd = Just dir'
|
||||
} $ \ClosedStream ClosedStream ClosedStream -> return ()
|
||||
|
||||
let maxFileSize = 1024 * 1024 * 5
|
||||
yaml :: FromJSON a => FilePath -> Handler a
|
||||
yaml fp = do
|
||||
say $ "Parsing " ++ fpToText fp
|
||||
bs <- sourceFile (dir </> fp) $$ takeCE maxFileSize =$ foldC
|
||||
either throwM return $ decodeEither' bs
|
||||
|
||||
(,,)
|
||||
<$> yaml "build-type.yaml"
|
||||
<*> yaml "build-plan.yaml"
|
||||
<*> yaml "docs-map.yaml"
|
||||
|
||||
now <- liftIO getCurrentTime
|
||||
|
||||
let theSiGhcVersion = siGhcVersion $ bpSystemInfo siPlan
|
||||
ghcVersion = display theSiGhcVersion
|
||||
ghcMajorVersionMay = case versionBranch theSiGhcVersion of
|
||||
(a:b:_) -> Just (GhcMajorVersion a b)
|
||||
_ -> Nothing
|
||||
slug' <-
|
||||
case siType of
|
||||
STNightly -> invalidArgs ["No longer support STNightly, use STNightly2"]
|
||||
STNightly2 day -> return $ "nightly-" ++ tshow day
|
||||
STLTS major minor -> return $ concat
|
||||
[ "lts-"
|
||||
, tshow major
|
||||
, "."
|
||||
, tshow minor
|
||||
]
|
||||
title <-
|
||||
case siType of
|
||||
STNightly -> invalidArgs ["No longer support STNightly, use STNightly2"]
|
||||
STNightly2 day -> return $ concat
|
||||
[ "Stackage Nightly "
|
||||
, tshow day
|
||||
, ", GHC "
|
||||
, ghcVersion
|
||||
]
|
||||
STLTS major minor -> return $ concat
|
||||
[ "LTS Haskell "
|
||||
, tshow major
|
||||
, "."
|
||||
, tshow minor
|
||||
, ", GHC "
|
||||
, ghcVersion
|
||||
]
|
||||
|
||||
slug <- do
|
||||
slug2 <- mkSlug slug'
|
||||
when (slug' /= unSlug slug2) $ error $ "Slug not available: " ++ show slug'
|
||||
return $ SnapSlug slug2
|
||||
|
||||
mexisting <- runDB $ getBy $ UniqueSnapshot slug
|
||||
route <- case mexisting of
|
||||
Just _ -> do
|
||||
say "Snapshot already exists"
|
||||
return $ SnapshotR slug StackageHomeR
|
||||
Nothing -> finishUpload
|
||||
title ident ghcVersion ghcMajorVersionMay slug now siType siPlan siDocMap
|
||||
uid say
|
||||
render <- getUrlRender
|
||||
return $ render route
|
||||
where
|
||||
say = atomically . writeTVar status
|
||||
|
||||
finishUpload
|
||||
:: Text
|
||||
-> PackageSetIdent
|
||||
-> Text
|
||||
-> Maybe GhcMajorVersion
|
||||
-> SnapSlug
|
||||
-> UTCTime
|
||||
-> SnapshotType
|
||||
-> BuildPlan
|
||||
-> Map Text PackageDocs
|
||||
-> UserId
|
||||
-> (Text -> Handler ())
|
||||
-> Handler (Route App)
|
||||
finishUpload
|
||||
title ident ghcVersion ghcMajorVersionMay slug now siType siPlan siDocMap
|
||||
uid say = do
|
||||
say "Creating index tarball"
|
||||
withSystemTempDirectory "buildindex.v2" $ \(fpFromString -> dir) -> do
|
||||
files <- forM (mapToList $ fmap ppVersion $ bpPackages siPlan) $ \(name', version') -> do
|
||||
let mpair = (,)
|
||||
<$> fromPathPiece (display name')
|
||||
<*> fromPathPiece (display version')
|
||||
(name, version) <-
|
||||
case mpair of
|
||||
Nothing -> error $ "Could not parse: " ++ show (name', version')
|
||||
Just pair -> return pair
|
||||
|
||||
msrc <- storeRead (HackageCabal name version)
|
||||
src <-
|
||||
case msrc of
|
||||
Nothing -> error $ "Cabal file not found for: " ++ show (name, version)
|
||||
Just src -> return src
|
||||
|
||||
let fp' = fpFromText (toPathPiece name)
|
||||
</> fpFromText (toPathPiece version)
|
||||
</> fpFromText (concat
|
||||
[ toPathPiece name
|
||||
, "-"
|
||||
, toPathPiece version
|
||||
, ".cabal"
|
||||
])
|
||||
let fp = dir </> fp'
|
||||
|
||||
liftIO $ createTree $ parent fp
|
||||
src $$ sinkFile fp
|
||||
return $ fpToString fp'
|
||||
|
||||
withSystemTempFile "newindex.v2" $ \fp' h -> do
|
||||
liftIO $ do
|
||||
hClose h
|
||||
let args = "cfz"
|
||||
: fp'
|
||||
: files
|
||||
cp = (proc "tar" args) { cwd = Just $ fpToString dir }
|
||||
withCheckedProcess cp $ \ClosedStream Inherited Inherited ->
|
||||
return ()
|
||||
sourceFile (fpFromString fp') $$ storeWrite (CabalIndex ident)
|
||||
|
||||
say $ "Attempting: " ++ tshow (slug, title)
|
||||
sid <- runDB $ do
|
||||
sid <- insert Stackage
|
||||
{ stackageUser = uid
|
||||
, stackageIdent = ident
|
||||
, stackageSlug = slug
|
||||
, stackageUploaded = now
|
||||
, stackageTitle = title
|
||||
, stackageDesc = ""
|
||||
, stackageHasHaddocks = True
|
||||
, stackageGhcMajorVersion = ghcMajorVersionMay
|
||||
}
|
||||
case siType of
|
||||
STNightly -> invalidArgs ["No longer support STNightly, use STNightly2"]
|
||||
STNightly2 day -> insert_ Nightly
|
||||
{ nightlyDay = day
|
||||
, nightlyGhcVersion = ghcVersion
|
||||
, nightlyStackage = sid
|
||||
}
|
||||
STLTS major minor -> insert_ Lts
|
||||
{ ltsMajor = major
|
||||
, ltsMinor = minor
|
||||
, ltsStackage = sid
|
||||
}
|
||||
|
||||
let cores, nonCores :: Map PackageName Version
|
||||
cores = mapKeysWith const (PackageName . display)
|
||||
$ fmap (Version . display)
|
||||
$ siCorePackages
|
||||
$ bpSystemInfo siPlan
|
||||
nonCores
|
||||
= mapKeysWith const (PackageName . display)
|
||||
$ fmap (Version . display . ppVersion)
|
||||
$ bpPackages siPlan
|
||||
forM_ (mapToList $ cores ++ nonCores) $ \(name, version) -> do
|
||||
let PackageName nameT = name
|
||||
insert_ Package
|
||||
{ packageStackage = sid
|
||||
, packageName' = name
|
||||
, packageVersion = version
|
||||
, packageHasHaddocks = nameT `member` siDocMap
|
||||
, packageOverwrite = False
|
||||
, packageCore = Just $ name `member` cores
|
||||
}
|
||||
return sid
|
||||
|
||||
say $ concat
|
||||
[ "New snapshot with ID "
|
||||
, toPathPiece sid
|
||||
, " and slug "
|
||||
, toPathPiece slug
|
||||
, " created"
|
||||
]
|
||||
|
||||
render <- getUrlRender
|
||||
|
||||
say "Updating docmap"
|
||||
runDB $ forM_ (mapToList siDocMap) $ \(package, PackageDocs version ms) -> do
|
||||
did <- insert Docs
|
||||
{ docsName = PackageName package
|
||||
, docsVersion = Version version
|
||||
, docsUploaded = now
|
||||
, docsSnapshot = Just sid
|
||||
}
|
||||
forM_ (mapToList ms) $ \(name, pieces) -> do
|
||||
let url = render $ HaddockR slug pieces
|
||||
insert_ $ Module did name url
|
||||
|
||||
return $ SnapshotR slug StackageHomeR
|
||||
25
Import.hs
25
Import.hs
@ -34,28 +34,3 @@ parseLtsPair t1 = do
|
||||
t3 <- stripPrefix "." t2
|
||||
(y, "") <- either (const Nothing) Just $ decimal t3
|
||||
Just (x, y)
|
||||
|
||||
requireDocs :: Entity Stackage -> Handler ()
|
||||
requireDocs stackageEnt = do
|
||||
master <- getYesod
|
||||
status <- liftIO $ duRequestDocs (appDocUnpacker master) stackageEnt
|
||||
case status of
|
||||
USReady -> return ()
|
||||
USBusy -> (>>= sendResponse) $ defaultLayout $ do
|
||||
setTitle "Docs unpacking, please wait"
|
||||
addHeader "Refresh" "1"
|
||||
msg <- liftIO $ duGetStatus $ appDocUnpacker master
|
||||
[whamlet|
|
||||
<div .container>
|
||||
<p>Docs are currently being unpacked, please wait.
|
||||
<p>This page will automatically reload every second.
|
||||
<p>Current status: #{msg}
|
||||
|]
|
||||
USFailed e -> do
|
||||
$logWarn $ "Docs not available: " ++ tshow
|
||||
( stackageSlug $ entityVal stackageEnt
|
||||
, e
|
||||
)
|
||||
invalidArgs
|
||||
[ "Docs not available: " ++ e
|
||||
]
|
||||
|
||||
103
config/models
103
config/models
@ -15,36 +15,6 @@ Verkey
|
||||
email Text
|
||||
verkey Text
|
||||
|
||||
DocsOnS3
|
||||
slug SnapSlug
|
||||
UniqueDocsOnS3 slug
|
||||
|
||||
Stackage
|
||||
user UserId
|
||||
ident PackageSetIdent
|
||||
slug SnapSlug default="md5((random())::text)"
|
||||
uploaded UTCTime
|
||||
title Text
|
||||
desc Text
|
||||
hasHaddocks Bool default=false
|
||||
ghcMajorVersion GhcMajorVersion Maybe
|
||||
UniqueStackage ident
|
||||
UniqueSnapshot slug
|
||||
|
||||
Alias
|
||||
user UserId
|
||||
name Slug
|
||||
target PackageSetIdent
|
||||
UniqueAlias user name
|
||||
|
||||
Package
|
||||
stackage StackageId
|
||||
name' PackageName sql=name
|
||||
version Version
|
||||
hasHaddocks Bool default=true
|
||||
overwrite Bool
|
||||
core Bool Maybe -- use Maybe to speed up migration
|
||||
|
||||
Tag
|
||||
package PackageName
|
||||
tag Slug
|
||||
@ -56,54 +26,6 @@ Like
|
||||
voter UserId
|
||||
UniqueLikePackageVoter package voter
|
||||
|
||||
Download
|
||||
ident PackageSetIdent Maybe
|
||||
view Text Maybe MigrationOnly
|
||||
timestamp UTCTime
|
||||
package PackageName
|
||||
version Version
|
||||
userAgent Text Maybe
|
||||
|
||||
Metadata
|
||||
name PackageName
|
||||
version Version
|
||||
hash ByteString
|
||||
deps [Text]
|
||||
author Text
|
||||
maintainer Text
|
||||
licenseName Text
|
||||
homepage Text
|
||||
bugReports Text
|
||||
synopsis Text
|
||||
sourceRepo [Text]
|
||||
category Text
|
||||
library Bool
|
||||
exes Int
|
||||
testSuites Int
|
||||
benchmarks Int
|
||||
|
||||
readme Html
|
||||
changelog Html Maybe
|
||||
licenseContent Html Maybe
|
||||
|
||||
UniqueMetadata name
|
||||
|
||||
Docs
|
||||
name PackageName
|
||||
version Version
|
||||
uploaded UTCTime
|
||||
snapshot StackageId Maybe
|
||||
Module
|
||||
docs DocsId
|
||||
name Text
|
||||
url Text
|
||||
UniqueModule docs name
|
||||
|
||||
Dependency
|
||||
dep PackageName
|
||||
user PackageName
|
||||
UniqueDependency dep user
|
||||
|
||||
BannedTag
|
||||
tag Slug
|
||||
UniqueBannedTag tag
|
||||
@ -111,28 +33,3 @@ BannedTag
|
||||
Migration
|
||||
num Int
|
||||
UniqueMigration num
|
||||
|
||||
Nightly
|
||||
day Day
|
||||
ghcVersion Text
|
||||
stackage StackageId
|
||||
UniqueNightly day
|
||||
|
||||
Lts
|
||||
major Int
|
||||
minor Int
|
||||
stackage StackageId
|
||||
UniqueLts major minor
|
||||
|
||||
Deprecated
|
||||
package PackageName
|
||||
UniqueDeprecated package
|
||||
|
||||
Suggested
|
||||
package PackageName
|
||||
insteadOf PackageName
|
||||
UniqueSuggested package insteadOf
|
||||
|
||||
UploadProgress
|
||||
message Text
|
||||
dest Text Maybe
|
||||
|
||||
@ -11,15 +11,9 @@
|
||||
/profile ProfileR GET PUT
|
||||
/email/#EmailId EmailR DELETE
|
||||
/reset-token ResetTokenR POST
|
||||
/upload UploadStackageR GET PUT
|
||||
/upload-haddock/#Text UploadHaddockR GET PUT
|
||||
/upload-doc-map UploadDocMapR GET PUT
|
||||
|
||||
/stackage/#PackageSetIdent/*Texts OldStackageR GET
|
||||
|
||||
/snapshot/#SnapSlug SnapshotR:
|
||||
/ StackageHomeR GET
|
||||
/metadata StackageMetadataR GET
|
||||
/cabal.config StackageCabalConfigR GET
|
||||
/00-index.tar.gz StackageIndexR GET
|
||||
/package/#PackageNameVersion StackageSdistR GET
|
||||
@ -30,15 +24,11 @@
|
||||
/build-plan BuildPlanR GET
|
||||
/ghc-major-version GhcMajorVersionR GET
|
||||
|
||||
/aliases AliasesR PUT
|
||||
/alias/#Slug/#Slug/*Texts AliasR
|
||||
/progress/#UploadProgressId ProgressR GET
|
||||
/system SystemR GET
|
||||
/haddock/#SnapSlug/*Texts HaddockR GET
|
||||
/package/#PackageName PackageR GET
|
||||
/package/#PackageName/snapshots PackageSnapshotsR GET
|
||||
/package PackageListR GET
|
||||
/compressor-status CompressorStatusR GET
|
||||
/package/#PackageName/like PackageLikeR POST
|
||||
/package/#PackageName/unlike PackageUnlikeR POST
|
||||
/package/#PackageName/tag PackageTagR POST
|
||||
@ -54,10 +44,7 @@
|
||||
/install InstallR GET
|
||||
/older-releases OlderReleasesR GET
|
||||
|
||||
/refresh-deprecated RefreshDeprecatedR GET
|
||||
/upload2 UploadV2R PUT
|
||||
/build-version BuildVersionR GET
|
||||
/package-counts PackageCountsR GET
|
||||
|
||||
/download DownloadR GET
|
||||
/download/snapshots.json DownloadSnapshotsJsonR GET
|
||||
|
||||
@ -24,38 +24,28 @@ library
|
||||
Data.Tag
|
||||
Data.BlobStore
|
||||
Data.GhcLinks
|
||||
Data.Hackage
|
||||
Data.Hackage.DeprecationInfo
|
||||
Data.WebsiteContent
|
||||
Data.Unpacking
|
||||
Types
|
||||
Handler.Home
|
||||
Handler.Snapshots
|
||||
Handler.Profile
|
||||
Handler.Email
|
||||
Handler.ResetToken
|
||||
Handler.UploadStackage
|
||||
Handler.StackageHome
|
||||
Handler.StackageIndex
|
||||
Handler.StackageSdist
|
||||
Handler.Aliases
|
||||
Handler.Alias
|
||||
Handler.Progress
|
||||
Handler.System
|
||||
Handler.Haddock
|
||||
Handler.Hoogle
|
||||
Handler.Package
|
||||
Handler.PackageList
|
||||
Handler.CompressorStatus
|
||||
Handler.Tag
|
||||
Handler.BannedTags
|
||||
Handler.RefreshDeprecated
|
||||
Handler.UploadV2
|
||||
Handler.BuildVersion
|
||||
Handler.PackageCounts
|
||||
Handler.Sitemap
|
||||
Handler.BuildPlan
|
||||
Handler.Download
|
||||
Handler.OldLinks
|
||||
|
||||
if flag(dev) || flag(library-only)
|
||||
cpp-options: -DDEVELOPMENT
|
||||
|
||||
@ -9,7 +9,7 @@
|
||||
<img src="/static/img/stackage.png" title="FP Complete">
|
||||
<div class="nav-collapse collapse">
|
||||
<ul class="nav">
|
||||
$forall route <- [AllSnapshotsR,UploadStackageR]
|
||||
$forall route <- [AllSnapshotsR]
|
||||
$maybe current <- cur
|
||||
$if route == current
|
||||
<li .active>
|
||||
|
||||
@ -25,21 +25,6 @@
|
||||
^{userWidget}
|
||||
<button .btn>Update
|
||||
|
||||
<h2>Aliases
|
||||
|
||||
<form method=post action=@{AliasesR}?_method=PUT>
|
||||
Format: alias name, package set ID
|
||||
<textarea #aliases name=aliases>#{unlines $ map aliasToText aliases}
|
||||
<button .btn>Update
|
||||
|
||||
$if not $ null aliases
|
||||
<dl>
|
||||
$forall Entity _ alias <- aliases
|
||||
<dt>#{aliasName alias}
|
||||
<dd>
|
||||
$with url <- AliasR (userHandle user) (aliasName alias) []
|
||||
<a href=@{url}>@{url}
|
||||
|
||||
<h2>Security token
|
||||
|
||||
<p>
|
||||
|
||||
Loading…
Reference in New Issue
Block a user