mirror of
https://github.com/commercialhaskell/stackage-server.git
synced 2026-02-05 15:50:26 +01:00
Merge branch 'new-upload-v2'
Conflicts: Application.hs
This commit is contained in:
commit
2b4d9a667b
@ -10,7 +10,7 @@ import qualified Aws
|
|||||||
import Control.Concurrent (forkIO, threadDelay)
|
import Control.Concurrent (forkIO, threadDelay)
|
||||||
import Control.Exception (catch)
|
import Control.Exception (catch)
|
||||||
import Control.Monad.Logger (runLoggingT, LoggingT, defaultLogStr)
|
import Control.Monad.Logger (runLoggingT, LoggingT, defaultLogStr)
|
||||||
import Data.BlobStore (fileStore, storeWrite, cachedS3Store)
|
import Data.BlobStore (fileStore, cachedS3Store)
|
||||||
import Data.Hackage
|
import Data.Hackage
|
||||||
import Data.Hackage.DeprecationInfo
|
import Data.Hackage.DeprecationInfo
|
||||||
import Data.Unpacking (newDocUnpacker, createHoogleDatabases)
|
import Data.Unpacking (newDocUnpacker, createHoogleDatabases)
|
||||||
@ -29,7 +29,7 @@ import Network.Wai.Middleware.RequestLogger
|
|||||||
)
|
)
|
||||||
import qualified Network.Wai.Middleware.RequestLogger as RequestLogger
|
import qualified Network.Wai.Middleware.RequestLogger as RequestLogger
|
||||||
import Settings
|
import Settings
|
||||||
import System.Log.FastLogger (newStdoutLoggerSet, newFileLoggerSet, defaultBufSize, flushLogStr, fromLogStr)
|
import System.Log.FastLogger (newStdoutLoggerSet, newFileLoggerSet, defaultBufSize, fromLogStr)
|
||||||
import qualified System.Random.MWC as MWC
|
import qualified System.Random.MWC as MWC
|
||||||
import Yesod.Core.Types (loggerSet, Logger (Logger))
|
import Yesod.Core.Types (loggerSet, Logger (Logger))
|
||||||
import Yesod.Default.Config
|
import Yesod.Default.Config
|
||||||
@ -67,6 +67,7 @@ import Handler.CompressorStatus
|
|||||||
import Handler.Tag
|
import Handler.Tag
|
||||||
import Handler.BannedTags
|
import Handler.BannedTags
|
||||||
import Handler.RefreshDeprecated
|
import Handler.RefreshDeprecated
|
||||||
|
import Handler.UploadV2
|
||||||
import Handler.Hoogle
|
import Handler.Hoogle
|
||||||
import Handler.BuildVersion
|
import Handler.BuildVersion
|
||||||
import Handler.PackageCounts
|
import Handler.PackageCounts
|
||||||
@ -311,7 +312,6 @@ appLoadCabalFiles updateDB forceUpdate env dbconf p = do
|
|||||||
insertMany_ (suggestions info)
|
insertMany_ (suggestions info)
|
||||||
$logInfo "Finished updating deprecation tags"
|
$logInfo "Finished updating deprecation tags"
|
||||||
|
|
||||||
uploadHistory0 <- runDB' $ selectSource [] [] $$ sinkUploadHistory
|
|
||||||
let toMDPair (E.Value name, E.Value version, E.Value hash') =
|
let toMDPair (E.Value name, E.Value version, E.Value hash') =
|
||||||
(name, (version, hash'))
|
(name, (version, hash'))
|
||||||
metadata0 <- fmap (mapFromList . map toMDPair)
|
metadata0 <- fmap (mapFromList . map toMDPair)
|
||||||
@ -320,9 +320,7 @@ appLoadCabalFiles updateDB forceUpdate env dbconf p = do
|
|||||||
, m E.^. MetadataVersion
|
, m E.^. MetadataVersion
|
||||||
, m E.^. MetadataHash
|
, m E.^. MetadataHash
|
||||||
)
|
)
|
||||||
UploadState uploadHistory newUploads _ newMD <- loadCabalFiles updateDB forceUpdate uploadHistory0 metadata0
|
UploadState _ newMD <- loadCabalFiles updateDB forceUpdate metadata0
|
||||||
$logInfo "Inserting to new uploads"
|
|
||||||
runDB' $ insertMany_ newUploads
|
|
||||||
$logInfo $ "Updating metadatas: " ++ tshow (length newMD)
|
$logInfo $ "Updating metadatas: " ++ tshow (length newMD)
|
||||||
runDB' $ do
|
runDB' $ do
|
||||||
let newMD' = toList newMD
|
let newMD' = toList newMD
|
||||||
|
|||||||
@ -1,10 +1,7 @@
|
|||||||
module Data.Hackage
|
module Data.Hackage
|
||||||
( loadCabalFiles
|
( loadCabalFiles
|
||||||
, sourceHackageSdist
|
, sourceHackageSdist
|
||||||
, sinkUploadHistory
|
|
||||||
, UploadState (..)
|
, UploadState (..)
|
||||||
, UploadHistory
|
|
||||||
, sourceHistory
|
|
||||||
) where
|
) where
|
||||||
|
|
||||||
import ClassyPrelude.Yesod hiding (get)
|
import ClassyPrelude.Yesod hiding (get)
|
||||||
@ -17,7 +14,7 @@ import qualified Data.Text as T
|
|||||||
import Data.Conduit.Zlib (ungzip)
|
import Data.Conduit.Zlib (ungzip)
|
||||||
import System.IO.Temp (withSystemTempFile)
|
import System.IO.Temp (withSystemTempFile)
|
||||||
import System.IO (IOMode (ReadMode), openBinaryFile)
|
import System.IO (IOMode (ReadMode), openBinaryFile)
|
||||||
import Model (Uploaded (Uploaded), Metadata (..))
|
import Model (Metadata (..))
|
||||||
import Distribution.PackageDescription.Parse (parsePackageDescription, ParseResult (ParseOk))
|
import Distribution.PackageDescription.Parse (parsePackageDescription, ParseResult (ParseOk))
|
||||||
import qualified Distribution.PackageDescription as PD
|
import qualified Distribution.PackageDescription as PD
|
||||||
import qualified Distribution.Package as PD
|
import qualified Distribution.Package as PD
|
||||||
@ -38,15 +35,6 @@ import qualified Documentation.Haddock.Parser as Haddock
|
|||||||
import Documentation.Haddock.Types (DocH (..), Hyperlink (..), Picture (..), Header (..), Example (..))
|
import Documentation.Haddock.Types (DocH (..), Hyperlink (..), Picture (..), Header (..), Example (..))
|
||||||
import qualified Data.HashMap.Lazy as HM
|
import qualified Data.HashMap.Lazy as HM
|
||||||
|
|
||||||
sinkUploadHistory :: Monad m => Consumer (Entity Uploaded) m UploadHistory
|
|
||||||
sinkUploadHistory =
|
|
||||||
foldlC go mempty
|
|
||||||
where
|
|
||||||
go history (Entity _ (Uploaded name version time)) =
|
|
||||||
case lookup name history of
|
|
||||||
Nothing -> insertMap name (singletonMap version time) history
|
|
||||||
Just vhistory -> insertMap name (insertMap version time vhistory) history
|
|
||||||
|
|
||||||
loadCabalFiles :: ( MonadActive m
|
loadCabalFiles :: ( MonadActive m
|
||||||
, MonadBaseControl IO m
|
, MonadBaseControl IO m
|
||||||
, MonadThrow m
|
, MonadThrow m
|
||||||
@ -60,10 +48,9 @@ loadCabalFiles :: ( MonadActive m
|
|||||||
)
|
)
|
||||||
=> Bool -- ^ do the database updating
|
=> Bool -- ^ do the database updating
|
||||||
-> Bool -- ^ force updates regardless of hash value?
|
-> Bool -- ^ force updates regardless of hash value?
|
||||||
-> UploadHistory -- ^ initial
|
|
||||||
-> HashMap PackageName (Version, ByteString)
|
-> HashMap PackageName (Version, ByteString)
|
||||||
-> m (UploadState Metadata)
|
-> m (UploadState Metadata)
|
||||||
loadCabalFiles dbUpdates forceUpdate uploadHistory0 metadata0 = (>>= runUploadState) $ flip execStateT (UploadState uploadHistory0 [] metadata1 mempty) $ do
|
loadCabalFiles dbUpdates forceUpdate metadata0 = (>>= T.mapM liftIO) $ flip execStateT (UploadState metadata1 mempty) $ do
|
||||||
HackageRoot root <- liftM getHackageRoot ask
|
HackageRoot root <- liftM getHackageRoot ask
|
||||||
$logDebug $ "Entering loadCabalFiles, root == " ++ root
|
$logDebug $ "Entering loadCabalFiles, root == " ++ root
|
||||||
req <- parseUrl $ unpack $ root ++ "/00-index.tar.gz"
|
req <- parseUrl $ unpack $ root ++ "/00-index.tar.gz"
|
||||||
@ -110,8 +97,6 @@ loadCabalFiles dbUpdates forceUpdate uploadHistory0 metadata0 = (>>= runUploadSt
|
|||||||
when toStore $ withAcquire (storeWrite' store key) $ \sink ->
|
when toStore $ withAcquire (storeWrite' store key) $ \sink ->
|
||||||
sourceLazy lbs $$ sink
|
sourceLazy lbs $$ sink
|
||||||
when dbUpdates $ do
|
when dbUpdates $ do
|
||||||
setUploadDate name version
|
|
||||||
|
|
||||||
case readVersion version of
|
case readVersion version of
|
||||||
Nothing -> return ()
|
Nothing -> return ()
|
||||||
Just dataVersion -> setMetadata
|
Just dataVersion -> setMetadata
|
||||||
@ -129,9 +114,6 @@ readVersion v =
|
|||||||
(dv, _):_ -> Just $ pack $ Data.Version.versionBranch dv
|
(dv, _):_ -> Just $ pack $ Data.Version.versionBranch dv
|
||||||
[] -> Nothing
|
[] -> Nothing
|
||||||
|
|
||||||
runUploadState :: MonadIO m => UploadState (IO a) -> m (UploadState a)
|
|
||||||
runUploadState (UploadState w x y z) = liftIO $ UploadState w x y <$> T.sequence z
|
|
||||||
|
|
||||||
tarSource :: (Exception e, MonadThrow m)
|
tarSource :: (Exception e, MonadThrow m)
|
||||||
=> Tar.Entries e
|
=> Tar.Entries e
|
||||||
-> Producer m Tar.Entry
|
-> Producer m Tar.Entry
|
||||||
@ -139,55 +121,17 @@ tarSource Tar.Done = return ()
|
|||||||
tarSource (Tar.Fail e) = throwM e
|
tarSource (Tar.Fail e) = throwM e
|
||||||
tarSource (Tar.Next e es) = yield e >> tarSource es
|
tarSource (Tar.Next e es) = yield e >> tarSource es
|
||||||
|
|
||||||
type UploadHistory = HashMap PackageName (HashMap Version UTCTime)
|
|
||||||
data UploadState md = UploadState
|
data UploadState md = UploadState
|
||||||
{ usHistory :: !UploadHistory
|
{ usMetadata :: !(HashMap PackageName MetaSig)
|
||||||
, usChanges :: ![Uploaded]
|
|
||||||
, usMetadata :: !(HashMap PackageName MetaSig)
|
|
||||||
, usMetaChanges :: (HashMap PackageName md)
|
, usMetaChanges :: (HashMap PackageName md)
|
||||||
}
|
}
|
||||||
|
deriving (Functor, Foldable, Traversable)
|
||||||
|
|
||||||
data MetaSig = MetaSig
|
data MetaSig = MetaSig
|
||||||
{-# UNPACK #-} !Version
|
{-# UNPACK #-} !Version
|
||||||
{-# UNPACK #-} !(UVector Int) -- versionBranch
|
{-# UNPACK #-} !(UVector Int) -- versionBranch
|
||||||
{-# UNPACK #-} !ByteString -- hash
|
{-# UNPACK #-} !ByteString -- hash
|
||||||
|
|
||||||
setUploadDate :: ( MonadBaseControl IO m
|
|
||||||
, MonadThrow m
|
|
||||||
, MonadIO m
|
|
||||||
, MonadReader env m
|
|
||||||
, MonadState (UploadState (IO Metadata)) m
|
|
||||||
, HasHttpManager env
|
|
||||||
, MonadLogger m
|
|
||||||
)
|
|
||||||
=> PackageName
|
|
||||||
-> Version
|
|
||||||
-> m ()
|
|
||||||
setUploadDate name version = do
|
|
||||||
UploadState history changes us3 us4 <- get
|
|
||||||
case lookup name history >>= lookup version of
|
|
||||||
Just _ -> return ()
|
|
||||||
Nothing -> do
|
|
||||||
req <- parseUrl url
|
|
||||||
$logDebug $ "Requesting: " ++ tshow req
|
|
||||||
lbs <- withResponse req $ \res -> responseBody res $$ sinkLazy
|
|
||||||
let uploadDateT = decodeUtf8 $ toStrict lbs
|
|
||||||
case parseTime defaultTimeLocale "%c" $ unpack uploadDateT of
|
|
||||||
Nothing -> return ()
|
|
||||||
Just time -> do
|
|
||||||
let vhistory = insertMap version time $ fromMaybe mempty $ lookup name history
|
|
||||||
history' = insertMap name vhistory history
|
|
||||||
changes' = Uploaded name version time : changes
|
|
||||||
put $ UploadState history' changes' us3 us4
|
|
||||||
where
|
|
||||||
url = unpack $ concat
|
|
||||||
[ "http://hackage.haskell.org/package/"
|
|
||||||
, toPathPiece name
|
|
||||||
, "-"
|
|
||||||
, toPathPiece version
|
|
||||||
, "/upload-time"
|
|
||||||
]
|
|
||||||
|
|
||||||
setMetadata :: ( MonadBaseControl IO m
|
setMetadata :: ( MonadBaseControl IO m
|
||||||
, MonadThrow m
|
, MonadThrow m
|
||||||
, MonadIO m
|
, MonadIO m
|
||||||
@ -207,7 +151,7 @@ setMetadata :: ( MonadBaseControl IO m
|
|||||||
-> ParseResult PD.GenericPackageDescription
|
-> ParseResult PD.GenericPackageDescription
|
||||||
-> m ()
|
-> m ()
|
||||||
setMetadata forceUpdate name version dataVersion hash' gpdRes = do
|
setMetadata forceUpdate name version dataVersion hash' gpdRes = do
|
||||||
UploadState us1 us2 mdMap mdChanges <- get
|
UploadState mdMap mdChanges <- get
|
||||||
let toUpdate =
|
let toUpdate =
|
||||||
case lookup name mdMap of
|
case lookup name mdMap of
|
||||||
Just (MetaSig _currVersion currDataVersion currHash) ->
|
Just (MetaSig _currVersion currDataVersion currHash) ->
|
||||||
@ -220,7 +164,7 @@ setMetadata forceUpdate name version dataVersion hash' gpdRes = do
|
|||||||
then case gpdRes of
|
then case gpdRes of
|
||||||
ParseOk _ gpd -> do
|
ParseOk _ gpd -> do
|
||||||
!md <- getMetadata name version hash' gpd
|
!md <- getMetadata name version hash' gpd
|
||||||
put $! UploadState us1 us2
|
put $! UploadState
|
||||||
(insertMap name (MetaSig version dataVersion hash') mdMap)
|
(insertMap name (MetaSig version dataVersion hash') mdMap)
|
||||||
(HM.insert name md mdChanges)
|
(HM.insert name md mdChanges)
|
||||||
_ -> return ()
|
_ -> return ()
|
||||||
@ -427,15 +371,6 @@ sourceHackageSdist name version = do
|
|||||||
then storeRead key
|
then storeRead key
|
||||||
else return Nothing
|
else return Nothing
|
||||||
|
|
||||||
sourceHistory :: Monad m => UploadHistory -> Producer m Uploaded
|
|
||||||
sourceHistory =
|
|
||||||
mapM_ go . mapToList
|
|
||||||
where
|
|
||||||
go (name, vhistory) =
|
|
||||||
mapM_ go' $ mapToList vhistory
|
|
||||||
where
|
|
||||||
go' (version, time) = yield $ Uploaded name version time
|
|
||||||
|
|
||||||
-- FIXME put in conduit-combinators
|
-- FIXME put in conduit-combinators
|
||||||
parMapMC :: (MonadIO m, MonadBaseControl IO m)
|
parMapMC :: (MonadIO m, MonadBaseControl IO m)
|
||||||
=> Int
|
=> Int
|
||||||
|
|||||||
@ -17,11 +17,14 @@ import qualified System.Random.MWC as MWC
|
|||||||
import GHC.Prim (RealWorld)
|
import GHC.Prim (RealWorld)
|
||||||
import Text.Blaze (ToMarkup)
|
import Text.Blaze (ToMarkup)
|
||||||
|
|
||||||
newtype Slug = Slug { unSlug :: Text }
|
newtype Slug = Slug Text
|
||||||
deriving (Show, Read, Eq, Typeable, PersistField, ToMarkup, Ord, Hashable)
|
deriving (Show, Read, Eq, Typeable, PersistField, ToMarkup, Ord, Hashable)
|
||||||
instance PersistFieldSql Slug where
|
instance PersistFieldSql Slug where
|
||||||
sqlType = sqlType . liftM unSlug
|
sqlType = sqlType . liftM unSlug
|
||||||
|
|
||||||
|
unSlug :: Slug -> Text
|
||||||
|
unSlug (Slug t) = t
|
||||||
|
|
||||||
mkSlug :: MonadThrow m => Text -> m Slug
|
mkSlug :: MonadThrow m => Text -> m Slug
|
||||||
mkSlug t
|
mkSlug t
|
||||||
| length t < minLen = throwM $ InvalidSlugException t "Too short"
|
| length t < minLen = throwM $ InvalidSlugException t "Too short"
|
||||||
|
|||||||
@ -23,6 +23,8 @@ import Yesod.Core.Types (Logger, GWData)
|
|||||||
import Yesod.Default.Config
|
import Yesod.Default.Config
|
||||||
import Yesod.Default.Util (addStaticContentExternal)
|
import Yesod.Default.Util (addStaticContentExternal)
|
||||||
import Yesod.GitRepo
|
import Yesod.GitRepo
|
||||||
|
import Stackage.ServerBundle (SnapshotType, DocMap)
|
||||||
|
import Stackage.BuildPlan (BuildPlan)
|
||||||
|
|
||||||
-- | The site argument for your application. This can be a good place to
|
-- | The site argument for your application. This can be a good place to
|
||||||
-- keep settings and values requiring initialization before your application
|
-- keep settings and values requiring initialization before your application
|
||||||
@ -158,6 +160,7 @@ instance Yesod App where
|
|||||||
|
|
||||||
maximumContentLength _ (Just UploadStackageR) = Just 50000000
|
maximumContentLength _ (Just UploadStackageR) = Just 50000000
|
||||||
maximumContentLength _ (Just UploadHaddockR{}) = Just 100000000
|
maximumContentLength _ (Just UploadHaddockR{}) = Just 100000000
|
||||||
|
maximumContentLength _ (Just UploadV2R) = Just 100000000
|
||||||
maximumContentLength _ _ = Just 2000000
|
maximumContentLength _ _ = Just 2000000
|
||||||
|
|
||||||
instance ToMarkup (Route App) where
|
instance ToMarkup (Route App) where
|
||||||
|
|||||||
@ -7,7 +7,7 @@ module Handler.Alias
|
|||||||
import Import
|
import Import
|
||||||
import Data.Slug (Slug)
|
import Data.Slug (Slug)
|
||||||
import Handler.StackageHome (getStackageHomeR, getStackageMetadataR, getStackageCabalConfigR, getSnapshotPackagesR, getDocsR)
|
import Handler.StackageHome (getStackageHomeR, getStackageMetadataR, getStackageCabalConfigR, getSnapshotPackagesR, getDocsR)
|
||||||
import Handler.StackageIndex (getStackageIndexR, getStackageBundleR)
|
import Handler.StackageIndex (getStackageIndexR)
|
||||||
import Handler.StackageSdist (getStackageSdistR)
|
import Handler.StackageSdist (getStackageSdistR)
|
||||||
import Handler.Hoogle (getHoogleR)
|
import Handler.Hoogle (getHoogleR)
|
||||||
|
|
||||||
@ -74,7 +74,6 @@ goSid sid pieces = do
|
|||||||
StackageMetadataR -> getStackageMetadataR slug >>= sendResponse
|
StackageMetadataR -> getStackageMetadataR slug >>= sendResponse
|
||||||
StackageCabalConfigR -> getStackageCabalConfigR slug >>= sendResponse
|
StackageCabalConfigR -> getStackageCabalConfigR slug >>= sendResponse
|
||||||
StackageIndexR -> getStackageIndexR slug >>= sendResponse
|
StackageIndexR -> getStackageIndexR slug >>= sendResponse
|
||||||
StackageBundleR -> getStackageBundleR slug >>= sendResponse
|
|
||||||
StackageSdistR pnv -> getStackageSdistR slug pnv >>= sendResponse
|
StackageSdistR pnv -> getStackageSdistR slug pnv >>= sendResponse
|
||||||
SnapshotPackagesR -> getSnapshotPackagesR slug >>= sendResponse
|
SnapshotPackagesR -> getSnapshotPackagesR slug >>= sendResponse
|
||||||
DocsR -> getDocsR slug >>= sendResponse
|
DocsR -> getDocsR slug >>= sendResponse
|
||||||
|
|||||||
@ -17,10 +17,9 @@ getPackageListR = defaultLayout $ do
|
|||||||
)
|
)
|
||||||
addDocs (x, y) = (x, Nothing, y, Nothing)
|
addDocs (x, y) = (x, Nothing, y, Nothing)
|
||||||
packages <- fmap (map addDocs . uniqueByKey . map clean) $ handlerToWidget $ runDB $
|
packages <- fmap (map addDocs . uniqueByKey . map clean) $ handlerToWidget $ runDB $
|
||||||
E.selectDistinct $ E.from $ \(u,m) -> do
|
E.selectDistinct $ E.from $ \m -> do
|
||||||
E.where_ (m E.^. MetadataName E.==. u E.^. UploadedName)
|
E.orderBy [E.asc $ m E.^. MetadataName]
|
||||||
E.orderBy [E.asc $ u E.^. UploadedName]
|
return $ (m E.^. MetadataName
|
||||||
return $ (u E.^. UploadedName
|
|
||||||
,m E.^. MetadataSynopsis)
|
,m E.^. MetadataSynopsis)
|
||||||
$(widgetFile "package-list")
|
$(widgetFile "package-list")
|
||||||
where strip x = fromMaybe x (stripSuffix "." x)
|
where strip x = fromMaybe x (stripSuffix "." x)
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
module Handler.StackageHome where
|
module Handler.StackageHome where
|
||||||
|
|
||||||
import Data.BlobStore (storeExists)
|
|
||||||
import Import
|
import Import
|
||||||
import Data.Time (FormatTime)
|
import Data.Time (FormatTime)
|
||||||
import Data.Slug (SnapSlug)
|
import Data.Slug (SnapSlug)
|
||||||
@ -13,7 +12,6 @@ getStackageHomeR slug = do
|
|||||||
Entity _ stackage <- getBy404 $ UniqueSnapshot slug
|
Entity _ stackage <- getBy404 $ UniqueSnapshot slug
|
||||||
return stackage
|
return stackage
|
||||||
|
|
||||||
hasBundle <- storeExists $ SnapshotBundle $ stackageIdent stackage
|
|
||||||
let minclusive =
|
let minclusive =
|
||||||
if "inclusive" `isSuffixOf` stackageTitle stackage
|
if "inclusive" `isSuffixOf` stackageTitle stackage
|
||||||
then Just True
|
then Just True
|
||||||
@ -31,18 +29,17 @@ getStackageHomeR slug = do
|
|||||||
cachedWidget (20 * 60) ("package-list-" ++ toPathPiece slug) $ do
|
cachedWidget (20 * 60) ("package-list-" ++ toPathPiece slug) $ do
|
||||||
let maxPackages = 5000
|
let maxPackages = 5000
|
||||||
(packageListClipped, packages') <- handlerToWidget $ runDB $ do
|
(packageListClipped, packages') <- handlerToWidget $ runDB $ do
|
||||||
packages' <- E.select $ E.from $ \(u,m,p) -> do
|
packages' <- E.select $ E.from $ \(m,p) -> do
|
||||||
E.where_ $
|
E.where_ $
|
||||||
(m E.^. MetadataName E.==. u E.^. UploadedName) E.&&.
|
|
||||||
(m E.^. MetadataName E.==. p E.^. PackageName') E.&&.
|
(m E.^. MetadataName E.==. p E.^. PackageName') E.&&.
|
||||||
(p E.^. PackageStackage E.==. E.val sid)
|
(p E.^. PackageStackage E.==. E.val sid)
|
||||||
E.orderBy [E.asc $ u E.^. UploadedName]
|
E.orderBy [E.asc $ m E.^. MetadataName]
|
||||||
E.groupBy ( u E.^. UploadedName
|
E.groupBy ( m E.^. MetadataName
|
||||||
, m E.^. MetadataSynopsis
|
, m E.^. MetadataSynopsis
|
||||||
)
|
)
|
||||||
E.limit maxPackages
|
E.limit maxPackages
|
||||||
return
|
return
|
||||||
( u E.^. UploadedName
|
( m E.^. MetadataName
|
||||||
, m E.^. MetadataSynopsis
|
, m E.^. MetadataSynopsis
|
||||||
, E.max_ (p E.^. PackageVersion)
|
, E.max_ (p E.^. PackageVersion)
|
||||||
, E.max_ $ E.case_
|
, E.max_ $ E.case_
|
||||||
@ -188,17 +185,16 @@ getSnapshotPackagesR slug = do
|
|||||||
defaultLayout $ do
|
defaultLayout $ do
|
||||||
setTitle $ toHtml $ "Package list for " ++ toPathPiece slug
|
setTitle $ toHtml $ "Package list for " ++ toPathPiece slug
|
||||||
cachedWidget (20 * 60) ("package-list-" ++ toPathPiece slug) $ do
|
cachedWidget (20 * 60) ("package-list-" ++ toPathPiece slug) $ do
|
||||||
packages' <- handlerToWidget $ runDB $ E.select $ E.from $ \(u,m,p) -> do
|
packages' <- handlerToWidget $ runDB $ E.select $ E.from $ \(m,p) -> do
|
||||||
E.where_ $
|
E.where_ $
|
||||||
(m E.^. MetadataName E.==. u E.^. UploadedName) E.&&.
|
|
||||||
(m E.^. MetadataName E.==. p E.^. PackageName') E.&&.
|
(m E.^. MetadataName E.==. p E.^. PackageName') E.&&.
|
||||||
(p E.^. PackageStackage E.==. E.val sid)
|
(p E.^. PackageStackage E.==. E.val sid)
|
||||||
E.orderBy [E.asc $ u E.^. UploadedName]
|
E.orderBy [E.asc $ m E.^. MetadataName]
|
||||||
E.groupBy ( u E.^. UploadedName
|
E.groupBy ( m E.^. MetadataName
|
||||||
, m E.^. MetadataSynopsis
|
, m E.^. MetadataSynopsis
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
( u E.^. UploadedName
|
( m E.^. MetadataName
|
||||||
, m E.^. MetadataSynopsis
|
, m E.^. MetadataSynopsis
|
||||||
, E.max_ $ E.case_
|
, E.max_ $ E.case_
|
||||||
[ ( p E.^. PackageHasHaddocks
|
[ ( p E.^. PackageHasHaddocks
|
||||||
|
|||||||
@ -16,19 +16,3 @@ getStackageIndexR slug = do
|
|||||||
addHeader "content-disposition" "attachment; filename=\"00-index.tar.gz\""
|
addHeader "content-disposition" "attachment; filename=\"00-index.tar.gz\""
|
||||||
neverExpires
|
neverExpires
|
||||||
respondSource "application/x-gzip" $ mapOutput (Chunk . toBuilder) src
|
respondSource "application/x-gzip" $ mapOutput (Chunk . toBuilder) src
|
||||||
|
|
||||||
getStackageBundleR :: SnapSlug -> Handler TypedContent
|
|
||||||
getStackageBundleR slug = do
|
|
||||||
Entity _ stackage <- runDB $ getBy404 $ UniqueSnapshot slug
|
|
||||||
let ident = stackageIdent stackage
|
|
||||||
slug' = stackageSlug stackage
|
|
||||||
msrc <- storeRead $ SnapshotBundle ident
|
|
||||||
case msrc of
|
|
||||||
Nothing -> notFound
|
|
||||||
Just src -> do
|
|
||||||
addHeader "content-disposition" $ mconcat
|
|
||||||
[ "attachment; filename=\"bundle-"
|
|
||||||
, toPathPiece slug'
|
|
||||||
, ".tar.gz\""
|
|
||||||
]
|
|
||||||
respondSource "application/x-gzip" $ mapOutput (Chunk . toBuilder) src
|
|
||||||
|
|||||||
@ -23,6 +23,7 @@ import Stackage.Prelude (display)
|
|||||||
import Filesystem (createTree)
|
import Filesystem (createTree)
|
||||||
import Filesystem.Path (parent)
|
import Filesystem.Path (parent)
|
||||||
import Data.Conduit.Process
|
import Data.Conduit.Process
|
||||||
|
import Data.Yaml (decodeEither')
|
||||||
|
|
||||||
putUploadV2R :: Handler TypedContent
|
putUploadV2R :: Handler TypedContent
|
||||||
putUploadV2R = do
|
putUploadV2R = do
|
||||||
@ -94,10 +95,26 @@ doUpload status uid ident bundleFP = do
|
|||||||
threadDelay 1000000 -- FIXME remove
|
threadDelay 1000000 -- FIXME remove
|
||||||
|
|
||||||
say $ "Unpacking bundle"
|
say $ "Unpacking bundle"
|
||||||
master <- getYesod
|
|
||||||
liftIO $ haddockUnpacker master True ident
|
|
||||||
|
|
||||||
SnapshotInfo {..} <- getSnapshotInfoByIdent ident
|
(siType, siPlan, siDocMap :: DocMap) <-
|
||||||
|
withSystemTempDirectory "uploadv2" $ \dir' -> do
|
||||||
|
let dir = fpFromString dir'
|
||||||
|
withCheckedProcess
|
||||||
|
(proc "tar" ["xf", fpToString bundleFP])
|
||||||
|
{ cwd = Just dir'
|
||||||
|
} $ \ClosedStream ClosedStream ClosedStream -> return ()
|
||||||
|
|
||||||
|
let maxFileSize = 1024 * 1024 * 5
|
||||||
|
yaml :: FromJSON a => FilePath -> Handler a
|
||||||
|
yaml fp = do
|
||||||
|
say $ "Parsing " ++ fpToText fp
|
||||||
|
bs <- sourceFile (dir </> fp) $$ takeCE maxFileSize =$ foldC
|
||||||
|
either throwM return $ decodeEither' bs
|
||||||
|
|
||||||
|
(,,)
|
||||||
|
<$> yaml "build-type.yaml"
|
||||||
|
<*> yaml "build-plan.yaml"
|
||||||
|
<*> yaml "docs-map.yaml"
|
||||||
|
|
||||||
now <- liftIO getCurrentTime
|
now <- liftIO getCurrentTime
|
||||||
let day = tshow $ utctDay now
|
let day = tshow $ utctDay now
|
||||||
@ -127,9 +144,12 @@ doUpload status uid ident bundleFP = do
|
|||||||
, tshow minor
|
, tshow minor
|
||||||
, ", GHC "
|
, ", GHC "
|
||||||
, ghcVersion
|
, ghcVersion
|
||||||
]
|
]
|
||||||
|
|
||||||
slug <- SnapSlug <$> mkSlug slug'
|
slug <- do
|
||||||
|
slug2 <- mkSlug slug'
|
||||||
|
when (slug' /= unSlug slug2) $ error $ "Slug not available: " ++ show slug'
|
||||||
|
return $ SnapSlug slug2
|
||||||
|
|
||||||
say "Creating index tarball"
|
say "Creating index tarball"
|
||||||
withSystemTempDirectory "buildindex.v2" $ \(fpFromString -> dir) -> do
|
withSystemTempDirectory "buildindex.v2" $ \(fpFromString -> dir) -> do
|
||||||
@ -183,7 +203,6 @@ doUpload status uid ident bundleFP = do
|
|||||||
, stackageTitle = title
|
, stackageTitle = title
|
||||||
, stackageDesc = ""
|
, stackageDesc = ""
|
||||||
, stackageHasHaddocks = True
|
, stackageHasHaddocks = True
|
||||||
, stackageYaml = True
|
|
||||||
}
|
}
|
||||||
case siType of
|
case siType of
|
||||||
STNightly -> insert_ Nightly
|
STNightly -> insert_ Nightly
|
||||||
@ -196,6 +215,30 @@ doUpload status uid ident bundleFP = do
|
|||||||
, ltsMinor = minor
|
, ltsMinor = minor
|
||||||
, ltsStackage = sid
|
, ltsStackage = sid
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let cores :: Set PackageName
|
||||||
|
cores = setFromList
|
||||||
|
$ map (PackageName . display . fst)
|
||||||
|
$ mapToList
|
||||||
|
$ siCorePackages
|
||||||
|
$ bpSystemInfo siPlan
|
||||||
|
forM_ (mapToList $ fmap ppVersion $ bpPackages siPlan) $ \(name', version') -> do
|
||||||
|
let nameT = display name'
|
||||||
|
mpair = (,)
|
||||||
|
<$> fromPathPiece nameT
|
||||||
|
<*> fromPathPiece (display version')
|
||||||
|
(name, version) <-
|
||||||
|
case mpair of
|
||||||
|
Nothing -> error $ "Could not parse: " ++ show (name', version')
|
||||||
|
Just pair -> return pair
|
||||||
|
insert_ Package
|
||||||
|
{ packageStackage = sid
|
||||||
|
, packageName' = name
|
||||||
|
, packageVersion = version
|
||||||
|
, packageHasHaddocks = nameT `member` siDocMap
|
||||||
|
, packageOverwrite = False
|
||||||
|
, packageCore = Just $ name `member` cores
|
||||||
|
}
|
||||||
return sid
|
return sid
|
||||||
|
|
||||||
say $ concat
|
say $ concat
|
||||||
@ -207,6 +250,19 @@ doUpload status uid ident bundleFP = do
|
|||||||
]
|
]
|
||||||
|
|
||||||
render <- getUrlRender
|
render <- getUrlRender
|
||||||
|
|
||||||
|
say "Updating docmap"
|
||||||
|
runDB $ forM_ (mapToList siDocMap) $ \(package, PackageDocs version ms) -> do
|
||||||
|
did <- insert Docs
|
||||||
|
{ docsName = PackageName package
|
||||||
|
, docsVersion = Version version
|
||||||
|
, docsUploaded = now
|
||||||
|
, docsSnapshot = Just sid
|
||||||
|
}
|
||||||
|
forM_ (mapToList ms) $ \(name, pieces) -> do
|
||||||
|
let url = render $ HaddockR slug pieces
|
||||||
|
insert_ $ Module did name url
|
||||||
|
|
||||||
return $ render $ SnapshotR slug StackageHomeR
|
return $ render $ SnapshotR slug StackageHomeR
|
||||||
where
|
where
|
||||||
say = atomically . writeTVar status
|
say = atomically . writeTVar status
|
||||||
|
|||||||
@ -691,7 +691,6 @@ constraints: abstract-deque ==0.3,
|
|||||||
Spock-worker ==0.2.1.3,
|
Spock-worker ==0.2.1.3,
|
||||||
spoon ==0.3.1,
|
spoon ==0.3.1,
|
||||||
sqlite-simple ==0.4.8.0,
|
sqlite-simple ==0.4.8.0,
|
||||||
stackage ==0.3.1,
|
|
||||||
stateref ==0.3,
|
stateref ==0.3,
|
||||||
statestack ==0.2.0.3,
|
statestack ==0.2.0.3,
|
||||||
statistics ==0.13.2.1,
|
statistics ==0.13.2.1,
|
||||||
|
|||||||
@ -26,12 +26,6 @@ Stackage
|
|||||||
UniqueStackage ident
|
UniqueStackage ident
|
||||||
UniqueSnapshot slug
|
UniqueSnapshot slug
|
||||||
|
|
||||||
Uploaded
|
|
||||||
name PackageName
|
|
||||||
version Version
|
|
||||||
uploaded UTCTime
|
|
||||||
UniqueUploaded name version
|
|
||||||
|
|
||||||
Alias
|
Alias
|
||||||
user UserId
|
user UserId
|
||||||
name Slug
|
name Slug
|
||||||
|
|||||||
@ -21,7 +21,6 @@
|
|||||||
/metadata StackageMetadataR GET
|
/metadata StackageMetadataR GET
|
||||||
/cabal.config StackageCabalConfigR GET
|
/cabal.config StackageCabalConfigR GET
|
||||||
/00-index.tar.gz StackageIndexR GET
|
/00-index.tar.gz StackageIndexR GET
|
||||||
/bundle StackageBundleR GET
|
|
||||||
/package/#PackageNameVersion StackageSdistR GET
|
/package/#PackageNameVersion StackageSdistR GET
|
||||||
/packages SnapshotPackagesR GET
|
/packages SnapshotPackagesR GET
|
||||||
/docs DocsR GET
|
/docs DocsR GET
|
||||||
@ -52,5 +51,6 @@
|
|||||||
/older-releases OlderReleasesR GET
|
/older-releases OlderReleasesR GET
|
||||||
|
|
||||||
/refresh-deprecated RefreshDeprecatedR GET
|
/refresh-deprecated RefreshDeprecatedR GET
|
||||||
|
/upload2 UploadV2R PUT
|
||||||
/build-version BuildVersionR GET
|
/build-version BuildVersionR GET
|
||||||
/package-counts PackageCountsR GET
|
/package-counts PackageCountsR GET
|
||||||
|
|||||||
@ -1,4 +1,3 @@
|
|||||||
-- Stackage snapshot: http://www.stackage.org/stackage/aecbf72b568a63e86a971311fee5475f076043cc
|
|
||||||
name: stackage-server
|
name: stackage-server
|
||||||
version: 0.0.0
|
version: 0.0.0
|
||||||
cabal-version: >= 1.8
|
cabal-version: >= 1.8
|
||||||
@ -50,6 +49,7 @@ library
|
|||||||
Handler.Tag
|
Handler.Tag
|
||||||
Handler.BannedTags
|
Handler.BannedTags
|
||||||
Handler.RefreshDeprecated
|
Handler.RefreshDeprecated
|
||||||
|
Handler.UploadV2
|
||||||
Handler.BuildVersion
|
Handler.BuildVersion
|
||||||
Handler.PackageCounts
|
Handler.PackageCounts
|
||||||
|
|
||||||
@ -83,7 +83,11 @@ library
|
|||||||
RecordWildCards
|
RecordWildCards
|
||||||
ScopedTypeVariables
|
ScopedTypeVariables
|
||||||
BangPatterns
|
BangPatterns
|
||||||
|
TupleSections
|
||||||
DeriveGeneric
|
DeriveGeneric
|
||||||
|
DeriveFunctor
|
||||||
|
DeriveFoldable
|
||||||
|
DeriveTraversable
|
||||||
|
|
||||||
build-depends:
|
build-depends:
|
||||||
base >= 4
|
base >= 4
|
||||||
@ -151,6 +155,8 @@ library
|
|||||||
, formatting
|
, formatting
|
||||||
, blaze-html
|
, blaze-html
|
||||||
, haddock-library
|
, haddock-library
|
||||||
|
, async
|
||||||
|
, stackage >= 0.4
|
||||||
, yesod-gitrepo >= 0.1.1
|
, yesod-gitrepo >= 0.1.1
|
||||||
, hoogle
|
, hoogle
|
||||||
, spoon
|
, spoon
|
||||||
|
|||||||
@ -74,11 +74,3 @@ $newline never
|
|||||||
<a href=@{doc}>Docs
|
<a href=@{doc}>Docs
|
||||||
<td>
|
<td>
|
||||||
#{synopsis}
|
#{synopsis}
|
||||||
|
|
||||||
|
|
||||||
<div .container>
|
|
||||||
<p .bottom-links>
|
|
||||||
$if hasBundle
|
|
||||||
<span>
|
|
||||||
<a href=@{SnapshotR slug StackageBundleR} title="This is useful for making modifications to an existing snapshot">
|
|
||||||
\Bundle
|
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user