mirror of
https://github.com/commercialhaskell/stackage-server.git
synced 2026-01-12 04:08:29 +01:00
Fix warnings
This commit is contained in:
parent
fe20a6d825
commit
856ac728b4
@ -43,6 +43,7 @@ dependencies:
|
||||
- system-filepath
|
||||
- tar
|
||||
- template-haskell
|
||||
- temporary
|
||||
- text
|
||||
- these
|
||||
- unliftio
|
||||
|
||||
@ -22,9 +22,9 @@ supportedArches = [minBound .. maxBound]
|
||||
readGhcLinks :: FilePath -> IO GhcLinks
|
||||
readGhcLinks dir = do
|
||||
let ghcMajorVersionsPath = dir </> "supported-ghc-major-versions.yaml"
|
||||
Yaml.decodeFile ghcMajorVersionsPath >>= \case
|
||||
Nothing -> return $ GhcLinks HashMap.empty
|
||||
Just (ghcMajorVersions :: [GhcMajorVersion]) -> do
|
||||
Yaml.decodeFileEither ghcMajorVersionsPath >>= \case
|
||||
Left _ -> return $ GhcLinks HashMap.empty
|
||||
Right (ghcMajorVersions :: [GhcMajorVersion]) -> do
|
||||
let opts =
|
||||
[ (arch, ver)
|
||||
| arch <- supportedArches
|
||||
|
||||
@ -29,7 +29,7 @@ getLatestMatcher man = do
|
||||
{ requestHeaders = [("User-Agent", "Stackage Server")]
|
||||
}
|
||||
val <- flip runReaderT man $ withResponse req
|
||||
$ \res -> responseBody res $$ sinkParser json
|
||||
$ \res -> runConduit $ responseBody res .| sinkParser json
|
||||
return $ \pattern -> do
|
||||
let pattern' = pattern ++ "."
|
||||
Object top <- return val
|
||||
|
||||
@ -2,7 +2,6 @@
|
||||
module Handler.Hoogle where
|
||||
|
||||
import Control.DeepSeq (NFData(..))
|
||||
import Control.DeepSeq.Generics (genericRnf)
|
||||
import Data.Data (Data)
|
||||
import Data.Text.Read (decimal)
|
||||
import qualified Hoogle
|
||||
@ -106,7 +105,7 @@ data HoogleQueryInput = HoogleQueryInput
|
||||
|
||||
data HoogleQueryOutput = HoogleQueryOutput [HoogleResult] (Maybe Int) -- ^ Int == total count
|
||||
deriving (Read, Typeable, Data, Show, Eq, Generic)
|
||||
instance NFData HoogleQueryOutput where rnf = genericRnf
|
||||
instance NFData HoogleQueryOutput
|
||||
|
||||
data HoogleResult = HoogleResult
|
||||
{ hrURL :: String
|
||||
@ -128,9 +127,9 @@ data ModuleLink = ModuleLink
|
||||
}
|
||||
deriving (Eq, Read, Show, Data, Typeable, Ord, Generic)
|
||||
|
||||
instance NFData HoogleResult where rnf = genericRnf
|
||||
instance NFData PackageLink where rnf = genericRnf
|
||||
instance NFData ModuleLink where rnf = genericRnf
|
||||
instance NFData HoogleResult
|
||||
instance NFData PackageLink
|
||||
instance NFData ModuleLink
|
||||
|
||||
runHoogleQuery :: (Route App -> Text)
|
||||
-> SnapName
|
||||
|
||||
@ -163,9 +163,9 @@ getHackageRecent latestTime =
|
||||
httpSink "https://hackage.haskell.org/packages/recent" sink
|
||||
where
|
||||
sink _ = parseBytes def
|
||||
=$= concatMapC getDate
|
||||
=$= filterC (<= latestTime)
|
||||
=$= headC
|
||||
.| concatMapC getDate
|
||||
.| filterC (<= latestTime)
|
||||
.| headC
|
||||
|
||||
getDate :: Event -> Maybe UTCTime
|
||||
getDate (EventContent (ContentText t)) = parseTimeM
|
||||
|
||||
@ -4,9 +4,6 @@ import Import
|
||||
import Yesod.Sitemap
|
||||
--import Stackage.Database
|
||||
|
||||
--type SitemapFor a = forall m. Monad m => Conduit a m (SitemapUrl (Route App))
|
||||
type Sitemap = forall m. Monad m => Producer m (SitemapUrl (Route App))
|
||||
|
||||
getSitemapR :: Handler TypedContent
|
||||
getSitemapR = track "Handler.Sitemap.getSitemapR" $ sitemap $ do
|
||||
priority 1.0 $ HomeR
|
||||
@ -89,7 +86,7 @@ url loc = yield SitemapUrl
|
||||
}
|
||||
-}
|
||||
|
||||
priority :: Double -> Route App -> Sitemap
|
||||
priority :: Monad m => Double -> Route App -> ConduitT i (SitemapUrl (Route App)) m ()
|
||||
priority p loc = yield SitemapUrl
|
||||
{ sitemapLoc = loc
|
||||
, sitemapLastMod = Nothing
|
||||
|
||||
@ -71,7 +71,7 @@ getStackageCabalConfigR name = track "Handler.StackageHome.getStackageCabalConfi
|
||||
|
||||
plis <- getPackages sid
|
||||
|
||||
respondSource typePlain $ yieldMany plis $=
|
||||
respondSource typePlain $ yieldMany plis .|
|
||||
if isGlobal
|
||||
then conduitGlobal render
|
||||
else conduitLocal render
|
||||
|
||||
@ -165,7 +165,7 @@ class MonadIO m => GetStackageDatabase m where
|
||||
instance MonadIO m => GetStackageDatabase (ReaderT StackageDatabase m) where
|
||||
getStackageDatabase = ask
|
||||
|
||||
sourcePackages :: MonadResource m => FilePath -> Producer m Tar.Entry
|
||||
sourcePackages :: MonadResource m => FilePath -> ConduitT i Tar.Entry m ()
|
||||
sourcePackages root = do
|
||||
dir <- liftIO $ cloneOrUpdate root "commercialhaskell" "all-cabal-metadata"
|
||||
bracketP
|
||||
@ -178,14 +178,14 @@ sourcePackages root = do
|
||||
liftIO $ runIn dir "git" ["archive", "--output", fp, "--format", "tar", "master"]
|
||||
sourceTarFile False fp
|
||||
|
||||
sourceBuildPlans :: MonadResource m => FilePath -> Producer m (SnapName, FilePath, Either (IO BuildPlan) (IO DocMap))
|
||||
sourceBuildPlans :: MonadResource m => FilePath -> ConduitT i (SnapName, FilePath, Either (IO BuildPlan) (IO DocMap)) m ()
|
||||
sourceBuildPlans root = do
|
||||
forM_ ["lts-haskell", "stackage-nightly"] $ \repoName -> do
|
||||
dir <- liftIO $ cloneOrUpdate root "fpco" repoName
|
||||
sourceDirectory (encodeString dir) =$= concatMapMC (go Left . fromString)
|
||||
sourceDirectory (encodeString dir) .| concatMapMC (go Left . fromString)
|
||||
let docdir = dir </> "docs"
|
||||
whenM (liftIO $ F.isDirectory docdir) $
|
||||
sourceDirectory (encodeString docdir) =$= concatMapMC (go Right . fromString)
|
||||
sourceDirectory (encodeString docdir) .| concatMapMC (go Right . fromString)
|
||||
where
|
||||
go wrapper fp | Just name <- nameFromFP fp = liftIO $ do
|
||||
let bp = decodeFileEither (encodeString fp) >>= either throwIO return
|
||||
@ -248,7 +248,7 @@ createStackageDatabase fp = liftIO $ do
|
||||
F.createTree root
|
||||
runResourceT $ do
|
||||
putStrLn "Updating all-cabal-metadata repo"
|
||||
flip runSqlPool pool $ sourcePackages root $$ getZipSink
|
||||
flip runSqlPool pool $ runConduit $ sourcePackages root .| getZipSink
|
||||
( ZipSink (mapM_C addPackage)
|
||||
*> ZipSink (do
|
||||
deprs <- foldlC getDeprecated' []
|
||||
@ -268,7 +268,7 @@ createStackageDatabase fp = liftIO $ do
|
||||
loop i
|
||||
in loop (0 :: Int))
|
||||
)
|
||||
sourceBuildPlans root $$ mapM_C (\(sname, fp', eval) -> flip runSqlPool pool $ do
|
||||
runConduit $ sourceBuildPlans root .| mapM_C (\(sname, fp', eval) -> flip runSqlPool pool $ do
|
||||
let (typ, action) =
|
||||
case eval of
|
||||
Left bp -> ("build-plan", liftIO bp >>= addPlan sname fp')
|
||||
@ -372,7 +372,7 @@ addPlan name fp bp = do
|
||||
]
|
||||
cp = cp' { cwd = Just $ encodeString $ directory fp }
|
||||
t <- withCheckedProcess cp $ \ClosedStream out ClosedStream ->
|
||||
out $$ decodeUtf8C =$ foldC
|
||||
runConduit $ out .| decodeUtf8C .| foldC
|
||||
case readMay $ concat $ take 1 $ words t of
|
||||
Just created -> return created
|
||||
Nothing -> do
|
||||
|
||||
@ -67,9 +67,10 @@ newHoogleLocker toPrint man = mkSingleRun $ \name -> do
|
||||
withResponse req man $ \res -> if responseStatus res == status200
|
||||
then do
|
||||
createTree $ parent (fromString fptmp)
|
||||
runResourceT $ bodyReaderSource (responseBody res)
|
||||
$= ungzip
|
||||
$$ sinkFile fptmp
|
||||
runConduitRes
|
||||
$ bodyReaderSource (responseBody res)
|
||||
.| ungzip
|
||||
.| sinkFile fptmp
|
||||
rename (fromString fptmp) fp
|
||||
return $ Just $ encodeString fp
|
||||
else do
|
||||
@ -86,9 +87,10 @@ stackageServerCron = do
|
||||
let upload :: FilePath -> ObjectKey -> IO ()
|
||||
upload fp key = do
|
||||
let fpgz = fp <.> "gz"
|
||||
runResourceT $ sourceFile fp
|
||||
$$ compress 9 (WindowBits 31)
|
||||
=$ CB.sinkFile fpgz
|
||||
runConduitRes
|
||||
$ sourceFile fp
|
||||
.| compress 9 (WindowBits 31)
|
||||
.| CB.sinkFile fpgz
|
||||
body <- chunkedFile defaultChunkSize fpgz
|
||||
let po =
|
||||
set poACL (Just OPublicRead)
|
||||
@ -149,8 +151,9 @@ createHoogleDB db man name = handleAny (\e -> print e $> Nothing) $ do
|
||||
unlessM (isFile (fromString tarFP)) $ withResponse req man $ \res -> do
|
||||
let tmp = tarFP <.> "tmp"
|
||||
createTree $ parent (fromString tmp)
|
||||
runResourceT $ bodyReaderSource (responseBody res)
|
||||
$$ sinkFile tmp
|
||||
runConduitRes
|
||||
$ bodyReaderSource (responseBody res)
|
||||
.| sinkFile tmp
|
||||
rename (fromString tmp) (fromString tarFP)
|
||||
|
||||
void $ tryIO $ removeTree (fromString bindir)
|
||||
@ -158,9 +161,9 @@ createHoogleDB db man name = handleAny (\e -> print e $> Nothing) $ do
|
||||
createTree (fromString bindir)
|
||||
|
||||
withSystemTempDirectory ("hoogle-" ++ unpack (toPathPiece name)) $ \tmpdir -> do
|
||||
allPackagePairs <- runResourceT
|
||||
allPackagePairs <- runConduitRes
|
||||
$ sourceTarFile False tarFP
|
||||
$$ foldMapMC (liftIO . singleDB db name tmpdir)
|
||||
.| foldMapMC (liftIO . singleDB db name tmpdir)
|
||||
|
||||
when (null allPackagePairs) $ error $ "No Hoogle .txt files found for " ++ unpack (toPathPiece name)
|
||||
|
||||
|
||||
@ -5,7 +5,7 @@ module Stackage.Database.Haddock
|
||||
import qualified Text.Blaze.Html5 as H
|
||||
import qualified Text.Blaze.Html5.Attributes as A
|
||||
import qualified Documentation.Haddock.Parser as Haddock
|
||||
import Documentation.Haddock.Types (DocH (..), Hyperlink (..), Picture (..), Header (..), Example (..), MetaDoc(..))
|
||||
import Documentation.Haddock.Types (DocH (..), Hyperlink (..), Picture (..), Header (..), Example (..), MetaDoc(..), Table (..), TableRow (..), TableCell (..))
|
||||
import ClassyPrelude.Conduit
|
||||
import Text.Blaze.Html (Html, toHtml)
|
||||
|
||||
@ -58,3 +58,12 @@ hToHtml =
|
||||
wrapper _ = H.h6
|
||||
go (DocMathInline x) = H.pre $ H.code $ toHtml x
|
||||
go (DocMathDisplay x) = H.pre $ H.code $ toHtml x
|
||||
go (DocTable (Table header body)) = H.table $ do
|
||||
unless (null header) $ H.thead $ mapM_ goRow header
|
||||
unless (null body) $ H.tbody $ mapM_ goRow body
|
||||
|
||||
goRow (TableRow cells) = H.tr $ forM_ cells $ \(TableCell colspan rowspan content) ->
|
||||
H.td
|
||||
H.! A.colspan (H.toValue colspan)
|
||||
H.! A.rowspan (H.toValue rowspan)
|
||||
$ go content
|
||||
|
||||
Loading…
Reference in New Issue
Block a user