Upgrade all the way to lts-22.6

I stopped at 22.6 because I'm using NixOS and ghc-9.6.3 is the last
version available on the stable channel right now. Later snapshots use
9.6.4.
This commit is contained in:
Bryan Richter 2024-02-12 15:15:22 +02:00
parent c1c7d14e15
commit 33e5cb2589
No known key found for this signature in database
GPG Key ID: B202264020068BFB
9 changed files with 87 additions and 106 deletions

View File

@ -11,6 +11,7 @@ import Data.Conduit.Attoparsec (sinkParser)
import Data.WebsiteContent import Data.WebsiteContent
import Import import Import
import Yesod.GitRepo import Yesod.GitRepo
import qualified Data.Aeson.KeyMap as Aeson
getDownloadStackListR :: Handler Html getDownloadStackListR :: Handler Html
getDownloadStackListR = track "Handler.DownloadStack.getDownloadStackListR" $ do getDownloadStackListR = track "Handler.DownloadStack.getDownloadStackListR" $ do
@ -35,14 +36,14 @@ getLatestMatcher man = do
return $ \pattern' -> do return $ \pattern' -> do
let pattern'' = pattern' ++ "." let pattern'' = pattern' ++ "."
Object top <- return val Object top <- return val
Array assets <- lookup "assets" top Array assets <- Aeson.lookup "assets" top
headMay $ preferZip $ catMaybes $ map (findMatch pattern'') assets headMay $ preferZip $ catMaybes $ map (findMatch pattern'') assets
where where
findMatch pattern' (Object o) = do findMatch pattern' (Object o) = do
String name <- lookup "name" o String name <- Aeson.lookup "name" o
guard $ not $ ".asc" `isSuffixOf` name guard $ not $ ".asc" `isSuffixOf` name
guard $ pattern' `isInfixOf` name guard $ pattern' `isInfixOf` name
String url <- lookup "browser_download_url" o String url <- Aeson.lookup "browser_download_url" o
Just url Just url
findMatch _ _ = Nothing findMatch _ _ = Nothing

View File

@ -12,6 +12,8 @@ import RIO.Time (diffUTCTime, addUTCTime, getCurrentTime)
import Text.XML.Stream.Parse import Text.XML.Stream.Parse
import Data.XML.Types (Event (EventContent), Content (ContentText)) import Data.XML.Types (Event (EventContent), Content (ContentText))
import qualified Prelude import qualified Prelude
import qualified Data.Aeson.Key as Aeson
import qualified Data.Aeson.KeyMap as Aeson
getMirrorStatusR :: Handler Html getMirrorStatusR :: Handler Html
getMirrorStatusR = do getMirrorStatusR = do
@ -148,7 +150,7 @@ getLastModifiedGit org repo ref = do
lookupJ :: MonadThrow m => Text -> Value -> m Value lookupJ :: MonadThrow m => Text -> Value -> m Value
lookupJ key (Object o) = lookupJ key (Object o) =
case lookup key o of case Aeson.lookup (Aeson.fromText key) o of
Nothing -> error $ "Key not found: " ++ show key Nothing -> error $ "Key not found: " ++ show key
Just x -> return x Just x -> return x
lookupJ key val = error $ concat lookupJ key val = error $ concat

View File

@ -5,6 +5,7 @@
{-# LANGUAGE RecordWildCards #-} {-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-} {-# LANGUAGE TupleSections #-}
{-# LANGUAGE FlexibleContexts #-}
module Stackage.Database.Cron module Stackage.Database.Cron
( stackageServerCron ( stackageServerCron
, newHoogleLocker , newHoogleLocker
@ -16,7 +17,6 @@ module Stackage.Database.Cron
import Conduit import Conduit
import Control.DeepSeq import Control.DeepSeq
import qualified Control.Monad.Trans.AWS as AWS (paginate)
import Control.SingleRun import Control.SingleRun
import qualified Data.ByteString.Char8 as BS8 import qualified Data.ByteString.Char8 as BS8
import qualified Data.Conduit.Binary as CB import qualified Data.Conduit.Binary as CB
@ -29,9 +29,12 @@ import Data.Yaml (decodeFileEither)
import Database.Persist hiding (exists) import Database.Persist hiding (exists)
import Database.Persist.Postgresql hiding (exists) import Database.Persist.Postgresql hiding (exists)
import qualified Hoogle import qualified Hoogle
import Network.AWS hiding (Request, Response) import Amazonka hiding (Request, length, error)
import Network.AWS.Data.Text (toText) import Amazonka.Data.Text (toText)
import Network.AWS.S3 import Amazonka.S3
import Amazonka.S3.ListObjectsV2
import Amazonka.S3.Lens
import Amazonka.Lens
import Network.HTTP.Client import Network.HTTP.Client
import Network.HTTP.Client.Conduit (bodyReaderSource) import Network.HTTP.Client.Conduit (bodyReaderSource)
import Network.HTTP.Simple (getResponseBody, httpJSONEither) import Network.HTTP.Simple (getResponseBody, httpJSONEither)
@ -39,9 +42,9 @@ import Network.HTTP.Types (status200, status404)
import Pantry (CabalFileInfo(..), DidUpdateOccur(..), import Pantry (CabalFileInfo(..), DidUpdateOccur(..),
HpackExecutable(HpackBundled), PackageIdentifierRevision(..), HpackExecutable(HpackBundled), PackageIdentifierRevision(..),
defaultCasaMaxPerRequest, defaultCasaRepoPrefix, defaultCasaMaxPerRequest, defaultCasaRepoPrefix,
defaultHackageSecurityConfig, defaultSnapshotLocation) defaultPackageIndexConfig,
import Pantry.Internal.Stackage (HackageTarballResult(..), PantryConfig(..), defaultSnapshotLocation, withPantryConfig, PantryConfig)
Storage(..), forceUpdateHackageIndex, import Pantry.Internal.Stackage (HackageTarballResult(..), forceUpdateHackageIndex,
getHackageTarball, packageTreeKey) getHackageTarball, packageTreeKey)
import Path (parseAbsDir, toFilePath) import Path (parseAbsDir, toFilePath)
import RIO import RIO
@ -54,7 +57,6 @@ import RIO.Process (mkDefaultProcessContext)
import qualified RIO.Set as Set import qualified RIO.Set as Set
import qualified RIO.Text as T import qualified RIO.Text as T
import RIO.Time import RIO.Time
import Settings
import Stackage.Database.Github import Stackage.Database.Github
import Stackage.Database.PackageInfo import Stackage.Database.PackageInfo
import Stackage.Database.Query import Stackage.Database.Query
@ -86,11 +88,6 @@ hoogleUrl n haddockBucketUrl = T.concat
hackageDeprecatedUrl :: Request hackageDeprecatedUrl :: Request
hackageDeprecatedUrl = "https://hackage.haskell.org/packages/deprecated.json" hackageDeprecatedUrl = "https://hackage.haskell.org/packages/deprecated.json"
withStorage :: (Storage -> IO a) -> IO a
withStorage inner = do
as <- getAppSettings
withStackageDatabase False (appDatabase as) (\db -> inner (Storage (runDatabase db) id))
getStackageSnapshotsDir :: RIO StackageCron FilePath getStackageSnapshotsDir :: RIO StackageCron FilePath
getStackageSnapshotsDir = do getStackageSnapshotsDir = do
cron <- ask cron <- ask
@ -154,58 +151,52 @@ stackageServerCron StackageCronOptions {..} = do
catchIO (bindPortTCP 17834 "127.0.0.1") $ catchIO (bindPortTCP 17834 "127.0.0.1") $
const $ throwString "Stackage Cron loader process already running, exiting." const $ throwString "Stackage Cron loader process already running, exiting."
connectionCount <- getNumCapabilities connectionCount <- getNumCapabilities
withStorage $ \storage -> do lo <- logOptionsHandle stdout True
lo <- logOptionsHandle stdout True stackageRootDir <- getAppUserDataDirectory "stackage"
stackageRootDir <- getAppUserDataDirectory "stackage" pantryRootDir <- parseAbsDir (stackageRootDir </> "pantry")
pantryRootDir <- parseAbsDir (stackageRootDir </> "pantry") createDirectoryIfMissing True (toFilePath pantryRootDir)
createDirectoryIfMissing True (toFilePath pantryRootDir) gpdCache <- newIORef IntMap.empty
updateRef <- newMVar True defaultProcessContext <- mkDefaultProcessContext
cabalImmutable <- newIORef Map.empty aws <- do
cabalMutable <- newIORef Map.empty aws' <- newEnv discover
gpdCache <- newIORef IntMap.empty endpoint <- lookup "AWS_S3_ENDPOINT" <$> getEnvironment
defaultProcessContext <- mkDefaultProcessContext pure $ case endpoint of
aws <- do Nothing -> aws'
aws' <- newEnv Discover Just ep -> configureService (setEndpoint True (BS8.pack ep) 443 Amazonka.S3.defaultService) aws'
endpoint <- lookup "AWS_S3_ENDPOINT" <$> getEnvironment withLogFunc (setLogMinLevel scoLogLevel lo) $ \logFunc -> do
pure $ case endpoint of let cronWithPantryConfig :: HasLogFunc env => (PantryConfig -> RIO env a) -> RIO env a
Nothing -> aws' cronWithPantryConfig =
Just ep -> configure (setEndpoint True (BS8.pack ep) 443 s3) aws' withPantryConfig
withLogFunc (setLogMinLevel scoLogLevel lo) $ \logFunc -> do pantryRootDir
let pantryConfig = defaultPackageIndexConfig
PantryConfig HpackBundled
{ pcHackageSecurity = defaultHackageSecurityConfig connectionCount
, pcHpackExecutable = HpackBundled defaultCasaRepoPrefix
, pcRootDir = pantryRootDir defaultCasaMaxPerRequest
, pcStorage = storage defaultSnapshotLocation
, pcUpdateRef = updateRef
, pcParsedCabalFilesRawImmutable = cabalImmutable currentHoogleVersionId <- runRIO logFunc $ do
, pcParsedCabalFilesMutable = cabalMutable cronWithPantryConfig $ \pantryConfig -> do
, pcConnectionCount = connectionCount
, pcCasaRepoPrefix = defaultCasaRepoPrefix
, pcCasaMaxPerRequest = defaultCasaMaxPerRequest
, pcSnapshotLocation = defaultSnapshotLocation
}
currentHoogleVersionId <- runRIO logFunc $ do
runStackageMigrations' pantryConfig runStackageMigrations' pantryConfig
getCurrentHoogleVersionIdWithPantryConfig pantryConfig getCurrentHoogleVersionIdWithPantryConfig pantryConfig
let stackage = let stackage pantryConfig =
StackageCron StackageCron
{ scPantryConfig = pantryConfig { scPantryConfig = pantryConfig
, scStackageRoot = stackageRootDir , scStackageRoot = stackageRootDir
, scProcessContext = defaultProcessContext , scProcessContext = defaultProcessContext
, scLogFunc = logFunc , scLogFunc = logFunc
, scForceFullUpdate = scoForceUpdate , scForceFullUpdate = scoForceUpdate
, scCachedGPD = gpdCache , scCachedGPD = gpdCache
, scEnvAWS = aws , scEnvAWS = aws
, scDownloadBucketName = scoDownloadBucketName , scDownloadBucketName = scoDownloadBucketName
, scDownloadBucketUrl = scoDownloadBucketUrl , scDownloadBucketUrl = scoDownloadBucketUrl
, scUploadBucketName = scoUploadBucketName , scUploadBucketName = scoUploadBucketName
, scSnapshotsRepo = scoSnapshotsRepo , scSnapshotsRepo = scoSnapshotsRepo
, scReportProgress = scoReportProgress , scReportProgress = scoReportProgress
, scCacheCabalFiles = scoCacheCabalFiles , scCacheCabalFiles = scoCacheCabalFiles
, scHoogleVersionId = currentHoogleVersionId , scHoogleVersionId = currentHoogleVersionId
} }
runRIO stackage (runStackageUpdate scoDoNotUpload) runRIO logFunc $ cronWithPantryConfig $ \pantryConfig -> runRIO (stackage pantryConfig) (runStackageUpdate scoDoNotUpload)
runStackageUpdate :: Bool -> RIO StackageCron () runStackageUpdate :: Bool -> RIO StackageCron ()
@ -393,10 +384,11 @@ addPantryPackage sid compiler isHidden flags (PantryPackage pc treeKey) = do
checkForDocs :: SnapshotId -> SnapName -> ResourceT (RIO StackageCron) () checkForDocs :: SnapshotId -> SnapName -> ResourceT (RIO StackageCron) ()
checkForDocs snapshotId snapName = do checkForDocs snapshotId snapName = do
bucketName <- lift (scDownloadBucketName <$> ask) bucketName <- lift (scDownloadBucketName <$> ask)
env <- asks scEnvAWS
mods <- mods <-
runConduit $ runConduit $
AWS.paginate (req bucketName) .| concatMapC (^. lovrsContents) .| paginate env (req bucketName) .| concatMapC (fromMaybe [] . (^. listObjectsV2Response_contents)) .|
mapC (\obj -> toText (obj ^. oKey)) .| mapC (\obj -> toText (obj ^. object_key)) .|
concatMapC (T.stripSuffix ".html" >=> T.stripPrefix prefix >=> pathToPackageModule) .| concatMapC (T.stripSuffix ".html" >=> T.stripPrefix prefix >=> pathToPackageModule) .|
sinkList sinkList
-- it is faster to download all modules in this snapshot, than process them with a conduit all -- it is faster to download all modules in this snapshot, than process them with a conduit all
@ -414,7 +406,7 @@ checkForDocs snapshotId snapName = do
display snapName display snapName
where where
prefix = textDisplay snapName <> "/" prefix = textDisplay snapName <> "/"
req bucketName = listObjectsV2 (BucketName bucketName) & lovPrefix .~ Just prefix req bucketName = newListObjectsV2 (BucketName bucketName) & listObjectsV2_prefix .~ Just prefix
-- | This function records all package modules that have documentation available, the ones -- | This function records all package modules that have documentation available, the ones
-- that are not found in the snapshot reported back as an error. Besides being run -- that are not found in the snapshot reported back as an error. Besides being run
-- concurrently this function optimizes the SnapshotPackageId lookup as well, since that can -- concurrently this function optimizes the SnapshotPackageId lookup as well, since that can
@ -670,9 +662,9 @@ uploadSnapshotsJSON = do
uploadBucket <- scUploadBucketName <$> ask uploadBucket <- scUploadBucketName <$> ask
let key = ObjectKey "snapshots.json" let key = ObjectKey "snapshots.json"
uploadFromRIO key $ uploadFromRIO key $
set poACL (Just OPublicRead) $ set putObject_acl (Just ObjectCannedACL_Public_read) $
set poContentType (Just "application/json") $ set putObject_contentType (Just "application/json") $
putObject (BucketName uploadBucket) key (toBody snapshots) newPutObject (BucketName uploadBucket) key (toBody snapshots)
-- | Writes a gzipped version of hoogle db into temporary file onto the file system and then uploads -- | Writes a gzipped version of hoogle db into temporary file onto the file system and then uploads
-- it to S3. Temporary file is removed upon completion -- it to S3. Temporary file is removed upon completion
@ -684,14 +676,14 @@ uploadHoogleDB fp key =
body <- toBody <$> readFileBinary fpgz body <- toBody <$> readFileBinary fpgz
uploadBucket <- scUploadBucketName <$> ask uploadBucket <- scUploadBucketName <$> ask
uploadFromRIO key $ uploadFromRIO key $
set poACL (Just OPublicRead) $ putObject (BucketName uploadBucket) key body set putObject_acl (Just ObjectCannedACL_Public_read) $ newPutObject (BucketName uploadBucket) key body
uploadFromRIO :: AWSRequest a => ObjectKey -> a -> RIO StackageCron () uploadFromRIO :: (AWSRequest a, Typeable a, Typeable (AWSResponse a)) => ObjectKey -> a -> RIO StackageCron ()
uploadFromRIO key po = do uploadFromRIO key po = do
logInfo $ "Uploading " <> displayShow key <> " to S3 bucket." logInfo $ "Uploading " <> displayShow key <> " to S3 bucket."
env <- ask env <- asks scEnvAWS
eres <- runResourceT $ runAWS env $ trying _Error $ send po eres <- runResourceT $ trying _Error $ send env po
case eres of case eres of
Left e -> Left e ->
logError $ "Couldn't upload " <> displayShow key <> " to S3 becuase " <> displayShow e logError $ "Couldn't upload " <> displayShow key <> " to S3 becuase " <> displayShow e
@ -701,8 +693,9 @@ buildAndUploadHoogleDB :: Bool -> RIO StackageCron ()
buildAndUploadHoogleDB doNotUpload = do buildAndUploadHoogleDB doNotUpload = do
snapshots <- lastLtsNightlyWithoutHoogleDb 5 5 snapshots <- lastLtsNightlyWithoutHoogleDb 5 5
env <- ask env <- ask
awsEnv <- asks scEnvAWS
bucketUrl <- asks scDownloadBucketUrl bucketUrl <- asks scDownloadBucketUrl
locker <- newHoogleLocker (env ^. logFuncL) (env ^. envManager) bucketUrl locker <- newHoogleLocker (env ^. logFuncL) (awsEnv ^. env_manager) bucketUrl
for_ snapshots $ \(snapshotId, snapName) -> for_ snapshots $ \(snapshotId, snapName) ->
unlessM (checkInsertSnapshotHoogleDb False snapshotId) $ do unlessM (checkInsertSnapshotHoogleDb False snapshotId) $ do
logInfo $ "Starting Hoogle database download: " <> display (hoogleKey snapName) logInfo $ "Starting Hoogle database download: " <> display (hoogleKey snapName)
@ -738,7 +731,8 @@ createHoogleDB snapshotId snapName =
-- is present it is not in a corrupted state -- is present it is not in a corrupted state
unlessM (doesFileExist tarFP) $ do unlessM (doesFileExist tarFP) $ do
req <- parseRequest $ T.unpack tarUrl req <- parseRequest $ T.unpack tarUrl
man <- view envManager env <- asks scEnvAWS
let man = env ^. env_manager
withResponseUnliftIO req {decompress = const True} man $ \res -> do withResponseUnliftIO req {decompress = const True} man $ \res -> do
throwErrorStatusCodes req res throwErrorStatusCodes req res
createDirectoryIfMissing True $ takeDirectory tarFP createDirectoryIfMissing True $ takeDirectory tarFP

View File

@ -23,7 +23,7 @@ import Distribution.Compiler (CompilerFlavor(GHC))
import Distribution.Package (Dependency(..)) import Distribution.Package (Dependency(..))
import Distribution.PackageDescription (CondTree(..), Condition(..), import Distribution.PackageDescription (CondTree(..), Condition(..),
ConfVar(..), ConfVar(..),
Flag(flagDefault, flagName), FlagName, PackageFlag(..), flagDefault, flagName, FlagName,
GenericPackageDescription, author, GenericPackageDescription, author,
condExecutables, condLibrary, condExecutables, condLibrary,
description, genPackageFlags, homepage, description, genPackageFlags, homepage,
@ -152,7 +152,7 @@ getCheckCond compiler overrideFlags gpd = go
where where
go (Var (OS os)) = os == Linux -- arbitrary go (Var (OS os)) = os == Linux -- arbitrary
go (Var (Arch arch)) = arch == X86_64 -- arbitrary go (Var (Arch arch)) = arch == X86_64 -- arbitrary
go (Var (Flag flag)) = fromMaybe False $ Map.lookup flag flags go (Var (PackageFlag flag)) = fromMaybe False $ Map.lookup flag flags
go (Var (Impl flavor range)) = flavor == compilerFlavor && compilerVersion `withinRange` range go (Var (Impl flavor range)) = flavor == compilerFlavor && compilerVersion `withinRange` range
go (Lit b) = b go (Lit b) = b
go (CNot c) = not $ go c go (CNot c) = not $ go c

View File

@ -206,7 +206,7 @@ snapshotsJSON = do
Just n -> (("nightly" A..= printNightly n) :) Just n -> (("nightly" A..= printNightly n) :)
return $ A.object $ nightly lts return $ A.object $ nightly lts
where where
toObj lts@(major, _) = T.pack ("lts-" <> show major) A..= printLts lts toObj lts@(major, _) = fromString ("lts-" <> show major) A..= printLts lts
printLts (major, minor) = "lts-" <> show major <> "." <> show minor printLts (major, minor) = "lts-" <> show major <> "." <> show minor
printNightly day = "nightly-" <> T.pack (show day) printNightly day = "nightly-" <> T.pack (show day)

View File

@ -15,6 +15,7 @@
{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE TypeOperators #-}
module Stackage.Database.Schema module Stackage.Database.Schema
( -- * Database ( -- * Database
run run

View File

@ -50,7 +50,7 @@ module Stackage.Database.Types
import Data.Aeson import Data.Aeson
import qualified Data.Text as T import qualified Data.Text as T
import Data.Text.Read (decimal) import Data.Text.Read (decimal)
import Network.AWS (Env, HasEnv(..)) import Amazonka (Env)
import Pantry (BlobKey(..), CabalFileInfo(..), FileSize(..), import Pantry (BlobKey(..), CabalFileInfo(..), FileSize(..),
HasPantryConfig(..), PantryConfig, PackageIdentifierRevision(..), TreeKey(..)) HasPantryConfig(..), PantryConfig, PackageIdentifierRevision(..), TreeKey(..))
import Pantry.SHA256 (fromHexText) import Pantry.SHA256 (fromHexText)
@ -97,9 +97,6 @@ data StackageCron = StackageCron
, scHoogleVersionId :: !VersionId , scHoogleVersionId :: !VersionId
} }
instance HasEnv StackageCron where
environment = lens scEnvAWS (\c f -> c {scEnvAWS = f})
instance HasLogFunc StackageCron where instance HasLogFunc StackageCron where
logFuncL = lens scLogFunc (\c f -> c {scLogFunc = f}) logFuncL = lens scLogFunc (\c f -> c {scLogFunc = f})

View File

@ -15,6 +15,7 @@ module Stackage.Snapshot.Diff
import ClassyPrelude (sortOn, toCaseFold) import ClassyPrelude (sortOn, toCaseFold)
import Data.Aeson import Data.Aeson
import Data.Aeson.Key
import qualified Data.Text as T (commonPrefixes) import qualified Data.Text as T (commonPrefixes)
import Data.These import Data.These
import RIO import RIO
@ -61,7 +62,7 @@ newtype VersionChange = VersionChange { unVersionChange :: These VersionP Versio
deriving (Show, Eq, Generic, Typeable) deriving (Show, Eq, Generic, Typeable)
instance ToJSON (WithSnapshotNames VersionChange) where instance ToJSON (WithSnapshotNames VersionChange) where
toJSON (WithSnapshotNames (toPathPiece -> aKey) (toPathPiece -> bKey) change) = toJSON (WithSnapshotNames (fromText . toPathPiece -> aKey) (fromText . toPathPiece -> bKey) change) =
case change of case change of
VersionChange (This a) -> object [ aKey .= a ] VersionChange (This a) -> object [ aKey .= a ]
VersionChange (That b) -> object [ bKey .= b ] VersionChange (That b) -> object [ bKey .= b ]

View File

@ -1,25 +1,10 @@
resolver: lts-18.28 resolver: lts-22.6
extra-deps: extra-deps:
- amazonka-1.6.1 - hoogle-5.0.18.4@sha256:1372458e97dff541fcda099236af7936bf93ee6b8c5d15695ee6d9426dff5eed,3171
- github: chreekat/amazonka - safe-0.3.20@sha256:7813ad56161f57d5162a924de5597d454162a2faed06be6e268b37bb5c19d48d,2312
commit: b/1.6.1-r2-compat
subdirs: [core]
- barrier-0.1.1
- classy-prelude-yesod-1.5.0
- unliftio-core-0.1.2.0
- yesod-gitrepo-0.3.0
- static-bytes-0.1.0
- companion-0.1.0
- aeson-warning-parser-0.1.0
- hpack-0.35.0
- http-client-0.5.14@sha256:4880b27d6741e331454a1d4c887d96ce3d7d625322c8433983a4b1cd08538577,5348
- git: https://github.com/commercialhaskell/pantry.git
commit: 5df643cc1deb561d9c52a9cb6f593aba2bc4c08e
drop-packages:
- Cabal
nix: nix:
packages: packages:
- zlib - zlib
- postgresql - postgresql
- pkg-config