fradrive/src/Model/Migration.hs
2020-11-05 15:52:04 +01:00

1031 lines
57 KiB
Haskell
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

{-# LANGUAGE UndecidableInstances, GeneralizedNewtypeDeriving #-}
module Model.Migration
( migrateAll
, requiresMigration
) where
import Import.NoModel hiding (Max(..), Last(..))
import Model
import Settings
import Foundation.Type
import Jobs.Types
import Audit.Types
import Model.Migration.Version
import qualified Model.Migration.Types as Legacy
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Text as Text
import qualified Data.Conduit.List as C
import Data.Semigroup (Max(..), Last(..))
import Database.Persist.Sql
import Database.Persist.Sql.Raw.QQ
import Database.Persist.Postgresql
import Text.Read (readMaybe)
import Control.Monad.Except (MonadError(..))
import Utils.Lens (_NoUpload)
import Network.IP.Addr
import qualified Data.Char as Char
import qualified Data.CaseInsensitive as CI
import qualified Data.Aeson as Aeson
import Web.ServerSession.Backend.Persistent.Memcached (migrateMemcachedSqlStorage)
import Data.Conduit.Algorithms.FastCDC (FastCDCParameters(fastCDCMinBlockSize))
import Data.Time.Format.ISO8601 (iso8601Show)
import qualified Data.Time.Zones as TZ
-- Database versions must follow https://pvp.haskell.org:
-- - Breaking changes are instances where manual migration is necessary (via customMigrations; i.e. changing a columns format)
-- - Non-breaking changes are instances where the automatic migration done by persistent is sufficient (i.e. adding a column or table)
-- Note that only one automatic migration is done (after all manual migrations).
-- Manual migrations can therefore not rely on non-breaking changes being applied when they are executed (i.e. columns existing, that were added as non-breaking changes after InitialVersion)
-- If that is inconvenient a custom migration between minor version numbers can be formulated using `migration`, `runMigration`, and manually defined `EntityDef`s so as to use persistent's automatic migration system
-- Database versions must be marked with git tags:
-- The first commit corresponding to a new database version x.x.x must be tagged dbx.x.x
-- Tags should be annotated with a description of the changes affecting the database.
--
-- Example:
-- $ git tag -a db0.0.0 -m "Simplified format of UserTheme"
--
-- Doing so creates sort of parallel commit history tracking changes to the database schema
share [mkPersist sqlSettings, mkMigrate "migrateDBVersioning"]
[persistLowerCase|
AppliedMigration json
from MigrationVersion
to Version
time UTCTime
UniqueAppliedMigration from
Primary from to
deriving Show Eq Ord
|]
migrateAll' :: Migration
migrateAll' = sequence_
[ migrateUniWorX
, migrateMemcachedSqlStorage
, migrateManual
]
migrateAll :: ( MonadLogger m
, MonadResource m
, MonadUnliftIO m
, MonadReader UniWorX m
)
=> ReaderT SqlBackend m ()
migrateAll = do
$logDebugS "Migration" "Initial migration"
mapM_ ($logInfoS "Migration") =<< runMigrationSilent initialMigration
missingMigrations <- getMissingMigrations
let
doCustomMigration acc desc migration = acc <* do
let AppliedMigrationKey appliedMigrationFrom appliedMigrationTo = desc
$logInfoS "Migration" [st|#{tshow appliedMigrationFrom} -> #{tshow appliedMigrationTo}|]
appliedMigrationTime <- liftIO getCurrentTime
_ <- migration
insert AppliedMigration{..}
-- Map.foldlWithKey traverses migrations in ascending order of AppliedMigrationKey
$logDebugS "Migration" "Apply missing migrations"
Map.foldlWithKey doCustomMigration (return ()) missingMigrations
$logDebugS "Migration" "Persistent automatic migration"
mapM_ ($logInfoS "Migration") =<< runMigrationSilent migrateAll'
$logDebugS "Migration" "Migrations marked as always safe"
mapM_ ($logInfoS "Migration") =<< runMigrationSilent migrateAlwaysSafe
requiresMigration :: forall m.
( MonadLogger m
, MonadResource m
)
=> ReaderT SqlBackend m Bool
requiresMigration = mapReaderT (exceptT return return) $ do
initial <- either id (map snd) <$> parseMigration initialMigration
unless (null initial) $ do
$logInfoS "Migration" $ intercalate "; " initial
throwError True
customs <- mapReaderT lift $ getMissingMigrations @_ @(ReaderT UniWorX m)
unless (Map.null customs) $ do
$logInfoS "Migration" . intercalate ", " . map tshow $ Map.keys customs
throwError True
automatic <- either id (map snd) <$> parseMigration migrateAll'
unless (null automatic) $ do
$logInfoS "Migration" $ intercalate "; " automatic
throwError True
-- Does not consider `migrateAlwaysSafe`
return False
initialMigration :: Migration
-- ^ Manual migrations to go to InitialVersion below:
initialMigration = do
mapM_ migrateEnableExtension ["citext", "pgcrypto"]
migrateDBVersioning
getMissingMigrations :: forall m m'.
( MonadLogger m
, MonadIO m
, MonadResource m'
, MonadReader UniWorX m'
)
=> ReaderT SqlBackend m (Map (Key AppliedMigration) (ReaderT SqlBackend m' ()))
getMissingMigrations = do
$logDebugS "Migration" "Retrieve applied migrations"
appliedMigrations <- selectKeysList [] []
return $ customMigrations `Map.withoutKeys` Set.fromList appliedMigrations
migrateManual :: Migration
migrateManual = do
mapM_ (uncurry addIndex)
[ ("course_application_file_content", "CREATE INDEX course_application_file_content ON course_application_file (content)" )
, ("material_file_content", "CREATE INDEX material_file_content ON material_file (content)" )
, ("course_news_file_content", "CREATE INDEX course_news_file_content ON course_news_file (content)" )
, ("sheet_file_content", "CREATE INDEX sheet_file_content ON sheet_file (content)" )
, ("course_app_instruction_file_content", "CREATE INDEX course_app_instruction_file_content ON course_app_instruction_file (content)")
, ("allocation_matching_log", "CREATE INDEX allocation_matching_log ON allocation_matching (log)" )
, ("submission_file_content", "CREATE INDEX submission_file_content ON submission_file (content)" )
, ("session_file_content", "CREATE INDEX session_file_content ON session_file (content)" )
, ("file_lock_content", "CREATE INDEX file_lock_content ON file_lock (content)" )
, ("user_lower_display_email", "CREATE INDEX user_lower_display_email ON \"user\" (lower(display_email))" )
, ("user_lower_email", "CREATE INDEX user_lower_email ON \"user\" (lower(email))" )
, ("user_lower_ident", "CREATE INDEX user_lower_ident ON \"user\" (lower(ident))" )
, ("submission_sheet", "CREATE INDEX submission_sheet ON submission (sheet)" )
, ("submission_edit_submission", "CREATE INDEX submission_edit_submission ON submission_edit (submission)" )
, ("user_ldap_primary_key", "CREATE INDEX user_ldap_primary_key ON \"user\" (ldap_primary_key)" )
, ("file_content_entry_chunk_hash", "CREATE INDEX file_content_entry_chunk_hash ON \"file_content_entry\" (chunk_hash)" )
, ("sent_mail_bounce_secret", "CREATE INDEX sent_mail_bounce_secret ON \"sent_mail\" (bounce_secret) WHERE bounce_secret IS NOT NULL")
, ("sent_mail_recipient", "CREATE INDEX sent_mail_recipient ON \"sent_mail\" (recipient) WHERE recipient IS NOT NULL")
]
where
addIndex :: Text -> Sql -> Migration
addIndex ixName ixDef = do
res <- lift $ lift [sqlQQ|SELECT EXISTS (SELECT 1 FROM pg_indexes WHERE schemaname = current_schema() AND indexname = #{ixName})|]
alreadyDefined <- case res of
[Single e] -> return e
_other -> return True
unless alreadyDefined $ addMigration False ixDef
migrateAlwaysSafe :: Migration
-- | Part of `migrateAll` but not checked in `requiresMigration`
migrateAlwaysSafe = do
recordedChangelogItems <- lift . lift $ selectList [ ChangelogItemFirstSeenItem <-. universeF ] []
let missingChangelogItems = Set.toList $ Set.fromList universeF `Set.difference` recordedChangelogItems'
where recordedChangelogItems' = Set.fromList [ changelogItemFirstSeenItem | Entity _ ChangelogItemFirstSeen{..} <- recordedChangelogItems ]
unless (null missingChangelogItems) $ do
today <- localDay . TZ.utcToLocalTimeTZ appTZ <$> liftIO getCurrentTime
addMigration False $ do
let sql = [st|INSERT INTO changelog_item_first_seen (item, first_seen) VALUES #{vals}|]
vals = Text.intercalate ", " $ do
item <- missingChangelogItems
let itemDay = Map.findWithDefault today item changelogItemDays
return [st|('#{toPathPiece item}', '#{iso8601Show itemDay}')|]
in sql
{-
Confusion about quotes, from the PostgreSQL Manual:
Single quotes for string constants, double quotes for table/column names.
QuasiQuoter: ^{TableName} @{ColumnName} (escaped as column/table-name; value determined from current model);
#{anything} (escaped as value);
-}
customMigrations :: forall m.
( MonadResource m
, MonadReader UniWorX m
)
=> Map (Key AppliedMigration) (ReaderT SqlBackend m ())
customMigrations = Map.fromListWith (>>)
[ ( AppliedMigrationKey [migrationVersion|initial|] [version|0.0.0|]
, whenM (columnExists "user" "theme") $ do -- New theme format
userThemes <- [sqlQQ| SELECT "id", "theme" FROM "user"; |]
forM_ userThemes $ \(uid, Single str) -> case stripPrefix "theme--" str of
Just v
| Just theme <- fromPathPiece v -> update uid [UserTheme =. theme]
other -> error $ "Could not parse theme: " <> show other
)
, ( AppliedMigrationKey [migrationVersion|0.0.0|] [version|1.0.0|]
, whenM (tableExists "sheet") -- Better JSON encoding
[executeQQ|
ALTER TABLE "sheet" ALTER COLUMN "type" TYPE jsonb USING "type"::jsonb;
ALTER TABLE "sheet" ALTER COLUMN "grouping" TYPE jsonb USING "grouping"::jsonb;
|]
)
, ( AppliedMigrationKey [migrationVersion|1.0.0|] [version|2.0.0|]
, whenM (columnExists "school" "id") $ do -- SchoolId is the Shorthand CI Text now
-- Read old table into memory
schoolTable <- [sqlQQ| SELECT "id", "shorthand" FROM "school"; |]
let _sT = schoolTable :: [(Single Int64, Single (CI Text))] -- Types needed
-- Convert columns containing SchoolId
whenM (tableExists "user_admin") $ do
[executeQQ|
ALTER TABLE "user_admin" DROP CONSTRAINT "user_admin_school_fkey";
ALTER TABLE "user_admin" ALTER COLUMN "school" TYPE citext USING "school"::citext;
|]
forM_ schoolTable $ \(Single idnr, Single ssh) ->
[executeQQ|
UPDATE "user_admin" SET "school" = #{ssh} WHERE "school" = #{tshow idnr};
|]
[executeQQ|
ALTER TABLE "user_admin" ADD CONSTRAINT "user_admin_school_fkey"
FOREIGN KEY (school) REFERENCES school(shorthand);
|]
whenM (tableExists "user_lecturer") $ do
[executeQQ|
ALTER TABLE "user_lecturer" DROP CONSTRAINT "user_lecturer_school_fkey";
ALTER TABLE "user_lecturer" ALTER COLUMN "school" TYPE citext USING "school"::citext;
|]
forM_ schoolTable $ \(Single idnr, Single ssh) ->
[executeQQ|
UPDATE "user_lecturer" SET "school" = #{ssh} WHERE "school" = #{tshow idnr};
|]
[executeQQ|
ALTER TABLE "user_lecturer" ADD CONSTRAINT "user_lecturer_school_fkey"
FOREIGN KEY (school) REFERENCES school(shorthand);
|]
whenM (tableExists "course") $ do
[executeQQ|
ALTER TABLE "course" DROP CONSTRAINT "course_school_fkey";
ALTER TABLE "course" ALTER COLUMN "school" TYPE citext USING "school"::citext;
|]
forM_ schoolTable $ \(Single idnr, Single ssh) ->
[executeQQ|
UPDATE "course" SET "school" = #{ssh} WHERE "school" = #{tshow idnr};
|]
[executeQQ|
ALTER TABLE "course" ADD CONSTRAINT "course_school_fkey"
FOREIGN KEY (school) REFERENCES school(shorthand);
|]
[executeQQ|
ALTER TABLE "school" DROP COLUMN "id";
ALTER TABLE "school" ADD PRIMARY KEY (shorthand);
|]
)
, ( AppliedMigrationKey [migrationVersion|2.0.0|] [version|3.0.0|]
, whenM (tableExists "sheet_corrector") $ do -- Load is encoded as JSON now.
correctorLoads <- [sqlQQ| SELECT "id", "load" FROM "sheet_corrector"; |]
forM_ correctorLoads $ \(uid, Single str) -> case readMaybe str of
Just load -> update uid [SheetCorrectorLoad =. load]
_other -> error $ "Could not parse Load: " <> show str
[executeQQ|
ALTER TABLE "sheet_corrector" ALTER COLUMN "load" TYPE jsonb USING "load"::jsonb;
|]
)
, ( AppliedMigrationKey [migrationVersion|3.0.0|] [version|3.1.0|]
, whenM (tableExists "user") $ do
userDisplayNames <- [sqlQQ| SELECT "id", "display_name" FROM "user"; |]
[executeQQ|
ALTER TABLE "user" ADD COLUMN IF NOT EXISTS "surname" text DEFAULT '';
|]
forM_ userDisplayNames $ \(uid, Single str) -> case lastMaybe $ words str of
Just name -> update uid [UserSurname =. name]
_other -> error "Empty userDisplayName found"
)
, ( AppliedMigrationKey [migrationVersion|3.1.0|] [version|3.2.0|]
, whenM (tableExists "sheet")
[executeQQ|
ALTER TABLE "sheet" ADD COLUMN IF NOT EXISTS "upload_mode" jsonb DEFAULT '{ "tag": "Upload", "unpackZips": true }';
|]
)
, ( AppliedMigrationKey [migrationVersion|3.2.0|] [version|4.0.0|]
, whenM (columnExists "user" "plugin")
-- <> is standard sql for /=
[executeQQ|
DELETE FROM "user" WHERE "plugin" <> 'LDAP';
ALTER TABLE "user" DROP COLUMN "plugin";
ALTER TABLE "user" ADD COLUMN IF NOT EXISTS "authentication" jsonb DEFAULT '"ldap"';
|]
)
, ( AppliedMigrationKey [migrationVersion|4.0.0|] [version|5.0.0|]
, whenM (tableExists "user")
[executeQQ|
ALTER TABLE "user" ADD COLUMN IF NOT EXISTS "notification_settings" jsonb NOT NULL DEFAULT '[]';
|]
)
, ( AppliedMigrationKey [migrationVersion|5.0.0|] [version|6.0.0|]
, whenM (tableExists "sheet") $ do
sheets <- [sqlQQ| SELECT "id", "type" FROM "sheet"; |]
forM_ sheets $ \(sid, Single lsty) -> update sid [SheetType =. Legacy.sheetType lsty]
)
, ( AppliedMigrationKey [migrationVersion|6.0.0|] [version|7.0.0|]
, whenM (tableExists "cluster_config")
[executeQQ|
UPDATE "cluster_config" SET "setting" = 'secret-box-key' WHERE "setting" = 'error-message-key';
|]
)
, ( AppliedMigrationKey [migrationVersion|7.0.0|] [version|8.0.0|]
, whenM (tableExists "sheet")
[executeQQ|
UPDATE "sheet" SET "type" = json_build_object('type', "type"->'type', 'grading', "type"->'') WHERE jsonb_exists("type", '');
UPDATE "sheet" SET "type" = json_build_object('type', "type"->'type', 'grading', json_build_object('type', "type"->'grading'->'type', 'max', "type"->'grading'->'points')) WHERE ("type"->'grading'->'type') = '"points"' AND jsonb_exists("type"->'grading', 'points');
UPDATE "sheet" SET "type" = json_build_object('type', "type"->'type', 'grading', json_build_object('type', "type"->'grading'->'type', 'max', "type"->'grading'->'points', 'passing', 0)) WHERE ("type"->'grading'->'type') = '"pass-points"' AND jsonb_exists("type"->'grading', 'points');
|]
)
, ( AppliedMigrationKey [migrationVersion|8.0.0|] [version|9.0.0|]
, whenM ((\a b c -> a && b && not c) <$> tableExists "study_features" <*> tableExists "course_participant" <*> columnExists "course_participant" "field") $ do
[executeQQ|
ALTER TABLE "course_participant" ADD COLUMN "field" bigint DEFAULT null REFERENCES study_features(id);
ALTER TABLE "study_features" ADD COLUMN IF NOT EXISTS "valid" boolean NOT NULL DEFAULT true;
|]
users <- [sqlQQ| SELECT DISTINCT ON ("user"."id") "user"."id", "study_features"."id" FROM "user", "study_features" WHERE "study_features"."user" = "user"."id" AND "study_features"."valid" AND "study_features"."type" = 'FieldPrimary' ORDER BY "user"."id", random(); |]
forM_ users $ \(uid :: UserId, sfid :: StudyFeaturesId) -> [executeQQ| UPDATE "course_participant" SET "field" = #{sfid} WHERE "user" = #{uid} AND "field" IS NULL; |]
)
, ( AppliedMigrationKey [migrationVersion|9.0.0|] [version|10.0.0|]
, do
whenM (columnExists "study_degree" "shorthand") [executeQQ| UPDATE "study_degree" SET "shorthand" = NULL WHERE "shorthand" = '' |]
whenM (columnExists "study_degree" "name") [executeQQ| UPDATE "study_degree" SET "name" = NULL WHERE "shorthand" = '' |]
whenM (columnExists "study_terms" "shorthand") [executeQQ| UPDATE "study_terms" SET "shorthand" = NULL WHERE "shorthand" = '' |]
whenM (columnExists "study_terms" "name") [executeQQ| UPDATE "study_terms" SET "name" = NULL WHERE "shorthand" = '' |]
)
, ( AppliedMigrationKey [migrationVersion|10.0.0|] [version|11.0.0|]
, whenM ((&&) <$> columnExists "sheet" "upload_mode" <*> columnExists "sheet" "submission_mode") $ do
sheetModes <- [sqlQQ| SELECT "id", "upload_mode", "submission_mode" FROM "sheet"; |]
[executeQQ|
ALTER TABLE "sheet" DROP COLUMN "upload_mode";
ALTER TABLE "sheet" ALTER COLUMN "submission_mode" DROP DEFAULT;
ALTER TABLE "sheet" ALTER COLUMN "submission_mode" TYPE jsonb USING 'null'::jsonb;
|]
forM_ sheetModes $ \(shid :: SheetId, unSingle -> uploadMode :: Legacy.UploadMode, unSingle -> submissionMode :: Legacy.SheetSubmissionMode ) -> do
let submissionMode' = case (submissionMode, uploadMode) of
( Legacy.NoSubmissions , _ ) -> SubmissionMode False Nothing
( Legacy.CorrectorSubmissions, _ ) -> SubmissionMode True Nothing
( Legacy.UserSubmissions , Legacy.NoUpload ) -> SubmissionMode False (Just NoUpload)
( Legacy.UserSubmissions , Legacy.Upload True ) -> SubmissionMode False (Just $ UploadAny True defaultExtensionRestriction True)
( Legacy.UserSubmissions , Legacy.Upload False ) -> SubmissionMode False (Just $ UploadAny False defaultExtensionRestriction True)
[executeQQ| UPDATE "sheet" SET "submission_mode" = #{submissionMode'} WHERE "id" = #{shid}; |]
)
, ( AppliedMigrationKey [migrationVersion|11.0.0|] [version|12.0.0|]
, whenM ((&&) <$> tableExists "tutorial" <*> tableExists "tutorial_user") $ do -- Tutorials were an unused stub before
tableDropEmpty "tutorial"
tableDropEmpty "tutorial_user"
)
, ( AppliedMigrationKey [migrationVersion|12.0.0|] [version|13.0.0|]
, whenM (tableExists "exam") $ -- Exams were an unused stub before
tableDropEmpty "exam"
)
, ( AppliedMigrationKey [migrationVersion|13.0.0|] [version|14.0.0|]
, whenM ((&&) <$> tableExists "exam_occurrence" <*> (not <$> columnExists "exam_occurrence" "name")) $ do
examOccurrences <- [sqlQQ| SELECT "id" FROM "exam_occurrence" ORDER BY "exam"; |]
[executeQQ|
ALTER TABLE "exam_occurrence" ADD COLUMN "name" citext DEFAULT null;
|]
forM_ (zip [0..] examOccurrences) $ \(n :: Natural, Single eoId :: Single ExamOccurrenceId) -> do
let name = [st|occ-#{tshow n}|]
[executeQQ| UPDATE "exam_occurrence" SET "name" = #{name} WHERE "id" = #{eoId} |]
[executeQQ|
ALTER TABLE "exam_occurrence" ALTER COLUMN "name" DROP DEFAULT;
ALTER TABLE "exam_occurrence" ALTER COLUMN "name" SET NOT NULL;
|]
)
, ( AppliedMigrationKey [migrationVersion|14.0.0|] [version|15.0.0|]
, whenM (tableExists "user") $ do
[executeQQ|
ALTER TABLE "user" ADD COLUMN "first_name" text NOT NULL DEFAULT '';
ALTER TABLE "user" ADD COLUMN "title" text DEFAULT null;
|]
let getUsers = rawQuery [st|SELECT "id", "display_name", "surname" FROM "user"|] []
updateUser (uid, firstName) = [executeQQ|UPDATE "user" SET "first_name" = #{firstName} WHERE "id" = #{uid}|]
splitFirstName :: [PersistValue] -> Maybe (UserId, Text)
splitFirstName [fromPersistValue -> Right uid, fromPersistValue -> Right displayName, fromPersistValue -> Right surname] = Just . (uid, ) $ if
| Just givenName <- Text.stripSuffix surname displayName
<|> Text.stripPrefix surname displayName
-> Text.strip givenName
| otherwise
-> Text.replace surname "" displayName
splitFirstName _ = Nothing
runConduit $ getUsers .| C.mapMaybe splitFirstName .| C.mapM_ updateUser
)
, ( AppliedMigrationKey [migrationVersion|15.0.0|] [version|16.0.0|]
, whenM (tableExists "transaction_log") $ do
[executeQQ|
UPDATE transaction_log SET remote = null WHERE remote = #{IPv4 loopbackIP4 :: IP} OR remote = #{IPv6 loopbackIP6 :: IP}
|]
[executeQQ|
ALTER TABLE transaction_log ADD COLUMN "initiator_id" bigint DEFAULT null;
|]
whenM (tableExists "user")
[executeQQ|
UPDATE transaction_log SET initiator_id = "user".id FROM "user" WHERE transaction_log.initiator = "user".ident;
|]
[executeQQ|
ALTER TABLE transaction_log DROP COLUMN initiator;
ALTER TABLE transaction_log RENAME COLUMN initiator_id TO initiator;
ALTER TABLE transaction_log ALTER COLUMN initiator DROP DEFAULT;
|]
let getLogEntries = rawQuery [st|SELECT id, info FROM transaction_log|] []
updateTransactionInfo [fromPersistValue -> Right lid, fromPersistValue -> Right (oldT :: Legacy.Transaction)] = do
newT <- case oldT of
Legacy.TransactionTermEdit tid
-> return . Just . TransactionTermEdit $ TermKey tid
Legacy.TransactionExamRegister (TermKey -> tid) (SchoolKey -> ssh) csh examn uident
-> runMaybeT $ do
guardM . lift $ tablesExist ["user", "exam", "course"]
cid <- MaybeT . getKeyBy $ TermSchoolCourseShort tid ssh csh
eid <- MaybeT . getKeyBy $ UniqueExam cid examn
uid <- MaybeT . getKeyBy $ UniqueAuthentication uident
return $ TransactionExamRegister eid uid
Legacy.TransactionExamDeregister (TermKey -> tid) (SchoolKey -> ssh) csh examn uident
-> runMaybeT $ do
guardM . lift $ tablesExist ["user", "exam", "course"]
cid <- MaybeT . getKeyBy $ TermSchoolCourseShort tid ssh csh
eid <- MaybeT . getKeyBy $ UniqueExam cid examn
uid <- MaybeT . getKeyBy $ UniqueAuthentication uident
return $ TransactionExamRegister eid uid
whenIsJust newT $ \newT' ->
update lid [ TransactionLogInfo =. toJSON newT' ]
updateTransactionInfo _ = return ()
runConduit $ getLogEntries .| C.mapM_ updateTransactionInfo
)
, ( AppliedMigrationKey [migrationVersion|16.0.0|] [version|17.0.0|]
, do
whenM (tableExists "allocation_course") $ do
vals <- [sqlQQ| SELECT "course", "instructions", "application_text", "application_files", "ratings_visible" FROM "allocation_course"; |]
whenM (tableExists "course") $ do
[executeQQ|
ALTER TABLE "course" ADD COLUMN "applications_required" boolean not null default #{False}, ADD COLUMN "applications_instructions" varchar null, ADD COLUMN "applications_text" boolean not null default #{False}, ADD COLUMN "applications_files" jsonb not null default #{NoUpload}, ADD COLUMN "applications_ratings_visible" boolean not null default #{False};
ALTER TABLE "course" ALTER COLUMN "applications_required" DROP DEFAULT, ALTER COLUMN "applications_text" DROP DEFAULT, ALTER COLUMN "applications_files" DROP DEFAULT, ALTER COLUMN "applications_ratings_visible" DROP DEFAULT;
|]
forM_ vals $ \(cid :: CourseId, Single applicationsInstructions :: Single (Maybe Html), Single applicationsText :: Single Bool, Single applicationsFiles :: Single UploadMode, Single applicationsRatingsVisible :: Single Bool) -> do
let appRequired = applicationsText || isn't _NoUpload applicationsFiles
[executeQQ|
UPDATE "course" SET ("applications_required", "applications_instructions", "applications_text", "applications_files", "applications_ratings_visible") = (#{appRequired}, #{applicationsInstructions}, #{applicationsText}, #{applicationsFiles}, #{applicationsRatingsVisible}) WHERE "id" = #{cid};
|]
[executeQQ|
ALTER TABLE "allocation_course" DROP COLUMN "instructions", DROP COLUMN "application_text", DROP COLUMN "application_files", DROP COLUMN "ratings_visible";
|]
whenM ((&&) <$> tableExists "allocation_course_file" <*> (not <$> tableExists "course_app_instruction_file")) $ do
[executeQQ|
CREATe TABLE "course_app_instruction_file"("id" SERIAL8 PRIMARY KEY UNIQUE,"course" INT8 NOT NULL,"file" INT8 NOT NULL);
ALTER TABLE "course_app_instruction_file" ADD CONSTRAINT "unique_course_app_instruction_file" UNIQUE("course","file");
ALTER TABLE "course_app_instruction_file" ADD CONSTRAINT "course_app_instruction_file_course_fkey" FOREIGN KEY("course") REFERENCES "course"("id");
ALTER TABLE "course_app_instruction_file" ADD CONSTRAINT "course_app_instruction_file_file_fkey" FOREIGN KEY("file") REFERENCES "file"("id");
|]
let getFileEntries = rawQuery [st|SELECT "allocation_course_file"."id", "allocation_course"."course", "allocation_course_file"."file" FROM "allocation_course_file" INNER JOIN "allocation_course" ON "allocation_course"."id" = "allocation_course_file"."allocation_course"|] []
moveFileEntry [fromPersistValue -> Right (acfId :: Int64), fromPersistValue -> Right (cid :: CourseId), fromPersistValue -> Right (fid :: Int64)] =
[executeQQ|
INSERT INTO "course_app_instruction_file" ("course", "file") VALUES (#{cid}, #{fid});
DELETE FROM "allocation_course_file" WHERE "id" = #{acfId};
|]
moveFileEntry _ = return ()
runConduit $ getFileEntries .| C.mapM_ moveFileEntry
tableDropEmpty "allocation_course_file"
whenM (tableExists "allocation_application") $
tableDropEmpty "allocation_application"
whenM (tableExists "allocation_application_file") $
tableDropEmpty "allocation_application_file"
)
, ( AppliedMigrationKey [migrationVersion|17.0.0|] [version|18.0.0|]
, do
whenM (tableExists "allocation") $ do
[executeQQ|ALTER TABLE allocation DROP COLUMN IF EXISTS capacity;|]
[executeQQ|ALTER TABLE allocation DROP COLUMN IF EXISTS link_external;|]
[executeQQ|ALTER TABLE allocation DROP COLUMN IF EXISTS register_secret;|]
whenM (tableExists "allocation_deregister") $ do
[executeQQ|ALTER TABLE allocation_deregister DROP COLUMN IF EXISTS allocation;|]
)
, ( AppliedMigrationKey [migrationVersion|18.0.0|] [version|19.0.0|]
, do
[executeQQ|
CREATe TABLE IF NOT EXISTS "user_function" ( "id" serial8 primary key, "user" bigint, "school" citext, "function" text );
|]
whenM (tableExists "user_admin") $ do
let getAdminEntries = rawQuery [st|SELECT user_admin.id, user_admin.user, user_admin.school FROM user_admin;|] []
moveAdminEntry [fromPersistValue -> Right (eId :: Int64), fromPersistValue -> Right (uid :: UserId), fromPersistValue -> Right (sid :: SchoolId)] =
[executeQQ|
INSERT INTO "user_function" ("user", "school", "function") VALUES (#{uid}, #{sid}, #{SchoolAdmin});
DELETE FROM "user_admin" WHERE "id" = #{eId};
|]
moveAdminEntry _ = return ()
runConduit $ getAdminEntries .| C.mapM_ moveAdminEntry
tableDropEmpty "user_admin"
whenM (tableExists "user_lecturer") $ do
let getLecturerEntries = rawQuery [st|SELECT user_lecturer.id, user_lecturer.user, user_lecturer.school FROM user_lecturer;|] []
moveLecturerEntry [fromPersistValue -> Right (eId :: Int64), fromPersistValue -> Right (uid :: UserId), fromPersistValue -> Right (sid :: SchoolId)] =
[executeQQ|
INSERT INTO "user_function" ("user", "school", "function") VALUES (#{uid}, #{sid}, #{SchoolLecturer});
DELETE FROM "user_lecturer" WHERE "id" = #{eId};
|]
moveLecturerEntry _ = return ()
runConduit $ getLecturerEntries .| C.mapM_ moveLecturerEntry
tableDropEmpty "user_lecturer"
whenM (tableExists "invitation") $ do
[executeQQ|
DELETE FROM "invitation" WHERE "for"->'junction' = '"UserLecturer"';
|]
)
, ( AppliedMigrationKey [migrationVersion|19.0.0|] [version|20.0.0|]
, whenM (tableExists "user") $ do
[executeQQ|
ALTER TABLE "user" ADD COLUMN IF NOT EXISTS "display_email" citext;
UPDATE "user" SET "display_email" = "email" WHERE "display_email" IS NULL;
ALTER TABLE "user" ALTER COLUMN "display_email" SET NOT NULL;
|]
)
, ( AppliedMigrationKey [migrationVersion|20.0.0|] [version|21.0.0|]
, whenM (tableExists "exam_part") $ do
[executeQQ|
ALTER TABLE "exam_part" ADD COLUMN IF NOT EXISTS "number" citext;
|]
let getExamEntries = rawQuery [st|SELECT DISTINCT exam FROM exam_part ORDER BY exam;|] []
renameExamParts [fromPersistValue -> Right (eId :: ExamId)] = do
partNames' <- [sqlQQ|SELECT id, name FROM "exam_part" WHERE exam = #{eId};|]
let
partNames :: [(ExamPartId, ExamPartName)]
partNames = foldMap (\(Single epId, Single pName) -> singletonMap epId pName) partNames'
partsSorted = partNames
& sortOn ( map (\x -> maybe (Left x) Right (readMay x :: Maybe Integer))
. groupBy ((==) `on` Char.isDigit)
. CI.foldedCase
. snd
)
& map fst
forM_ (zip [_ExamPartNumber' # 1..] partsSorted) $ \(num :: ExamPartNumber, pId) ->
[executeQQ|
UPDATE "exam_part" SET "number" = #{num} WHERE "id" = #{pId};
|]
renameExamParts _ = return ()
runConduit $ getExamEntries .| C.mapM_ renameExamParts
)
, ( AppliedMigrationKey [migrationVersion|21.0.0|] [version|22.0.0|]
, whenM (tableExists "exam") $ do
oldVersion <- columnExists "exam" "grading_key"
if
| oldVersion -> do
-- Major changes happend to the structure of exams without appropriate
-- migration, try to remedy that here
tableDropEmpty "exam_part_corrector"
tableDropEmpty "exam_corrector"
tableDropEmpty "exam_result"
tableDropEmpty "exam_registration"
tableDropEmpty "exam_occurrence"
tableDropEmpty "exam_part"
tableDropEmpty "exam"
| otherwise ->
[executeQQ|
ALTER TABLE "exam" ALTER COLUMN "grading_rule" DROP NOT NULL;
ALTER TABLE "exam" ALTER COLUMN "bonus_rule" DROP NOT NULL;
ALTER TABLE "exam" ALTER COLUMN "occurrence_rule" DROP NOT NULL;
UPDATE "exam" SET "grading_rule" = NULL WHERE "grading_rule"->>'rule' = 'manual';
UPDATE "exam" SET "bonus_rule" = NULL WHERE "bonus_rule"->>'rule' = 'no-bonus';
UPDATE "exam" SET "occurrence_rule" = NULL WHERE "occurrence_rule" = '"manual"';
UPDATE "exam" SET "occurrence_rule" = json_build_object('rule', "occurrence_rule");
|]
)
, ( AppliedMigrationKey [migrationVersion|22.0.0|] [version|23.0.0|]
, whenM (tableExists "exam")
[executeQQ|
UPDATE "exam" SET "bonus_rule" = jsonb_insert("bonus_rule", '{round}' :: text[], '0.01' :: jsonb) WHERE "bonus_rule"->>'rule' = 'bonus-points';
|]
)
, ( AppliedMigrationKey [migrationVersion|23.0.0|] [version|24.0.0|]
, whenM (tableExists "course_favourite")
[executeQQ|
ALTER TABLE "course_favourite" RENAME COLUMN "time" TO "last_visit";
ALTER TABLE "course_favourite" ADD COLUMN "reason" jsonb DEFAULT '"visited"'::jsonb;
|]
)
, ( AppliedMigrationKey [migrationVersion|24.0.0|] [version|25.0.0|]
, whenM (tableExists "course_participant") $ do
queryRes <- [sqlQQ|SELECT (EXISTS (SELECT 1 FROM "course_participant" WHERE "allocated" <> false))|]
case queryRes of
[Single False] ->
[executeQQ|
ALTER TABLE "course_participant" DROP COLUMN "allocated";
ALTER TABLE "course_participant" ADD COLUMN "allocated" bigint;
|]
_other -> error "Cannot reconstruct course_participant.allocated"
)
, ( AppliedMigrationKey [migrationVersion|25.0.0|] [version|26.0.0|]
, whenM (tableExists "allocation")
[executeQQ|
CREATE TABLE "allocation_matching" ("id" SERIAL8 PRIMARY KEY UNIQUE, "allocation" INT8 NOT NULL, "fingerprint" BYTEA NOT NULL, "log" INT8 NOT NULL);
INSERT INTO "allocation_matching" ("allocation", "fingerprint", "log") (select "id" as "allocation", "fingerprint", "matching_log" as "log" from "allocation" where not ("matching_log" is null) and not ("fingerprint" is null));
ALTER TABLE "allocation" DROP COLUMN "fingerprint";
ALTER TABLE "allocation" DROP COLUMN "matching_log";
|]
)
, ( AppliedMigrationKey [migrationVersion|26.0.0|] [version|27.0.0|]
, whenM (tableExists "user")
[executeQQ|
ALTER TABLE "user" ADD COLUMN "languages" jsonb;
UPDATE "user" SET "languages" = "mail_languages" where "mail_languages" <> '[]';
ALTER TABLE "user" DROP COLUMN "mail_languages";
|]
)
, ( AppliedMigrationKey [migrationVersion|27.0.0|] [version|28.0.0|]
, whenM (tableExists "exam_part_corrector") $
tableDropEmpty "exam_part_corrector"
)
, ( AppliedMigrationKey [migrationVersion|28.0.0|] [version|29.0.0|]
, whenM (tableExists "study_features")
[executeQQ|
ALTER TABLE "study_features" ADD COLUMN "super_field" bigint;
UPDATE "study_features" SET "super_field" = "field", "field" = "sub_field" WHERE NOT ("sub_field" IS NULL);
ALTER TABLE "study_features" DROP COLUMN "sub_field";
|]
)
, ( AppliedMigrationKey [migrationVersion|29.0.0|] [version|30.0.0|]
, whenM (tableExists "exam")
[executeQQ|
UPDATE "exam" SET "occurrence_rule" = #{ExamRoomManual} WHERE "occurrence_rule" IS NULL;
ALTER TABLE "exam" ALTER COLUMN "occurrence_rule" SET NOT NULL;
|]
)
, ( AppliedMigrationKey [migrationVersion|30.0.0|] [version|31.0.0|]
, whenM ((&&) <$> tableExists "exam" <*> tableExists "exam_result") $ do
queryRes <- [sqlQQ|SELECT exam_result.id, exam_result.result FROM exam_result INNER JOIN exam ON exam_result.exam = exam.id WHERE NOT exam.show_grades;|]
forM_ queryRes $ \(resId :: ExamResultId, Single (res :: ExamResultGrade)) ->
let res' :: ExamResultPassedGrade
res' = Left . view passingGrade <$> res
in [executeQQ|UPDATE exam_result SET result = #{res'} WHERE id = #{resId};|]
)
, ( AppliedMigrationKey [migrationVersion|31.0.0|] [version|32.0.0|]
, whenM (tableExists "exam")
[executeQQ|
ALTER TABLE "exam" ADD COLUMN "grading_mode" character varying;
UPDATE "exam" SET "grading_mode" = 'grades' WHERE "show_grades";
UPDATE "exam" SET "grading_mode" = 'pass' WHERE NOT "show_grades";
ALTER TABLE "exam" DROP COLUMN "show_grades";
ALTER TABLE "exam" ALTER COLUMN "grading_mode" SET NOT NULL;
|]
)
, ( AppliedMigrationKey [migrationVersion|32.0.0|] [version|33.0.0|]
, whenM (tableExists "external_exam")
[executeQQ|
ALTER TABLE "external_exam" ADD COLUMN "grading_mode" character varying;
UPDATE "external_exam" SET "grading_mode" = 'grades' WHERE "show_grades";
UPDATE "external_exam" SET "grading_mode" = 'pass' WHERE NOT "show_grades";
ALTER TABLE "external_exam" DROP COLUMN "show_grades";
ALTER TABLE "external_exam" ALTER COLUMN "grading_mode" SET NOT NULL;
|]
)
, ( AppliedMigrationKey [migrationVersion|33.0.0|] [version|34.0.0|]
, whenM (tableExists "allocation_matching") $
tableDropEmpty "allocation_matching"
)
, ( AppliedMigrationKey [migrationVersion|34.0.0|] [version|35.0.0|]
, do
whenM (tableExists "submission_group") $
tableDropEmpty "submission_group"
whenM (tableExists "submission_group_edit") $
tableDropEmpty "submission_group_edit"
)
, ( AppliedMigrationKey [migrationVersion|35.0.0|] [version|36.0.0|]
, whenM (tableExists "course_participant") $ do
[executeQQ|
ALTER TABLE "course_participant" ADD COLUMN "state" text NOT NULL DEFAULT 'active';
ALTER TABLE "course_participant" ALTER COLUMN "state" DROP DEFAULT;
|]
let getAuditLog = rawQuery [st|SELECT DISTINCT ON ("info") "info", max("time") FROM "transaction_log" GROUP BY "info" ORDER BY "info";|] []
ensureParticipant :: [PersistValue] -> ReaderT SqlBackend m ()
ensureParticipant [fmap Aeson.fromJSON . fromPersistValue -> Right (Aeson.Success TransactionCourseParticipantEdit{..}), fromPersistValue -> Right (time :: UTCTime)] = do
let toAllocated :: [[PersistValue]] -> Maybe AllocationId
toAllocated = either (const Nothing) Just . fromPersistValue <=< listToMaybe <=< listToMaybe
allocated <- toAllocated <$> sourceToList [queryQQ|SELECT "allocation_course".allocation FROM "allocation_deregister" INNER JOIN "allocation_course" ON "allocation_course".course = "allocation_deregister".course WHERE "user" = #{transactionUser} AND "allocation_course"."course" = #{transactionCourse} LIMIT 1;|]
whenM (existsKey transactionCourse `and2M` existsKey transactionUser)
[executeQQ|INSERT INTO "course_participant" ("course", "user", "registration", "state", "allocated") VALUES (#{transactionCourse}, #{transactionUser}, #{time}, #{CourseParticipantInactive False}, #{allocated}) ON CONFLICT DO NOTHING;|]
ensureParticipant _ = return ()
runConduit $ getAuditLog .| C.mapM_ ensureParticipant
)
, ( AppliedMigrationKey [migrationVersion|36.0.0|] [version|37.0.0|]
, whenM (tableExists "session_file") $
tableDropEmpty "session_file"
)
, ( AppliedMigrationKey [migrationVersion|37.0.0|] [version|38.0.0|]
, whenM (tableExists "file") $ do
[executeQQ|
ALTER TABLE "file" ADD COLUMN "hash" BYTEA;
UPDATE "file" SET "hash" = digest("content", 'sha3-512');
|]
let
migrateFromFile :: forall fRef.
( HasFileReference fRef
, PersistRecordBackend fRef SqlBackend
)
=> ([PersistValue] -> (Key fRef, FileReferenceResidual fRef))
-> (Entity fRef -> ReaderT SqlBackend m ())
-> [PersistValue]
-> ReaderT SqlBackend m ()
migrateFromFile toResidual doUpdate ((fromPersistValue -> Right (fId :: Int64)):rest) = do
let (fRefKey, residual) = toResidual rest
fileDat <- [sqlQQ|
SELECT "file".title, "file".modified, "file".hash FROM "file" WHERE "id" = #{fId};
|]
forM_ fileDat $ \case
(fromPersistValue . unSingle -> Right (fileReferenceTitle' :: FilePath), fromPersistValue . unSingle -> Right fileReferenceModified, fromPersistValue . unSingle -> Right fileReferenceContent) -> do
let fileRef fileReferenceTitle = _FileReference # (FileReference{..}, residual)
candidateTitles = fileReferenceTitle' : [ fName <.> ("old-" <> show n) <.> ext | n <- [1..1000] ]
where (fName, ext) = splitExtension fileReferenceTitle'
validTitles <- dropWhileM (fmap (is _Just) . checkUnique . fileRef) candidateTitles
case validTitles of
fTitle : _ -> doUpdate . Entity fRefKey $ fileRef fTitle
_other -> error "Could not make validTitle"
_other -> return ()
migrateFromFile _ _ _ = return ()
whenM (tableExists "submission_file") $ do
[executeQQ|
ALTER TABLE "submission_file" ADD COLUMN "title" VARCHAR;
ALTER TABLE "submission_file" ADD COLUMN "content" BYTEA NULL;
ALTER TABLE "submission_file" ADD COLUMN "modified" TIMESTAMP WITH TIME ZONE;
ALTER TABLE "submission_file" DROP CONSTRAINT "unique_submission_file";
ALTER TABLE "submission_file" ADD CONSTRAINT "unique_submission_file" UNIQUE("submission", "title", "is_update");
|]
let getSubmissionFiles = [queryQQ|SELECT "file", "submission_file"."id", "submission", "is_update", "is_deletion" FROM "submission_file" LEFT OUTER JOIN "file" ON "submission_file"."file" = "file".id ORDER BY "file"."modified" DESC;|]
toResidual [ fromPersistValue -> Right sfId
, fromPersistValue -> Right submissionFileResidualSubmission
, fromPersistValue -> Right submissionFileResidualIsUpdate
, fromPersistValue -> Right submissionFileResidualIsDeletion
]
= (sfId, SubmissionFileResidual{..})
toResidual _ = error "Could not convert SubmissionFile to residual"
runConduit $ getSubmissionFiles .| C.mapM_ (migrateFromFile @SubmissionFile toResidual replaceEntity)
[executeQQ|
ALTER TABLE "submission_file" DROP COLUMN "file";
|]
whenM (tableExists "sheet_file") $ do
[executeQQ|
ALTER TABLE "sheet_file" ADD COLUMN "title" VARCHAR;
ALTER TABLE "sheet_file" ADD COLUMN "content" BYTEA NULL;
ALTER TABLE "sheet_file" ADD COLUMN "modified" TIMESTAMP WITH TIME ZONE;
ALTER TABLE "sheet_file" DROP CONSTRAINT "unique_sheet_file";
ALTER TABLE "sheet_file" ADD CONSTRAINT "unique_sheet_file" UNIQUE("sheet", "type", "title");
|]
let getSheetFiles = [queryQQ|SELECT "file", "sheet_file"."id", "sheet", "type" FROM "sheet_file" LEFT OUTER JOIN "file" ON "sheet_file"."file" = "file".id ORDER BY "file"."modified" DESC;|]
toResidual [ fromPersistValue -> Right shfId
, fromPersistValue -> Right sheetFileResidualSheet
, fromPersistValue -> Right sheetFileResidualType
]
= (shfId, SheetFileResidual{..})
toResidual _ = error "Could not convert SheetFile to residual"
runConduit $ getSheetFiles .| C.mapM_ (migrateFromFile @SheetFile toResidual replaceEntity)
[executeQQ|
ALTER TABLE "sheet_file" DROP COLUMN "file";
|]
whenM (tableExists "course_app_instruction_file") $ do
[executeQQ|
ALTER TABLE "course_app_instruction_file" ADD COLUMN "title" VARCHAR;
ALTER TABLE "course_app_instruction_file" ADD COLUMN "content" BYTEA NULL;
ALTER TABLE "course_app_instruction_file" ADD COLUMN "modified" TIMESTAMP WITH TIME ZONE;
ALTER TABLE "course_app_instruction_file" DROP CONSTRAINT "unique_course_app_instruction_file";
ALTER TABLE "course_app_instruction_file" ADD CONSTRAINT "unique_course_app_instruction_file" UNIQUE("course", "title");
|]
let getCourseAppInstructionFiles = [queryQQ|SELECT "file", "course_app_instruction_file"."id", "course" FROM "course_app_instruction_file" LEFT OUTER JOIN "file" ON "course_app_instruction_file"."file" = "file".id ORDER BY "file"."modified" DESC;|]
toResidual [ fromPersistValue -> Right caifId
, fromPersistValue -> Right courseAppInstructionFileResidualCourse
]
= (caifId, CourseAppInstructionFileResidual{..})
toResidual _ = error "Could not convert CourseAppInstructionFile to residual"
runConduit $ getCourseAppInstructionFiles .| C.mapM_ (migrateFromFile @CourseAppInstructionFile toResidual replaceEntity)
[executeQQ|
ALTER TABLE "course_app_instruction_file" DROP COLUMN "file";
|]
whenM (tableExists "course_news_file") $ do
[executeQQ|
ALTER TABLE "course_news_file" ADD COLUMN "title" VARCHAR;
ALTER TABLE "course_news_file" ADD COLUMN "content" BYTEA NULL;
ALTER TABLE "course_news_file" ADD COLUMN "modified" TIMESTAMP WITH TIME ZONE;
ALTER TABLE "course_news_file" DROP CONSTRAINT "unique_course_news_file";
ALTER TABLE "course_news_file" ADD CONSTRAINT "unique_course_news_file" UNIQUE("news", "title");
|]
let getCourseNewsFiles = [queryQQ|SELECT "file", "course_news_file"."id", "news" FROM "course_news_file" LEFT OUTER JOIN "file" ON "course_news_file"."file" = "file".id ORDER BY "file"."modified" DESC;|]
toResidual [ fromPersistValue -> Right cnfId
, fromPersistValue -> Right courseNewsFileResidualNews
]
= (cnfId, CourseNewsFileResidual{..})
toResidual _ = error "Could not convert CourseNewsFile to residual"
runConduit $ getCourseNewsFiles .| C.mapM_ (migrateFromFile @CourseNewsFile toResidual replaceEntity)
[executeQQ|
ALTER TABLE "course_news_file" DROP COLUMN "file";
|]
whenM (tableExists "material_file") $ do
[executeQQ|
ALTER TABLE "material_file" ADD COLUMN "title" VARCHAR;
ALTER TABLE "material_file" ADD COLUMN "content" BYTEA NULL;
ALTER TABLE "material_file" ADD COLUMN "modified" TIMESTAMP WITH TIME ZONE;
ALTER TABLE "material_file" DROP CONSTRAINT "unique_material_file";
ALTER TABLE "material_file" ADD CONSTRAINT "unique_material_file" UNIQUE("material", "title");
|]
let getMaterialFiles = [queryQQ|SELECT "file", "material_file"."id", "material" FROM "material_file" LEFT OUTER JOIN "file" ON "material_file"."file" = "file".id ORDER BY "file"."modified" DESC;|]
toResidual [ fromPersistValue -> Right shfId
, fromPersistValue -> Right materialFileResidualMaterial
]
= (shfId, MaterialFileResidual{..})
toResidual _ = error "Could not convert MaterialFile to residual"
runConduit $ getMaterialFiles .| C.mapM_ (migrateFromFile @MaterialFile toResidual replaceEntity)
[executeQQ|
ALTER TABLE "material_file" DROP COLUMN "file";
|]
whenM (tableExists "course_application_file") $ do
[executeQQ|
ALTER TABLE "course_application_file" ADD COLUMN "title" VARCHAR;
ALTER TABLE "course_application_file" ADD COLUMN "content" BYTEA NULL;
ALTER TABLE "course_application_file" ADD COLUMN "modified" TIMESTAMP WITH TIME ZONE;
ALTER TABLE "course_application_file" DROP CONSTRAINT "unique_application_file";
ALTER TABLE "course_application_file" ADD CONSTRAINT "unique_course_application_file" UNIQUE("application", "title");
|]
let getCourseApplicationFiles = [queryQQ|SELECT "file", "course_application_file"."id", "application" FROM "course_application_file" LEFT OUTER JOIN "file" ON "course_application_file"."file" = "file".id ORDER BY "file"."modified" DESC;|]
toResidual [ fromPersistValue -> Right cnfId
, fromPersistValue -> Right courseApplicationFileResidualApplication
]
= (cnfId, CourseApplicationFileResidual{..})
toResidual _ = error "Could not convert CourseApplicationFile to residual"
runConduit $ getCourseApplicationFiles .| C.mapM_ (migrateFromFile @CourseApplicationFile toResidual replaceEntity)
[executeQQ|
ALTER TABLE "course_application_file" DROP COLUMN "file";
|]
whenM (tableExists "allocation_matching") $ do
[executeQQ|
ALTER TABLE "allocation_matching" ADD COLUMN "log_ref" BYTEA;
UPDATE "allocation_matching" SET "log_ref" = (SELECT "hash" FROM "file" WHERE "file".id = "log");
ALTER TABLE "allocation_matching" DROP COLUMN "log";
ALTER TABLE "allocation_matching" RENAME COLUMN "log_ref" TO "log";
|]
whenM (tableExists "session_file")
[executeQQ|
ALTER TABLE "session_file" ADD COLUMN "content" BYTEA;
UPDATE "session_file" SET "content" = (SELECT "hash" FROM "file" WHERE "file".id = "session_file"."file");
ALTER TABLE "session_file" DROP COLUMN "file";
|]
[executeQQ|
ALTER TABLE "file" RENAME TO "file_content";
DELETE FROM "file_content" WHERE "content" IS NULL OR "hash" IS NULL;
|]
[executeQQ|
DELETE FROM "file_content"
WHERE "id" IN (
SELECT
"id"
FROM (
SELECT
"id",
ROW_NUMBER() OVER w AS rnum
FROM "file_content"
WINDOW w AS (
PARTITION BY "hash"
ORDER BY "id"
)
) as t
WHERE t.rnum > 1);
|]
[executeQQ|
ALTER TABLE "file_content" DROP COLUMN "title";
ALTER TABLE "file_content" DROP COLUMN "modified";
ALTER TABLE "file_content" DROP COLUMN "id";
|]
)
, ( AppliedMigrationKey [migrationVersion|38.0.0|] [version|39.0.0|]
, whenM (and2M (tableExists "cron_last_exec") (tableExists "allocation")) $ do
let
allocationTimes :: EntityField Allocation (Maybe UTCTime)
-> ReaderT SqlBackend m (MergeHashMap UTCTime (Set AllocationId, Max UTCTime, Last InstanceId))
allocationTimes aField = do
ress <- [sqlQQ|SELECT ^{Allocation}.@{AllocationId},^{Allocation}.@{aField},^{CronLastExec}.@{CronLastExecTime},^{CronLastExec}.@{CronLastExecInstance} FROM ^{Allocation} INNER JOIN ^{CronLastExec} ON ^{CronLastExec}.@{CronLastExecJob}->'job' = '"queue-notification"' AND ^{CronLastExec}.@{CronLastExecJob}->'notification'->'notification' = '"allocation-staff-register"' AND ^{CronLastExec}.@{CronLastExecJob}->'notification'->'allocation' = (^{Allocation}.@{AllocationId} :: text) :: jsonb ORDER BY ^{Allocation}.@{aField} ASC;|]
return . flip foldMap ress $ \(Single allocId, Single allocTime, Single execTime, Single execInstance)
-> _MergeHashMap # HashMap.singleton allocTime (Set.singleton allocId, Max execTime, Last execInstance)
staffRegisterFroms <- allocationTimes AllocationStaffRegisterFrom
forM_ staffRegisterFroms $ \(nAllocations, Max cronLastExecTime, Last cronLastExecInstance) ->
insert_ CronLastExec{ cronLastExecJob = toJSON $ JobQueueNotification NotificationAllocationStaffRegister{..}, .. }
registerFroms <- allocationTimes AllocationRegisterFrom
forM_ registerFroms $ \(nAllocations, Max cronLastExecTime, Last cronLastExecInstance) ->
insert_ CronLastExec{ cronLastExecJob = toJSON $ JobQueueNotification NotificationAllocationRegister{..}, .. }
staffAllocationFroms <- allocationTimes AllocationStaffAllocationFrom
forM_ staffAllocationFroms $ \(nAllocations, Max cronLastExecTime, Last cronLastExecInstance) ->
insert_ CronLastExec{ cronLastExecJob = toJSON $ JobQueueNotification NotificationAllocationAllocation{..}, .. }
registerTos <- allocationTimes AllocationRegisterTo
forM_ registerTos $ \(nAllocations, Max cronLastExecTime, Last cronLastExecInstance) ->
insert_ CronLastExec{ cronLastExecJob = toJSON $ JobQueueNotification NotificationAllocationUnratedApplications{..}, .. }
)
, ( AppliedMigrationKey [migrationVersion|39.0.0|] [version|40.0.0|]
, whenM (tableExists "study_features")
[executeQQ|
ALTER TABLE study_features RENAME updated TO last_observed;
ALTER TABLE study_features ADD COLUMN first_observed timestamp with time zone;
UPDATE study_features SET first_observed = (SELECT MAX(last_observed) FROM study_features as other WHERE other."user" = study_features."user" AND other.degree = study_features.degree AND other.field = study_features.field AND other.type = study_features.type AND other.semester = study_features.semester - 1);
|]
)
, ( AppliedMigrationKey [migrationVersion|40.0.0|] [version|41.0.0|]
, whenM (tableExists "file_content") $ do
chunkingParams <- lift $ view _appFileChunkingParams
[executeQQ|
ALTER TABLE file_content RENAME TO file_content_chunk;
ALTER INDEX file_content_pkey RENAME TO file_content_chunk_pkey;
CREATE TABLE file_content_chunk_unreferenced (id bigserial, hash bytea NOT NULL, since timestamp with time zone NOT NULL);
INSERT INTO file_content_chunk_unreferenced (since, hash) (SELECT unreferenced_since as since, hash FROM file_content_chunk WHERE NOT (unreferenced_since IS NULL));
ALTER TABLE file_content_chunk DROP COLUMN unreferenced_since;
ALTER TABLE file_content_chunk ADD COLUMN content_based boolean NOT NULL DEFAULT false;
UPDATE file_content_chunk SET content_based = true WHERE length(content) <= #{fastCDCMinBlockSize chunkingParams};
CREATE TABLE file_content_entry (id bigserial NOT NULL PRIMARY KEY, hash bytea NOT NULL, ix bigint NOT NULL, chunk_hash bytea NOT NULL);
INSERT INTO file_content_entry (hash, chunk_hash, ix) (SELECT hash, hash as chunk_hash, 0 as ix FROM file_content_chunk);
|]
)
, ( AppliedMigrationKey [migrationVersion|41.0.0|] [version|42.0.0|]
, do
whenM (tableExists "exam")
[executeQQ|
ALTER TABLE exam ADD COLUMN "exam_mode" jsonb NOT NULL DEFAULT #{ExamMode Nothing Nothing Nothing Nothing};
|]
whenM (tableExists "school")
[executeQQ|
ALTER TABLE school ADD COLUMN "exam_discouraged_modes" jsonb NOT NULL DEFAULT #{ExamModeDNF predDNFFalse};
|]
)
, ( AppliedMigrationKey [migrationVersion|42.0.0|] [version|43.0.0|]
, return () -- Unused; used to create and fill `ChangelogItemFirstSeen`
)
]
tableExists :: MonadIO m => Text -> ReaderT SqlBackend m Bool
tableExists table = do
haveTable <- [sqlQQ| SELECT to_regclass(#{table}); |]
case haveTable :: [Maybe (Single PersistValue)] of
[Just _] -> return True
_other -> return False
tablesExist :: MonadIO m => [Text] -> ReaderT SqlBackend m Bool
tablesExist = flip allM tableExists
tableIsEmpty :: MonadIO m => Text -> ReaderT SqlBackend m Bool
tableIsEmpty table = do
res <- rawSql [st|SELECT COUNT(*) FROM "#{table}"|] []
return $ case res of
[unSingle -> rows] -> rows == (0 :: Int64)
_other -> error "tableIsEmpty din't return exactly one result"
tableDropEmpty :: MonadIO m => Text -> ReaderT SqlBackend m ()
tableDropEmpty table = whenM (tableExists table) $ do
isEmpty <- tableIsEmpty table
if
| isEmpty -> rawExecute [st|DROP TABLE "#{table}" CASCADE|] []
| otherwise -> error $ "Table " <> unpack table <> " is not empty"
columnExists :: MonadIO m
=> Text -- ^ Table
-> Text -- ^ Column
-> ReaderT SqlBackend m Bool
columnExists table column = do
haveColumn <- [sqlQQ|SELECT column_name FROM information_schema.columns WHERE table_name=#{table} and column_name=#{column};|]
case haveColumn :: [Single PersistValue] of
[_] -> return True
_other -> return False