946 lines
52 KiB
Haskell
946 lines
52 KiB
Haskell
-- SPDX-FileCopyrightText: 2022-23 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Steffen Jost <jost@tcs.ifi.lmu.de>,Steffen Jost <s.jost@fraport.de>
|
|
--
|
|
-- SPDX-License-Identifier: AGPL-3.0-or-later
|
|
|
|
module Model.Migration.Definitions
|
|
( ManualMigration(..)
|
|
, migrateManual
|
|
, migrateAlwaysSafe
|
|
, customMigrations
|
|
, columnExists
|
|
) where
|
|
|
|
import Import.NoModel hiding (Max(..), Last(..))
|
|
import Model
|
|
import Model.Types.TH.PathPiece
|
|
import Settings
|
|
import Foundation.Type
|
|
import Audit.Types
|
|
import qualified Model.Migration.Types as Legacy
|
|
import qualified Data.Map as Map
|
|
|
|
import qualified Data.Set as Set
|
|
|
|
import qualified Data.Text as Text
|
|
|
|
import qualified Data.Conduit.List as C
|
|
|
|
import Database.Persist.Sql
|
|
import Database.Persist.Sql.Raw.QQ
|
|
|
|
import Text.Read (readMaybe)
|
|
|
|
import Network.IP.Addr
|
|
|
|
import qualified Data.Char as Char
|
|
import qualified Data.CaseInsensitive as CI
|
|
|
|
import qualified Data.Aeson as Aeson
|
|
|
|
import Data.Conduit.Algorithms.FastCDC (FastCDCParameters(fastCDCMinBlockSize))
|
|
|
|
import Data.Time.Format.ISO8601 (iso8601Show)
|
|
import Data.Time.Format
|
|
|
|
import qualified Data.Time.Zones as TZ
|
|
|
|
|
|
|
|
data ManualMigration
|
|
= Migration20180813SimplifyUserTheme
|
|
| Migration20180813SheetJSONB
|
|
| Migration20180823SchoolShorthandPrimaryKey
|
|
| Migration20180918SheetCorrectorLoadJSON
|
|
| Migration20180918UserSurnames
|
|
| Migration20180918SheetUploadMode
|
|
| Migration20180928UserAuthentication
|
|
| Migration20181011UserNotificationSettings
|
|
| Migration20181031SheetTypeRefactor
|
|
| Migration20181129EncodedSecretBoxes
|
|
| Migration20181130SheetTypeRefactor
|
|
| Migration20190319CourseParticipantField
|
|
| Migration20190320BetterStudyShorthands
|
|
| Migration20190421MixedSheetSubmissions
|
|
| Migration20190429Tutorials
|
|
| Migration20190515Exams
|
|
| Migration20190715ExamOccurrenceName
|
|
| Migration20190726UserFirstNamesTitles
|
|
| Migration20190806TransactionLogIds
|
|
| Migration20190828UserFunction
|
|
| Migration20190912UserDisplayEmail
|
|
| Migration20190916ExamPartNumber
|
|
| Migration20190918ExamRulesRefactor
|
|
| Migration20190919ExamBonusRounding
|
|
| Migration20191002FavouriteReason
|
|
| Migration20191125UserLanguages
|
|
| Migration20191126ExamPartCorrector
|
|
| Migration20191128StudyFeaturesSuperField
|
|
| Migration20200111ExamOccurrenceRuleRefactor
|
|
| Migration20200218ExamResultPassedGrade
|
|
| Migration20200218ExamGradingModeMixed
|
|
| Migration20200218ExternalExamGradingModeMixed
|
|
| Migration20200424SubmissionGroups
|
|
| Migration20200504CourseParticipantState
|
|
| Migration20200506SessionFile
|
|
| Migration20200627FileRefactor
|
|
| Migration20200825StudyFeaturesFirstObserved
|
|
| Migration20200902FileChunking
|
|
| Migration20200916ExamMode
|
|
| Migration20201106StoredMarkup
|
|
| Migration20201119RoomTypes
|
|
| Migration20210115ExamPartsFrom
|
|
| Migration20210208StudyFeaturesRelevanceCachedUUIDs
|
|
| Migration20210318CrontabSubmissionRatedNotification
|
|
| Migration20210608SeparateTermActive
|
|
| Migration20230524QualificationUserBlock
|
|
| Migration20230703LmsUserStatus
|
|
deriving (Eq, Ord, Read, Show, Enum, Bounded, Generic)
|
|
deriving anyclass (Universe, Finite)
|
|
|
|
nullaryPathPiece ''ManualMigration $ \t@(splitCamel -> verbs) -> case verbs of
|
|
("Migration" : dVerb : vs)
|
|
| Just (d :: Day) <- parseTimeM False defaultTimeLocale "%Y%m%d" (unpack dVerb)
|
|
-> pack (formatTime defaultTimeLocale "%Y-%m-%d" d) <> "--" <> intercalate "-" (map toLower vs)
|
|
_other
|
|
-> terror $ "Could not parse: “" <> t <> "” → " <> tshow verbs
|
|
pathPieceJSON ''ManualMigration
|
|
pathPieceJSONKey ''ManualMigration
|
|
pathPieceHttpApiData ''ManualMigration
|
|
derivePersistFieldPathPiece ''ManualMigration
|
|
|
|
|
|
|
|
|
|
migrateManual :: Migration
|
|
migrateManual = do
|
|
mapM_ (uncurry addIndex) -- NOTE: Indices are automatically created for primary keys and unique columns; manually create them frequent filter conditions that small results for speed up
|
|
[ ("material_file_content", "CREATE INDEX material_file_content ON material_file (content)" )
|
|
, ("course_news_file_content", "CREATE INDEX course_news_file_content ON course_news_file (content)" )
|
|
, ("sheet_file_content", "CREATE INDEX sheet_file_content ON sheet_file (content)" )
|
|
, ("submission_file_content", "CREATE INDEX submission_file_content ON submission_file (content)" )
|
|
, ("session_file_content", "CREATE INDEX session_file_content ON session_file (content)" )
|
|
, ("file_lock_content", "CREATE INDEX file_lock_content ON file_lock (content)" )
|
|
, ("user_lower_display_email", "CREATE INDEX user_lower_display_email ON \"user\" (lower(display_email))" )
|
|
, ("user_lower_email", "CREATE INDEX user_lower_email ON \"user\" (lower(email))" )
|
|
, ("user_lower_ident", "CREATE INDEX user_lower_ident ON \"user\" (lower(ident))" )
|
|
, ("submission_sheet", "CREATE INDEX submission_sheet ON submission (sheet)" )
|
|
, ("submission_edit_submission", "CREATE INDEX submission_edit_submission ON submission_edit (submission)" )
|
|
, ("user_ldap_primary_key", "CREATE INDEX user_ldap_primary_key ON \"user\" (ldap_primary_key)" )
|
|
, ("file_content_entry_chunk_hash", "CREATE INDEX file_content_entry_chunk_hash ON \"file_content_entry\" (chunk_hash)" )
|
|
, ("sent_mail_bounce_secret", "CREATE INDEX sent_mail_bounce_secret ON \"sent_mail\" (bounce_secret) WHERE bounce_secret IS NOT NULL")
|
|
, ("sent_mail_recipient", "CREATE INDEX sent_mail_recipient ON \"sent_mail\" (recipient) WHERE recipient IS NOT NULL")
|
|
, ("study_features_relevance_cached", "CREATE INDEX study_features_relevance_cached ON \"study_features\" (relevance_cached)")
|
|
, ("submission_rating_by", "CREATE INDEX submission_rating_by ON submission (rating_by) WHERE rating_by IS NOT NULL" )
|
|
, ("exam_corrector_user", "CREATE INDEX exam_corrector_user ON exam_corrector (\"user\")" )
|
|
, ("submission_rating_time", "CREATE INDEX submission_rating_time ON submission (rating_time)" )
|
|
, ("idx_qualification_user_first_held" ,"CREATE INDEX idx_qualification_user_first_held ON \"qualification_user\" (\"first_held\")")
|
|
, ("idx_qualification_user_valid_until" ,"CREATE INDEX idx_qualification_user_valid_until ON \"qualification_user\" (\"valid_until\")")
|
|
, ("idx_qualification_user_block_quser" ,"CREATE INDEX idx_qualification_user_block_quser ON \"qualification_user_block\" (\"qualification_user\")")
|
|
, ("idx_qualification_user_block_unblock","CREATE INDEX idx_qualification_user_block_unblock ON \"qualification_user_block\" (\"unblock\")")
|
|
, ("idx_qualification_user_block_from" ,"CREATE INDEX idx_qualification_user_block_from ON \"qualification_user_block\" (\"from\")")
|
|
, ("idx_print_job_apc_ident" ,"CREATE INDEX idx_print_job_apc_ident ON \"print_job\" (\"apc_ident\")")
|
|
, ("idx_user_avs_card_person_id" ,"CREATE INDEX idx_user_avs_card_person_id ON \"user_avs_card\" (\"person_id\")")
|
|
, ("idx_lms_report_log_q_ident_time" ,"CREATE INDEX idx_lms_report_log_q_ident_time ON \"lms_report_log\" (\"qualification\",\"ident\",\"timestamp\")")
|
|
]
|
|
where
|
|
addIndex :: Text -> Sql -> Migration
|
|
addIndex ixName ixDef = do
|
|
res <- lift $ lift [sqlQQ|SELECT EXISTS (SELECT 1 FROM pg_indexes WHERE schemaname = current_schema() AND indexname = #{ixName})|]
|
|
alreadyDefined <- case res of
|
|
[Single e] -> return e
|
|
_other -> return True
|
|
unless alreadyDefined $ addMigration False ixDef
|
|
|
|
migrateAlwaysSafe :: Migration
|
|
-- | Part of `migrateAll` but not checked in `requiresMigration`
|
|
migrateAlwaysSafe = do
|
|
recordedChangelogItems <- lift . lift $ selectList [ ChangelogItemFirstSeenItem <-. universeF ] []
|
|
let missingChangelogItems = Set.toList $ Set.fromList universeF `Set.difference` recordedChangelogItems'
|
|
where recordedChangelogItems' = Set.fromList [ changelogItemFirstSeenItem | Entity _ ChangelogItemFirstSeen{..} <- recordedChangelogItems ]
|
|
unless (null missingChangelogItems) $ do
|
|
today <- localDay . TZ.utcToLocalTimeTZ appTZ <$> liftIO getCurrentTime
|
|
addMigration False $ do
|
|
let sql = [st|INSERT INTO changelog_item_first_seen (item, first_seen) VALUES #{vals}|]
|
|
vals = Text.intercalate ", " $ do
|
|
item <- missingChangelogItems
|
|
let itemDay = Map.findWithDefault today item changelogItemDays
|
|
return [st|('#{toPathPiece item}', '#{iso8601Show itemDay}')|]
|
|
in sql
|
|
|
|
{-
|
|
Confusion about quotes, from the PostgreSQL Manual:
|
|
Single quotes for string constants, double quotes for table/column names.
|
|
|
|
QuasiQuoter: ^{TableName} @{ColumnName} (escaped as column/table-name; value determined from current model);
|
|
#{anything} (escaped as value);
|
|
-}
|
|
|
|
customMigrations :: forall m.
|
|
( MonadResource m
|
|
, MonadReader UniWorX m
|
|
)
|
|
=> Map ManualMigration (ReaderT SqlBackend m ())
|
|
customMigrations = mapF $ \case
|
|
Migration20180813SimplifyUserTheme -> whenM (columnExists "user" "theme") $ do -- New theme format
|
|
userThemes <- [sqlQQ| SELECT "id", "theme" FROM "user"; |]
|
|
forM_ userThemes $ \(uid, Single str) -> case stripPrefix "theme--" str of
|
|
Just v
|
|
| Just theme <- fromPathPiece v -> update uid [UserTheme =. theme]
|
|
other -> error $ "Could not parse theme: " <> show other
|
|
|
|
Migration20180813SheetJSONB -> whenM (tableExists "sheet") -- Better JSON encoding
|
|
[executeQQ|
|
|
ALTER TABLE "sheet" ALTER COLUMN "type" TYPE jsonb USING "type"::jsonb;
|
|
ALTER TABLE "sheet" ALTER COLUMN "grouping" TYPE jsonb USING "grouping"::jsonb;
|
|
|]
|
|
|
|
Migration20180823SchoolShorthandPrimaryKey -> whenM (columnExists "school" "id") $ do -- SchoolId is the Shorthand CI Text now
|
|
-- Read old table into memory
|
|
schoolTable <- [sqlQQ| SELECT "id", "shorthand" FROM "school"; |]
|
|
let _sT = schoolTable :: [(Single Int64, Single (CI Text))] -- Types needed
|
|
-- Convert columns containing SchoolId
|
|
whenM (tableExists "user_admin") $ do
|
|
[executeQQ|
|
|
ALTER TABLE "user_admin" DROP CONSTRAINT "user_admin_school_fkey";
|
|
ALTER TABLE "user_admin" ALTER COLUMN "school" TYPE citext USING "school"::citext;
|
|
|]
|
|
forM_ schoolTable $ \(Single idnr, Single ssh) ->
|
|
[executeQQ|
|
|
UPDATE "user_admin" SET "school" = #{ssh} WHERE "school" = #{tshow idnr};
|
|
|]
|
|
[executeQQ|
|
|
ALTER TABLE "user_admin" ADD CONSTRAINT "user_admin_school_fkey"
|
|
FOREIGN KEY (school) REFERENCES school(shorthand);
|
|
|]
|
|
whenM (tableExists "user_lecturer") $ do
|
|
[executeQQ|
|
|
ALTER TABLE "user_lecturer" DROP CONSTRAINT "user_lecturer_school_fkey";
|
|
ALTER TABLE "user_lecturer" ALTER COLUMN "school" TYPE citext USING "school"::citext;
|
|
|]
|
|
forM_ schoolTable $ \(Single idnr, Single ssh) ->
|
|
[executeQQ|
|
|
UPDATE "user_lecturer" SET "school" = #{ssh} WHERE "school" = #{tshow idnr};
|
|
|]
|
|
[executeQQ|
|
|
ALTER TABLE "user_lecturer" ADD CONSTRAINT "user_lecturer_school_fkey"
|
|
FOREIGN KEY (school) REFERENCES school(shorthand);
|
|
|]
|
|
whenM (tableExists "course") $ do
|
|
[executeQQ|
|
|
ALTER TABLE "course" DROP CONSTRAINT "course_school_fkey";
|
|
ALTER TABLE "course" ALTER COLUMN "school" TYPE citext USING "school"::citext;
|
|
|]
|
|
forM_ schoolTable $ \(Single idnr, Single ssh) ->
|
|
[executeQQ|
|
|
UPDATE "course" SET "school" = #{ssh} WHERE "school" = #{tshow idnr};
|
|
|]
|
|
[executeQQ|
|
|
ALTER TABLE "course" ADD CONSTRAINT "course_school_fkey"
|
|
FOREIGN KEY (school) REFERENCES school(shorthand);
|
|
|]
|
|
[executeQQ|
|
|
ALTER TABLE "school" DROP COLUMN "id";
|
|
ALTER TABLE "school" ADD PRIMARY KEY (shorthand);
|
|
|]
|
|
|
|
Migration20180918SheetCorrectorLoadJSON -> whenM (tableExists "sheet_corrector") $ do -- Load is encoded as JSON now.
|
|
correctorLoads <- [sqlQQ| SELECT "id", "load" FROM "sheet_corrector"; |]
|
|
forM_ correctorLoads $ \(uid, Single str) -> case readMaybe str of
|
|
Just load -> update uid [SheetCorrectorLoad =. load]
|
|
_other -> error $ "Could not parse Load: " <> show str
|
|
[executeQQ|
|
|
ALTER TABLE "sheet_corrector" ALTER COLUMN "load" TYPE jsonb USING "load"::jsonb;
|
|
|]
|
|
|
|
Migration20180918UserSurnames -> whenM (tableExists "user") $ do
|
|
userDisplayNames <- [sqlQQ| SELECT "id", "display_name" FROM "user"; |]
|
|
[executeQQ|
|
|
ALTER TABLE "user" ADD COLUMN IF NOT EXISTS "surname" text DEFAULT '';
|
|
|]
|
|
forM_ userDisplayNames $ \(uid, Single str) -> case lastMaybe $ words str of
|
|
Just name -> update uid [UserSurname =. name]
|
|
_other -> error "Empty userDisplayName found"
|
|
|
|
Migration20180918SheetUploadMode -> whenM (tableExists "sheet")
|
|
[executeQQ|
|
|
ALTER TABLE "sheet" ADD COLUMN IF NOT EXISTS "upload_mode" jsonb DEFAULT '{ "tag": "Upload", "unpackZips": true }';
|
|
|]
|
|
|
|
Migration20180928UserAuthentication -> whenM (columnExists "user" "plugin")
|
|
-- <> is standard sql for /=
|
|
[executeQQ|
|
|
DELETE FROM "user" WHERE "plugin" <> 'LDAP';
|
|
ALTER TABLE "user" DROP COLUMN "plugin";
|
|
ALTER TABLE "user" ADD COLUMN IF NOT EXISTS "authentication" jsonb DEFAULT '"ldap"';
|
|
|]
|
|
|
|
Migration20181011UserNotificationSettings -> whenM (tableExists "user")
|
|
[executeQQ|
|
|
ALTER TABLE "user" ADD COLUMN IF NOT EXISTS "notification_settings" jsonb NOT NULL DEFAULT '[]';
|
|
|]
|
|
|
|
Migration20181031SheetTypeRefactor -> whenM (tableExists "sheet") $ do
|
|
sheets <- [sqlQQ| SELECT "id", "type" FROM "sheet"; |]
|
|
forM_ sheets $ \(sid, Single lsty) -> update sid [SheetType =. Legacy.sheetType lsty]
|
|
|
|
Migration20181129EncodedSecretBoxes -> whenM (tableExists "cluster_config")
|
|
[executeQQ|
|
|
UPDATE "cluster_config" SET "setting" = 'secret-box-key' WHERE "setting" = 'error-message-key';
|
|
|]
|
|
|
|
Migration20181130SheetTypeRefactor -> whenM (tableExists "sheet")
|
|
[executeQQ|
|
|
UPDATE "sheet" SET "type" = json_build_object('type', "type"->'type', 'grading', "type"->'') WHERE jsonb_exists("type", '');
|
|
UPDATE "sheet" SET "type" = json_build_object('type', "type"->'type', 'grading', json_build_object('type', "type"->'grading'->'type', 'max', "type"->'grading'->'points')) WHERE ("type"->'grading'->'type') = '"points"' AND jsonb_exists("type"->'grading', 'points');
|
|
UPDATE "sheet" SET "type" = json_build_object('type', "type"->'type', 'grading', json_build_object('type', "type"->'grading'->'type', 'max', "type"->'grading'->'points', 'passing', 0)) WHERE ("type"->'grading'->'type') = '"pass-points"' AND jsonb_exists("type"->'grading', 'points');
|
|
|]
|
|
|
|
Migration20190319CourseParticipantField -> whenM ((\a b c -> a && b && not c) <$> tableExists "study_features" <*> tableExists "course_participant" <*> columnExists "course_participant" "field") $ do
|
|
[executeQQ|
|
|
ALTER TABLE "course_participant" ADD COLUMN "field" bigint DEFAULT null REFERENCES study_features(id);
|
|
ALTER TABLE "study_features" ADD COLUMN IF NOT EXISTS "valid" boolean NOT NULL DEFAULT true;
|
|
|]
|
|
users <- [sqlQQ| SELECT DISTINCT ON ("user"."id") "user"."id", "study_features"."id" FROM "user", "study_features" WHERE "study_features"."user" = "user"."id" AND "study_features"."valid" AND "study_features"."type" = 'FieldPrimary' ORDER BY "user"."id", random(); |]
|
|
forM_ users $ \(uid :: UserId, sfid :: StudyFeaturesId) -> [executeQQ| UPDATE "course_participant" SET "field" = #{sfid} WHERE "user" = #{uid} AND "field" IS NULL; |]
|
|
|
|
Migration20190320BetterStudyShorthands -> do
|
|
whenM (columnExists "study_degree" "shorthand") [executeQQ| UPDATE "study_degree" SET "shorthand" = NULL WHERE "shorthand" = '' |]
|
|
whenM (columnExists "study_degree" "name") [executeQQ| UPDATE "study_degree" SET "name" = NULL WHERE "shorthand" = '' |]
|
|
whenM (columnExists "study_terms" "shorthand") [executeQQ| UPDATE "study_terms" SET "shorthand" = NULL WHERE "shorthand" = '' |]
|
|
whenM (columnExists "study_terms" "name") [executeQQ| UPDATE "study_terms" SET "name" = NULL WHERE "shorthand" = '' |]
|
|
|
|
Migration20190421MixedSheetSubmissions -> whenM ((&&) <$> columnExists "sheet" "upload_mode" <*> columnExists "sheet" "submission_mode") $ do
|
|
sheetModes <- [sqlQQ| SELECT "id", "upload_mode", "submission_mode" FROM "sheet"; |]
|
|
[executeQQ|
|
|
ALTER TABLE "sheet" DROP COLUMN "upload_mode";
|
|
ALTER TABLE "sheet" ALTER COLUMN "submission_mode" DROP DEFAULT;
|
|
ALTER TABLE "sheet" ALTER COLUMN "submission_mode" TYPE jsonb USING 'null'::jsonb;
|
|
|]
|
|
forM_ sheetModes $ \(shid :: SheetId, unSingle -> uploadMode :: Legacy.UploadMode, unSingle -> submissionMode :: Legacy.SheetSubmissionMode ) -> do
|
|
let submissionMode' = case (submissionMode, uploadMode) of
|
|
( Legacy.NoSubmissions , _ ) -> SubmissionMode False Nothing
|
|
( Legacy.CorrectorSubmissions, _ ) -> SubmissionMode True Nothing
|
|
( Legacy.UserSubmissions , Legacy.NoUpload ) -> SubmissionMode False (Just NoUpload)
|
|
( Legacy.UserSubmissions , Legacy.Upload True ) -> SubmissionMode False (Just $ UploadAny True defaultExtensionRestriction True)
|
|
( Legacy.UserSubmissions , Legacy.Upload False ) -> SubmissionMode False (Just $ UploadAny False defaultExtensionRestriction True)
|
|
[executeQQ| UPDATE "sheet" SET "submission_mode" = #{submissionMode'} WHERE "id" = #{shid}; |]
|
|
|
|
Migration20190429Tutorials -> whenM ((&&) <$> tableExists "tutorial" <*> tableExists "tutorial_user") $ do -- Tutorials were an unused stub before
|
|
tableDropEmpty "tutorial"
|
|
tableDropEmpty "tutorial_user"
|
|
|
|
Migration20190515Exams -> whenM (tableExists "exam") $ -- Exams were an unused stub before
|
|
tableDropEmpty "exam"
|
|
|
|
Migration20190715ExamOccurrenceName -> whenM ((&&) <$> tableExists "exam_occurrence" <*> (not <$> columnExists "exam_occurrence" "name")) $ do
|
|
examOccurrences <- [sqlQQ| SELECT "id" FROM "exam_occurrence" ORDER BY "exam"; |]
|
|
[executeQQ|
|
|
ALTER TABLE "exam_occurrence" ADD COLUMN "name" citext DEFAULT null;
|
|
|]
|
|
forM_ (zip [0..] examOccurrences) $ \(n :: Natural, Single eoId :: Single ExamOccurrenceId) -> do
|
|
let name = [st|occ-#{tshow n}|]
|
|
[executeQQ| UPDATE "exam_occurrence" SET "name" = #{name} WHERE "id" = #{eoId} |]
|
|
[executeQQ|
|
|
ALTER TABLE "exam_occurrence" ALTER COLUMN "name" DROP DEFAULT;
|
|
ALTER TABLE "exam_occurrence" ALTER COLUMN "name" SET NOT NULL;
|
|
|]
|
|
|
|
Migration20190726UserFirstNamesTitles -> whenM (tableExists "user") $ do
|
|
[executeQQ|
|
|
ALTER TABLE "user" ADD COLUMN "first_name" text NOT NULL DEFAULT '';
|
|
ALTER TABLE "user" ADD COLUMN "title" text DEFAULT null;
|
|
|]
|
|
let getUsers = rawQuery [st|SELECT "id", "display_name", "surname" FROM "user"|] []
|
|
updateUser (uid, firstName) = [executeQQ|UPDATE "user" SET "first_name" = #{firstName} WHERE "id" = #{uid}|]
|
|
splitFirstName :: [PersistValue] -> Maybe (UserId, Text)
|
|
splitFirstName [fromPersistValue -> Right uid, fromPersistValue -> Right displayName, fromPersistValue -> Right surname] = Just . (uid, ) $ if
|
|
| Just givenName <- Text.stripSuffix surname displayName
|
|
<|> Text.stripPrefix surname displayName
|
|
-> Text.strip givenName
|
|
| otherwise
|
|
-> Text.replace surname "…" displayName
|
|
splitFirstName _ = Nothing
|
|
runConduit $ getUsers .| C.mapMaybe splitFirstName .| C.mapM_ updateUser
|
|
|
|
Migration20190806TransactionLogIds -> whenM (tableExists "transaction_log") $ do
|
|
[executeQQ|
|
|
UPDATE transaction_log SET remote = null WHERE remote = #{IPv4 loopbackIP4 :: IP} OR remote = #{IPv6 loopbackIP6 :: IP}
|
|
|]
|
|
|
|
[executeQQ|
|
|
ALTER TABLE transaction_log ADD COLUMN "initiator_id" bigint DEFAULT null;
|
|
|]
|
|
|
|
whenM (tableExists "user")
|
|
[executeQQ|
|
|
UPDATE transaction_log SET initiator_id = "user".id FROM "user" WHERE transaction_log.initiator = "user".ident;
|
|
|]
|
|
|
|
[executeQQ|
|
|
ALTER TABLE transaction_log DROP COLUMN initiator;
|
|
ALTER TABLE transaction_log RENAME COLUMN initiator_id TO initiator;
|
|
ALTER TABLE transaction_log ALTER COLUMN initiator DROP DEFAULT;
|
|
|]
|
|
|
|
let getLogEntries = rawQuery [st|SELECT id, info FROM transaction_log|] []
|
|
updateTransactionInfo [fromPersistValue -> Right lid, fromPersistValue -> Right (oldT :: Legacy.Transaction)] = do
|
|
newT <- case oldT of
|
|
Legacy.TransactionTermEdit tid
|
|
-> return . Just . TransactionTermEdit $ TermKey tid
|
|
Legacy.TransactionExamRegister (TermKey -> tid) (SchoolKey -> ssh) csh examn uident
|
|
-> runMaybeT $ do
|
|
guardM . lift $ tablesExist ["user", "exam", "course"]
|
|
|
|
cid <- MaybeT . getKeyBy $ TermSchoolCourseShort tid ssh csh
|
|
eid <- MaybeT . getKeyBy $ UniqueExam cid examn
|
|
uid <- MaybeT . getKeyBy $ UniqueAuthentication uident
|
|
return $ TransactionExamRegister eid uid
|
|
Legacy.TransactionExamDeregister (TermKey -> tid) (SchoolKey -> ssh) csh examn uident
|
|
-> runMaybeT $ do
|
|
guardM . lift $ tablesExist ["user", "exam", "course"]
|
|
|
|
cid <- MaybeT . getKeyBy $ TermSchoolCourseShort tid ssh csh
|
|
eid <- MaybeT . getKeyBy $ UniqueExam cid examn
|
|
uid <- MaybeT . getKeyBy $ UniqueAuthentication uident
|
|
return $ TransactionExamRegister eid uid
|
|
whenIsJust newT $ \newT' ->
|
|
update lid [ TransactionLogInfo =. toJSON newT' ]
|
|
updateTransactionInfo _ = return ()
|
|
runConduit $ getLogEntries .| C.mapM_ updateTransactionInfo
|
|
|
|
Migration20190828UserFunction -> do
|
|
[executeQQ|
|
|
CREATE TABLE IF NOT EXISTS "user_function" ( "id" serial8 primary key, "user" bigint, "school" citext, "function" text );
|
|
|]
|
|
|
|
whenM (tableExists "user_admin") $ do
|
|
let getAdminEntries = rawQuery [st|SELECT user_admin.id, user_admin.user, user_admin.school FROM user_admin;|] []
|
|
moveAdminEntry [fromPersistValue -> Right (eId :: Int64), fromPersistValue -> Right (uid :: UserId), fromPersistValue -> Right (sid :: SchoolId)] =
|
|
[executeQQ|
|
|
INSERT INTO "user_function" ("user", "school", "function") VALUES (#{uid}, #{sid}, #{SchoolAdmin});
|
|
DELETE FROM "user_admin" WHERE "id" = #{eId};
|
|
|]
|
|
moveAdminEntry _ = return ()
|
|
runConduit $ getAdminEntries .| C.mapM_ moveAdminEntry
|
|
tableDropEmpty "user_admin"
|
|
whenM (tableExists "user_lecturer") $ do
|
|
let getLecturerEntries = rawQuery [st|SELECT user_lecturer.id, user_lecturer.user, user_lecturer.school FROM user_lecturer;|] []
|
|
moveLecturerEntry [fromPersistValue -> Right (eId :: Int64), fromPersistValue -> Right (uid :: UserId), fromPersistValue -> Right (sid :: SchoolId)] =
|
|
[executeQQ|
|
|
INSERT INTO "user_function" ("user", "school", "function") VALUES (#{uid}, #{sid}, #{SchoolLecturer});
|
|
DELETE FROM "user_lecturer" WHERE "id" = #{eId};
|
|
|]
|
|
moveLecturerEntry _ = return ()
|
|
runConduit $ getLecturerEntries .| C.mapM_ moveLecturerEntry
|
|
tableDropEmpty "user_lecturer"
|
|
whenM (tableExists "invitation") $ do
|
|
[executeQQ|
|
|
DELETE FROM "invitation" WHERE "for"->'junction' = '"UserLecturer"';
|
|
|]
|
|
|
|
Migration20190912UserDisplayEmail -> whenM (tableExists "user") $ do
|
|
[executeQQ|
|
|
ALTER TABLE "user" ADD COLUMN IF NOT EXISTS "display_email" citext;
|
|
UPDATE "user" SET "display_email" = "email" WHERE "display_email" IS NULL;
|
|
ALTER TABLE "user" ALTER COLUMN "display_email" SET NOT NULL;
|
|
|]
|
|
|
|
Migration20190916ExamPartNumber -> whenM (tableExists "exam_part") $ do
|
|
[executeQQ|
|
|
ALTER TABLE "exam_part" ADD COLUMN IF NOT EXISTS "number" citext;
|
|
|]
|
|
|
|
let getExamEntries = rawQuery [st|SELECT DISTINCT exam FROM exam_part ORDER BY exam;|] []
|
|
renameExamParts [fromPersistValue -> Right (eId :: ExamId)] = do
|
|
partNames' <- [sqlQQ|SELECT id, name FROM "exam_part" WHERE exam = #{eId};|]
|
|
let
|
|
partNames :: [(ExamPartId, ExamPartName)]
|
|
partNames = foldMap (\(Single epId, Single pName) -> singletonMap epId pName) partNames'
|
|
|
|
partsSorted = partNames
|
|
& sortOn ( map (\x -> maybe (Left x) Right (readMay x :: Maybe Integer))
|
|
. groupBy ((==) `on` Char.isDigit)
|
|
. CI.foldedCase
|
|
. snd
|
|
)
|
|
& map fst
|
|
forM_ (zip [_ExamPartNumber' # 1..] partsSorted) $ \(num :: ExamPartNumber, pId) ->
|
|
[executeQQ|
|
|
UPDATE "exam_part" SET "number" = #{num} WHERE "id" = #{pId};
|
|
|]
|
|
renameExamParts _ = return ()
|
|
runConduit $ getExamEntries .| C.mapM_ renameExamParts
|
|
|
|
Migration20190918ExamRulesRefactor -> whenM (tableExists "exam") $ do
|
|
oldVersion <- columnExists "exam" "grading_key"
|
|
if
|
|
| oldVersion -> do
|
|
-- Major changes happend to the structure of exams without appropriate
|
|
-- migration, try to remedy that here
|
|
tableDropEmpty "exam_part_corrector"
|
|
tableDropEmpty "exam_corrector"
|
|
tableDropEmpty "exam_result"
|
|
tableDropEmpty "exam_registration"
|
|
tableDropEmpty "exam_occurrence"
|
|
tableDropEmpty "exam_part"
|
|
tableDropEmpty "exam"
|
|
| otherwise ->
|
|
[executeQQ|
|
|
ALTER TABLE "exam" ALTER COLUMN "grading_rule" DROP NOT NULL;
|
|
ALTER TABLE "exam" ALTER COLUMN "bonus_rule" DROP NOT NULL;
|
|
ALTER TABLE "exam" ALTER COLUMN "occurrence_rule" DROP NOT NULL;
|
|
|
|
UPDATE "exam" SET "grading_rule" = NULL WHERE "grading_rule"->>'rule' = 'manual';
|
|
UPDATE "exam" SET "bonus_rule" = NULL WHERE "bonus_rule"->>'rule' = 'no-bonus';
|
|
UPDATE "exam" SET "occurrence_rule" = NULL WHERE "occurrence_rule" = '"manual"';
|
|
|
|
UPDATE "exam" SET "occurrence_rule" = json_build_object('rule', "occurrence_rule");
|
|
|]
|
|
|
|
Migration20190919ExamBonusRounding -> whenM (tableExists "exam")
|
|
[executeQQ|
|
|
UPDATE "exam" SET "bonus_rule" = jsonb_insert("bonus_rule", '{round}' :: text[], '0.01' :: jsonb) WHERE "bonus_rule"->>'rule' = 'bonus-points';
|
|
|]
|
|
|
|
Migration20191002FavouriteReason -> whenM (tableExists "course_favourite")
|
|
[executeQQ|
|
|
ALTER TABLE "course_favourite" RENAME COLUMN "time" TO "last_visit";
|
|
ALTER TABLE "course_favourite" ADD COLUMN "reason" jsonb DEFAULT '"visited"'::jsonb;
|
|
|]
|
|
|
|
Migration20191125UserLanguages -> whenM (tableExists "user")
|
|
[executeQQ|
|
|
ALTER TABLE "user" ADD COLUMN "languages" jsonb;
|
|
UPDATE "user" SET "languages" = "mail_languages" where "mail_languages" <> '[]';
|
|
ALTER TABLE "user" DROP COLUMN "mail_languages";
|
|
|]
|
|
|
|
Migration20191126ExamPartCorrector -> whenM (tableExists "exam_part_corrector") $
|
|
tableDropEmpty "exam_part_corrector"
|
|
|
|
Migration20191128StudyFeaturesSuperField -> whenM (tableExists "study_features")
|
|
[executeQQ|
|
|
ALTER TABLE "study_features" ADD COLUMN "super_field" bigint;
|
|
UPDATE "study_features" SET "super_field" = "field", "field" = "sub_field" WHERE NOT ("sub_field" IS NULL);
|
|
ALTER TABLE "study_features" DROP COLUMN "sub_field";
|
|
|]
|
|
|
|
Migration20200111ExamOccurrenceRuleRefactor -> whenM (tableExists "exam")
|
|
[executeQQ|
|
|
UPDATE "exam" SET "occurrence_rule" = #{ExamRoomManual} WHERE "occurrence_rule" IS NULL;
|
|
ALTER TABLE "exam" ALTER COLUMN "occurrence_rule" SET NOT NULL;
|
|
|]
|
|
|
|
Migration20200218ExamResultPassedGrade -> whenM ((&&) <$> tableExists "exam" <*> tableExists "exam_result") $ do
|
|
queryRes <- [sqlQQ|SELECT exam_result.id, exam_result.result FROM exam_result INNER JOIN exam ON exam_result.exam = exam.id WHERE NOT exam.show_grades;|]
|
|
forM_ queryRes $ \(resId :: ExamResultId, Single (res :: ExamResultGrade)) ->
|
|
let res' :: ExamResultPassedGrade
|
|
res' = Left . view passingGrade <$> res
|
|
in [executeQQ|UPDATE exam_result SET result = #{res'} WHERE id = #{resId};|]
|
|
|
|
Migration20200218ExamGradingModeMixed -> whenM (tableExists "exam")
|
|
[executeQQ|
|
|
ALTER TABLE "exam" ADD COLUMN "grading_mode" character varying;
|
|
UPDATE "exam" SET "grading_mode" = 'grades' WHERE "show_grades";
|
|
UPDATE "exam" SET "grading_mode" = 'pass' WHERE NOT "show_grades";
|
|
ALTER TABLE "exam" DROP COLUMN "show_grades";
|
|
ALTER TABLE "exam" ALTER COLUMN "grading_mode" SET NOT NULL;
|
|
|]
|
|
|
|
Migration20200218ExternalExamGradingModeMixed -> whenM (tableExists "external_exam")
|
|
[executeQQ|
|
|
ALTER TABLE "external_exam" ADD COLUMN "grading_mode" character varying;
|
|
UPDATE "external_exam" SET "grading_mode" = 'grades' WHERE "show_grades";
|
|
UPDATE "external_exam" SET "grading_mode" = 'pass' WHERE NOT "show_grades";
|
|
ALTER TABLE "external_exam" DROP COLUMN "show_grades";
|
|
ALTER TABLE "external_exam" ALTER COLUMN "grading_mode" SET NOT NULL;
|
|
|]
|
|
|
|
Migration20200424SubmissionGroups -> do
|
|
whenM (tableExists "submission_group") $
|
|
tableDropEmpty "submission_group"
|
|
whenM (tableExists "submission_group_edit") $
|
|
tableDropEmpty "submission_group_edit"
|
|
|
|
Migration20200504CourseParticipantState -> whenM (tableExists "course_participant") $ do
|
|
[executeQQ|
|
|
ALTER TABLE "course_participant" ADD COLUMN "state" text NOT NULL DEFAULT 'active';
|
|
ALTER TABLE "course_participant" ALTER COLUMN "state" DROP DEFAULT;
|
|
|]
|
|
let getAuditLog = rawQuery [st|SELECT DISTINCT ON ("info") "info", max("time") FROM "transaction_log" GROUP BY "info" ORDER BY "info";|] []
|
|
ensureParticipant :: [PersistValue] -> ReaderT SqlBackend m ()
|
|
ensureParticipant [fmap Aeson.fromJSON . fromPersistValue -> Right (Aeson.Success TransactionCourseParticipantEdit{..}), fromPersistValue -> Right (time :: UTCTime)] = do
|
|
whenM (existsKey transactionCourse `and2M` existsKey transactionUser)
|
|
[executeQQ|INSERT INTO "course_participant" ("course", "user", "registration", "state") VALUES (#{transactionCourse}, #{transactionUser}, #{time}, #{CourseParticipantInactive False}) ON CONFLICT DO NOTHING;|]
|
|
ensureParticipant _ = return ()
|
|
runConduit $ getAuditLog .| C.mapM_ ensureParticipant
|
|
|
|
Migration20200506SessionFile -> whenM (tableExists "session_file") $
|
|
tableDropEmpty "session_file"
|
|
|
|
Migration20200627FileRefactor -> whenM (tableExists "file") $ do
|
|
[executeQQ|
|
|
ALTER TABLE "file" ADD COLUMN "hash" BYTEA;
|
|
UPDATE "file" SET "hash" = digest("content", 'sha3-512');
|
|
|]
|
|
|
|
let
|
|
migrateFromFile :: forall fRef.
|
|
( HasFileReference fRef
|
|
, PersistRecordBackend fRef SqlBackend
|
|
)
|
|
=> ([PersistValue] -> (Key fRef, FileReferenceResidual fRef))
|
|
-> (Entity fRef -> ReaderT SqlBackend m ())
|
|
-> [PersistValue]
|
|
-> ReaderT SqlBackend m ()
|
|
migrateFromFile toResidual doUpdate ((fromPersistValue -> Right (fId :: Int64)):rest) = do
|
|
let (fRefKey, residual) = toResidual rest
|
|
fileDat <- [sqlQQ|
|
|
SELECT "file".title, "file".modified, "file".hash FROM "file" WHERE "id" = #{fId};
|
|
|]
|
|
forM_ fileDat $ \case
|
|
(fromPersistValue . unSingle -> Right (fileReferenceTitle' :: FilePath), fromPersistValue . unSingle -> Right fileReferenceModified, fromPersistValue . unSingle -> Right fileReferenceContent) -> do
|
|
let fileRef fileReferenceTitle = _FileReference # (FileReference{..}, residual)
|
|
candidateTitles = fileReferenceTitle' : [ fName <.> ("old-" <> show n) <.> ext | n <- [1..1000] ]
|
|
where (fName, ext) = splitExtension fileReferenceTitle'
|
|
validTitles <- dropWhileM (fmap (is _Just) . checkUnique . fileRef) candidateTitles
|
|
case validTitles of
|
|
fTitle : _ -> doUpdate . Entity fRefKey $ fileRef fTitle
|
|
_other -> error "Could not make validTitle"
|
|
_other -> return ()
|
|
migrateFromFile _ _ _ = return ()
|
|
|
|
whenM (tableExists "submission_file") $ do
|
|
[executeQQ|
|
|
ALTER TABLE "submission_file" ADD COLUMN "title" VARCHAR;
|
|
ALTER TABLE "submission_file" ADD COLUMN "content" BYTEA NULL;
|
|
ALTER TABLE "submission_file" ADD COLUMN "modified" TIMESTAMP WITH TIME ZONE;
|
|
ALTER TABLE "submission_file" DROP CONSTRAINT "unique_submission_file";
|
|
ALTER TABLE "submission_file" ADD CONSTRAINT "unique_submission_file" UNIQUE("submission", "title", "is_update");
|
|
|]
|
|
let getSubmissionFiles = [queryQQ|SELECT "file", "submission_file"."id", "submission", "is_update", "is_deletion" FROM "submission_file" LEFT OUTER JOIN "file" ON "submission_file"."file" = "file".id ORDER BY "file"."modified" DESC;|]
|
|
toResidual [ fromPersistValue -> Right sfId
|
|
, fromPersistValue -> Right submissionFileResidualSubmission
|
|
, fromPersistValue -> Right submissionFileResidualIsUpdate
|
|
, fromPersistValue -> Right submissionFileResidualIsDeletion
|
|
]
|
|
= (sfId, SubmissionFileResidual{..})
|
|
toResidual _ = error "Could not convert SubmissionFile to residual"
|
|
runConduit $ getSubmissionFiles .| C.mapM_ (migrateFromFile @SubmissionFile toResidual replaceEntity)
|
|
[executeQQ|
|
|
ALTER TABLE "submission_file" DROP COLUMN "file";
|
|
|]
|
|
|
|
whenM (tableExists "sheet_file") $ do
|
|
[executeQQ|
|
|
ALTER TABLE "sheet_file" ADD COLUMN "title" VARCHAR;
|
|
ALTER TABLE "sheet_file" ADD COLUMN "content" BYTEA NULL;
|
|
ALTER TABLE "sheet_file" ADD COLUMN "modified" TIMESTAMP WITH TIME ZONE;
|
|
ALTER TABLE "sheet_file" DROP CONSTRAINT "unique_sheet_file";
|
|
ALTER TABLE "sheet_file" ADD CONSTRAINT "unique_sheet_file" UNIQUE("sheet", "type", "title");
|
|
|]
|
|
let getSheetFiles = [queryQQ|SELECT "file", "sheet_file"."id", "sheet", "type" FROM "sheet_file" LEFT OUTER JOIN "file" ON "sheet_file"."file" = "file".id ORDER BY "file"."modified" DESC;|]
|
|
toResidual [ fromPersistValue -> Right shfId
|
|
, fromPersistValue -> Right sheetFileResidualSheet
|
|
, fromPersistValue -> Right sheetFileResidualType
|
|
]
|
|
= (shfId, SheetFileResidual{..})
|
|
toResidual _ = error "Could not convert SheetFile to residual"
|
|
runConduit $ getSheetFiles .| C.mapM_ (migrateFromFile @SheetFile toResidual replaceEntity)
|
|
[executeQQ|
|
|
ALTER TABLE "sheet_file" DROP COLUMN "file";
|
|
|]
|
|
|
|
whenM (tableExists "course_news_file") $ do
|
|
[executeQQ|
|
|
ALTER TABLE "course_news_file" ADD COLUMN "title" VARCHAR;
|
|
ALTER TABLE "course_news_file" ADD COLUMN "content" BYTEA NULL;
|
|
ALTER TABLE "course_news_file" ADD COLUMN "modified" TIMESTAMP WITH TIME ZONE;
|
|
ALTER TABLE "course_news_file" DROP CONSTRAINT "unique_course_news_file";
|
|
ALTER TABLE "course_news_file" ADD CONSTRAINT "unique_course_news_file" UNIQUE("news", "title");
|
|
|]
|
|
let getCourseNewsFiles = [queryQQ|SELECT "file", "course_news_file"."id", "news" FROM "course_news_file" LEFT OUTER JOIN "file" ON "course_news_file"."file" = "file".id ORDER BY "file"."modified" DESC;|]
|
|
toResidual [ fromPersistValue -> Right cnfId
|
|
, fromPersistValue -> Right courseNewsFileResidualNews
|
|
]
|
|
= (cnfId, CourseNewsFileResidual{..})
|
|
toResidual _ = error "Could not convert CourseNewsFile to residual"
|
|
runConduit $ getCourseNewsFiles .| C.mapM_ (migrateFromFile @CourseNewsFile toResidual replaceEntity)
|
|
[executeQQ|
|
|
ALTER TABLE "course_news_file" DROP COLUMN "file";
|
|
|]
|
|
|
|
whenM (tableExists "material_file") $ do
|
|
[executeQQ|
|
|
ALTER TABLE "material_file" ADD COLUMN "title" VARCHAR;
|
|
ALTER TABLE "material_file" ADD COLUMN "content" BYTEA NULL;
|
|
ALTER TABLE "material_file" ADD COLUMN "modified" TIMESTAMP WITH TIME ZONE;
|
|
ALTER TABLE "material_file" DROP CONSTRAINT "unique_material_file";
|
|
ALTER TABLE "material_file" ADD CONSTRAINT "unique_material_file" UNIQUE("material", "title");
|
|
|]
|
|
let getMaterialFiles = [queryQQ|SELECT "file", "material_file"."id", "material" FROM "material_file" LEFT OUTER JOIN "file" ON "material_file"."file" = "file".id ORDER BY "file"."modified" DESC;|]
|
|
toResidual [ fromPersistValue -> Right shfId
|
|
, fromPersistValue -> Right materialFileResidualMaterial
|
|
]
|
|
= (shfId, MaterialFileResidual{..})
|
|
toResidual _ = error "Could not convert MaterialFile to residual"
|
|
runConduit $ getMaterialFiles .| C.mapM_ (migrateFromFile @MaterialFile toResidual replaceEntity)
|
|
[executeQQ|
|
|
ALTER TABLE "material_file" DROP COLUMN "file";
|
|
|]
|
|
|
|
whenM (tableExists "session_file")
|
|
[executeQQ|
|
|
ALTER TABLE "session_file" ADD COLUMN "content" BYTEA;
|
|
UPDATE "session_file" SET "content" = (SELECT "hash" FROM "file" WHERE "file".id = "session_file"."file");
|
|
ALTER TABLE "session_file" DROP COLUMN "file";
|
|
|]
|
|
|
|
[executeQQ|
|
|
ALTER TABLE "file" RENAME TO "file_content";
|
|
DELETE FROM "file_content" WHERE "content" IS NULL OR "hash" IS NULL;
|
|
|]
|
|
[executeQQ|
|
|
DELETE FROM "file_content"
|
|
WHERE "id" IN (
|
|
SELECT
|
|
"id"
|
|
FROM (
|
|
SELECT
|
|
"id",
|
|
ROW_NUMBER() OVER w AS rnum
|
|
FROM "file_content"
|
|
WINDOW w AS (
|
|
PARTITION BY "hash"
|
|
ORDER BY "id"
|
|
)
|
|
) as t
|
|
WHERE t.rnum > 1);
|
|
|]
|
|
[executeQQ|
|
|
ALTER TABLE "file_content" DROP COLUMN "title";
|
|
ALTER TABLE "file_content" DROP COLUMN "modified";
|
|
ALTER TABLE "file_content" DROP COLUMN "id";
|
|
|]
|
|
|
|
Migration20200825StudyFeaturesFirstObserved -> whenM (tableExists "study_features")
|
|
[executeQQ|
|
|
ALTER TABLE study_features RENAME updated TO last_observed;
|
|
ALTER TABLE study_features ADD COLUMN first_observed timestamp with time zone;
|
|
UPDATE study_features SET first_observed = (SELECT MAX(last_observed) FROM study_features as other WHERE other."user" = study_features."user" AND other.degree = study_features.degree AND other.field = study_features.field AND other.type = study_features.type AND other.semester = study_features.semester - 1);
|
|
|]
|
|
|
|
Migration20200902FileChunking -> whenM (tableExists "file_content") $ do
|
|
chunkingParams <- lift $ view _appFileChunkingParams
|
|
|
|
[executeQQ|
|
|
ALTER TABLE file_content RENAME TO file_content_chunk;
|
|
ALTER INDEX file_content_pkey RENAME TO file_content_chunk_pkey;
|
|
|
|
CREATE TABLE file_content_chunk_unreferenced (id bigserial, hash bytea NOT NULL, since timestamp with time zone NOT NULL);
|
|
INSERT INTO file_content_chunk_unreferenced (since, hash) (SELECT unreferenced_since as since, hash FROM file_content_chunk WHERE NOT (unreferenced_since IS NULL));
|
|
ALTER TABLE file_content_chunk DROP COLUMN unreferenced_since;
|
|
|
|
ALTER TABLE file_content_chunk ADD COLUMN content_based boolean NOT NULL DEFAULT false;
|
|
UPDATE file_content_chunk SET content_based = true WHERE length(content) <= #{fastCDCMinBlockSize chunkingParams};
|
|
|
|
CREATE TABLE file_content_entry (id bigserial NOT NULL PRIMARY KEY, hash bytea NOT NULL, ix bigint NOT NULL, chunk_hash bytea NOT NULL);
|
|
INSERT INTO file_content_entry (hash, chunk_hash, ix) (SELECT hash, hash as chunk_hash, 0 as ix FROM file_content_chunk);
|
|
|]
|
|
|
|
Migration20200916ExamMode -> do
|
|
whenM (tableExists "exam")
|
|
[executeQQ|
|
|
ALTER TABLE exam ADD COLUMN "exam_mode" jsonb NOT NULL DEFAULT #{ExamMode Nothing Nothing Nothing Nothing};
|
|
|]
|
|
whenM (tableExists "school")
|
|
[executeQQ|
|
|
ALTER TABLE school ADD COLUMN "exam_discouraged_modes" jsonb NOT NULL DEFAULT #{ExamModeDNF predDNFFalse};
|
|
|]
|
|
|
|
Migration20201106StoredMarkup ->
|
|
[executeQQ|
|
|
SET client_min_messages TO WARNING;
|
|
ALTER TABLE IF EXISTS ^{Course} ALTER COLUMN @{CourseDescription} TYPE jsonb USING (CASE WHEN @{CourseDescription} IS NOT NULL THEN to_json(@{CourseDescription}) ELSE NULL END);
|
|
ALTER TABLE IF EXISTS ^{CourseEvent} ALTER COLUMN @{CourseEventNote} TYPE jsonb USING (CASE WHEN @{CourseEventNote} IS NOT NULL THEN to_json(@{CourseEventNote}) ELSE NULL END);
|
|
ALTER TABLE IF EXISTS ^{CourseUserNote} ALTER COLUMN @{CourseUserNoteNote} TYPE jsonb USING (CASE WHEN @{CourseUserNoteNote} IS NOT NULL THEN to_json(@{CourseUserNoteNote}) ELSE NULL END);
|
|
ALTER TABLE IF EXISTS ^{Material} ALTER COLUMN @{MaterialDescription} TYPE jsonb USING (CASE WHEN @{MaterialDescription} IS NOT NULL THEN to_json(@{MaterialDescription}) ELSE NULL END);
|
|
ALTER TABLE IF EXISTS ^{CourseNews} ALTER COLUMN @{CourseNewsContent} TYPE jsonb USING (CASE WHEN @{CourseNewsContent} IS NOT NULL THEN to_json(@{CourseNewsContent}) ELSE NULL END);
|
|
ALTER TABLE IF EXISTS ^{CourseNews} ALTER COLUMN @{CourseNewsSummary} TYPE jsonb USING (CASE WHEN @{CourseNewsSummary} IS NOT NULL THEN to_json(@{CourseNewsSummary}) ELSE NULL END);
|
|
ALTER TABLE IF EXISTS ^{Exam} ALTER COLUMN @{ExamDescription} TYPE jsonb USING (CASE WHEN @{ExamDescription} IS NOT NULL THEN to_json(@{ExamDescription}) ELSE NULL END);
|
|
ALTER TABLE IF EXISTS ^{ExamOccurrence} ALTER COLUMN @{ExamOccurrenceDescription} TYPE jsonb USING (CASE WHEN @{ExamOccurrenceDescription} IS NOT NULL THEN to_json(@{ExamOccurrenceDescription}) ELSE NULL END);
|
|
ALTER TABLE IF EXISTS ^{Sheet} ALTER COLUMN @{SheetDescription} TYPE jsonb USING (CASE WHEN @{SheetDescription} IS NOT NULL THEN to_json(@{SheetDescription}) ELSE NULL END);
|
|
ALTER TABLE IF EXISTS ^{Sheet} ALTER COLUMN @{SheetMarkingText} TYPE jsonb USING (CASE WHEN @{SheetMarkingText} IS NOT NULL THEN to_json(@{SheetMarkingText}) ELSE NULL END);
|
|
ALTER TABLE IF EXISTS ^{SystemMessage} ALTER COLUMN @{SystemMessageContent} TYPE jsonb USING (CASE WHEN @{SystemMessageContent} IS NOT NULL THEN to_json(@{SystemMessageContent}) ELSE NULL END);
|
|
ALTER TABLE IF EXISTS ^{SystemMessage} ALTER COLUMN @{SystemMessageSummary} TYPE jsonb USING (CASE WHEN @{SystemMessageSummary} IS NOT NULL THEN to_json(@{SystemMessageSummary}) ELSE NULL END);
|
|
ALTER TABLE IF EXISTS ^{SystemMessageTranslation} ALTER COLUMN @{SystemMessageTranslationContent} TYPE jsonb USING (CASE WHEN @{SystemMessageTranslationContent} IS NOT NULL THEN to_json(@{SystemMessageTranslationContent}) ELSE NULL END);
|
|
ALTER TABLE IF EXISTS ^{SystemMessageTranslation} ALTER COLUMN @{SystemMessageTranslationSummary} TYPE jsonb USING (CASE WHEN @{SystemMessageTranslationSummary} IS NOT NULL THEN to_json(@{SystemMessageTranslationSummary}) ELSE NULL END);
|
|
SET client_min_messages TO NOTICE;
|
|
|]
|
|
|
|
Migration20201119RoomTypes -> do
|
|
whenM (tableExists "exam_occurrence") $ do
|
|
[executeQQ|ALTER TABLE "exam_occurrence" ADD COLUMN "room_json" jsonb|]
|
|
let getExamOccurrences = [queryQQ|SELECT "id", "room" FROM "exam_occurrence"|]
|
|
migrateExamOccurrence [ fromPersistValue -> Right (eoId :: ExamOccurrenceId), fromPersistValue -> Right roomText ] = [executeQQ|UPDATE "exam_occurrence" SET "room_json" = #{RoomReferenceSimple roomText} WHERE "id" = #{eoId}|]
|
|
migrateExamOccurrence _ = return ()
|
|
in runConduit $ getExamOccurrences .| C.mapM_ migrateExamOccurrence
|
|
[executeQQ|
|
|
ALTER TABLE "exam_occurrence" DROP COLUMN "room";
|
|
ALTER TABLE "exam_occurrence" RENAME COLUMN "room_json" TO "room";
|
|
|]
|
|
whenM (tableExists "tutorial") $ do
|
|
[executeQQ|ALTER TABLE "tutorial" ADD COLUMN "room_json" jsonb|]
|
|
let getTutorials = [queryQQ|SELECT "id", "room" FROM "tutorial"|]
|
|
migrateTutorial [ fromPersistValue -> Right (tutId :: TutorialId), fromPersistValue -> Right roomText ] = [executeQQ|UPDATE "tutorial" SET "room_json" = #{RoomReferenceSimple roomText} WHERE "id" = #{tutId}|]
|
|
migrateTutorial _ = return ()
|
|
in runConduit $ getTutorials .| C.mapM_ migrateTutorial
|
|
[executeQQ|
|
|
ALTER TABLE "tutorial" DROP COLUMN "room";
|
|
ALTER TABLE "tutorial" RENAME COLUMN "room_json" TO "room";
|
|
|]
|
|
whenM (tableExists "course_event") $ do
|
|
[executeQQ|ALTER TABLE "course_event" ADD COLUMN "room_json" jsonb|]
|
|
let getCourseEvents = [queryQQ|SELECT "id", "room" FROM "course_event"|]
|
|
migrateCourseEvent [ fromPersistValue -> Right (ceId :: CourseEventId), fromPersistValue -> Right roomText ] = [executeQQ|UPDATE "course_event" SET "room_json" = #{RoomReferenceSimple roomText} WHERE "id" = #{ceId}|]
|
|
migrateCourseEvent _ = return ()
|
|
in runConduit $ getCourseEvents .| C.mapM_ migrateCourseEvent
|
|
[executeQQ|
|
|
ALTER TABLE "course_event" DROP COLUMN "room";
|
|
ALTER TABLE "course_event" RENAME COLUMN "room_json" TO "room";
|
|
|]
|
|
whenM (tableExists "course") $ do
|
|
let getCourses = [queryQQ|SELECT "id", "link_external" FROM "course"|]
|
|
migrateCourse [ fromPersistValue -> Right (cId :: CourseId), fromPersistValue -> Right (uriText :: Maybe Text) ]
|
|
| Just uri <- parseURI . unpack =<< uriText = [executeQQ|UPDATE "course" SET "link_external" = #{uri} WHERE "id" = #{cId}|]
|
|
| otherwise = [executeQQ|UPDATE "course" SET "link_external" = NULL WHERE "id" = #{cId}|]
|
|
migrateCourse _ = return ()
|
|
in runConduit $ getCourses .| C.mapM_ migrateCourse
|
|
|
|
Migration20210115ExamPartsFrom -> do
|
|
whenM (tableExists "exam") $ do
|
|
[executeQQ|ALTER TABLE "exam" ADD COLUMN "parts_from" timestamp with time zone|]
|
|
let getExam = [queryQQ|SELECT "id", "finished" FROM "exam"|]
|
|
migrateExam [ fromPersistValue -> Right (eId :: ExamId), fromPersistValue -> Right (finished :: Maybe UTCTime) ] = [executeQQ|UPDATE "exam" SET "parts_from" = #{finished} WHERE "id" = #{eId}|]
|
|
migrateExam _ = return ()
|
|
in runConduit $ getExam .| C.mapM_ migrateExam
|
|
|
|
Migration20210208StudyFeaturesRelevanceCachedUUIDs ->
|
|
whenM (tableExists "study_features") $ do
|
|
[executeQQ|
|
|
ALTER TABLE "study_features" ADD COLUMN "relevance_cached_uuid" uuid
|
|
|]
|
|
|
|
let getStudyFeatures = [queryQQ|SELECT "id" FROM "study_features" WHERE relevance_cached|]
|
|
migrateStudyFeatures genUUID lift' [ fromPersistValue -> Right (sfId :: StudyFeaturesId) ] = do
|
|
uuid <- genUUID
|
|
lift' [executeQQ|UPDATE "study_features" SET "relevance_cached_uuid" = #{uuid} WHERE "id" = #{sfId}|]
|
|
migrateStudyFeatures _ _ _ = return ()
|
|
in runConduit $ getStudyFeatures .| randUUIDC (\genUUID lift' -> C.mapM_ $ migrateStudyFeatures genUUID lift')
|
|
|
|
[executeQQ|
|
|
ALTER TABLE "study_features" DROP COLUMN "relevance_cached";
|
|
ALTER TABLE "study_features" RENAME COLUMN "relevance_cached_uuid" TO "relevance_cached";
|
|
|]
|
|
|
|
-- Placeholder to inform crontab generation when switchover happened so old submissions don't get notified as corrected
|
|
Migration20210318CrontabSubmissionRatedNotification -> return ()
|
|
|
|
Migration20210608SeparateTermActive -> do
|
|
now <- liftIO getCurrentTime
|
|
|
|
whenM (and2M (tableExists "term") (not <$> tableExists "term_active")) $ do
|
|
[executeQQ|
|
|
CREATE TABLE "term_active" ("id" SERIAL8 PRIMARY KEY UNIQUE, "term" numeric(5,1) NOT NULL, "from" timestamp with time zone NOT NULL)
|
|
|]
|
|
|
|
let getTerms = [queryQQ|SELECT "name", "active" FROM "term"|]
|
|
migrateTerms [ fromPersistValue -> Right (tid :: TermId), fromPersistValue -> Right (isActive :: Bool) ] = when isActive
|
|
[executeQQ|INSERT INTO term_active (term, "from") VALUES (#{tid}, #{now})|]
|
|
migrateTerms _ = return ()
|
|
in runConduit $ getTerms .| C.mapM_ migrateTerms
|
|
|
|
[executeQQ|
|
|
ALTER TABLE "term" DROP COLUMN "active";
|
|
|]
|
|
|
|
Migration20230524QualificationUserBlock ->
|
|
whenM (andM [ not <$> tableExists "qualification_user_block"
|
|
, tableExists "qualification_user"
|
|
, columnExists "qualification_user" "blocked_due"
|
|
] ) $ do
|
|
[executeQQ|
|
|
CREATE TABLE "qualification_user_block"
|
|
( "id" SERIAL8 PRIMARY KEY UNIQUE
|
|
, "qualification_user" bigint NOT NULL
|
|
, "unblock" boolean NOT NULL
|
|
, "from" timestamp with time zone NOT NULL
|
|
, "reason" character varying NOT NULL
|
|
, "blocker" bigint
|
|
, CONSTRAINT qualification_user_block_qualification_user_fkey FOREIGN KEY ("qualification_user") REFERENCES "qualification_user"(id) ON DELETE CASCADE ON UPDATE CASCADE
|
|
, CONSTRAINT qualification_user_block_blocker_fkey FOREIGN KEY ("blocker") REFERENCES "user"(id)
|
|
);
|
|
|]
|
|
|
|
let getBlocks = [queryQQ|SELECT "id", "blocked_due" FROM "qualification_user" WHERE "blocked_due" IS NOT NULL|]
|
|
migrateBlocks [ fromPersistValue -> Right (quid :: QualificationUserId), fromPersistValue -> Right (Just (Legacy.QualificationBlocked{..} :: Legacy.QualificationBlocked)) ] =
|
|
[executeQQ|INSERT INTO "qualification_user_block" ("qualification_user", "unblock", "from", "reason") VALUES (#{quid}, FALSE, #{qualificationBlockedDay}, #{qualificationBlockedReason})|]
|
|
migrateBlocks _ = return ()
|
|
in runConduit $ getBlocks .| C.mapM_ migrateBlocks
|
|
|
|
[executeQQ|
|
|
ALTER TABLE "qualification_user" DROP COLUMN "blocked_due";
|
|
|]
|
|
|
|
Migration20230703LmsUserStatus ->
|
|
whenM (columnNotExists "lms_user" "status_day") $ do
|
|
[executeQQ|
|
|
ALTER TABLE "lms_user" ADD COLUMN "status_day" date;
|
|
UPDATE "lms_user"
|
|
SET "status_day" = CAST("status"->>'day' AS date)
|
|
, "status" = "status"->'lms-status'
|
|
;
|
|
|]
|
|
|
|
|
|
tableExists :: MonadIO m => Text -> ReaderT SqlBackend m Bool
|
|
tableExists table = do
|
|
haveTable <- [sqlQQ| SELECT to_regclass(#{table}); |]
|
|
case haveTable :: [Maybe (Single PersistValue)] of
|
|
[Just _] -> return True
|
|
_other -> return False
|
|
|
|
tablesExist :: MonadIO m => [Text] -> ReaderT SqlBackend m Bool
|
|
tablesExist = flip allM tableExists
|
|
|
|
tableIsEmpty :: MonadIO m => Text -> ReaderT SqlBackend m Bool
|
|
tableIsEmpty table = do
|
|
res <- rawSql [st|SELECT COUNT(*) FROM "#{table}"|] []
|
|
return $ case res of
|
|
[unSingle -> rows] -> rows == (0 :: Int64)
|
|
_other -> error "tableIsEmpty din't return exactly one result"
|
|
|
|
tableDropEmpty :: MonadIO m => Text -> ReaderT SqlBackend m ()
|
|
tableDropEmpty table = whenM (tableExists table) $ do
|
|
isEmpty <- tableIsEmpty table
|
|
if
|
|
| isEmpty -> rawExecute [st|DROP TABLE "#{table}" CASCADE|] []
|
|
| otherwise -> error $ "Table " <> unpack table <> " is not empty"
|
|
|
|
columnExists :: MonadIO m
|
|
=> Text -- ^ Table
|
|
-> Text -- ^ Column
|
|
-> ReaderT SqlBackend m Bool -- BEWARE: use tablesExist beforehand!!!
|
|
columnExists table column = do
|
|
haveColumn <- [sqlQQ|SELECT column_name FROM information_schema.columns WHERE table_name=#{table} and column_name=#{column};|]
|
|
case haveColumn :: [Single PersistValue] of
|
|
[_] -> return True
|
|
_other -> return False
|
|
|
|
-- | equivalent to andM [ tableExists, not <$> columnExists]
|
|
columnNotExists :: MonadIO m
|
|
=> Text -- ^ Table
|
|
-> Text -- ^ Column
|
|
-> ReaderT SqlBackend m Bool
|
|
columnNotExists table column = and2M (tableExists table) (not <$> columnExists table column)
|