Merge branch 'master' into feat/external-apis

This commit is contained in:
Gregor Kleen 2021-02-15 15:37:05 +01:00
commit 39c0c44c2a
34 changed files with 620 additions and 209 deletions

View File

@ -6,7 +6,7 @@ workflow:
default:
image:
name: fpco/stack-build:lts-16.11
name: fpco/stack-build:lts-16.31
cache: &global_cache
paths:
- node_modules
@ -72,7 +72,8 @@ frontend:build:
- npm run frontend:build
before_script: *npm
needs:
- npm install
- job: npm install
artifacts: true
artifacts:
paths:
- static
@ -80,8 +81,6 @@ frontend:build:
- config/webpack.yml
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
expire_in: "1 day"
dependencies:
- npm install
retry: 2
interruptible: true
@ -91,9 +90,8 @@ frontend:lint:
- npm run frontend:lint
before_script: *npm
needs:
- npm install
dependencies:
- npm install
- job: npm install
artifacts: true
retry: 2
interruptible: true
@ -109,7 +107,8 @@ yesod:build:dev:
script:
- stack build --test --copy-bins --local-bin-path $(pwd)/bin --fast --flag uniworx:-library-only --flag uniworx:dev --flag uniworx:pedantic --no-strip
needs:
- frontend:build
- job: frontend:build
artifacts: true
before_script: &haskell
- rm -rvf /etc/apt/sources.list /etc/apt/sources.list.d
- install -v -T -m 0644 ${APT_SOURCES_LIST} /etc/apt/sources.list
@ -118,18 +117,20 @@ yesod:build:dev:
- install -v -m 0700 -d ~/.ssh
- install -v -T -m 0644 ${SSH_KNOWN_HOSTS} ~/.ssh/known_hosts
- install -v -T -m 0400 ${SSH_DEPLOY_KEY} ~/.ssh/deploy && echo "IdentityFile ~/.ssh/deploy" >> ~/.ssh/config;
- stack install happy
- export PATH="${HOME}/.local/bin:$PATH"
- hash -r
- git restore-mtime
artifacts:
paths:
- bin/
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
expire_in: "1 week"
dependencies:
- frontend:build
rules:
- if: $CI_COMMIT_REF_NAME =~ /(^v[0-9].*)|((^|\/)profile($|\/))/
when: manual
allow_failure: true
- when: always
retry: 2
@ -142,19 +143,19 @@ yesod:build:
script:
- stack build --test --copy-bins --local-bin-path $(pwd)/bin --flag uniworx:-library-only --flag uniworx:-dev --flag uniworx:pedantic --no-strip
needs:
- frontend:build
- job: frontend:build
artifacts: true
before_script: *haskell
artifacts:
paths:
- bin/
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
dependencies:
- frontend:build
rules:
- if: $CI_COMMIT_REF_NAME =~ /^v[0-9].*/
when: always
- when: manual
allow_failure: true
retry: 2
interruptible: true
@ -169,19 +170,19 @@ yesod:build:profile:
script:
- stack build --profile --copy-bins --local-bin-path $(pwd)/bin --flag uniworx:-library-only --flag uniworx:-dev --flag uniworx:pedantic --no-strip
needs:
- frontend:build
- job: frontend:build
artifacts: true
before_script: *haskell
artifacts:
paths:
- bin/
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
dependencies:
- frontend:build
rules:
- if: $CI_COMMIT_REF_NAME =~ /(^|\/)profile($|\/)/
when: always
- when: manual
allow_failure: true
retry: 2
interruptible: true
@ -195,7 +196,8 @@ frontend:test:
script:
- npm run frontend:test
needs:
- npm install
- job: npm install
artifacts: true
before_script:
- rm -rvf /etc/apt/sources.list /etc/apt/sources.list.d
- install -v -T -m 0644 ${APT_SOURCES_LIST} /etc/apt/sources.list
@ -206,18 +208,15 @@ frontend:test:
- npm install -g npm
- hash -r
- apt-get install -y --no-install-recommends chromium-browser
dependencies:
- npm install
retry: 2
interruptible: true
parse-changelog:
cache: {}
stage: prepare release
dependencies:
- npm install
needs:
- npm install
- job: npm install
artifacts: true
rules:
- if: $CI_COMMIT_REF_NAME =~ /^v[0-9].*/
when: always
@ -243,14 +242,15 @@ parse-changelog:
upload:
cache: {}
variables:
GIT_STRATEGY: none
stage: upload packages
image: curlimages/curl:latest
needs:
- yesod:build
- parse-changelog
dependencies:
- yesod:build
- parse-changelog
- job: yesod:build
artifacts: true
- job: parse-changelog
artifacts: true
rules:
- if: $CI_COMMIT_REF_NAME =~ /^v[0-9].*/
when: always
@ -268,13 +268,15 @@ upload:
release:
cache: {}
variables:
GIT_STRATEGY: none
stage: release
image: registry.gitlab.com/gitlab-org/release-cli:latest
needs:
- upload
- parse-changelog
dependencies:
- parse-changelog
- job: upload
artifacts: false
- job: parse-changelog
artifacts: true
rules:
- if: $CI_COMMIT_REF_NAME =~ /^v[0-9].*/
when: always
@ -282,11 +284,11 @@ release:
- export VERSION="$(cat .current-version)"
script:
- |
release-cli create --name "${VERSION}" --tag-name $CI_COMMIT_TAG --description ./current-changelog.md \
--assets-link "{\"name\":\"uniworx\",\"url\":\"${PACKAGE_REGISTRY_URL}/${VERSION}/uniworx\",\"filepath\":\"uniworx\"}" \
--assets-link "{\"name\":\"uniworxdb\",\"url\":\"${PACKAGE_REGISTRY_URL}/${VERSION}/uniworxdb\",\"filepath\":\"uniworxdb\"}" \
--assets-link "{\"name\":\"uniworxload\",\"url\":\"${PACKAGE_REGISTRY_URL}/${VERSION}/uniworxload\",\"filepath\":\"uniworxload\"}" \
--assets-link "{\"name\":\"uniworx-wflint\",\"url\":\"${PACKAGE_REGISTRY_URL}/${VERSION}/uniworx-wflint\",\"filepath\":\"uniworx-wflint\"}"
release-cli create --name "${VERSION}" --tag-name $CI_COMMIT_TAG --description .current-changelog.md \
--assets-link "{\"name\":\"uniworx\",\"url\":\"${PACKAGE_REGISTRY_URL}/${VERSION}/uniworx\",\"filepath\":\"/uniworx\"}" \
--assets-link "{\"name\":\"uniworxdb\",\"url\":\"${PACKAGE_REGISTRY_URL}/${VERSION}/uniworxdb\",\"filepath\":\"/uniworxdb\"}" \
--assets-link "{\"name\":\"uniworxload\",\"url\":\"${PACKAGE_REGISTRY_URL}/${VERSION}/uniworxload\",\"filepath\":\"/uniworxload\"}" \
--assets-link "{\"name\":\"uniworx-wflint\",\"url\":\"${PACKAGE_REGISTRY_URL}/${VERSION}/uniworx-wflint\",\"filepath\":\"/uniworx-wflint\"}"
# deploy:uniworx3:

27
.versionrc.js Normal file
View File

@ -0,0 +1,27 @@
const standardVersionUpdaterYaml = require.resolve('standard-version-updater-yaml');
module.exports = {
scripts: {
// postbump: './sync-versions.hs && git add -- package.yaml', // moved to bumpFiles
postchangelog: 'sed \'s/^### \\[/## [/g\' -i CHANGELOG.md'
},
packageFiles: ['package.json', 'package.yaml'],
bumpFiles: [
{
filename: 'package.json',
type: 'json'
},
{
filename: 'package-lock.json',
type: 'json'
},
{
filename: 'package.yaml',
updater: standardVersionUpdaterYaml
}
],
commitUrlFormat: 'https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/{{hash}}',
compareUrlFormat: 'https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/{{previousTag}}...{{currentTag}}',
issueUrlFormat: 'https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/issues/{{id}}',
userUrlFormat: 'https://gitlab2.rz.ifi.lmu.de/{{user}}'
};

View File

@ -2,6 +2,46 @@
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
## [24.3.0](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v24.2.1...v24.3.0) (2021-02-15)
### Features
* **minio:** use separate bucket for temporary files ([1cd79d3](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/1cd79d35e2761d84bb904a77e74d5cacb0b2244c))
* **personalised-sheet-files:** restrict download by exam ([a8f2688](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/a8f268852a256209f6ab187167c2a1c066618c4c))
### Bug Fixes
* **exam-bonus:** avoid divide by zero if all sheets are bonus ([0fd7e86](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/0fd7e86695f47047c9e4e1bb8efe9477103707ab)), closes [#671](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/issues/671)
* **exam-bonus:** fix rounding ([854fa6b](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/854fa6b968ed01d60dea0d9ba6fc93d37e5ec361)), closes [#672](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/issues/672)
## [24.2.1](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v24.2.0...v24.2.1) (2021-02-11)
### Bug Fixes
* **arc:** reduce lock contention ([1be391f](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/1be391f5f5bf2588939fea92809dd629c0a69d99))
## [24.2.0](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v24.1.5...v24.2.0) (2021-02-10)
### Features
* implement in-memory cache for file download ([36debd8](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/36debd865f6e74856c74bd658dc4694140183fed))
### Bug Fixes
* unbreak arc ([8ecb460](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/8ecb460f39f48557b5935b1cd18709ba197d3490))
* **jobs:** prevent offloading instances from deleting cron last exec ([e61b561](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/e61b5611b1568180aa3ccfc3e3b981eb9a13cd53))
## [24.1.5](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v24.1.4...v24.1.5) (2021-02-09)
## [24.1.4](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v24.1.3...v24.1.4) (2021-02-09)
## [24.1.3](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v24.1.2...v24.1.3) (2021-02-09)
## [24.1.2](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v24.1.1...v24.1.2) (2021-02-09)
## [24.1.1](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v24.1.0...v24.1.1) (2021-02-09)

View File

@ -168,6 +168,7 @@ upload-cache:
auto-discover-region: "_env:UPLOAD_S3_AUTO_DISCOVER_REGION:true"
disable-cert-validation: "_env:UPLOAD_S3_DISABLE_CERT_VALIDATION:false"
upload-cache-bucket: "uni2work-uploads"
upload-tmp-bucket: "uni2work-tmp"
inject-files: 601
rechunk-files: 1201
@ -273,3 +274,7 @@ fallback-personalised-sheet-files-keys-expire: 2419200
download-token-expire: 604801
memcache-auth: true
file-source-arc:
maximum-ghost: 512
maximum-weight: 1073741824 # 1GiB

View File

@ -1,6 +1,7 @@
database:
database: "_env:PGDATABASE_TEST:uniworx_test"
upload-cache-bucket: "uni2work-test-uploads"
upload-tmp-bucket: "uni2work-test-tmp"
log-settings:
detailed: true

View File

@ -3003,6 +3003,9 @@ PersonalisedSheetFilesIgnored count@Int64: Es #{pluralDE count "wurde" "wurden"}
PersonalisedSheetFilesIgnoredIntro: Es wurden die folgenden Dateien ignoriert:
CourseUserHasPersonalisedSheetFilesFilter: Teilnehmer hat personalisierte Übungsblatt-Dateien für
SheetPersonalisedFilesUsersList: Liste von Teilnehmern mit personalisierten Übungsblatt-Dateien
PersonalisedSheetFilesDownloadRestrictByExamNone: Keine Einschränkung
PersonalisedSheetFilesDownloadRestrictByExam: Nur Prüfungsteilnehmer
PersonalisedSheetFilesDownloadRestrictByExamTip: Sollen nur personalisierte Übungsblatt-Dateien exportiert werden, für jene Kursteilnehmer, die auch Teilnehmer einer bestimmten Prüfung sind?
AdminCrontabNotGenerated: (Noch) keine Crontab generiert
CronMatchAsap: ASAP

View File

@ -3004,6 +3004,9 @@ PersonalisedSheetFilesIgnored count: #{count} uploaded #{pluralEN count "file wa
PersonalisedSheetFilesIgnoredIntro: The following files were ignored:
CourseUserHasPersonalisedSheetFilesFilter: Participant has personalised sheet files for
SheetPersonalisedFilesUsersList: List of course participants who have personalised sheet files
PersonalisedSheetFilesDownloadRestrictByExamNone: No restriction
PersonalisedSheetFilesDownloadRestrictByExam: Restrict to exam participants
PersonalisedSheetFilesDownloadRestrictByExamTip: Only download personalised sheet files for participants also registered to a certain exam?
AdminCrontabNotGenerated: Crontab not (yet) generated
CronMatchAsap: ASAP

11
package-lock.json generated
View File

@ -1,6 +1,6 @@
{
"name": "uni2work",
"version": "24.1.2",
"version": "24.3.0",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
@ -19531,6 +19531,15 @@
}
}
},
"standard-version-updater-yaml": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/standard-version-updater-yaml/-/standard-version-updater-yaml-1.0.2.tgz",
"integrity": "sha512-hTaNAnsQ7HznYbt489qVPYs4lvZ5q6pVwZJ7kmPMhYmvNzq7hZnQoImTYvEB9hgkx/moBJkqF38Dp82xy+dqvw==",
"dev": true,
"requires": {
"yaml": "^1.10.0"
}
},
"static-extend": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/static-extend/-/static-extend-0.1.2.tgz",

View File

@ -1,6 +1,6 @@
{
"name": "uni2work",
"version": "24.1.2",
"version": "24.3.0",
"description": "",
"keywords": [],
"author": "",
@ -41,16 +41,6 @@
"git add"
]
},
"standard-version": {
"scripts": {
"postbump": "./sync-versions.hs && git add -- package.yaml",
"postchangelog": "sed 's/^### \\[/## [/g' -i CHANGELOG.md"
},
"commitUrlFormat": "https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/{{hash}}",
"compareUrlFormat": "https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/{{previousTag}}...{{currentTag}}",
"issueUrlFormat": "https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/issues/{{id}}",
"userUrlFormat": "https://gitlab2.rz.ifi.lmu.de/{{user}}"
},
"browserslist": [
"defaults"
],
@ -108,6 +98,7 @@
"sass-loader": "^7.3.1",
"semver": "^6.3.0",
"standard-version": "^9.1.0",
"standard-version-updater-yaml": "^1.0.2",
"style-loader": "^0.23.1",
"terser-webpack-plugin": "^2.3.8",
"tmp": "^0.1.0",

View File

@ -1,6 +1,5 @@
name: uniworx
version: 24.1.2
name: uniworx
version: 24.3.0
dependencies:
- base
- yesod
@ -173,14 +172,14 @@ dependencies:
- list-t
- topograph
- network-uri
- psqueues
- nonce
other-extensions:
- GeneralizedNewtypeDeriving
- IncoherentInstances
- OverloadedLists
- UndecidableInstances
- ApplicativeDo
default-extensions:
- OverloadedStrings
- PartialTypeSignatures
@ -235,7 +234,6 @@ default-extensions:
- EmptyDataDeriving
- StandaloneKindSignatures
- NoStarIsType
ghc-options:
- -Wall
- -Wmissing-home-modules
@ -248,7 +246,6 @@ ghc-options:
- -fno-max-relevant-binds
- -j
- -freduction-depth=0
when:
- condition: flag(pedantic)
ghc-options:
@ -264,13 +261,8 @@ when:
else:
ghc-options:
- -O -fllvm
# The library contains all of our application code. The executable
# defined below is just a thin wrapper.
library:
source-dirs: src
# Runnable executable for our application
executables:
uniworx:
main: main.hs
@ -327,8 +319,6 @@ executables:
when:
- condition: flag(library-only)
buildable: false
# Test suite
tests:
yesod:
main: Main.hs
@ -360,8 +350,6 @@ tests:
when:
- condition: "!flag(pedantic)"
buildable: false
# Define flags used by "yesod devel" to make compilation faster
flags:
library-only:
description: Build for use with "yesod devel"

View File

@ -104,6 +104,8 @@ import Web.ServerSession.Core (StorageException(..))
import GHC.RTS.Flags (getRTSFlags)
import qualified Prometheus
-- Import all relevant handler modules here.
-- (HPack takes care to add new modules to our cabal file nowadays.)
import Handler.News
@ -185,6 +187,11 @@ makeFoundation appSettings''@AppSettings{..} = do
appJobState <- liftIO newEmptyTMVarIO
appHealthReport <- liftIO $ newTVarIO Set.empty
appFileSourceARC <- for appFileSourceARCConf $ \ARCConf{..} -> do
ah <- initARCHandle arccMaximumGhost arccMaximumWeight
void . Prometheus.register $ arcMetrics ARCFileSource ah
return ah
-- We need a log function to create a connection pool. We need a connection
-- pool to create our foundation. And we need our foundation to get a
-- logging function. To get out of this loop, we initially create a
@ -274,8 +281,9 @@ makeFoundation appSettings''@AppSettings{..} = do
conn <- Minio.connect minioConf
let isBucketExists Minio.BucketAlreadyOwnedByYou = True
isBucketExists _ = False
either throwM return <=< Minio.runMinioWith conn $
either throwM return <=< Minio.runMinioWith conn $ do
handleIf isBucketExists (const $ return ()) $ Minio.makeBucket appUploadCacheBucket Nothing
handleIf isBucketExists (const $ return ()) $ Minio.makeBucket appUploadTmpBucket Nothing
return conn
$logDebugS "Runtime configuration" $ tshow appSettings'

View File

@ -60,6 +60,7 @@ data UniWorX = UniWorX
, appUploadCache :: Maybe MinioConn
, appVerpSecret :: VerpSecret
, appAuthKey :: Auth.Key
, appFileSourceARC :: Maybe (ARCHandle (FileContentChunkReference, (Int, Int)) Int ByteString)
}
makeLenses_ ''UniWorX

View File

@ -681,7 +681,7 @@ postCUsersR tid ssh csh = do
shId <- runDB . getKeyBy404 $ CourseSheet cid shn
archiveName <- fmap (flip addExtension (unpack extensionZip) . unpack) . ap getMessageRender . pure $
MsgPersonalisedSheetFilesArchiveName courseTerm courseSchool courseShorthand shn
sendResponse <=< serveZipArchive' archiveName $ sourcePersonalisedSheetFiles cid (Just shId) (Just selectedUsers) anonMode
sendResponse <=< serveZipArchive' archiveName $ sourcePersonalisedSheetFiles cid (Just shId) (Just selectedUsers) anonMode Set.empty
let headingLong = [whamlet|_{MsgMenuCourseMembers} #{courseName} #{tid}|]

View File

@ -50,6 +50,8 @@ import qualified Control.Monad.State as State
import Control.Monad.Memo (MemoStateT, MonadMemo(..), for2)
import Utils.Memo
{-# ANN module ("HLint: ignore Use newtype instead of data" :: String) #-}
data PersonalisedSheetFileUnresolved a
= PSFUnresolvedDirectory a
@ -60,6 +62,18 @@ data PersonalisedSheetFileUnresolved a
makePrisms ''PersonalisedSheetFileUnresolved
data PersonalisedSheetFilesRestriction
= PSFRExamRegistered { psfrExam :: ExamId }
deriving (Eq, Ord, Read, Show, Generic, Typeable)
makeLenses_ ''PersonalisedSheetFilesRestriction
data PersonalisedSheetFilesForm = PersonalisedSheetFilesForm
{ psffAnonymous :: PersonalisedSheetFilesDownloadAnonymous
, psffRestrictions :: Set PersonalisedSheetFilesRestriction
} deriving (Eq, Ord, Read, Show, Generic, Typeable)
personalisedSheetFileTypes :: [SheetFileType]
personalisedSheetFileTypes = filter (/= SheetMarking) universeF
@ -199,8 +213,9 @@ sourcePersonalisedSheetFiles :: forall m.
-> Maybe SheetId
-> Maybe (Set UserId)
-> PersonalisedSheetFilesDownloadAnonymous
-> Set PersonalisedSheetFilesRestriction
-> ConduitT () (Either PersonalisedSheetFile DBFile) (SqlPersistT m) ()
sourcePersonalisedSheetFiles cId mbsid mbuids anonMode = do
sourcePersonalisedSheetFiles cId mbsid mbuids anonMode restrs = do
(mbIdx, cIDKey) <- lift . newPersonalisedFilesKey $ maybe (Left cId) Right mbsid
let
genSuffixes uid = case anonMode of
@ -232,6 +247,10 @@ sourcePersonalisedSheetFiles cId mbsid mbuids anonMode = do
case mbuids of
Just uids -> E.where_ $ courseParticipant E.^. CourseParticipantUser `E.in_` E.valList (Set.toList uids)
Nothing -> E.where_ $ courseParticipant E.^. CourseParticipantState E.==. E.val CourseParticipantActive
forM_ restrs $ \case
PSFRExamRegistered{..} -> E.where_ . E.exists . E.from $ \examRegistration ->
E.where_ $ examRegistration E.^. ExamRegistrationExam E.==. E.val psfrExam
E.&&. examRegistration E.^. ExamRegistrationUser E.==. courseParticipant E.^. CourseParticipantUser
return (courseParticipant, personalisedSheetFile)
toRefs = awaitForever $ \(Entity _ cPart@CourseParticipant{..}, mbPFile) -> do
@ -372,23 +391,26 @@ getPersonalFilesR cId mbsid = do
<*> traverse get404 mbsid
cRoute <- getCurrentRoute
((anonRes, anonFormWdgt), anonEnctype) <- runFormGet . renderAForm FormStandard $
apopt (selectField optionsFinite) (fslI MsgPersonalisedSheetFilesDownloadAnonymousField & setTooltip MsgPersonalisedSheetFilesDownloadAnonymousFieldTip) (Just PersonalisedSheetFilesDownloadAnonymous)
let toRestrictions = maybe Set.empty $ Set.singleton . PSFRExamRegistered
((psfRes, psfWdgt), psfEnctype) <- runFormGet . renderAForm FormStandard $ PersonalisedSheetFilesForm
<$> apopt (selectField optionsFinite) (fslI MsgPersonalisedSheetFilesDownloadAnonymousField & setTooltip MsgPersonalisedSheetFilesDownloadAnonymousFieldTip) (Just PersonalisedSheetFilesDownloadAnonymous)
<*> fmap toRestrictions (aopt (examField (Just $ SomeMessage MsgPersonalisedSheetFilesDownloadRestrictByExamNone) cId) (fslI MsgPersonalisedSheetFilesDownloadRestrictByExam & setTooltip MsgPersonalisedSheetFilesDownloadRestrictByExamTip) (Just $ mbSheet ^? _Just . _sheetType . _examPart . from _SqlKey))
formResult anonRes $ \anonMode -> do
formResult psfRes $ \PersonalisedSheetFilesForm{..} -> do
archiveName <- fmap (flip addExtension (unpack extensionZip) . unpack) . ap getMessageRender . pure $ case mbSheet of
Nothing -> MsgCoursePersonalisedSheetFilesArchiveName courseTerm courseSchool courseShorthand
Just Sheet{..} -> MsgPersonalisedSheetFilesArchiveName courseTerm courseSchool courseShorthand sheetName
sendResponse <=< serveZipArchive' archiveName $ sourcePersonalisedSheetFiles cId mbsid Nothing anonMode
sendResponse <=< serveZipArchive' archiveName $ sourcePersonalisedSheetFiles cId mbsid Nothing psffAnonymous psffRestrictions
isModal <- hasCustomHeader HeaderIsModal
fmap toTypedContent . siteLayoutMsg MsgMenuSheetPersonalisedFiles $ do
setTitleI MsgMenuSheetPersonalisedFiles
wrapForm anonFormWdgt def
wrapForm psfWdgt def
{ formMethod = GET
, formAction = SomeRoute <$> cRoute
, formEncoding = anonEnctype
, formEncoding = psfEnctype
, formAttrs = formAttrs def <> bool mempty [("uw-no-navigate-away-prompt", ""), ("target", "_blank")] isModal
}

View File

@ -170,6 +170,7 @@ examResultBonus bonusRule bonusPossible bonusAchieved = case bonusRule of
scalePasses :: Integer -> Rational
-- ^ Rescale passes so count of all sheets with pass is worth as many points as sum of all sheets with points
scalePasses passes
| pointsPossible <= 0, passesPossible <= 0 = 1 -- This arbitrarily identifies a pass as being worth one point if all sheets are `Bonus`; maybe weird
| pointsPossible <= 0 = toRational mPoints / fromInteger passesPossible
| passesPossible <= 0 = 0
| otherwise = fromInteger passes / fromInteger passesPossible * toRational pointsPossible
@ -177,7 +178,7 @@ examResultBonus bonusRule bonusPossible bonusAchieved = case bonusRule of
passesPossible = getSum $ numSheetsPasses bonusPossible
pointsPossible = getSum $ sumSheetsPoints bonusPossible - sumSheetsPassPoints bonusPossible
roundToPoints' mult = (* mult) . roundToPoints . (/ toRational mult)
roundToPoints' mult = (* mult) . (realToFrac :: Uni -> Points) . roundToPoints . (/ toRational mult)
examGrade :: ( MonoFoldable mono
, Element mono ~ ExamResultPoints

View File

@ -31,6 +31,30 @@ data SourceFilesException
deriving anyclass (Exception)
fileChunkARC :: ( MonadHandler m
, HandlerSite m ~ UniWorX
)
=> (FileContentChunkReference, (Int, Int))
-> m (Maybe ByteString)
-> m (Maybe ByteString)
fileChunkARC k getChunkDB = do
arc <- getsYesod appFileSourceARC
case arc of
Nothing -> getChunkDB
Just ah -> do
cachedARC' ah k $ \case
Nothing -> do
chunk' <- getChunkDB
for chunk' $ \chunk -> do
let w = length chunk
$logDebugS "fileChunkARC" "ARC miss"
liftIO $ observeSourcedChunk StorageDB w
return (chunk, w)
Just x@(_, w) -> do
$logDebugS "fileChunkARC" "ARC hit"
liftIO $ Just x <$ observeSourcedChunk StorageARC w
sourceFileDB :: (MonadThrow m, MonadHandler m, HandlerSite m ~ UniWorX)
=> FileContentReference -> ConduitT () ByteString (SqlPersistT m) ()
sourceFileDB fileReference = do
@ -38,13 +62,13 @@ sourceFileDB fileReference = do
let retrieveChunk chunkHash = \case
Nothing -> return Nothing
Just start -> do
chunk <- E.selectMaybe . E.from $ \fileContentChunk -> do
E.where_ $ fileContentChunk E.^. FileContentChunkId E.==. E.val chunkHash
return $ E.substring (fileContentChunk E.^. FileContentChunkContent) (E.val start) (E.val dbChunksize)
let getChunkDB = fmap (fmap E.unValue) . E.selectMaybe . E.from $ \fileContentChunk -> do
E.where_ $ fileContentChunk E.^. FileContentChunkId E.==. E.val chunkHash
return $ E.substring (fileContentChunk E.^. FileContentChunkContent) (E.val start) (E.val dbChunksize)
chunk <- fileChunkARC (unFileContentChunkKey chunkHash, (start, dbChunksize)) getChunkDB
case chunk of
Nothing -> throwM SourceFilesContentUnavailable
Just (E.Value c) -> do
observeSourcedChunk StorageDB $ olength c
Just c -> do
return . Just . (c, ) $ if
| olength c >= dbChunksize -> Just $ start + dbChunksize
| otherwise -> Nothing
@ -185,13 +209,13 @@ respondFileConditional representationLastModified cType FileReference{..} = do
forM_ relevantChunks $ \(chunkHash, offset, cLength)
-> let retrieveChunk = \case
Just (start, cLength') | cLength' > 0 -> do
chunk <- E.selectMaybe . E.from $ \fileContentChunk -> do
E.where_ $ fileContentChunk E.^. FileContentChunkHash E.==. E.val chunkHash
return $ E.substring (fileContentChunk E.^. FileContentChunkContent) (E.val start) (E.val $ min cLength' dbChunksize)
let getChunkDB = fmap (fmap E.unValue) . E.selectMaybe . E.from $ \fileContentChunk -> do
E.where_ $ fileContentChunk E.^. FileContentChunkHash E.==. E.val chunkHash
return $ E.substring (fileContentChunk E.^. FileContentChunkContent) (E.val start) (E.val $ min cLength' dbChunksize)
chunk <- fileChunkARC (chunkHash, (fromIntegral start, fromIntegral $ min cLength' dbChunksize)) getChunkDB
case chunk of
Nothing -> throwM SourceFilesContentUnavailable
Just (E.Value c) -> do
observeSourcedChunk StorageDB $ olength c
Just c -> do
return . Just . (c, ) $ if
| fromIntegral (olength c) >= min cLength' dbChunksize
-> Just (start + dbChunksize, cLength' - fromIntegral (olength c))

View File

@ -501,8 +501,8 @@ handleJobs' wNum = C.mapM_ $ \jctl -> hoist delimitInternalState . withJobWorker
handleCmd (JobCtlPerform jId) = do
jMode <- getsYesod $ view _appJobMode
case jMode of
JobsLocal{} -> performLocal
JobsOffload -> performOffload
_otherwise -> performLocal
where
performOffload = hoist atomically $ do
JobOffloadHandler{..} <- lift . readTMVar =<< asks jobOffload
@ -520,8 +520,9 @@ handleJobs' wNum = C.mapM_ $ \jctl -> hoist delimitInternalState . withJobWorker
instanceID' <- getsYesod $ view instanceID
now <- liftIO getCurrentTime
jMode <- getsYesod $ view _appJobMode
let cleanup = do
when queuedJobWriteLastExec $
when (queuedJobWriteLastExec && modeWriteLastExec) $
void $ upsertBy
(UniqueCronLastExec queuedJobContent)
CronLastExec
@ -533,20 +534,25 @@ handleJobs' wNum = C.mapM_ $ \jctl -> hoist delimitInternalState . withJobWorker
, CronLastExecInstance =. instanceID'
]
delete jId
modeWriteLastExec = case jMode of
JobsDrop{..} -> jobsWriteFakeLastExec
_otherwise -> True
case performJob content of
JobHandlerAtomic act -> runDBJobs . setSerializableBatch $ do
act & withJobWorkerState wNum (JobWorkerExecJob content)
hoist lift cleanup
JobHandlerException act -> do
act & withJobWorkerState wNum (JobWorkerExecJob content)
runDB $ setSerializableBatch cleanup
JobHandlerAtomicWithFinalizer act fin -> do
res <- runDBJobs . setSerializableBatch $ do
res <- act & withJobWorkerState wNum (JobWorkerExecJob content)
case jMode of
JobsDrop{} -> runDB $ setSerializableBatch cleanup
_otherwise -> case performJob content of
JobHandlerAtomic act -> runDBJobs . setSerializableBatch $ do
act & withJobWorkerState wNum (JobWorkerExecJob content)
hoist lift cleanup
return res
fin res
JobHandlerException act -> do
act & withJobWorkerState wNum (JobWorkerExecJob content)
runDB $ setSerializableBatch cleanup
JobHandlerAtomicWithFinalizer act fin -> do
res <- runDBJobs . setSerializableBatch $ do
res <- act & withJobWorkerState wNum (JobWorkerExecJob content)
hoist lift cleanup
return res
fin res
handleCmd JobCtlDetermineCrontab = do
$logDebugS logIdent "DetermineCrontab..."
newCTab <- liftHandler . runDB $ setSerializableBatch determineCrontab'
@ -615,21 +621,26 @@ jLocked jId act = flip evalStateT False $ do
pruneLastExecs :: Crontab JobCtl -> DB ()
pruneLastExecs crontab = runConduit $ selectSource [] [] .| C.mapM_ ensureCrontab
pruneLastExecs crontab = do
jMode <- getsYesod $ view _appJobMode
when (is _JobsLocal jMode) $ do
Sum deleted <- runConduit $ selectSource [] [] .| C.foldMapM ensureCrontab
when (deleted > 0) $
$logInfoS "pruneLastExeces" [st|Deleted #{deleted} entries|]
where
ensureCrontab (Entity leId CronLastExec{..}) = void . runMaybeT $ do
ensureCrontab :: Entity CronLastExec -> DB (Sum Natural)
ensureCrontab (Entity leId CronLastExec{..}) = maybeT (return mempty) $ do
now <- liftIO getCurrentTime
flushInterval <- MaybeT . getsYesod . view $ appSettings . _appJobFlushInterval
if
| abs (now `diffUTCTime` cronLastExecTime) > flushInterval * 2
-> return ()
-> return mempty
| Just job <- Aeson.parseMaybe parseJSON cronLastExecJob
, not $ HashMap.member (JobCtlQueue job) crontab
-> lift $ delete leId
-> Sum 1 <$ lift (delete leId)
| otherwise
-> return ()
-> return mempty
determineCrontab' :: DB (Crontab JobCtl)
determineCrontab' = (\ct -> ct <$ pruneLastExecs ct) =<< determineCrontab

View File

@ -49,7 +49,7 @@ determineCrontab = execWriterT $ do
}
Nothing -> mempty
when (is _JobsLocal appJobMode) $ do
when (isn't _JobsOffload appJobMode) $ do
case appJobFlushInterval of
Just interval -> tell $ HashMap.singleton
JobCtlFlush

View File

@ -29,12 +29,11 @@ mkJobOffloadHandler :: forall m.
=> PostgresConf -> JobMode
-> Maybe (m JobOffloadHandler)
mkJobOffloadHandler dbConf jMode
| is _JobsLocal jMode, hasn't (_jobsAcceptOffload . only True) jMode = Nothing
| not shouldListen = Nothing
| otherwise = Just $ do
jobOffloadOutgoing <- newTVarIO mempty
jobOffloadHandler <- allocateAsync . bracket (liftIO . PG.connectPostgreSQL $ pgConnStr dbConf) (liftIO . PG.close) $ \pgConn -> do
myPid <- liftIO $ PG.getBackendPID pgConn
let shouldListen = has (_jobsAcceptOffload . only True) jMode
when shouldListen $
void . liftIO $ PG.execute pgConn "LISTEN ?" (PG.Only $ PG.Identifier jobOffloadChannel)
@ -68,3 +67,4 @@ mkJobOffloadHandler dbConf jMode
Right jId -> void . liftIO $ PG.execute pgConn "NOTIFY ?, ?" (PG.Identifier jobOffloadChannel, encodeUtf8 $ toPathPiece jId)
return JobOffloadHandler{..}
where shouldListen = has (_jobsAcceptOffload . only True) jMode

View File

@ -41,6 +41,8 @@ classifyChangelogItem = \case
ChangelogStoredMarkup -> ChangelogItemBugfix
ChangelogFixPersonalisedSheetFilesKeep -> ChangelogItemBugfix
ChangelogHonorRoomHidden -> ChangelogItemBugfix
ChangelogFixSheetBonusRounding -> ChangelogItemBugfix
ChangelogFixExamBonusAllSheetsBonus -> ChangelogItemBugfix
_other -> ChangelogItemFeature
changelogItemDays :: Map ChangelogItem Day

View File

@ -188,7 +188,7 @@ data AppSettings = AppSettings
, appMemcachedConf :: Maybe MemcachedConf
, appUploadCacheConf :: Maybe Minio.ConnectInfo
, appUploadCacheBucket :: Minio.Bucket
, appUploadCacheBucket, appUploadTmpBucket :: Minio.Bucket
, appInjectFiles :: Maybe NominalDiffTime
, appRechunkFiles :: Maybe NominalDiffTime
, appCheckMissingFiles :: Maybe NominalDiffTime
@ -216,10 +216,16 @@ data AppSettings = AppSettings
, appStudyFeaturesRecacheRelevanceInterval :: NominalDiffTime
, appMemcacheAuth :: Bool
, appFileSourceARCConf :: Maybe (ARCConf Int)
} deriving Show
data JobMode = JobsLocal { jobsAcceptOffload :: Bool }
| JobsOffload
| JobsDrop
{ jobsAcceptOffload :: Bool
, jobsWriteFakeLastExec :: Bool
}
deriving (Eq, Ord, Read, Show, Generic, Typeable)
deriving anyclass (Hashable)
@ -335,6 +341,11 @@ data VerpMode = VerpNone
| Verp { verpPrefix :: Text, verpSeparator :: Char }
deriving (Eq, Show, Read, Generic)
data ARCConf w = ARCConf
{ arccMaximumGhost :: Int
, arccMaximumWeight :: w
} deriving (Eq, Ord, Read, Show, Generic, Typeable)
nullaryPathPiece ''ApprootScope $ camelToPathPiece' 1
pathPieceJSON ''ApprootScope
pathPieceJSONKey ''ApprootScope
@ -361,6 +372,10 @@ deriveJSON defaultOptions
, constructorTagModifier = camelToPathPiece' 1
} ''JobMode
deriveJSON defaultOptions
{ fieldLabelModifier = camelToPathPiece' 1
} ''ARCConf
instance FromJSON LdapConf where
parseJSON = withObject "LdapConf" $ \o -> do
ldapTls <- o .:? "tls"
@ -611,6 +626,7 @@ instance FromJSON AppSettings where
appUploadCacheConf <- assertM (not . null . Minio.connectHost) <$> o .:? "upload-cache"
appUploadCacheBucket <- o .: "upload-cache-bucket"
appUploadTmpBucket <- o .: "upload-tmp-bucket"
appFallbackPersonalisedSheetFilesKeysExpire <- o .: "fallback-personalised-sheet-files-keys-expire"
@ -623,6 +639,8 @@ instance FromJSON AppSettings where
appStudyFeaturesRecacheRelevanceWithin <- o .:? "study-features-recache-relevance-within"
appStudyFeaturesRecacheRelevanceInterval <- o .: "study-features-recache-relevance-interval"
appFileSourceARCConf <- assertM ((||) <$> ((> 0) . arccMaximumGhost) <*> ((> 0) . arccMaximumWeight)) <$> o .:? "file-source-arc"
return AppSettings{..}
makeClassy_ ''AppSettings

View File

@ -38,6 +38,7 @@ import Utils.I18n as Utils
import Utils.NTop as Utils
import Utils.HttpConditional as Utils
import Utils.Persist as Utils
import Utils.ARC as Utils
import Text.Blaze (Markup, ToMarkup(..))

275
src/Utils/ARC.hs Normal file
View File

@ -0,0 +1,275 @@
module Utils.ARC
( ARCTick
, ARC, initARC
, arcAlterF, lookupARC, insertARC
, ARCHandle, initARCHandle, cachedARC, cachedARC'
, readARCHandle
, arcRecentSize, arcFrequentSize, arcGhostRecentSize, arcGhostFrequentSize
, getARCRecentWeight, getARCFrequentWeight
, describeARC
) where
import ClassyPrelude
import Data.OrdPSQ (OrdPSQ)
import qualified Data.OrdPSQ as OrdPSQ
import Control.Lens
-- https://web.archive.org/web/20210115184012/https://dbs.uni-leipzig.de/file/ARC.pdf
newtype ARCTick = ARCTick { _getARCTick :: Word64 }
deriving (Eq, Ord, Show, Typeable)
deriving newtype (NFData)
makeLenses ''ARCTick
data ARC k w v = ARC
{ arcRecent, arcFrequent :: !(OrdPSQ k ARCTick (v, w))
, arcGhostRecent, arcGhostFrequent :: !(OrdPSQ k ARCTick ())
, arcRecentWeight, arcFrequentWeight :: !w
, arcTargetRecent, arcMaximumWeight :: !w
, arcMaximumGhost :: !Int
}
instance (NFData k, NFData w, NFData v) => NFData (ARC k w v) where
rnf ARC{..} = rnf arcRecent
`seq` rnf arcFrequent
`seq` rnf arcGhostRecent
`seq` rnf arcGhostFrequent
`seq` rnf arcRecentWeight
`seq` rnf arcFrequentWeight
`seq` rnf arcTargetRecent
`seq` rnf arcMaximumWeight
`seq` rnf arcMaximumGhost
describeARC :: Show w
=> ARC k w v
-> String
describeARC ARC{..} = intercalate ", "
[ "arcRecent: " <> show (OrdPSQ.size arcRecent)
, "arcFrequent: " <> show (OrdPSQ.size arcFrequent)
, "arcGhostRecent: " <> show (OrdPSQ.size arcGhostRecent)
, "arcGhostFrequent: " <> show (OrdPSQ.size arcGhostFrequent)
, "arcRecentWeight: " <> show arcRecentWeight
, "arcFrequentWeight: " <> show arcFrequentWeight
, "arcTargetRecent: " <> show arcTargetRecent
, "arcMaximumWeight: " <> show arcMaximumWeight
, "arcMaximumGhost: " <> show arcMaximumGhost
]
arcRecentSize, arcFrequentSize, arcGhostRecentSize, arcGhostFrequentSize :: ARC k w v -> Int
arcRecentSize = OrdPSQ.size . arcRecent
arcFrequentSize = OrdPSQ.size . arcFrequent
arcGhostRecentSize = OrdPSQ.size . arcGhostRecent
arcGhostFrequentSize = OrdPSQ.size . arcGhostFrequent
getARCRecentWeight, getARCFrequentWeight :: ARC k w v -> w
getARCRecentWeight = arcRecentWeight
getARCFrequentWeight = arcFrequentWeight
initialARCTick :: ARCTick
initialARCTick = ARCTick 0
initARC :: forall k w v.
Integral w
=> Int -- ^ @arcMaximumGhost@
-> w -- ^ @arcMaximumWeight@
-> (ARC k w v, ARCTick)
initARC arcMaximumGhost arcMaximumWeight
| arcMaximumWeight < 0 = error "initARC given negative maximum weight"
| arcMaximumGhost < 0 = error "initARC given negative maximum ghost size"
| otherwise = (, initialARCTick) ARC
{ arcRecent = OrdPSQ.empty
, arcFrequent = OrdPSQ.empty
, arcGhostRecent = OrdPSQ.empty
, arcGhostFrequent = OrdPSQ.empty
, arcRecentWeight = 0
, arcFrequentWeight = 0
, arcMaximumWeight
, arcTargetRecent = 0
, arcMaximumGhost
}
infixl 6 |-
(|-) :: (Num a, Ord a) => a -> a -> a
(|-) m s
| s >= m = 0
| otherwise = m - s
arcAlterF :: forall f k w v.
( Ord k
, Functor f
, Integral w
)
=> k
-> (Maybe (v, w) -> f (Maybe (v, w)))
-> ARC k w v
-> ARCTick -> f (ARC k w v, ARCTick)
-- | Unchecked precondition: item weights are always less than `arcMaximumWeight`
arcAlterF k f oldARC@ARC{..} now
| later <= initialARCTick = uncurry (arcAlterF k f) $ initARC arcMaximumGhost arcMaximumWeight
| otherwise = (, later) <$> if
| Just (_p, x@(_, w), arcFrequent') <- OrdPSQ.deleteView k arcFrequent
-> f (Just x) <&> \(fromMaybe x -> x'@(_, w'))
-> let (arcFrequent'', arcFrequentWeight'', arcGhostFrequent') = evictToSize (arcMaximumWeight |- arcTargetRecent |- w') arcFrequent' (arcFrequentWeight - w) arcGhostFrequent
in oldARC
{ arcFrequent = OrdPSQ.insert k now x' arcFrequent''
, arcFrequentWeight = arcFrequentWeight'' + w'
, arcGhostFrequent = arcGhostFrequent'
}
| Just (_p, x@(_, w), arcRecent') <- OrdPSQ.deleteView k arcRecent
-> f (Just x) <&> \(fromMaybe x -> x'@(_, w'))
-> let (arcFrequent', arcFrequentWeight', arcGhostFrequent') = evictToSize (arcMaximumWeight |- arcTargetRecent |- w') arcFrequent arcFrequentWeight arcGhostFrequent
in oldARC
{ arcRecent = arcRecent'
, arcRecentWeight = arcRecentWeight - w
, arcFrequent = OrdPSQ.insert k now x' arcFrequent'
, arcFrequentWeight = arcFrequentWeight' + w'
, arcGhostFrequent = arcGhostFrequent'
}
| Just (_p, (), arcGhostRecent') <- OrdPSQ.deleteView k arcGhostRecent
-> f Nothing <&> \case
Nothing -> oldARC
{ arcGhostRecent = OrdPSQ.insert k now () arcGhostRecent'
}
Just x@(_, w)
-> let arcTargetRecent' = min arcMaximumWeight $ arcTargetRecent + max avgWeight (round $ toRational (OrdPSQ.size arcGhostFrequent) / toRational (OrdPSQ.size arcGhostRecent) * toRational avgWeight)
(arcFrequent', arcFrequentWeight', arcGhostFrequent') = evictToSize (arcMaximumWeight |- arcTargetRecent' |- w) arcFrequent arcFrequentWeight arcGhostFrequent
(arcRecent', arcRecentWeight', arcGhostRecent'') = evictToSize (max arcTargetRecent' $ arcMaximumWeight |- arcFrequentWeight' |- w) arcRecent arcRecentWeight arcGhostRecent'
in oldARC
{ arcRecent = arcRecent'
, arcFrequent = OrdPSQ.insert k now x arcFrequent'
, arcGhostRecent = arcGhostRecent''
, arcGhostFrequent = arcGhostFrequent'
, arcRecentWeight = arcRecentWeight'
, arcFrequentWeight = arcFrequentWeight' + w
, arcTargetRecent = arcTargetRecent'
}
| Just (_p, (), arcGhostFrequent') <- OrdPSQ.deleteView k arcGhostFrequent
-> f Nothing <&> \case
Nothing -> oldARC
{ arcGhostFrequent = OrdPSQ.insert k now () arcGhostFrequent'
}
Just x@(_, w)
-> let arcTargetRecent' = arcTargetRecent |- max avgWeight (round $ toRational (OrdPSQ.size arcGhostRecent) / toRational (OrdPSQ.size arcGhostFrequent) * toRational avgWeight)
(arcFrequent', arcFrequentWeight', arcGhostFrequent'') = evictToSize (arcMaximumWeight |- arcTargetRecent' |- w) arcFrequent arcFrequentWeight arcGhostFrequent'
(arcRecent', arcRecentWeight', arcGhostRecent') = evictToSize (max arcTargetRecent' $ arcMaximumWeight |- arcFrequentWeight' |- w) arcRecent arcRecentWeight arcGhostRecent
in oldARC
{ arcRecent = arcRecent'
, arcFrequent = OrdPSQ.insert k now x arcFrequent'
, arcGhostRecent = arcGhostRecent'
, arcGhostFrequent = arcGhostFrequent''
, arcRecentWeight = arcRecentWeight'
, arcFrequentWeight = arcFrequentWeight' + w
, arcTargetRecent = arcTargetRecent'
}
| otherwise -> f Nothing <&> \case
Nothing -> oldARC
{ arcGhostRecent = OrdPSQ.insert k now () $ evictGhostToCount arcGhostRecent
}
Just x@(_, w)
-> let (arcRecent', arcRecentWeight', arcGhostRecent') = evictToSize (max arcTargetRecent (arcMaximumWeight |- arcFrequentWeight) |- w) arcRecent arcRecentWeight arcGhostRecent
in oldARC
{ arcRecent = OrdPSQ.insert k now x arcRecent'
, arcRecentWeight = arcRecentWeight' + w
, arcGhostRecent = arcGhostRecent'
}
where
avgWeight = round $ toRational (arcRecentWeight + arcFrequentWeight) / toRational (OrdPSQ.size arcFrequent + OrdPSQ.size arcRecent)
later :: ARCTick
later = over getARCTick succ now
evictToSize :: w -> OrdPSQ k ARCTick (v, w) -> w -> OrdPSQ k ARCTick () -> (OrdPSQ k ARCTick (v, w), w, OrdPSQ k ARCTick ())
evictToSize tSize c cSize ghostC
| cSize <= tSize = (c, cSize, ghostC)
| Just (k', p', (_, w'), c') <- OrdPSQ.minView c = evictToSize tSize c' (cSize - w') . evictGhostToCount $ OrdPSQ.insert k' p' () ghostC
| otherwise = error "evictToSize: cannot reach required size through eviction"
evictGhostToCount :: OrdPSQ k ARCTick () -> OrdPSQ k ARCTick ()
evictGhostToCount c
| OrdPSQ.size c <= arcMaximumGhost = c
| Just (_, _, _, c') <- OrdPSQ.minView c = evictGhostToCount c'
| otherwise = error "evictGhostToCount: cannot reach required count through eviction"
lookupARC :: forall k w v.
( Ord k
, Integral w
)
=> k
-> (ARC k w v, ARCTick)
-> Maybe (v, w)
lookupARC k = getConst . uncurry (arcAlterF k Const)
insertARC :: forall k w v.
( Ord k
, Integral w
)
=> k
-> Maybe (v, w)
-> ARC k w v
-> ARCTick -> (ARC k w v, ARCTick)
insertARC k newVal = (runIdentity .) . arcAlterF k (const $ pure newVal)
newtype ARCHandle k w v = ARCHandle { _getARCHandle :: IORef (ARC k w v, ARCTick) }
deriving (Eq, Typeable)
initARCHandle :: forall k w v m.
( MonadIO m
, Integral w
)
=> Int -- ^ @arcMaximumGhost@
-> w -- ^ @arcMaximumWeight@
-> m (ARCHandle k w v)
initARCHandle maxGhost maxWeight = fmap ARCHandle . newIORef $ initARC maxGhost maxWeight
cachedARC' :: forall k w v m.
( MonadIO m
, Ord k
, Integral w
, NFData k, NFData w, NFData v
)
=> ARCHandle k w v
-> k
-> (Maybe (v, w) -> m (Maybe (v, w)))
-> m (Maybe v)
cachedARC' (ARCHandle arcVar) k f = do
oldVal <- lookupARC k <$> readIORef arcVar
newVal <- f oldVal
modifyIORef' arcVar $ force . uncurry (insertARC k newVal)
-- Using `modifyIORef'` instead of `atomicModifyIORef'` might very
-- well drop newer values computed during the update.
--
-- Currently we accept that to reduce lock contention.
--
-- Another alternative would be to use "optimistic locking",
-- i.e. read the current value of `arcVar`, compute an updated
-- version, and write it back atomically iff the `ARCTick` hasn't
-- changed.
--
-- This was not implemented to avoid the overhead and contention
-- likely associated with the atomic transaction required for the
-- "compare and swap"
return $ view _1 <$> newVal
cachedARC :: forall k w v m.
( MonadIO m
, Ord k
, Integral w
, NFData k, NFData w, NFData v
)
=> ARCHandle k w v
-> k
-> (Maybe (v, w) -> m (v, w))
-> m v
cachedARC h k f = fromMaybe (error "cachedARC: cachedARC' returned Nothing") <$> cachedARC' h k (fmap Just . f)
readARCHandle :: MonadIO m
=> ARCHandle k w v
-> m (ARC k w v, ARCTick)
readARCHandle (ARCHandle arcVar) = readIORef arcVar

View File

@ -29,9 +29,7 @@ import qualified Data.Sequence as Seq
import Database.Persist.Sql (deleteWhereCount)
import Control.Monad.Trans.Resource (allocate)
import qualified Data.UUID.V4 as UUID
import Control.Monad.Trans.Resource (allocate, register, release)
import qualified Database.Esqueleto as E
import qualified Database.Esqueleto.Utils as E
@ -40,6 +38,9 @@ import Data.Conduit.Algorithms.FastCDC (fastCDC)
import Control.Monad.Trans.Cont
import qualified Crypto.Nonce as Nonce
import System.IO.Unsafe
sinkFileDB :: (MonadCatch m, MonadHandler m, HandlerSite m ~ UniWorX, MonadUnliftIO m, BackendCompatible SqlBackend (YesodPersistBackend UniWorX), YesodPersist UniWorX)
=> Bool -- ^ Replace? Use only in serializable transaction
@ -96,12 +97,17 @@ sinkFileDB doReplace fileContentContent = do
where fileContentChunkContentBased = True
minioTmpGenerator :: Nonce.Generator
minioTmpGenerator = unsafePerformIO Nonce.new
{-# NOINLINE minioTmpGenerator #-}
sinkFileMinio :: (MonadCatch m, MonadHandler m, HandlerSite m ~ UniWorX, MonadUnliftIO m)
=> ConduitT () ByteString m ()
-> MaybeT m FileContentReference
-- ^ Cannot deal with zero length uploads
sinkFileMinio fileContentContent = do
uploadBucket <- getsYesod $ views appSettings appUploadCacheBucket
tmpBucket <- getsYesod $ views appSettings appUploadTmpBucket
chunk <- liftIO newEmptyMVar
let putChunks = do
nextChunk <- await
@ -119,16 +125,17 @@ sinkFileMinio fileContentContent = do
.| Crypto.sinkHash
runAppMinio $ do
tmpUUID <- liftIO UUID.nextRandom
let uploadName = ".tmp." <> toPathPiece tmpUUID
pooOptions = Minio.defaultPutObjectOptions
uploadName <- Nonce.nonce128urlT minioTmpGenerator
let pooOptions = Minio.defaultPutObjectOptions
{ Minio.pooCacheControl = Just "immutable"
}
Minio.putObject uploadBucket uploadName (C.unfoldM (\x -> fmap (, x) <$> takeMVar chunk) ()) Nothing pooOptions
removeObject <- withRunInIO $ \runInIO -> runInIO . register . runInIO $ Minio.removeObject tmpBucket uploadName
Minio.putObject tmpBucket uploadName (C.unfoldM (\x -> fmap (, x) <$> takeMVar chunk) ()) Nothing pooOptions
fileContentHash <- review _Wrapped <$> waitAsync sinkAsync
let dstName = minioFileReference # fileContentHash
copySrc = Minio.defaultSourceInfo
{ Minio.srcBucket = uploadBucket, Minio.srcObject = uploadName
{ Minio.srcBucket = tmpBucket
, Minio.srcObject = uploadName
}
copyDst = Minio.defaultDestinationInfo
{ Minio.dstBucket = uploadBucket
@ -137,7 +144,7 @@ sinkFileMinio fileContentContent = do
uploadExists <- handleIf minioIsDoesNotExist (const $ return False) $ True <$ Minio.statObject uploadBucket dstName Minio.defaultGetObjectOptions
unless uploadExists $
Minio.copyObject copyDst copySrc
Minio.removeObject uploadBucket uploadName
release removeObject
return fileContentHash

View File

@ -15,6 +15,8 @@ module Utils.Metrics
, observeDeletedUnreferencedFiles, observeDeletedUnreferencedChunks, observeInjectedFiles, observeRechunkedFiles
, registerJobWorkerQueueDepth
, observeMissingFiles
, ARCMetrics, ARCLabel(..)
, arcMetrics
) where
import Import.NoModel hiding (Vector, Info)
@ -228,6 +230,41 @@ missingFiles = unsafeRegister . vector "ref" $ gauge info
where info = Info "uni2work_missing_files_count"
"Number of files referenced from within database that are missing"
data ARCMetrics = ARCMetrics
data ARCLabel = ARCFileSource
deriving (Eq, Ord, Read, Show, Enum, Bounded, Generic, Typeable)
deriving anyclass (Universe, Finite)
nullaryPathPiece ''ARCLabel $ camelToPathPiece' 1
arcMetrics :: Integral w
=> ARCLabel
-> ARCHandle k w v
-> Metric ARCMetrics
arcMetrics lbl ah = Metric $ return (ARCMetrics, collectARCMetrics)
where
relabel l s (SampleGroup i t ss) = SampleGroup i t . flip map ss $ \(Sample k lbls v) -> Sample k ((l, s) : filter (views _1 $ (/=) l) lbls) v
labelArc = relabel "arc"
collectARCMetrics = map (labelArc $ toPathPiece lbl) <$> do
(arc, _) <- readARCHandle ah
return
[ SampleGroup sizeInfo GaugeType
[ Sample "arc_size" [("lru", "ghost-recent")] . encodeUtf8 . tshow $ arcGhostRecentSize arc
, Sample "arc_size" [("lru", "recent")] . encodeUtf8 . tshow $ arcRecentSize arc
, Sample "arc_size" [("lru", "frequent")] . encodeUtf8 . tshow $ arcFrequentSize arc
, Sample "arc_size" [("lru", "ghost-frequent")] . encodeUtf8 . tshow $ arcGhostFrequentSize arc
]
, SampleGroup weightInfo GaugeType
[ Sample "arc_weight" [("lru", "recent")] . encodeUtf8 . tshow . toInteger $ getARCRecentWeight arc
, Sample "arc_weight" [("lru", "frequent")] . encodeUtf8 . tshow . toInteger $ getARCFrequentWeight arc
]
]
sizeInfo = Info "arc_size"
"Number of entries in the ARC LRUs"
weightInfo = Info "arc_weight"
"Sum of weights of entries in the ARC LRUs"
withHealthReportMetrics :: MonadIO m => m HealthReport -> m HealthReport
withHealthReportMetrics act = do
@ -336,7 +373,7 @@ observeLoginOutcome plugin outcome
registerJobHeldLocksCount :: MonadIO m => TVar (Set QueuedJobId) -> m ()
registerJobHeldLocksCount = liftIO . void . register . jobHeldLocksCount
data FileChunkStorage = StorageMinio | StorageDB
data FileChunkStorage = StorageMinio | StorageDB | StorageARC
deriving (Eq, Ord, Read, Show, Enum, Bounded, Generic, Typeable)
deriving anyclass (Universe, Finite)
nullaryPathPiece ''FileChunkStorage $ camelToPathPiece' 1

View File

@ -1,81 +0,0 @@
#!/usr/bin/env stack
-- stack runghc --package libyaml --package aeson --package unordered-containers --package text
{-# LANGUAGE OverloadedStrings
, LambdaCase
, PackageImports
, NamedFieldPuns
, RecordWildCards
#-}
import "libyaml" Text.Libyaml
import Control.Monad.Trans.Resource
import Data.Conduit
import qualified Data.Conduit.List as C
import qualified Data.Aeson as JSON
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as CBS
import qualified Data.ByteString.Lazy as LBS
import qualified Data.HashMap.Lazy as HashMap
import Data.Text (Text)
import qualified Data.Text as Text
import qualified Data.Text.IO as Text
import qualified Data.Text.Encoding as Text
import Text.Printf
import System.IO (stderr)
main :: IO ()
main = do
Just packageJSON <- JSON.decode <$> LBS.readFile "package.json"
let
newVersion :: Text
Just (JSON.String newVersion) = HashMap.lookup ("version" :: Text) packageJSON
updatePackageYaml newVersion
updatePackageYaml :: Text -> IO ()
updatePackageYaml newVersion = do
(oldVersion, start, end) <- runResourceT . runConduit . (.|) (decodeFileMarked "package.yaml") $ do
awaitUntil $ \case
MarkedEvent{ yamlEvent = EventMappingStart _ _ _ } -> True
_ -> False
awaitUntil $ \case
MarkedEvent{ yamlEvent = EventScalar s _ _ _ }
| s == "version" -> True
_ -> False
_ <- await -- Throw away "version: "
Just MarkedEvent{ yamlEvent = EventScalar oldVersion' _ _ _, .. } <- await
let oldVersion = Text.decodeUtf8 oldVersion'
return (oldVersion, yamlStartMark, yamlEndMark)
encNewVersion <- runResourceT . runConduit . (.| encode) $ C.sourceList
[ EventStreamStart
, EventDocumentStart
, EventScalar (Text.encodeUtf8 newVersion) NoTag Any Nothing
, EventDocumentEnd
, EventStreamEnd
]
hPrintf stderr "package.yaml: %s -> %s\n" oldVersion newVersion
packageYaml <- BS.readFile "package.yaml"
BS.writeFile "package.yaml" . mconcat $
[ BS.take (fromIntegral $ yamlIndex start) packageYaml
, Text.encodeUtf8 . Text.strip $ Text.decodeUtf8 encNewVersion
, BS.drop (fromIntegral $ yamlIndex end) packageYaml
]
where
awaitUntil :: Monad m => (i -> Bool) -> ConduitM i o m ()
awaitUntil pred = do
nextIn <- await
case nextIn of
Nothing -> error "Ran out of input in awaitUntil"
Just inp
| pred inp -> leftover inp
Just _ -> awaitUntil pred

View File

@ -0,0 +1,2 @@
$newline never
Automatische Berechnung von Prüfungsboni führt nun nicht mehr zu einem Fehler, wenn alle Übungsblätter im Bonus-Modus sind.

View File

@ -0,0 +1,2 @@
$newline never
Automatic calculation of exam bonuses now no longer causes an error if all exercise sheets are set to Bonus.

View File

@ -0,0 +1,2 @@
$newline never
Bonuspunkte werden nun wieder korrekt gerundet

View File

@ -0,0 +1,2 @@
$newline never
Exam bonus points are now rounded correctly again

View File

@ -0,0 +1,2 @@
$newline never
Beim Herunterladen von personalisierten Übungsblatt-Dateien kann der Download nun auf jene Teilnehmer eingeschränkt werden, die auch für eine bestimmte Prüfung registriert sind.

View File

@ -0,0 +1,2 @@
$newline never
When downloading personalised exercise sheet files, the download can now be restricted to only those participants, who are also registered for a certain exam.

View File

@ -25,6 +25,8 @@ import Database.Persist.Sql (transactionUndo)
import Data.Bitraversable
import qualified Data.Set as Set
instance Arbitrary (FileReferenceResidual PersonalisedSheetFile) where
arbitrary = PersonalisedSheetFileResidual
@ -82,7 +84,7 @@ spec = withApp . describe "Personalised sheet file zip encoding" $ do
loadResolved (f, fRes) = (, fRes) <$> traverse toPureFile f
recoveredFiles <- runConduit $
sourcePersonalisedSheetFiles cid (Just shid) Nothing anonMode
sourcePersonalisedSheetFiles cid (Just shid) Nothing anonMode Set.empty
.| resolvePersonalisedSheetFiles fpL isDirectory cid shid
.| C.mapM loadFile
.| C.foldMap pure

View File

@ -42,11 +42,12 @@ spec = withApp . describe "File handling" $ do
suppliedContent <- runConduit $ transPipe generalize fileContentContent
.| C.sinkLazy
readContent <- runConduit $ sourceFileMinio fileReferenceContentContent
.| C.takeE (succ $ olength suppliedContent)
.| C.sinkLazy
for_ [1..10] $ \_i -> do
readContent <- runConduit $ sourceFileMinio fileReferenceContentContent
.| C.takeE (succ $ olength suppliedContent)
.| C.sinkLazy
liftIO $ readContent `shouldBe` suppliedContent
liftIO $ readContent `shouldBe` suppliedContent
describe "DB" $ do
modifyMaxSuccess (`div` 10) . it "roundtrips" $ \(tSite, _) -> property $ do
fileContentContent <- arbitrary
@ -61,9 +62,10 @@ spec = withApp . describe "File handling" $ do
suppliedContent <- runConduit $ transPipe generalize fileContentContent
.| C.sinkLazy
readContent <- runDB . runConduit
$ sourceFileDB fRef'
.| C.takeE (succ $ olength suppliedContent)
.| C.sinkLazy
for_ [1..10] $ \_i -> do
readContent <- runDB . runConduit
$ sourceFileDB fRef'
.| C.takeE (succ $ olength suppliedContent)
.| C.sinkLazy
liftIO $ readContent `shouldBe` suppliedContent
liftIO $ readContent `shouldBe` suppliedContent