Merge remote-tracking branch 'origin/145-build-system-rewrite' into 144-ghc-update
This commit is contained in:
commit
da0d02f711
17
.babelrc
17
.babelrc
@ -1,17 +0,0 @@
|
||||
{
|
||||
"presets": [
|
||||
["env", {
|
||||
"useBuiltIns": "usage",
|
||||
"targets": { "node": "current" }
|
||||
}
|
||||
]
|
||||
],
|
||||
"plugins": [
|
||||
["@babel/plugin-proposal-decorators", { "legacy": true }],
|
||||
["@babel/plugin-proposal-class-properties", { "loose": true }],
|
||||
["@babel/plugin-proposal-private-methods", { "loose": true }],
|
||||
["@babel/plugin-proposal-private-property-in-object", { "loose": true }],
|
||||
["@babel/plugin-transform-modules-commonjs"],
|
||||
["@babel/transform-runtime"]
|
||||
]
|
||||
}
|
||||
@ -1 +0,0 @@
|
||||
**/*
|
||||
@ -1,3 +0,0 @@
|
||||
SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
@ -1,30 +0,0 @@
|
||||
{
|
||||
"env": {
|
||||
"browser": true,
|
||||
"es6": true,
|
||||
"jasmine": true
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"globals": {
|
||||
"Atomics": "readonly",
|
||||
"SharedArrayBuffer": "readonly",
|
||||
"flatpickr": "readonly",
|
||||
"$": "readonly"
|
||||
},
|
||||
"parser": "@babel/eslint-parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 2018,
|
||||
"requireConfigFile": false,
|
||||
"ecmaFeatures": {
|
||||
"legacyDecorators": true
|
||||
}
|
||||
},
|
||||
"rules": {
|
||||
"no-console": "off",
|
||||
"no-extra-semi": "off",
|
||||
"semi": ["error", "always"],
|
||||
"comma-dangle": ["error", "always-multiline"],
|
||||
"quotes": ["error", "single"],
|
||||
"no-var": "error"
|
||||
}
|
||||
}
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@ -1,5 +1,8 @@
|
||||
.Dockerfile
|
||||
dist*
|
||||
node_modules/
|
||||
assets/icons
|
||||
bin/
|
||||
*.hi
|
||||
*.o
|
||||
*.sqlite3
|
||||
@ -7,6 +10,8 @@ node_modules/
|
||||
*.sqlite3-wal
|
||||
.hsenv*
|
||||
cabal-dev/
|
||||
.cache/
|
||||
.stack/
|
||||
.stack-work/
|
||||
yesod-devel/
|
||||
.cabal-sandbox
|
||||
|
||||
802
.gitlab-ci.yml
802
.gitlab-ci.yml
@ -2,6 +2,16 @@
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
### IMPORTANT NOTICE ###
|
||||
# Our pipeline consists of static and dynamic parts.
|
||||
#
|
||||
# This file only contains the static parts of our pipeline.
|
||||
# Dynamic jobs are defined in .gitlab-ci/frontend.yml and .gitlab-ci/backend.yml.
|
||||
# These are used as a template to generate downstream (child) pipelines during
|
||||
# the runtime of the upstream (parent) pipeline.
|
||||
###
|
||||
|
||||
|
||||
# workflow:
|
||||
# rules:
|
||||
# - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
@ -10,349 +20,553 @@
|
||||
# - if: $CI_COMMIT_TAG =~ /^t/
|
||||
# - if: $CI_COMMIT_TAG =~ /^d/
|
||||
|
||||
default:
|
||||
image:
|
||||
name: registry.uniworx.de/uniworx/containers/nix-attic:latest
|
||||
|
||||
variables:
|
||||
NIX_PATH: "nixpkgs=http://nixos.org/channels/nixos-23.11/nixexprs.tar.xz"
|
||||
AWS_SHARED_CREDENTIALS_FILE: "/etc/aws/credentials"
|
||||
IMAGE_BUILDER: quay.io/buildah/stable:latest
|
||||
FRONTEND_IMAGE_DEPENDENCIES:
|
||||
docker/frontend/Dockerfile
|
||||
package.json
|
||||
package-lock.json
|
||||
webpack.config.js
|
||||
BACKEND_IMAGE_DEPENDENCIES:
|
||||
docker/backend/Dockerfile
|
||||
package.yaml
|
||||
stack.yaml
|
||||
stack.yaml.lock
|
||||
|
||||
TRANSFER_METER_FREQUENCY: "2s"
|
||||
|
||||
NIX_CONFIG: |-
|
||||
extra-substituters = https://cache.iog.io
|
||||
extra-trusted-public-keys = hydra.iohk.io:f/Ea+s+dFdN+3Y/G+FDgSq+a5NEWhJGzdjvKNGv0/EQ=
|
||||
default:
|
||||
image:
|
||||
name: ${CI_REGISTRY}/uniworx/containers/debian:12.5
|
||||
entrypoint: [""]
|
||||
docker:
|
||||
platform: x86_64
|
||||
artifacts:
|
||||
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
|
||||
|
||||
stages:
|
||||
- setup
|
||||
- lint
|
||||
- build
|
||||
- test
|
||||
- container
|
||||
- prepare release
|
||||
- frontend
|
||||
- backend
|
||||
- release
|
||||
|
||||
node modules:
|
||||
|
||||
setup:dynamic:
|
||||
stage: setup
|
||||
cache:
|
||||
- &npm-cache
|
||||
key: default-npm
|
||||
paths:
|
||||
- .npm
|
||||
- node_modules
|
||||
before_script:
|
||||
- apt-get -y update
|
||||
- apt-get -y install git
|
||||
script:
|
||||
- "nix shell .#gnumake .#gup .#nodejs_21 --command make node_modules"
|
||||
before_script: &nix-before
|
||||
- git config --global init.defaultBranch master
|
||||
- install -v -m 0700 -d ~/.ssh
|
||||
- install -v -T -m 0644 "${SSH_KNOWN_HOSTS}" ~/.ssh/known_hosts
|
||||
- install -v -T -m 0400 "${SSH_DEPLOY_KEY}" ~/.ssh/deploy && echo "IdentityFile ~/.ssh/deploy" >> ~/.ssh/config;
|
||||
- install -v -T -m 0644 "${FONTAWESOME_NPM_AUTH_FILE}" /etc/fontawesome-token
|
||||
- install -v -T -m 0644 "${NIX_NETRC}" /etc/nix/netrc
|
||||
- FRONTEND_IMAGE_VERSION=`git log ${FRONTEND_IMAGE_DEPENDENCIES} | grep '^commit' | wc --lines | tee frontend-image-version`
|
||||
- BACKEND_IMAGE_VERSION=`git log ${BACKEND_IMAGE_DEPENDENCIES} | grep '^commit' | wc --lines | tee backend-image-version`
|
||||
- cat .gitlab-ci/frontend.yml | .gitlab-ci/dynamci.pl FRONTEND_IMAGE_VERSION=${FRONTEND_IMAGE_VERSION} > frontend.yml
|
||||
- cat .gitlab-ci/backend.yml | .gitlab-ci/dynamci.pl BACKEND_IMAGE_VERSION=${BACKEND_IMAGE_VERSION} > backend.yml
|
||||
artifacts:
|
||||
paths:
|
||||
- node_modules/
|
||||
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
- frontend-image-version
|
||||
- backend-image-version
|
||||
- frontend.yml
|
||||
- backend.yml
|
||||
rules:
|
||||
- when: always
|
||||
|
||||
frontend lint:
|
||||
stage: lint
|
||||
script:
|
||||
- "nix shell .#gnumake .#gup .#nodejs_21 --command make frontend-lint"
|
||||
before_script: *nix-before
|
||||
setup:containers:frontend: &setup-container
|
||||
stage: setup
|
||||
needs:
|
||||
- job: node modules
|
||||
artifacts: true
|
||||
retry: 2
|
||||
interruptible: true
|
||||
|
||||
frontend build:
|
||||
stage: build
|
||||
cache:
|
||||
- &frontend-cache
|
||||
key: default-frontend
|
||||
paths:
|
||||
- .well-known-cache
|
||||
- setup:dynamic
|
||||
image: ${IMAGE_BUILDER}
|
||||
variables:
|
||||
IMAGE_TYPE: frontend
|
||||
before_script: &container-before
|
||||
- IMAGE_VERSION=`cat ${IMAGE_TYPE}-image-version`
|
||||
- IMAGE_TAG=${IMAGE_TYPE}/${CI_COMMIT_REF_SLUG}:${IMAGE_VERSION}
|
||||
- REGISTRY_DESTINATION=${CI_REGISTRY_IMAGE}/${IMAGE_TYPE}/${CI_COMMIT_REF_SLUG}:${IMAGE_VERSION}
|
||||
script:
|
||||
- "nix shell .#gnumake .#gup .#nodejs_21 --command make frontend-build"
|
||||
before_script: *nix-before
|
||||
- curl --request GET --header "PRIVATE-TOKEN:${REGISTRY_AUTH_TOKEN}" "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/registry/repositories?tags=true" > .gitlab-ci/.container-registry-contents.json
|
||||
- IMAGE_EXISTS=`grep \""${REGISTRY_DESTINATION}"\" .gitlab-ci/.container-registry-contents.json`
|
||||
- cat .gitlab-ci/.container-registry-contents.json
|
||||
- test "${CI_JOB_MANUAL}" = true && echo "Force rebuilding container."
|
||||
- >
|
||||
if [ -z "${IMAGE_EXISTS}" -o "${CI_JOB_MANUAL}" = true ] ; then
|
||||
echo "Building image..."
|
||||
buildah bud -t ${IMAGE_TAG} --build-arg PROJECT_DIR=${CI_PROJECT_DIR} --volume ${CI_PROJECT_DIR}:/tmp/${CI_PROJECT_DIR} --file docker/${IMAGE_TYPE}/Dockerfile
|
||||
buildah push --creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" ${IMAGE_TAG} ${REGISTRY_DESTINATION}
|
||||
else
|
||||
echo "Image ${IMAGE_TAG} already exists in the container registry. Skipping build."
|
||||
fi
|
||||
rules:
|
||||
- if: $CI_MERGE_REQUEST_ID
|
||||
when: never
|
||||
- when: always
|
||||
setup:containers:frontend:wait: &setup-container-wait
|
||||
stage: setup
|
||||
needs:
|
||||
- job: node modules
|
||||
artifacts: true
|
||||
- job: frontend lint # pipeline performance
|
||||
artifacts: false
|
||||
artifacts:
|
||||
paths:
|
||||
- static/
|
||||
- well-known/
|
||||
- config/webpack.yml
|
||||
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
|
||||
backend lint:
|
||||
stage: lint
|
||||
- setup:dynamic
|
||||
before_script: *container-before
|
||||
script:
|
||||
- "nix shell .#gnumake .#coreutils .#stack --command make CI=1 backend-lint"
|
||||
before_script: *nix-before
|
||||
retry: 2
|
||||
interruptible: true
|
||||
- apt-get -y update && apt-get -y install curl
|
||||
- >
|
||||
while ! curl --request GET --header "PRIVATE-TOKEN:${REGISTRY_AUTH_TOKEN}" "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/registry/repositories?tags=true" | grep \""${REGISTRY_DESTINATION}"\" ; do
|
||||
echo "Waiting for container ${IMAGE_TAG} to appear in the container registry..."
|
||||
sleep 5
|
||||
done
|
||||
rules:
|
||||
- if: $CI_MERGE_REQUEST_ID
|
||||
when: always
|
||||
- when: never
|
||||
|
||||
backend build:
|
||||
stage: build
|
||||
cache:
|
||||
- &stack-cache
|
||||
key: default-stack
|
||||
paths:
|
||||
- .stack/
|
||||
- .stack-work/
|
||||
setup:containers:backend:
|
||||
<<: *setup-container
|
||||
variables:
|
||||
IMAGE_TYPE: backend
|
||||
setup:containers:backend:wait:
|
||||
<<: *setup-container-wait
|
||||
variables:
|
||||
IMAGE_TYPE: backend
|
||||
|
||||
script:
|
||||
- "nix shell .#gnumake .#coreutils .#stack --command make CI=1 backend-build"
|
||||
- cp $(stack path --dist-dir)/build/hlint/hlint bin/test-hlint
|
||||
- cp $(stack path --dist-dir)/build/yesod/yesod bin/test-yesod
|
||||
before_script: *nix-before
|
||||
|
||||
frontend:
|
||||
stage: frontend
|
||||
variables:
|
||||
GIT_STRATEGY: clone
|
||||
needs:
|
||||
- job: node modules # transitive
|
||||
artifacts: false
|
||||
- job: frontend lint # transitive
|
||||
artifacts: false
|
||||
- job: frontend build
|
||||
artifacts: true
|
||||
- job: backend lint # pipeline performance
|
||||
artifacts: false
|
||||
artifacts:
|
||||
paths:
|
||||
- bin/
|
||||
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
resource_group: ram
|
||||
- setup:dynamic
|
||||
- job: setup:containers:frontend
|
||||
optional: true
|
||||
trigger:
|
||||
strategy: depend
|
||||
include:
|
||||
- artifact: frontend.yml
|
||||
job: setup:dynamic
|
||||
|
||||
uniworx:exe:uniworxdb:
|
||||
stage: build
|
||||
script:
|
||||
- xzcat uniworx:lib:uniworx.nar.xz | nix-store --import
|
||||
- nix -L build -o result ".#uniworx:exe:uniworxdb"
|
||||
- nix-store --export $(nix-store -qR result) | xz -T0 -2 > uniworx:exe:uniworxdb.nar.xz
|
||||
before_script: *nix-before
|
||||
needs:
|
||||
- job: node modules # transitive
|
||||
artifacts: false
|
||||
- job: frontend build # transitive
|
||||
artifacts: false
|
||||
artifacts:
|
||||
paths:
|
||||
- uniworx:exe:uniworxdb.nar.xz
|
||||
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
|
||||
uniworx:exe:uniworxload:
|
||||
stage: build
|
||||
script:
|
||||
- xzcat uniworx:lib:uniworx.nar.xz | nix-store --import
|
||||
- nix -L build -o result ".#uniworx:exe:uniworxload"
|
||||
- nix-store --export $(nix-store -qR result) | xz -T0 -2 > uniworx:exe:uniworxload.nar.xz
|
||||
before_script: *nix-before
|
||||
backend:dev: &backend
|
||||
stage: backend
|
||||
variables:
|
||||
GIT_STRATEGY: clone
|
||||
needs:
|
||||
- job: node modules # transitive
|
||||
artifacts: false
|
||||
- job: frontend build # transitive
|
||||
artifacts: false
|
||||
artifacts:
|
||||
paths:
|
||||
- uniworx:exe:uniworxload.nar.xz
|
||||
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
- setup:dynamic
|
||||
- job: setup:containers:backend
|
||||
optional: true
|
||||
- frontend
|
||||
trigger:
|
||||
strategy: depend
|
||||
include:
|
||||
- artifact: backend.yml
|
||||
job: setup:dynamic
|
||||
|
||||
check:
|
||||
stage: test
|
||||
script:
|
||||
- xzcat frontend.nar.xz | nix-store --import
|
||||
- xzcat uniworx:lib:uniworx.nar.xz | nix-store --import
|
||||
- nix -L flake check .
|
||||
before_script: *nix-before
|
||||
needs:
|
||||
- job: node modules # transitive
|
||||
artifacts: false
|
||||
- job: frontend build
|
||||
artifacts: true
|
||||
retry: 2
|
||||
interruptible: true
|
||||
backend:prod:
|
||||
<<: *backend
|
||||
variables:
|
||||
PROD_BUILD: -prod
|
||||
|
||||
container:
|
||||
stage: container
|
||||
script:
|
||||
- xzcat uniworx:exe:uniworx.nar.xz | nix-store --import
|
||||
- cp -pr --reflink=auto -L $(nix build --print-out-paths ".#uniworxDocker") uniworx.tar.gz
|
||||
before_script: *nix-before
|
||||
needs:
|
||||
- job: node modules # transitive
|
||||
artifacts: false
|
||||
- job: frontend build # transitive
|
||||
artifacts: false
|
||||
- job: check # sanity
|
||||
artifacts: false
|
||||
artifacts:
|
||||
paths:
|
||||
- uniworx.tar.gz
|
||||
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
|
||||
release:changelog:
|
||||
stage: release
|
||||
rules: &release-rules
|
||||
- if: $CI_COMMIT_TAG =~ /^v/
|
||||
test container:
|
||||
stage: container
|
||||
- if: $CI_COMMIT_TAG =~ /^v[0-9\.]+$/
|
||||
- if: $CI_COMMIT_TAG =~ /^v[0-9\.]+-test-.*$/
|
||||
script:
|
||||
- xzcat uniworx:exe:uniworx.nar.xz | nix-store --import
|
||||
- cp -pr --reflink=auto -L $(nix build --print-out-paths ".#uniworxTestDocker") uniworx.tar.gz
|
||||
before_script: *nix-before
|
||||
needs:
|
||||
- job: node modules # transitive
|
||||
artifacts: false
|
||||
- job: frontend build # transitive
|
||||
artifacts: false
|
||||
- job: check # sanity
|
||||
artifacts: false
|
||||
artifacts:
|
||||
paths:
|
||||
- uniworx.tar.gz
|
||||
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
rules: &test-release-rules
|
||||
- if: $CI_COMMIT_TAG =~ /^t/
|
||||
|
||||
parse changelog:
|
||||
stage: prepare release
|
||||
needs:
|
||||
- job: node modules
|
||||
artifacts: true
|
||||
rules: *release-rules
|
||||
before_script: *nix-before
|
||||
script:
|
||||
- xzcat node-dependencies.nar.xz | nix-store --import
|
||||
- nix -L run ".#jqChangelogJson" -- -r '.versions[0].version' > .current-version
|
||||
- nix -L run ".#jqChangelogJson" -- -r '.versions[0].body' > .current-changelog.md
|
||||
- echo "Preparing release..."
|
||||
- ./.gitlab-ci/version.pl > .current-version
|
||||
- echo "VERSION=$(cat .current-version)" >> build.env
|
||||
- ./.gitlab-ci/version.pl -changelog CHANGELOG.md
|
||||
artifacts:
|
||||
reports:
|
||||
dotenv: build.env
|
||||
paths:
|
||||
- .current-version
|
||||
- .current-changelog.md
|
||||
- .changelog.md
|
||||
name: "changelog-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
parse test changelog:
|
||||
stage: prepare release
|
||||
needs:
|
||||
- job: node modules
|
||||
artifacts: true
|
||||
rules: *test-release-rules
|
||||
before_script: *nix-before
|
||||
script:
|
||||
- xzcat node-dependencies.nar.xz | nix-store --import
|
||||
- nix -L run ".#jqChangelogJson" -- -r '.versions[0].version' > .current-version
|
||||
- nix -L run ".#jqChangelogJson" -- -r '.versions[0].body' > .current-changelog.md
|
||||
- echo "VERSION=$(cat .current-version)" >> build.env
|
||||
artifacts:
|
||||
reports:
|
||||
dotenv: build.env
|
||||
paths:
|
||||
- .current-version
|
||||
- .current-changelog.md
|
||||
name: "changelog-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
|
||||
upload container:
|
||||
variables:
|
||||
GIT_STRATEGY: none
|
||||
release:container:
|
||||
stage: release
|
||||
image: quay.io/skopeo/stable:latest
|
||||
script:
|
||||
- skopeo --insecure-policy copy --dest-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" docker-archive://$(pwd)/uniworx.tar.gz docker://${CI_REGISTRY_IMAGE}:${VERSION}
|
||||
- skopeo --insecure-policy copy --src-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" --dest-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" docker://${CI_REGISTRY_IMAGE}:${VERSION} docker://${CI_REGISTRY_IMAGE}:latest
|
||||
needs:
|
||||
- job: node modules # transitive
|
||||
artifacts: false
|
||||
- job: frontend build # transitive
|
||||
artifacts: false
|
||||
- job: container
|
||||
artifacts: true
|
||||
- job: parse changelog
|
||||
artifacts: true
|
||||
- job: check # sanity
|
||||
artifacts: false
|
||||
rules: *release-rules
|
||||
retry: 2
|
||||
upload test container:
|
||||
variables:
|
||||
GIT_STRATEGY: none
|
||||
stage: release
|
||||
image: quay.io/skopeo/stable:latest
|
||||
image: ${IMAGE_BUILDER}
|
||||
script:
|
||||
- skopeo --insecure-policy copy --dest-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" docker-archive://$(pwd)/uniworx.tar.gz docker://${CI_REGISTRY}/fradrive/fradrive/test:${CI_COMMIT_REF_NAME}
|
||||
- skopeo --insecure-policy copy --src-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" --dest-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" docker://${CI_REGISTRY}/fradrive/fradrive/test:${CI_COMMIT_REF_NAME} docker://${CI_REGISTRY}/fradrive/fradrive/test:latest
|
||||
- echo "Building container for release ${VERSION}..."
|
||||
- buildah bud --tag fradrive:${VERSION} --file docker/fradrive/Dockerfile
|
||||
- buildah add --chown uniworx:uniworx bin/uniworx /bin/uniworx
|
||||
- echo "Pushing container ${VERSION} to container registry..."
|
||||
- buildah push --creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" fradrive:${VERSION} ${CI_REGISTRY_IMAGE}/fradrive:${VERSION}
|
||||
needs:
|
||||
- job: node modules # transitive
|
||||
artifacts: false
|
||||
- job: frontend build # transitive
|
||||
artifacts: false
|
||||
- job: test container
|
||||
artifacts: true
|
||||
- job: parse test changelog
|
||||
artifacts: true
|
||||
- job: check # sanity
|
||||
artifacts: false
|
||||
rules: *test-release-rules
|
||||
retry: 2
|
||||
- frontend # sanity
|
||||
- backend:prod # sanity
|
||||
- release:changelog
|
||||
|
||||
release:
|
||||
variables:
|
||||
GIT_STRATEGY: none
|
||||
release:release:
|
||||
stage: release
|
||||
image: registry.gitlab.com/gitlab-org/release-cli:latest
|
||||
rules: *release-rules
|
||||
script:
|
||||
- echo "Will create release ${VERSION}..."
|
||||
release:
|
||||
name: '$VERSION'
|
||||
tag_name: '$CI_COMMIT_TAG'
|
||||
description: .current-changelog.md
|
||||
needs:
|
||||
- job: check # sanity
|
||||
artifacts: false
|
||||
- job: parse changelog
|
||||
artifacts: true
|
||||
test release:
|
||||
variables:
|
||||
GIT_STRATEGY: none
|
||||
stage: release
|
||||
image: registry.gitlab.com/gitlab-org/release-cli:latest
|
||||
rules: *test-release-rules
|
||||
script:
|
||||
- echo "Will create test release ${VERSION}-test..."
|
||||
- echo "Creating release ${VERSION}"
|
||||
release:
|
||||
name: "${VERSION}-test"
|
||||
tag_name: '$CI_COMMIT_TAG'
|
||||
description: .current-changelog.md
|
||||
name: "${VERSION}"
|
||||
tag_name: "{$CI_COMMIT_TAG}"
|
||||
description: .changelog.md
|
||||
needs:
|
||||
- job: check # sanity
|
||||
artifacts: false
|
||||
- job: parse test changelog
|
||||
artifacts: true
|
||||
- frontend # sanity
|
||||
- backend:prod # sanity
|
||||
- release:changelog
|
||||
- release:container
|
||||
retry: 0
|
||||
|
||||
|
||||
|
||||
# frontend dependencies:
|
||||
# stage: setup
|
||||
# cache:
|
||||
# - &npm-cache
|
||||
# key: default-npm
|
||||
# paths: &npm-paths
|
||||
# - node_modules/
|
||||
# - .npm/
|
||||
# - .npmrc
|
||||
# script:
|
||||
# - make node_modules
|
||||
# artifacts:
|
||||
# paths: *npm-paths
|
||||
|
||||
# well-known:
|
||||
# stage: setup
|
||||
# script:
|
||||
# - make well-known
|
||||
# needs:
|
||||
# - job: frontend dependencies
|
||||
# artifacts: true
|
||||
# cache:
|
||||
# - &frontend-cache
|
||||
# key: default-frontend
|
||||
# paths:
|
||||
# - .well-known-cache
|
||||
# artifacts:
|
||||
# paths:
|
||||
# - well-known/
|
||||
# - .well-known-cache/
|
||||
|
||||
# # TODO: cache is always uploaded even if up-to-date; prevent re-upload when up-to-date
|
||||
# backend dependencies:
|
||||
# stage: setup
|
||||
# cache:
|
||||
# - &stack-cache
|
||||
# key: default-stack
|
||||
# paths:
|
||||
# - .stack/
|
||||
# - .stack-work/
|
||||
# script:
|
||||
# - make backend-dependencies-prod
|
||||
# artifacts:
|
||||
# paths:
|
||||
# - .stack/
|
||||
# - .stack-work/
|
||||
|
||||
# frontend build:
|
||||
# stage: build
|
||||
# cache:
|
||||
# - *frontend-cache
|
||||
# script:
|
||||
# - make frontend-build
|
||||
# needs:
|
||||
# - job: frontend dependencies
|
||||
# artifacts: true
|
||||
# - job: well-known
|
||||
# artifacts: true
|
||||
# artifacts:
|
||||
# paths:
|
||||
# - static/
|
||||
# - config/webpack.yml
|
||||
|
||||
# # TODO: .stack-work cache not working
|
||||
# backend build:
|
||||
# stage: build
|
||||
# cache:
|
||||
# - *stack-cache
|
||||
# script:
|
||||
# - make bin/uniworx
|
||||
# # - find .stack-work
|
||||
# # - cp $(stack path --dist-dir)/build/hlint/hlint bin/test-hlint
|
||||
# # - cp $(stack path --dist-dir)/build/yesod/yesod bin/test-yesod
|
||||
# needs:
|
||||
# - job: frontend dependencies # transitive
|
||||
# artifacts: false
|
||||
# - job: well-known
|
||||
# artifacts: true
|
||||
# - job: backend dependencies
|
||||
# artifacts: true
|
||||
# - job: frontend build
|
||||
# artifacts: true
|
||||
# artifacts:
|
||||
# paths:
|
||||
# - bin/
|
||||
# resource_group: ram
|
||||
|
||||
# # TODO: part of backend build; probably deprecated
|
||||
# # uniworxdb:
|
||||
# # stage: build
|
||||
# # script:
|
||||
# # - make bin/uniworxdb
|
||||
# # needs:
|
||||
# # # TODO: no frontend needed
|
||||
# # - job: frontend dependencies # transitive
|
||||
# # artifacts: false
|
||||
# # - job: frontend build # transitive
|
||||
# # artifacts: false
|
||||
# # artifacts:
|
||||
# # paths:
|
||||
# # - bin/uniworxdb
|
||||
|
||||
# # TODO: part of backend build; probably deprecated
|
||||
# # TODO: rewrite
|
||||
# # uniworx:exe:uniworxload:
|
||||
# # stage: build
|
||||
# # script:
|
||||
# # - xzcat uniworx:lib:uniworx.nar.xz | nix-store --import
|
||||
# # - nix -L build -o result ".#uniworx:exe:uniworxload"
|
||||
# # - nix-store --export $(nix-store -qR result) | xz -T0 -2 > uniworx:exe:uniworxload.nar.xz
|
||||
# # needs:
|
||||
# # - job: frontend dependencies # transitive
|
||||
# # artifacts: false
|
||||
# # - job: frontend build # transitive
|
||||
# # artifacts: false
|
||||
# # artifacts:
|
||||
# # paths:
|
||||
# # - uniworx:exe:uniworxload.nar.xz
|
||||
|
||||
# frontend lint:
|
||||
# stage: lint
|
||||
# script:
|
||||
# - make frontend-lint
|
||||
# cache:
|
||||
# - *frontend-cache
|
||||
# needs:
|
||||
# - job: frontend dependencies
|
||||
# artifacts: true
|
||||
# - job: well-known # TODO: is this really needed?
|
||||
# artifacts: true
|
||||
|
||||
# backend lint:
|
||||
# stage: lint
|
||||
# cache:
|
||||
# - *stack-cache
|
||||
# script:
|
||||
# # TODO: - make backend-lint-dev
|
||||
# - make backend-lint-prod
|
||||
# needs:
|
||||
# - job: backend dependencies
|
||||
# artifacts: true
|
||||
# - job: backend build
|
||||
# artifacts: true
|
||||
# - job: frontend build
|
||||
# artifacts: true
|
||||
# - job: well-known
|
||||
# artifacts: true
|
||||
|
||||
# frontend test:
|
||||
# stage: test
|
||||
# script:
|
||||
# - make frontend-test
|
||||
# cache: *frontend-cache
|
||||
# needs:
|
||||
# - job: frontend dependencies
|
||||
# artifacts: true
|
||||
# # TODO: configure report artifacts
|
||||
|
||||
# backend test:
|
||||
# stage: test
|
||||
# script:
|
||||
# - make backend-test-prod
|
||||
# cache: *stack-cache
|
||||
# needs:
|
||||
# - job: well-known
|
||||
# artifacts: true
|
||||
# - job: frontend build
|
||||
# artifacts: true
|
||||
# - job: backend dependencies
|
||||
# artifacts: true
|
||||
# - job: backend build
|
||||
# artifacts: true
|
||||
# # TODO: configure report artifacts
|
||||
|
||||
# # TODO: unify prod and test versions
|
||||
# # TODO: rewrite
|
||||
# container:
|
||||
# stage: container
|
||||
# script:
|
||||
# - xzcat uniworx:exe:uniworx.nar.xz | nix-store --import
|
||||
# - cp -pr --reflink=auto -L $(nix build --print-out-paths ".#uniworxDocker") uniworx.tar.gz
|
||||
# needs:
|
||||
# - job: frontend dependencies # transitive
|
||||
# artifacts: false
|
||||
# - job: frontend build # transitive
|
||||
# artifacts: false
|
||||
# - job: frontend test # sanity
|
||||
# artifacts: false
|
||||
# - job: backend test # sanity
|
||||
# artifacts: false
|
||||
# artifacts:
|
||||
# paths:
|
||||
# - uniworx.tar.gz
|
||||
# rules: &release-rules
|
||||
# - if: $CI_COMMIT_TAG =~ /^v/
|
||||
# # TODO: rewrite
|
||||
# test container:
|
||||
# stage: container
|
||||
# script:
|
||||
# - xzcat uniworx:exe:uniworx.nar.xz | nix-store --import
|
||||
# - cp -pr --reflink=auto -L $(nix build --print-out-paths ".#uniworxTestDocker") uniworx.tar.gz
|
||||
# needs:
|
||||
# - job: frontend dependencies # transitive
|
||||
# artifacts: false
|
||||
# - job: frontend build # transitive
|
||||
# artifacts: false
|
||||
# - job: frontend test # sanity
|
||||
# artifacts: false
|
||||
# - job: backend test # sanity
|
||||
# artifacts: false
|
||||
# artifacts:
|
||||
# paths:
|
||||
# - uniworx.tar.gz
|
||||
# rules: &test-release-rules
|
||||
# - if: $CI_COMMIT_TAG =~ /^t/
|
||||
|
||||
# # TODO: unify prod and test versions
|
||||
# # TODO: rewrite
|
||||
# parse changelog:
|
||||
# stage: prepare release
|
||||
# needs:
|
||||
# - job: frontend dependencies
|
||||
# artifacts: true
|
||||
# rules: *release-rules
|
||||
# script:
|
||||
# - xzcat node-dependencies.nar.xz | nix-store --import
|
||||
# - nix -L run ".#jqChangelogJson" -- -r '.versions[0].version' > .current-version
|
||||
# - nix -L run ".#jqChangelogJson" -- -r '.versions[0].body' > .current-changelog.md
|
||||
# - echo "VERSION=$(cat .current-version)" >> build.env
|
||||
# artifacts:
|
||||
# reports:
|
||||
# dotenv: build.env
|
||||
# paths:
|
||||
# - .current-version
|
||||
# - .current-changelog.md
|
||||
# name: "changelog-${CI_COMMIT_SHORT_SHA}"
|
||||
# expire_in: "1 day"
|
||||
# # TODO: rewrite
|
||||
# parse test changelog:
|
||||
# stage: prepare release
|
||||
# needs:
|
||||
# - job: frontend dependencies
|
||||
# artifacts: true
|
||||
# rules: *test-release-rules
|
||||
# script:
|
||||
# - xzcat node-dependencies.nar.xz | nix-store --import
|
||||
# - nix -L run ".#jqChangelogJson" -- -r '.versions[0].version' > .current-version
|
||||
# - nix -L run ".#jqChangelogJson" -- -r '.versions[0].body' > .current-changelog.md
|
||||
# - echo "VERSION=$(cat .current-version)" >> build.env
|
||||
# artifacts:
|
||||
# reports:
|
||||
# dotenv: build.env
|
||||
# paths:
|
||||
# - .current-version
|
||||
# - .current-changelog.md
|
||||
# name: "changelog-${CI_COMMIT_SHORT_SHA}"
|
||||
|
||||
# # TODO: unify prod and test versions
|
||||
# # TODO: rewrite
|
||||
# upload container:
|
||||
# variables:
|
||||
# GIT_STRATEGY: none
|
||||
# stage: release
|
||||
# image: quay.io/skopeo/stable:latest
|
||||
# script:
|
||||
# - skopeo --insecure-policy copy --dest-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" docker-archive://$(pwd)/uniworx.tar.gz docker://${CI_REGISTRY_IMAGE}:${VERSION}
|
||||
# - skopeo --insecure-policy copy --src-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" --dest-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" docker://${CI_REGISTRY_IMAGE}:${VERSION} docker://${CI_REGISTRY_IMAGE}:latest
|
||||
# needs:
|
||||
# - job: frontend dependencies # transitive
|
||||
# artifacts: false
|
||||
# - job: frontend build # transitive
|
||||
# artifacts: false
|
||||
# - job: container
|
||||
# artifacts: true
|
||||
# - job: parse changelog
|
||||
# artifacts: true
|
||||
# - job: frontend test # sanity
|
||||
# artifacts: false
|
||||
# - job: backend test # sanity
|
||||
# artifacts: false
|
||||
# rules: *release-rules
|
||||
# # TODO: rewrite
|
||||
# upload test container:
|
||||
# variables:
|
||||
# GIT_STRATEGY: none
|
||||
# stage: release
|
||||
# image: quay.io/skopeo/stable:latest
|
||||
# script:
|
||||
# - skopeo --insecure-policy copy --dest-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" docker-archive://$(pwd)/uniworx.tar.gz docker://${CI_REGISTRY}/fradrive/fradrive/test:${CI_COMMIT_REF_NAME}
|
||||
# - skopeo --insecure-policy copy --src-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" --dest-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" docker://${CI_REGISTRY}/fradrive/fradrive/test:${CI_COMMIT_REF_NAME} docker://${CI_REGISTRY}/fradrive/fradrive/test:latest
|
||||
# needs:
|
||||
# - job: frontend dependencies # transitive
|
||||
# artifacts: false
|
||||
# - job: frontend build # transitive
|
||||
# artifacts: false
|
||||
# - job: test container
|
||||
# artifacts: true
|
||||
# - job: parse test changelog
|
||||
# artifacts: true
|
||||
# - job: frontend test # sanity
|
||||
# artifacts: false
|
||||
# - job: backend test # sanity
|
||||
# artifacts: false
|
||||
# rules: *test-release-rules
|
||||
|
||||
# # TODO: unify prod and test versions
|
||||
# # TODO: rewrite
|
||||
# release:
|
||||
# variables:
|
||||
# GIT_STRATEGY: none
|
||||
# stage: release
|
||||
# image: registry.gitlab.com/gitlab-org/release-cli:latest
|
||||
# rules: *release-rules
|
||||
# script:
|
||||
# - echo "Will create release ${VERSION}..."
|
||||
# release:
|
||||
# name: '$VERSION'
|
||||
# tag_name: '$CI_COMMIT_TAG'
|
||||
# description: .current-changelog.md
|
||||
# needs:
|
||||
# - job: frontend test # sanity
|
||||
# artifacts: false
|
||||
# - job: backend test # sanity
|
||||
# artifacts: false
|
||||
# - job: parse changelog
|
||||
# artifacts: true
|
||||
# retry: 0
|
||||
# # TODO: rewrite
|
||||
# test release:
|
||||
# variables:
|
||||
# GIT_STRATEGY: none
|
||||
# stage: release
|
||||
# image: registry.gitlab.com/gitlab-org/release-cli:latest
|
||||
# rules: *test-release-rules
|
||||
# script:
|
||||
# - echo "Will create test release ${VERSION}-test..."
|
||||
# release:
|
||||
# name: "${VERSION}-test"
|
||||
# tag_name: '$CI_COMMIT_TAG'
|
||||
# description: .current-changelog.md
|
||||
# needs:
|
||||
# - job: frontend test # sanity
|
||||
# artifacts: false
|
||||
# - job: backend test # sanity
|
||||
# artifacts: false
|
||||
# - job: parse test changelog
|
||||
# artifacts: true
|
||||
# retry: 0
|
||||
|
||||
72
.gitlab-ci/backend.yml
Normal file
72
.gitlab-ci/backend.yml
Normal file
@ -0,0 +1,72 @@
|
||||
# SPDX-FileCopyrightText: 2024 Sarah Vaupel <sarah.vaupel@uniworx.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
### IMPORTANT NOTICE ###
|
||||
# Our pipeline consists of static and dynamic parts.
|
||||
#
|
||||
# This file only contains the dynamic backend parts of our pipeline.
|
||||
# Static jobs are defined in .gitlab-ci.yml.
|
||||
#
|
||||
# The marker "#dyn#" (without quotes) will be replaced by concrete values.
|
||||
###
|
||||
|
||||
variables:
|
||||
BACKEND_IMAGE_VERSION: #dyn#
|
||||
|
||||
stages:
|
||||
- compile
|
||||
- lint
|
||||
- test
|
||||
- release
|
||||
|
||||
default:
|
||||
image:
|
||||
name: ${CI_REGISTRY_IMAGE}/backend/${CI_COMMIT_REF_SLUG}:${BACKEND_IMAGE_VERSION}
|
||||
entrypoint: [""]
|
||||
docker:
|
||||
platform: x86_64
|
||||
artifacts:
|
||||
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
|
||||
|
||||
compile:
|
||||
stage: compile
|
||||
script:
|
||||
- make -- backend-build${PROD_BUILD}
|
||||
artifacts:
|
||||
paths:
|
||||
- ${CI_PROJECT_DIR}/.stack-work/
|
||||
- bin/
|
||||
cache: &backend-cache
|
||||
|
||||
lint:
|
||||
stage: lint
|
||||
script:
|
||||
- make -- backend-lint${PROD_BUILD}
|
||||
cache: *backend-cache
|
||||
|
||||
test:
|
||||
stage: test
|
||||
needs:
|
||||
- compile
|
||||
script:
|
||||
- make -- backend-test${PROD_BUILD}
|
||||
cache: *backend-cache
|
||||
|
||||
container:
|
||||
stage: release
|
||||
image: ${IMAGE_BUILDER}
|
||||
needs:
|
||||
- compile
|
||||
- lint
|
||||
- test
|
||||
script:
|
||||
- IMAGE_TAG=`./.gitlab-ci/version.pl`
|
||||
- buildah bud -t ${IMAGE_TAG} --volume ${CI_PROJECT_DIR}/bin/:/tmp/uniworx-bin --file docker/fradrive/Dockerfile
|
||||
- buildah push --creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" ${IMAGE_TAG} ${CI_REGISTRY_IMAGE}/${IMAGE_TAG}
|
||||
rules:
|
||||
- if: ${PROD_BUILD} == -prod
|
||||
@ -1,25 +0,0 @@
|
||||
diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc
|
||||
index 514ab3bf9..25dab18bb 100644
|
||||
--- a/src/libstore/filetransfer.cc
|
||||
+++ b/src/libstore/filetransfer.cc
|
||||
@@ -696,6 +696,8 @@ struct curlFileTransfer : public FileTransfer
|
||||
std::string scheme = get(params, "scheme").value_or("");
|
||||
std::string endpoint = get(params, "endpoint").value_or("");
|
||||
|
||||
+ debug("enqueueFileTransfer: scheme: %s", scheme);
|
||||
+
|
||||
S3Helper s3Helper(profile, region, scheme, endpoint);
|
||||
|
||||
// FIXME: implement ETag
|
||||
diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc
|
||||
index 6bfbee044..ff406e5e4 100644
|
||||
--- a/src/libstore/s3-binary-cache-store.cc
|
||||
+++ b/src/libstore/s3-binary-cache-store.cc
|
||||
@@ -126,6 +126,7 @@ ref<Aws::Client::ClientConfiguration> S3Helper::makeConfig(const string & region
|
||||
initAWS();
|
||||
auto res = make_ref<Aws::Client::ClientConfiguration>();
|
||||
res->region = region;
|
||||
+ debug("configuring scheme %s", scheme);
|
||||
if (!scheme.empty()) {
|
||||
res->scheme = Aws::Http::SchemeMapper::FromString(scheme.c_str());
|
||||
}
|
||||
@ -1,3 +0,0 @@
|
||||
SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
43
.gitlab-ci/dynamci.pl
Executable file
43
.gitlab-ci/dynamci.pl
Executable file
@ -0,0 +1,43 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
my %kv = ();
|
||||
|
||||
for(@ARGV) {
|
||||
if(!m#^\s*([^=\s]+)\s*=\s*(.*?)\s*$#) {
|
||||
die "$0: Bad parameter '$_': Not a key=value pair\n"
|
||||
}
|
||||
$kv{$1} = $2;
|
||||
}
|
||||
|
||||
my $invar = 0;
|
||||
|
||||
LOOP: while(my $line = <STDIN>) {
|
||||
if(1==$invar) {
|
||||
if($line=~m/^(\s+)(\S+)(\s*:\s*)(\S+)(.*)/) {
|
||||
my ($pre1, $key, $pre2, $var, $post) = ($1, $2, $3, $4, $5);
|
||||
if('#dyn#' eq $var) {
|
||||
if(not exists $kv{$key}) {
|
||||
die "$0: No value given for key '$key' in the parameters but it is defined in input!\n"
|
||||
}
|
||||
my $v = $kv{$key};
|
||||
delete $kv{$key};
|
||||
print "$pre1$key$pre2$v$post\n";
|
||||
next LOOP;
|
||||
}
|
||||
} elsif($line=~m/^[^#\t ]/) {
|
||||
$invar = 2
|
||||
}
|
||||
}
|
||||
if(0==$invar and $line=~m#^\s*variables\s*:\s*$#) {
|
||||
$invar = 1;
|
||||
}
|
||||
print $line;
|
||||
}
|
||||
|
||||
my @rem = sort keys %kv;
|
||||
|
||||
die "$0: Variables occur in parameter but not in input: @rem!\n" if @rem;
|
||||
|
||||
@ -1,13 +0,0 @@
|
||||
diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc
|
||||
index 6bfbee044..51d86c4e6 100644
|
||||
--- a/src/libstore/s3-binary-cache-store.cc
|
||||
+++ b/src/libstore/s3-binary-cache-store.cc
|
||||
@@ -209,7 +209,7 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual
|
||||
S3Helper s3Helper;
|
||||
|
||||
S3BinaryCacheStoreImpl(
|
||||
- const std::string & scheme,
|
||||
+ const std::string & uriScheme,
|
||||
const std::string & bucketName,
|
||||
const Params & params)
|
||||
: StoreConfig(params)
|
||||
@ -1,3 +0,0 @@
|
||||
SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
62
.gitlab-ci/frontend.yml
Normal file
62
.gitlab-ci/frontend.yml
Normal file
@ -0,0 +1,62 @@
|
||||
# SPDX-FileCopyrightText: 2024 Sarah Vaupel <sarah.vaupel@uniworx.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
### IMPORTANT NOTICE ###
|
||||
# Our pipeline consists of static and dynamic parts.
|
||||
#
|
||||
# This file only contains the dynamic frontend parts of our pipeline.
|
||||
# Static jobs are defined in .gitlab-ci.yml.
|
||||
#
|
||||
# The marker "#dyn#" (without quotes) will be replaced by concrete values.
|
||||
###
|
||||
|
||||
variables:
|
||||
FRONTEND_IMAGE_VERSION: #dyn#
|
||||
|
||||
stages:
|
||||
- compile
|
||||
- lint
|
||||
- test
|
||||
|
||||
default:
|
||||
image:
|
||||
name: ${CI_REGISTRY_IMAGE}/frontend/${CI_COMMIT_REF_SLUG}:${FRONTEND_IMAGE_VERSION}
|
||||
entrypoint: [""]
|
||||
docker:
|
||||
platform: x86_64
|
||||
artifacts:
|
||||
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
|
||||
|
||||
compile:
|
||||
stage: compile
|
||||
script:
|
||||
- make -- frontend-build
|
||||
artifacts:
|
||||
paths:
|
||||
- ${CI_PROJECT_DIR}/node_modules
|
||||
- ${CI_PROJECT_DIR}/well-known
|
||||
cache:
|
||||
- &frontend-cache
|
||||
key: default-frontend
|
||||
paths:
|
||||
- ${CI_PROJECT_DIR}/.npm/
|
||||
- ${CI_PROJECT_DIR}/.well-known-cache/
|
||||
|
||||
lint:
|
||||
stage: lint
|
||||
script:
|
||||
- make -- frontend-lint
|
||||
cache: *frontend-cache
|
||||
|
||||
test:
|
||||
stage: test
|
||||
needs:
|
||||
- compile
|
||||
script:
|
||||
- make -- frontend-test
|
||||
cache: *frontend-cache
|
||||
@ -1,11 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
#!/usr/bin/bash
|
||||
|
||||
export MC_HOST_minio=http://$(cat /minio-gitlab-runner-cache/accesskey):$(cat /minio-gitlab-runner-cache/secretkey)@minio-gitlab-runner-cache
|
||||
|
||||
mc mb --ignore-existing minio/nix-cache
|
||||
@ -1,8 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
{ pkgs ? import <nixpkgs> {} }:
|
||||
pkgs.nixUnstable.overrideAttrs (oldAttrs: {
|
||||
patches = oldAttrs.patches or [] ++ [ ./fix-aws-scheme.patch ];
|
||||
})
|
||||
@ -1,14 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
#!/usr/bin/bash
|
||||
|
||||
set -eu
|
||||
set -f # disable globbing
|
||||
export IFS=' '
|
||||
|
||||
echo "Signing and uploading paths" $OUT_PATHS
|
||||
exec nix copy --to "s3://nix-cache?region=us-east-1&scheme=http&endpoint=minio-gitlab-runner-cache&secret-key=${NIX_CACHE_KEYFILE}" $OUT_PATHS
|
||||
617
.gitlab-ci/version.pl
Executable file
617
.gitlab-ci/version.pl
Executable file
@ -0,0 +1,617 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
use Data::Dumper;
|
||||
|
||||
# Version changes:
|
||||
# v[x].[y].[z] -- Main version number
|
||||
# v[x].[y].[z]-test-[branchstring]-num -- test/branch/devel version number
|
||||
# on main/master: Biggest version so far, increment by occuring changes
|
||||
# on other branches: find version; be it branch string, old format or main version number;
|
||||
# increments from there. Increment version number, but on global conflict use new version number
|
||||
|
||||
# Actions and their results
|
||||
# chore -> +patch
|
||||
# feat -> +minor
|
||||
# fix -> +patch
|
||||
# [a-z]+! -> +major
|
||||
# perf -> +patch
|
||||
# refactor -> +patch
|
||||
# test -> +patch
|
||||
# style -> +patch
|
||||
# revert -> =
|
||||
# docs -> +patch
|
||||
# build -> =
|
||||
# ci -> =
|
||||
|
||||
# parameters with default values
|
||||
my %par = ();
|
||||
my %parKinds = (
|
||||
vcslog=>{
|
||||
arity=>1,
|
||||
def=>'git log --pretty=tformat:"%H :::: %d :::: %s"',
|
||||
help=>'set command which outputs the log information to be used; reads from STDIN if value is set to "-"',
|
||||
},
|
||||
vcstags=>{
|
||||
arity=>1,
|
||||
def=>'git tag',
|
||||
help=>'set command which outputs the used tags',
|
||||
},
|
||||
vcsbranch=>{
|
||||
arity=>1,
|
||||
def=>'git rev-parse --abbrev-ref HEAD',
|
||||
help=>'set command to find out the current branch name',
|
||||
},
|
||||
kind=>{
|
||||
arity=>1,
|
||||
def=>'v',
|
||||
help=>'set tag kind of version numbers; this option resets autokind to "". Implemented kinds: v: main version; t: test version',
|
||||
auto=>sub { $par{autokind}='' },
|
||||
},
|
||||
autokind=>{
|
||||
arity=>1,
|
||||
def=>'main=v,master=v,test=t,*=t',
|
||||
help=>'determine the tag kind from branch name instead of fixed value; use the first fitting glob',
|
||||
},
|
||||
change=>{
|
||||
arity=>1,
|
||||
def=>'chore=patch,feat=minor,feature=minor,fix=patch,BREAK=major,perf=patch,refactor=patch,test=patch,style=patch,revert=null,docs=patch,build=null,ci=null',
|
||||
help=>'how to react on which commit type; can be partially given. Actions are: "null", "major", "minor", "patch" or state "invalid" for removing this type',
|
||||
},
|
||||
changelog=>{
|
||||
arity=>1,
|
||||
def=>'',
|
||||
help=>'File to add the changelog to; no changelog is written if this parameter is empty.'
|
||||
},
|
||||
changelogout=>{
|
||||
arity=>1,
|
||||
def=>'',
|
||||
help=>'Use this file name to write the changelog to, but use "changelog" to read the old changelog. If not set for both versions the parameter changelog is used.',
|
||||
},
|
||||
vcsurl=>{
|
||||
arity=>1,
|
||||
def=>'',
|
||||
help=>'Repository URL for changelog; for example "https://gitlab.example.doc/proj/proj/"',
|
||||
},
|
||||
v=>{def=>0,arity=>0,help=>'verbose'},
|
||||
h=>{def=>0,arity=>0,help=>'help'},
|
||||
);
|
||||
|
||||
for my $k(keys %parKinds) {
|
||||
$par{$k} = $parKinds{$k}{def}
|
||||
}
|
||||
|
||||
#for my $p(@ARGV) {
|
||||
#
|
||||
#}
|
||||
{
|
||||
my $i = 0;
|
||||
while($i<@ARGV) {
|
||||
if($ARGV[$i]=~m#^-(.*)#) {
|
||||
my $key = $1;
|
||||
if(not exists $parKinds{$key}) {
|
||||
die "$0: Unknown parameter: -$key\n";
|
||||
}
|
||||
my $pk = $parKinds{$key};
|
||||
die "$0: Too few parameters for '-$key'\n" if $i+$pk->{arity}>@ARGV;
|
||||
my @par = @ARGV[$i+1..$i+$pk->{arity}];
|
||||
#warn "<< @par >>";
|
||||
$i++;
|
||||
$i += $pk->{arity};
|
||||
if($pk->{arity}) {
|
||||
$par{$key} = $par[0]
|
||||
} else {
|
||||
$par{$key}=1
|
||||
}
|
||||
if(exists $pk->{auto}) {
|
||||
$pk->{auto}->()
|
||||
}
|
||||
} else {
|
||||
die "$0: Bad parameter: $ARGV[$i]\n"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if($par{'h'}) {
|
||||
print "Usage: $0 [flags and options]\n\nAvailable options:\n";
|
||||
for my $k(sort keys %parKinds) {
|
||||
print " -$k\n $parKinds{$k}{help}\n";
|
||||
if($parKinds{$k}{arity}) {
|
||||
print " Default value: $parKinds{$k}{def}\n";
|
||||
} else {
|
||||
print " This is a flag and not an option\n";
|
||||
}
|
||||
print "\n";
|
||||
}
|
||||
exit 0
|
||||
}
|
||||
|
||||
if($par{autokind}) {
|
||||
my $branch = `$par{vcsbranch}`;
|
||||
my @rules = split /,/, $par{autokind};
|
||||
RULES: {
|
||||
for my $r(@rules) {
|
||||
if($r!~m#(.*)=(.*)#) {
|
||||
die "$0: Bad rule in autokind: $r\n";
|
||||
}
|
||||
my ($glob, $kind) = ($1, $2);
|
||||
if(globString($glob, $branch)) {
|
||||
$par{'kind'} = $kind;
|
||||
last RULES
|
||||
}
|
||||
}
|
||||
warn "$0: No autokind rule matches; leaving the kind unchanged.\n"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if($par{'v'}) {
|
||||
print "VERBOSE: Parameters\n";
|
||||
for my $k(sort keys %par) {
|
||||
print " $k: $par{$k}\n"
|
||||
}
|
||||
}
|
||||
|
||||
my %typeReact = ();
|
||||
for my $as(split /,/, $par{change}) {
|
||||
if($as=~m#(.*)=(.*)#) {
|
||||
$typeReact{$1} = $2;
|
||||
} else {
|
||||
warn "$0: Unexpected change parameter: '$as'"
|
||||
}
|
||||
}
|
||||
|
||||
if($par{changelog} and not $par{vcsurl}) {
|
||||
die "Parameter 'changelog' given, but parameter 'vcsurl' is not. Please state the url of your repository for computation of a changelog.\n"
|
||||
}
|
||||
|
||||
#my @have = split /\n/, `$par{vcstags}`;
|
||||
#
|
||||
#my @keep = grep { $_ } map { m#^($par{kind})([0-9].*)# ? [$1,$2] : undef } @have;
|
||||
#
|
||||
#my @oldVersions = ();
|
||||
|
||||
sub globString {
|
||||
my ($glob, $string) = @_;
|
||||
my @glob = map { m#\*# ? '*' : $_ } $glob=~m#(\?|\*+|[^\?\*]+)#g;
|
||||
my %matchCache = ();
|
||||
my $match = undef;
|
||||
my $matchCore = sub {
|
||||
my ($i, $j) = @_;
|
||||
return 1 if $i==@glob and $j==length $string;
|
||||
return 0 if $i>=@glob or $j>=length $string;
|
||||
return $match->($i+1,$j+1) if '?' eq $glob[$i];
|
||||
if('*' eq $glob[$i]) {
|
||||
for my $jj($j..length($string)) {
|
||||
return 1 if $match->($i+1, $jj);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
return $match->($i+1, $j+length($glob[$i])) if
|
||||
$glob[$i] eq substr($string, $j, length($glob[$i]));
|
||||
return 0
|
||||
};
|
||||
$match = sub {
|
||||
my ($i, $j) = @_;
|
||||
my $ij = "$i $j";
|
||||
my $res = $matchCache{$ij};
|
||||
if(not defined $res) {
|
||||
$res = $matchCore->($i, $j);
|
||||
$matchCache{$ij} = $res;
|
||||
}
|
||||
$res
|
||||
};
|
||||
$match->(0,0);
|
||||
}
|
||||
|
||||
sub parseVersion {
|
||||
my $v = shift;
|
||||
if(not defined $v) {
|
||||
my $c = join " ", caller;
|
||||
warn "$0: internal error (parseVersion called on undef at $c)\n";
|
||||
return undef
|
||||
}
|
||||
my ($pre,$ma,$mi,$p,$sp,$brn,$brv) = ();
|
||||
if($v=~m#^([a-z]*)([0-9]+)$#) {
|
||||
$pre = $1;
|
||||
$ma = $2;
|
||||
} elsif($v=~m#^([a-z]*)([0-9]+)\.([0-9]+)$#) {
|
||||
$pre = $1;
|
||||
$ma = $2;
|
||||
$mi = $3
|
||||
} elsif($v=~m#^([a-z]*)([0-9]+)\.([0-9]+)\.([0-9]+)$#) {
|
||||
$pre = $1;
|
||||
$ma = $2;
|
||||
$mi = $3;
|
||||
$p = $4;
|
||||
} elsif($v=~m#^([a-z]*)([0-9]+)\.([0-9]+)\.([0-9]+)-test-([a-z]+)-([0-9\.]+)$#) {
|
||||
$pre = $1;
|
||||
$ma = $2;
|
||||
$mi = $3;
|
||||
$p = $4;
|
||||
$sp = $5;
|
||||
$brn = $6;
|
||||
$brv = $7;
|
||||
} elsif($v=~m#^([a-z]*)([0-9]+)\.([0-9]+)\.([0-9]+)-(.*)$#) {
|
||||
$pre = $1;
|
||||
$ma = $2;
|
||||
$mi = $3;
|
||||
$p = $4;
|
||||
$sp = $5;
|
||||
} else {
|
||||
warn "$0: unexpected old version number: $v\n" if $par{v};
|
||||
return undef
|
||||
}
|
||||
$pre = 'v' if '' eq $pre;
|
||||
return {
|
||||
prefix=>$pre,
|
||||
major=>$ma,
|
||||
minor=>$mi,
|
||||
patch=>$p,
|
||||
subpatch=>$sp,
|
||||
branchname=>$brn,
|
||||
branchversion=>$brv,
|
||||
}
|
||||
}
|
||||
|
||||
#@oldVersions = sort {
|
||||
# ($a->{major} // 0) <=> ($b->{major} // 0) ||
|
||||
# ($a->{minor} // 0) <=> ($b->{minor} // 0) ||
|
||||
# ($a->{patch} // 0) <=> ($b->{patch} // 0) ||
|
||||
# ($a->{subpatch} // '') <=> ($b->{subpatch} // '')
|
||||
#} @oldVersions;
|
||||
|
||||
sub vsCompare {
|
||||
my ($vp, $wp) = @_;
|
||||
my ($v, $w) = ($vp, $wp);
|
||||
my ($verr, $werr) = (0,0);
|
||||
unless(ref $v) {
|
||||
eval { $v = parseVersion($v) };
|
||||
$verr = 1 if $@ or not defined $v;
|
||||
}
|
||||
unless(ref $w) {
|
||||
eval { $w = parseVersion($w) };
|
||||
$werr = 1 if $@ or not defined $w;
|
||||
}
|
||||
if($verr and $werr) {
|
||||
return $vp cmp $wp;
|
||||
}
|
||||
if($verr) {
|
||||
return -1
|
||||
}
|
||||
if($werr) {
|
||||
return 1
|
||||
}
|
||||
#for($v, $w) {
|
||||
# $_ = parseVersion($_) unless ref $_;
|
||||
#}
|
||||
if('v' eq $v->{prefix} and 'v' eq $w->{prefix}) {
|
||||
return(
|
||||
($v->{major} // 0) <=> ($w->{major} // 0) ||
|
||||
($v->{minor} // 0) <=> ($w->{minor} // 0) ||
|
||||
($v->{patch} // 0) <=> ($w->{patch} // 0) ||
|
||||
($v->{branchname} // '') cmp ($w->{branchname} // '') ||
|
||||
($v->{branchversion} // '') <=> ($w->{branchversion} // '') ||
|
||||
($v->{subpatch} // '') cmp ($w->{subpatch} // '')
|
||||
)
|
||||
} elsif('v' eq $v->{prefix} and 'v' ne $w->{prefix}) {
|
||||
return 1;
|
||||
} elsif('v' ne $v->{prefix} and 'v' eq $w->{prefix}) {
|
||||
return -1;
|
||||
} else {
|
||||
return vsStringDebug($v) cmp vsStringDebug($w)
|
||||
}
|
||||
}
|
||||
|
||||
sub vsStringDebug {
|
||||
my $v = shift;
|
||||
my $ret =
|
||||
("[" . ($v->{prefix} // 'undef') . "]") .
|
||||
($v->{major} // 'undef') . "." .
|
||||
($v->{minor} // 'undef') . "." .
|
||||
($v->{patch} // 'undef');
|
||||
$ret .= "-[$v->{subpatch}]" if defined $v->{subpatch};
|
||||
$ret .= "-test-" . ($v->{branchname} // 'undef') . "-" . ($v->{branchversion} // 'undef');
|
||||
return $ret
|
||||
}
|
||||
|
||||
sub vsString {
|
||||
my $v = shift;
|
||||
my $ret =
|
||||
($v->{major} // 0) . "." .
|
||||
($v->{minor} // 0) . "." .
|
||||
($v->{patch} // 0);
|
||||
$ret .= "-$v->{subpatch}" if defined $v->{subpatch};
|
||||
return $ret
|
||||
}
|
||||
|
||||
sub vsJustVersion {
|
||||
my $v = shift;
|
||||
my $ret =
|
||||
($v->{major} // 0) . "." .
|
||||
($v->{minor} // 0) . "." .
|
||||
($v->{patch} // 0);
|
||||
return $ret
|
||||
}
|
||||
|
||||
sub vsTestVersion {
|
||||
my $v = shift;
|
||||
my $ret =
|
||||
'v' .
|
||||
($v->{major} // 0) . "." .
|
||||
($v->{minor} // 0) . "." .
|
||||
($v->{patch} // 0) . "-test-" .
|
||||
($v->{branchname} // 'a') .
|
||||
($v->{branchversion} // '0.0.0');
|
||||
return $ret
|
||||
}
|
||||
|
||||
#print vsStringDebug($_), "\n" for @oldVersions;
|
||||
|
||||
#print " << $_->[1] >>\n" for @keep;
|
||||
|
||||
my @versionsOrig = ();
|
||||
if('-' eq $par{vcslog}) {
|
||||
@versionsOrig = <STDIN>;
|
||||
chomp for @versionsOrig
|
||||
} else {
|
||||
@versionsOrig = split /\n/, `$par{vcslog}`;
|
||||
}
|
||||
my @versions = ();
|
||||
for my $v(@versionsOrig) {
|
||||
if($v=~m#^(.*?\S)\s*::::\s*(.*?)\s*::::\s*(.*)#) {
|
||||
push @versions, {
|
||||
hash => $1,
|
||||
meta => $2,
|
||||
subject => $3
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#print Data::Dumper::Dumper(\@versions);
|
||||
|
||||
my @change = ();
|
||||
my $tag = undef;
|
||||
|
||||
my @versionPast = ();
|
||||
|
||||
VERSION: for my $v(@versions) {
|
||||
#if($v->{meta}=~m#tag\s*:\s*\Q$par{kind}\E(.*)\)#) {
|
||||
# $tag=$1;
|
||||
# last VERSION
|
||||
#}
|
||||
if($v->{meta}=~m#tag\s*:\s*([vtd]b?[0-9\.]+(?:-.*)?)\)#) {
|
||||
$v->{version} = $1;
|
||||
push @versionPast, $v->{version}
|
||||
}
|
||||
next if $v->{subject}=~m#^\s*(?:Merge (?:branch|remote)|Revert )#;
|
||||
if($v->{subject}=~m#^\s*([a-z]+)\s*(!?)\s*#) {
|
||||
my ($type, $break) = ($1, $2);
|
||||
if(exists $typeReact{$type}) {
|
||||
my $react = $typeReact{$type};
|
||||
next VERSION if 'null' eq $react;
|
||||
my %h = %$v;
|
||||
$h{react} = $react;
|
||||
push @change, \%h
|
||||
} else {
|
||||
warn "$0: cannot react on commit message '$v->{subject}', type '$type' unknown\n" if $par{$v};
|
||||
}
|
||||
} else {
|
||||
warn "$0: commit message not parseable: $v->{subject}\n" if $par{$v};
|
||||
}
|
||||
}
|
||||
|
||||
#$tag = parseVersion($tag);
|
||||
|
||||
for my $r(reverse @change) {
|
||||
if('major' eq $r->{react}) {
|
||||
$tag->{major}++;
|
||||
$tag->{minor}=0;
|
||||
$tag->{patch}=0;
|
||||
$tag->{subpatch}=undef;
|
||||
} elsif('minor' eq $r->{react}) {
|
||||
$tag->{minor}++;
|
||||
$tag->{patch}=0;
|
||||
$tag->{subpatch}=undef;
|
||||
} elsif('patch' eq $r->{react}) {
|
||||
$tag->{patch}++;
|
||||
$tag->{subpatch}=undef;
|
||||
} else {
|
||||
die "$0: Cannot perform modification '$r->{react}' (probably internal error)"
|
||||
}
|
||||
}
|
||||
|
||||
#print Data::Dumper::Dumper(\@change, $tag);
|
||||
#for my $c(@change) {
|
||||
# print "==\n";
|
||||
# for my $k(sort keys %$c) {
|
||||
# print " $k: $c->{$k}\n"
|
||||
# }
|
||||
# print "\n"
|
||||
#}
|
||||
#
|
||||
#print "\n";
|
||||
#for my $v(@versionPast) {
|
||||
# my $vv = vsStringDebug(parseVersion($v));
|
||||
# print "VERSION $v --> $vv\n"
|
||||
#}
|
||||
|
||||
my @allVersions = split /\n/, `$par{vcstags}`;
|
||||
|
||||
my @sortAll = sort {vsCompare($b, $a)} @allVersions;
|
||||
my @sortSee = sort {vsCompare($b, $a)} @versionPast;
|
||||
#print "all: $sortAll[0] -- see: $sortSee[0]\n";
|
||||
#
|
||||
#print vsString($tag), "\n";
|
||||
|
||||
my $mainVersion = 'v' eq $par{kind};
|
||||
|
||||
my $highStart = $mainVersion ? $sortAll[0] : $sortSee[0];
|
||||
my $highSee = $sortSee[0];
|
||||
my %reactCollect = ();
|
||||
SEARCHVERSION: for my $v(@versions) {
|
||||
next unless $v->{version};
|
||||
next unless $v->{react};
|
||||
$reactCollect{$v->{react}} = 1;
|
||||
if($highSee eq $v->{version}) {
|
||||
last SEARCHVERSION;
|
||||
}
|
||||
}
|
||||
|
||||
sub justVersionInc {
|
||||
my ($v, $react) = @_;
|
||||
my $vv = parseVersion($v);
|
||||
$vv->{patch}++ if $react->{patch};
|
||||
do {$vv->{minor}++; $vv->{patch}=0} if $react->{minor};
|
||||
do {$vv->{major}++; $vv->{minor}=0; $vv->{patch}=0} if $react->{major};
|
||||
return vsJustVersion($vv);
|
||||
}
|
||||
|
||||
my $newVersion = undef;
|
||||
|
||||
if($mainVersion) {
|
||||
$newVersion = "v" . justVersionInc($highStart, \%reactCollect);
|
||||
} else {
|
||||
my $v = parseVersion($highStart);
|
||||
if(exists $v->{branchname}) {
|
||||
$v->{branchversion} = justVersionInc($v->{branchversion} // '0.0.0', \%reactCollect);
|
||||
} else {
|
||||
$v->{branchname} = 'a';
|
||||
$v->{branchversion} = '0.0.0';
|
||||
}
|
||||
$newVersion = vsTestVersion($v);
|
||||
}
|
||||
|
||||
my %allVersions = ();
|
||||
for(@allVersions) {
|
||||
$allVersions{$_} = 1
|
||||
}
|
||||
while(exists $allVersions{$newVersion}) {
|
||||
if($mainVersion) {
|
||||
die "$0: probably internal error (collision in main version)\n"
|
||||
}
|
||||
my $v = parseVersion($newVersion);
|
||||
$v->{branchname} //= 'a';
|
||||
$v->{branchname}++;
|
||||
$newVersion = vsTestVersion($v);
|
||||
}
|
||||
|
||||
print "$newVersion\n";
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# If we want a changelog
|
||||
if($par{changelog}) {
|
||||
#print "Changelog file: '$par{changelog}'\n";
|
||||
# TODO at the moment we only extend a changelog; starting with a fresh one is not supportet yet
|
||||
my $fh = undef;
|
||||
open($fh, '<', $par{changelog}) or die "Could not read changelog file '$par{changelog}', because: $!";
|
||||
my @changelog = <$fh>;
|
||||
close $fh;
|
||||
my %seen = ();
|
||||
my @sects = ([]);
|
||||
for(@changelog) {
|
||||
push @sects, [] if m/^## /;
|
||||
push @{$sects[-1]}, $_;
|
||||
if(m#/commit/([a-f0-9]+)\s*\)\s*\)\s*$#) {
|
||||
$seen{$1} = 1;
|
||||
}
|
||||
}
|
||||
my $head = shift @sects;
|
||||
#print Data::Dumper::Dumper($head);
|
||||
#print " << $sects[0][0] >>\n";
|
||||
if($sects[0][0]=~m/^##\s*\[([^\]\[]+)\]\(/ and $1 eq $newVersion) {
|
||||
shift @sects;
|
||||
}
|
||||
for my $s(@sects) {
|
||||
my $hh = $s->[0];
|
||||
chomp $hh;
|
||||
my $cnt = @$s;
|
||||
#print " $hh\n $cnt lines\n\n"
|
||||
}
|
||||
#print Data::Dumper::Dumper($versions[0]);
|
||||
for my $v(@versions) {
|
||||
#print Data::Dumper::Dumper($v);
|
||||
my $hash = $v->{hash};
|
||||
my $see = 'new';
|
||||
$see = 'old' if $seen{$hash};
|
||||
#print "$hash -> $see ($v->{subject})\n";
|
||||
}
|
||||
my $changelogout = $par{changelogout} || $par{changelog};
|
||||
my $changelogfh = undef;
|
||||
open($changelogfh, '>', $changelogout) or die "$0: Could not write '$changelogout', because: $!\n";
|
||||
my %extend = ();
|
||||
my %when = (
|
||||
'fix' => 'Bug Fixes',
|
||||
'hotfix' => 'Bug Fixes',
|
||||
'feat' => 'Features',
|
||||
'feature' => 'Features',
|
||||
);
|
||||
SELECTCHANGELOG: for my $v(@versions) {
|
||||
last SELECTCHANGELOG if $seen{$v->{hash}};
|
||||
next unless $v->{subject}=~m#^\s*([a-z]+)\s*(!?)\s*((?:\(.*?\))?)\s*:\s*(.*?)\s*$#i;
|
||||
my ($kind, $break, $context, $msg) = ($1, $2, $3, $4);
|
||||
my $where = $when{$kind};
|
||||
$where = 'BREAKING CHANGES' if '!' eq $break;
|
||||
next unless $where;
|
||||
my $short = substr $v->{hash}, 0, 7;
|
||||
my $contS = '';
|
||||
if($context=~m#\((.*)\)#) {
|
||||
$contS = "**$1:** ";
|
||||
}
|
||||
my $row = qq#* $contS$msg ([$short]($par{vcsurl}commit/$v->{hash}))#;
|
||||
push @{$extend{$where}}, {
|
||||
msg=>$msg,
|
||||
context=>$context,
|
||||
orig=>$v,
|
||||
row=>$row,
|
||||
};
|
||||
}
|
||||
#print Data::Dumper::Dumper(\%extend);
|
||||
my $preVersion = '';
|
||||
if($sects[0][0]=~m/^##\s*\[([^\]\[]+)\]\(/) {
|
||||
$preVersion = $1;
|
||||
$preVersion =~ s#^v?#v#;
|
||||
}
|
||||
my $today = do {
|
||||
my @time = localtime;
|
||||
my $year = $time[5]+1900;
|
||||
my $month = $time[4]+1;
|
||||
my $day = $time[3];
|
||||
sprintf("%04i-%02i-%02i", $year, $month, $day)
|
||||
};
|
||||
print $changelogfh qq!# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
||||
|
||||
## [$newVersion]($par{vcsurl}/compare/$preVersion...$newVersion) ($today)
|
||||
|
||||
!;
|
||||
for my $variant('BREAKING CHANGES', 'Features', 'Bug Fixes') {
|
||||
my @all = map {$_->{row}} @{$extend{$variant}};
|
||||
next unless @all;
|
||||
my $msg = join "\n", @all;
|
||||
print $changelogfh qq/### $variant\n\n$msg\n\n/
|
||||
}
|
||||
for(@sects) {
|
||||
print $changelogfh $_ for @$_
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@ -1,14 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
#!/usr/bin/bash
|
||||
|
||||
mkdir -p $(dirname ${AWS_SHARED_CREDENTIALS_FILE})
|
||||
cat > ${AWS_SHARED_CREDENTIALS_FILE} <<EOF
|
||||
[default]
|
||||
aws_access_key_id = $(cat /minio-gitlab-runner-cache/accesskey)
|
||||
aws_secret_access_key = $(cat /minio-gitlab-runner-cache/secretkey)
|
||||
EOF
|
||||
12
.npmrc.gup
12
.npmrc.gup
@ -1,12 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
if command -V gup 1>&- 2>&-; then
|
||||
gup --always
|
||||
fi
|
||||
|
||||
cat >${1:-.npmrc} <<EOF
|
||||
@fortawesome:registry=https://npm.fontawesome.com/
|
||||
//npm.fontawesome.com/:_authToken=${FONTAWESOME_NPM_AUTH_TOKEN}
|
||||
EOF
|
||||
9
.reuse/dep5
Normal file
9
.reuse/dep5
Normal file
@ -0,0 +1,9 @@
|
||||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: FraDrive
|
||||
Upstream-Contact: Sarah Vaupel <sarah.vaupel@uniworx.de>
|
||||
Source: https://gitlab.uniworx.de/fradrive/fradrive
|
||||
|
||||
Files: assets/fonts/fradrive/*
|
||||
Copyright: 2010 Google Corporation with Reserved Font Arimo, Tinos and Cousine
|
||||
Copyright: 2012 Red Hat, Inc. with Reserved Font Name Liberation
|
||||
License: OFL-1.1-RFN
|
||||
24
CHANGELOG.md
24
CHANGELOG.md
@ -2,6 +2,30 @@
|
||||
|
||||
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
||||
|
||||
## [27.4.59](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v27.4.58...v27.4.59) (2024-02-13)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **sql:** remove potential bug in relation to missing parenthesis after not_ ([42695cf](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/42695cf5ef9f21691dc027f1ec97d57eec72f03e))
|
||||
|
||||
## [27.4.58](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v27.4.57...v27.4.58) (2024-02-08)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **health:** negative interface routes working as intended now ([3303c4e](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/3303c4eebf928e527d2f9c1eb6e2495c10b94b13))
|
||||
* **lms:** previouly failed notifications will be sent again ([263894b](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/263894b05899ce55635d790f5334729fbc655ecc))
|
||||
|
||||
## [27.4.57](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v27.4.56...v27.4.57) (2024-02-06)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **course:** fix [#147](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/issues/147) abort addd participant aborts now ([d332c0c](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/d332c0c11afd8b1dfe1343659f0b1626c968bbde))
|
||||
* **health:** fix [#151](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/issues/151) by offering route /health/interface/* ([c71814d](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/c71814d1ef1efc16c278136dfd6ebd86bd1d20db))
|
||||
* **health:** fix [#153](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/issues/153) and offer interface health route matching ([ce3852e](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/ce3852e3d365e62b32d181d58b7cbcc749e49373))
|
||||
|
||||
## [27.4.56](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v27.4.55...v27.4.56) (2023-12-20)
|
||||
|
||||
|
||||
|
||||
102
LICENSES/OFL-1.1-RFN.txt
Normal file
102
LICENSES/OFL-1.1-RFN.txt
Normal file
@ -0,0 +1,102 @@
|
||||
Digitized data copyright (c) 2010 Google Corporation
|
||||
with Reserved Font Arimo, Tinos and Cousine.
|
||||
Copyright (c) 2012 Red Hat, Inc.
|
||||
with Reserved Font Name Liberation.
|
||||
|
||||
This Font Software is licensed under the SIL Open Font License,
|
||||
Version 1.1.
|
||||
|
||||
This license is copied below, and is also available with a FAQ at:
|
||||
http://scripts.sil.org/OFL
|
||||
|
||||
SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
|
||||
|
||||
PREAMBLE The goals of the Open Font License (OFL) are to stimulate
|
||||
worldwide development of collaborative font projects, to support the font
|
||||
creation efforts of academic and linguistic communities, and to provide
|
||||
a free and open framework in which fonts may be shared and improved in
|
||||
partnership with others.
|
||||
|
||||
The OFL allows the licensed fonts to be used, studied, modified and
|
||||
redistributed freely as long as they are not sold by themselves.
|
||||
The fonts, including any derivative works, can be bundled, embedded,
|
||||
redistributed and/or sold with any software provided that any reserved
|
||||
names are not used by derivative works. The fonts and derivatives,
|
||||
however, cannot be released under any other type of license. The
|
||||
requirement for fonts to remain under this license does not apply to
|
||||
any document created using the fonts or their derivatives.
|
||||
|
||||
|
||||
|
||||
DEFINITIONS
|
||||
"Font Software" refers to the set of files released by the Copyright
|
||||
Holder(s) under this license and clearly marked as such.
|
||||
This may include source files, build scripts and documentation.
|
||||
|
||||
"Reserved Font Name" refers to any names specified as such after the
|
||||
copyright statement(s).
|
||||
|
||||
"Original Version" refers to the collection of Font Software components
|
||||
as distributed by the Copyright Holder(s).
|
||||
|
||||
"Modified Version" refers to any derivative made by adding to, deleting,
|
||||
or substituting ? in part or in whole ?
|
||||
any of the components of the Original Version, by changing formats or
|
||||
by porting the Font Software to a new environment.
|
||||
|
||||
"Author" refers to any designer, engineer, programmer, technical writer
|
||||
or other person who contributed to the Font Software.
|
||||
|
||||
|
||||
PERMISSION & CONDITIONS
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of the Font Software, to use, study, copy, merge, embed, modify,
|
||||
redistribute, and sell modified and unmodified copies of the Font
|
||||
Software, subject to the following conditions:
|
||||
|
||||
1) Neither the Font Software nor any of its individual components,in
|
||||
Original or Modified Versions, may be sold by itself.
|
||||
|
||||
2) Original or Modified Versions of the Font Software may be bundled,
|
||||
redistributed and/or sold with any software, provided that each copy
|
||||
contains the above copyright notice and this license. These can be
|
||||
included either as stand-alone text files, human-readable headers or
|
||||
in the appropriate machine-readable metadata fields within text or
|
||||
binary files as long as those fields can be easily viewed by the user.
|
||||
|
||||
3) No Modified Version of the Font Software may use the Reserved Font
|
||||
Name(s) unless explicit written permission is granted by the
|
||||
corresponding Copyright Holder. This restriction only applies to the
|
||||
primary font name as presented to the users.
|
||||
|
||||
4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font
|
||||
Software shall not be used to promote, endorse or advertise any
|
||||
Modified Version, except to acknowledge the contribution(s) of the
|
||||
Copyright Holder(s) and the Author(s) or with their explicit written
|
||||
permission.
|
||||
|
||||
5) The Font Software, modified or unmodified, in part or in whole, must
|
||||
be distributed entirely under this license, and must not be distributed
|
||||
under any other license. The requirement for fonts to remain under
|
||||
this license does not apply to any document created using the Font
|
||||
Software.
|
||||
|
||||
|
||||
|
||||
TERMINATION
|
||||
This license becomes null and void if any of the above conditions are not met.
|
||||
|
||||
|
||||
|
||||
DISCLAIMER
|
||||
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
|
||||
OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE
|
||||
COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
|
||||
DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER
|
||||
DEALINGS IN THE FONT SOFTWARE.
|
||||
|
||||
370
Makefile
370
Makefile
@ -1,117 +1,277 @@
|
||||
db ?= -cf
|
||||
SHELL=bash
|
||||
|
||||
export CONTAINER_COMMAND ?= podman
|
||||
export IN_CONTAINER ?= false
|
||||
export IN_CI ?= false
|
||||
export WATCH
|
||||
export db ?= -cf
|
||||
export DEV_PORT_HTTP
|
||||
export DEV_PORT_HTTPS
|
||||
|
||||
###########################
|
||||
##### GENERAL TARGETS #####
|
||||
|
||||
.PHONY: help
|
||||
help:
|
||||
@echo "Nothing to see here, go away"
|
||||
|
||||
.PHONY: build
|
||||
build: frontend-build backend-build
|
||||
|
||||
#.PHONY: start
|
||||
#start: frontend-build
|
||||
# $(MAKE) --jobs=2 frontend-watch backend-start
|
||||
|
||||
.PHONY: lint
|
||||
lint: frontend-lint backend-lint
|
||||
|
||||
.PHONY: test
|
||||
test: frontend-test backend-test i18n-check
|
||||
|
||||
.PHONY: backend-%
|
||||
backend-%: .develop.env .stack-work.lock # --stack-work-$*
|
||||
$(MAKE) -- --backend-$*
|
||||
#( $(MAKE) -- --backend-$* || $(MAKE) -- --stack-krow-$* ) && ( $(MAKE) -- --stack-krow-$* )
|
||||
|
||||
.PHONY: --backend-build
|
||||
--backend-build: .develop.env
|
||||
stack build --fast --profile --library-profiling --executable-profiling --flag uniworx:dev --flag uniworx:-library-only
|
||||
|
||||
#.PHONY: --backend-start
|
||||
#--backend-start: .develop.env
|
||||
# ./start.sh
|
||||
|
||||
.PHONY: backend-lint
|
||||
backend-lint: .develop.env
|
||||
stack build --test --fast --flag uniworx:dev --flag uniworx:library-only uniworx:test:hlint
|
||||
|
||||
.PHONY: --backend-test
|
||||
--backend-test: .develop.env
|
||||
stack build --test --coverage --fast --flag uniworx:dev --flag uniworx:library-only
|
||||
|
||||
.PHONY: i18n-check
|
||||
i18n-check:
|
||||
./missing-translations.sh
|
||||
@echo No missing translations.
|
||||
|
||||
.PHONY: database
|
||||
database: .develop.env .stack-work.lock # --stack-work-build
|
||||
# $(MAKE) -- --database
|
||||
( $(MAKE) -- --database || $(MAKE) -- --stack-krow-build ) && ( $(MAKE) -- --stack-krow-build )
|
||||
|
||||
.PHONY: --database
|
||||
--database: .develop.env
|
||||
stack build --fast --flag uniworx:-library-only --flag uniworx:dev
|
||||
export SERVER_SESSION_ACID_FALLBACK=$${SERVER_SESSION_ACID_FALLBACK:-true}
|
||||
stack exec uniworxdb -- $(db)
|
||||
|
||||
.PHONY: frontend-%
|
||||
frontend-%: node_modules
|
||||
$(MAKE) -- --frontend-$*
|
||||
|
||||
.PHONY: --frontend-build
|
||||
--frontend-build:
|
||||
npx -- webpack --progress $(WATCH)
|
||||
|
||||
.PHONY: --frontend-watch
|
||||
--frontend-watch: WATCH=--watch
|
||||
--frontend-watch: --frontend-build
|
||||
|
||||
.PHONY: --frontend-lint
|
||||
--frontend-lint: .eslintrc.json
|
||||
npx -- eslint frontend/src $(FIX)
|
||||
@echo Hooray! There are no hints.
|
||||
|
||||
.PHONY: --frontend-test
|
||||
--frontend-test: karma.conf.js
|
||||
npx -- karma start --conf karma.conf.js $(WATCH)
|
||||
|
||||
.PHONY: --frontend-test-watch
|
||||
--frontend-test-watch: WATCH=--single-run false
|
||||
--frontend-test-watch: --frontend-test
|
||||
|
||||
node_modules: .npmrc package.json
|
||||
npm ci --cache .npm --prefer-offline
|
||||
|
||||
.npmrc:
|
||||
command gup .npmrc
|
||||
.PHONY: all
|
||||
all:
|
||||
@echo "TODO"
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
rm -rf node_modules
|
||||
rm -rf .stack-work .stack-work-build .stack-work-run .stack-work-test .stack-work-doc
|
||||
rm -rf node_modules .npm .cache assets/icons well-known static/
|
||||
rm -rf .stack .stack-work .stack-work-build .stack-work-run .stack-work-test .stack-work-doc
|
||||
rm -rf bin/
|
||||
|
||||
.PHONY: release
|
||||
release:
|
||||
./.gitlab-ci/version.pl -changelog CHANGELOG.md
|
||||
git add CHANGELOG.md
|
||||
VERSION=`.gitlab-ci/version.pl`
|
||||
git tag ${VERSION}
|
||||
git commit -m "chore(release): ${VERSION}"
|
||||
git push
|
||||
|
||||
##### GENERAL TARGETS #####
|
||||
###########################
|
||||
|
||||
|
||||
############################################
|
||||
##### UNIFIED FRONTEND/BACKEND TARGETS #####
|
||||
|
||||
.PHONY: serve
|
||||
serve:
|
||||
$(MAKE) serve-database &
|
||||
$(MAKE) serve-frontend &
|
||||
$(MAKE) serve-backend
|
||||
|
||||
.PHONY: compile
|
||||
compile: compile-frontend compile-backend
|
||||
|
||||
.PHONY: lint
|
||||
lint: lint-frontend lint-backend
|
||||
|
||||
.PHONY: test
|
||||
test: test-frontend test-backend i18n-check
|
||||
|
||||
##### UNIFIED FRONTEND/BACKEND TARGETS #####
|
||||
############################################
|
||||
|
||||
|
||||
############################
|
||||
##### FRONTEND TARGETS #####
|
||||
|
||||
.PHONY: %-frontend
|
||||
%-frontend: FRADRIVE_SERVICE=frontend
|
||||
%-frontend: --image-build --containerized-%-frontend;
|
||||
|
||||
.PHONY: --%-frontend
|
||||
#--%-frontend: node_modules well-known;
|
||||
#--%-frontend: --containerized---node_modules-frontend --containerized---well-known-frontend;
|
||||
--%-frontend: --containerized---frontend-dependencies-frontend;
|
||||
|
||||
.PHONY: --compile-frontend
|
||||
--compile-frontend:
|
||||
npx -- webpack --progress $(WATCH)
|
||||
|
||||
.PHONY: --serve-frontend
|
||||
--serve-frontend: WATCH=--watch
|
||||
--serve-frontend: --compile-frontend;
|
||||
|
||||
.PHONY: --lint-frontend
|
||||
--lint-frontend: eslint.config.js
|
||||
npx -- eslint frontend/src $(FIX)
|
||||
@echo Hooray! There are no hints.
|
||||
|
||||
.PHONY: --test-frontend
|
||||
--test-frontend: karma.conf.cjs
|
||||
@echo Karma frontend tests are currently broken after npm update and have therefor been temporarily disabled.
|
||||
# npx -- karma start --conf karma.conf.cjs $(WATCH)
|
||||
|
||||
## TODO: rewrite
|
||||
#.PHONY: --test-frontend-watch
|
||||
#--test-frontend-watch: WATCH=--single-run false
|
||||
#--test-frontend-watch: --test-frontend;
|
||||
#
|
||||
#node_modules: package.json package-lock.json
|
||||
# $(MAKE) -- --containerized---node_modules-frontend
|
||||
#.PHONY: --node_modules
|
||||
#--node_modules: package.json package-lock.json
|
||||
# npm ci --cache .npm --prefer-offline
|
||||
#
|
||||
#package-lock.json: package.json
|
||||
# $(MAKE) -- --image-run---package-lock.json
|
||||
#.PHONY: --package-lock.json
|
||||
#--package-lock.json: package.json
|
||||
# npm install --cache .npm --prefer-offline
|
||||
#
|
||||
#assets: node_modules
|
||||
# $(MAKE) -- --image-run---assets
|
||||
#.PHONY: --assets/icons
|
||||
#--assets: node_modules
|
||||
# ./utils/renamer.pl node_modules/@fortawesome/fontawesome-free/svgs/solid utils/rename-fa.json assets/icons/fradrive
|
||||
# ./utils/renamer.pl node_modules/@fortawesome/fontawesome-free/svgs/regular utils/rename-fa.json assets/icons/fradrive
|
||||
#
|
||||
#well-known: node_modules assets well-known/.well-known
|
||||
# $(MAKE) -- --containerized---well-known-frontend
|
||||
#.PHONY: --well-known
|
||||
#--well-known: --node_modules assets well-known/.well-known
|
||||
# npx webpack --progress
|
||||
## mark well-known directory as clean after successful webpack run:
|
||||
# touch well-known/.well-known
|
||||
|
||||
.PHONY: --frontend-dependencies
|
||||
--frontend-dependencies: node_modules package.json package-lock.json assets
|
||||
|
||||
node_modules: package.json package-lock.json
|
||||
npm ci --cache .npm --prefer-offline
|
||||
package-lock.json: package.json
|
||||
npm install --cache .npm --prefer-offline
|
||||
assets: node_modules
|
||||
./utils/renamer.pl node_modules/@fortawesome/fontawesome-free/svgs/solid utils/rename-fa.json assets/icons/fradrive
|
||||
./utils/renamer.pl node_modules/@fortawesome/fontawesome-free/svgs/regular utils/rename-fa.json assets/icons/fradrive
|
||||
well-known: node_modules assets well-known/.well-known
|
||||
npx webpack --progress
|
||||
touch well-known/.well-known
|
||||
|
||||
|
||||
well-known/.well-known:
|
||||
# no-op target
|
||||
|
||||
##### FRONTEND TARGETS #####
|
||||
############################
|
||||
|
||||
|
||||
###########################
|
||||
##### BACKEND TARGETS #####
|
||||
|
||||
.PHONY: %-backend
|
||||
%-backend: FRADRIVE_SERVICE=backend
|
||||
%-backend: --image-build --containerized-%-dev-backend;
|
||||
|
||||
.PHONY: %-prod-backend
|
||||
%-prod-backend: FRADRIVE_SERVICE=backend
|
||||
%-prod-backend: --image-build --containerized-%-prod-backend;
|
||||
|
||||
.PHONY: --%-dev-backend
|
||||
--%-dev-backend: FRADRIVE_SERVICE=backend
|
||||
--%-dev-backend: stackopts=--flag uniworx:dev
|
||||
--%-dev-backend: --image-build --containerized-%-backend;
|
||||
|
||||
.PHONY: --%-prod-backend
|
||||
--%-prod-backend: FRADRIVE_SERVICE=backend
|
||||
--%-prod-backend: stackopts=--flag uniworx:-dev
|
||||
--%-prod-backend: --image-build --containerized-%-backend;
|
||||
|
||||
.PHONY: serve-backend
|
||||
serve-backend:
|
||||
export DEV_PORT_HTTP=`netstat -tulan | perl -le 'use strict;use warnings;my %p=();my $addr=qr((?:\d+.\d+.\d+.\d+|[0-9a-f:]+));while(<>){ if(m#$addr:(\d+)\s+$addr:(?:\d+|\*)\s+#) { $p{$1}=1 }}; my $port = 3000; $port++ while $p{$port}; print $port'` ; \
|
||||
export DEV_PORT_HTTPS=`netstat -tulan | perl -le 'use strict;use warnings;my %p=();my $addr=qr((?:\d+.\d+.\d+.\d+|[0-9a-f:]+));while(<>){ if(m#$addr:(\d+)\s+$addr:(?:\d+|\*)\s+#) { $p{$1}=1 }}; my $port = 3443; $port++ while $p{$port}; print $port'` ; \
|
||||
echo "bisschen mehr: ${DEV_PORT_HTTP}" ; \
|
||||
$(MAKE) -- --containerized---serve-dev-backend DEV_PORT_HTTP=${DEV_PORT_HTTP} DEV_PORT_HTTPS=${DEV_PORT_HTTPS}
|
||||
.PHONY: --serve-dev-backend
|
||||
--serve-dev-backend:
|
||||
./start.sh
|
||||
|
||||
.PHONY: --compile-backend
|
||||
--compile-backend:
|
||||
stack build --fast --profile --library-profiling --executable-profiling --flag uniworx:-library-only --copy-bins --local-bin-path $$(pwd)/bin $(stackopts)
|
||||
|
||||
.PHONY: --lint-backend
|
||||
--lint-backend:
|
||||
stack build --test --fast --flag uniworx:library-only uniworx:test:hlint $(stackopts)
|
||||
|
||||
.PHONY: --test-backend
|
||||
--test-backend:
|
||||
stack build --test --coverage --fast --flag uniworx:library-only $(stackopts)
|
||||
|
||||
# TODO: maybe deprecated
|
||||
.PHONY: database
|
||||
database: .stack .stack-work.lock compile-backend-prod
|
||||
export SERVER_SESSION_ACID_FALLBACK=$${SERVER_SESSION_ACID_FALLBACK:-true}
|
||||
export AVSPASS=$${AVSPASS:-nopasswordset}
|
||||
stack exec uniworxdb -- $(db)
|
||||
|
||||
.PHONY: serve-database
|
||||
serve-database: --containerized-database
|
||||
|
||||
.PHONY: .stack
|
||||
.stack: stack.yaml stack.yaml.lock
|
||||
.stack:
|
||||
$(MAKE) -- --image-run---.stack
|
||||
--.stack: stack.yaml stack.yaml.lock
|
||||
stack build --fast --only-dependencies $(stackopts)
|
||||
|
||||
.stack-work.lock:
|
||||
[ "${FLOCKER}" != "$0" ] && exec env FLOCKER="$0" flock -en .stack-work.lock "$0" "$@" || :
|
||||
|
||||
##### BACKEND TARGETS #####
|
||||
###########################
|
||||
|
||||
|
||||
#############################
|
||||
##### CONTAINER TARGETS #####
|
||||
|
||||
.PHONY: --containerized-database
|
||||
--containerized-database: FRADRIVE_SERVICE=database
|
||||
--containerized-database: --image-build
|
||||
if [ "$(IN_CONTAINER)" == "false" ] ; then \
|
||||
$(CONTAINER_COMMAND) run fradrive/$(FRADRIVE_SERVICE) ; \
|
||||
fi
|
||||
|
||||
.PHONY: --containerized-%-frontend
|
||||
--containerized-%-frontend: FRADRIVE_SERVICE=frontend
|
||||
#--containerized-%-frontend: --image-build --image-run-%-frontend;
|
||||
--containerized-%-frontend: --image-build
|
||||
if [ "$(IN_CONTAINER)" == "false" ] ; then \
|
||||
$(MAKE) -- --image-run-$*-frontend ; \
|
||||
fi
|
||||
|
||||
.PHONY: --containerized-%-backend
|
||||
--containerized-%-backend: FRADRIVE_SERVICE=backend
|
||||
#--containerized-%-backend: --image-build --image-run-%-backend;
|
||||
--containerized-%-backend: --image-build
|
||||
if [ "$(IN_CONTAINER)" == "false" ] ; then \
|
||||
$(MAKE) -- --image-run-$*-backend ; \
|
||||
fi
|
||||
|
||||
.PHONY: image-rebuild
|
||||
image-rebuild-%:
|
||||
$(MAKE) -- --image-build FRADRIVE_SERVICE=$* NO_CACHE=--no-cache
|
||||
.PHONY: --image-build
|
||||
--image-build:
|
||||
rm -f .Dockerfile
|
||||
ln -s docker/$(FRADRIVE_SERVICE)/Dockerfile .Dockerfile
|
||||
MOUNT_DIR=/mnt/fradrive; \
|
||||
PROJECT_DIR=/mnt/fradrive; \
|
||||
if [ "$(IN_CI)" == "true" ] ; then \
|
||||
PROJECT_DIR=/fradrive; \
|
||||
fi; \
|
||||
if [ "$(IN_CONTAINER)" == "false" ] ; then \
|
||||
$(CONTAINER_COMMAND) build $(NO_CACHE) -v $(PWD):$${MOUNT_DIR} --env IN_CONTAINER=true --build-arg MOUNT_DIR=$${MOUNT_DIR} --build-arg PROJECT_DIR=$${PROJECT_DIR} --tag fradrive/$(FRADRIVE_SERVICE) --file $(PWD)/.Dockerfile ; \
|
||||
fi
|
||||
|
||||
.PHONY: --image-run-%
|
||||
--image-run-%:
|
||||
ifeq ($(IN_CONTAINER),true)
|
||||
$(MAKE) -- $*
|
||||
else
|
||||
$(CONTAINER_COMMAND) run -v $(PWD):/mnt/fradrive --env FRADRIVE_MAKE_TARGET=$* fradrive/$(FRADRIVE_SERVICE)
|
||||
endif
|
||||
|
||||
##### CONTAINER TARGETS #####
|
||||
#############################
|
||||
|
||||
|
||||
.PHONY: i18n-check
|
||||
i18n-check: --image-run---i18n-check
|
||||
.PHONY: --i18n-check
|
||||
--i18n-check:
|
||||
./missing-translations.sh
|
||||
@echo No missing translations.
|
||||
|
||||
.PHONY: %.lock
|
||||
%.lock:
|
||||
[ -e $@ ] || touch $@
|
||||
flock -en $@ true
|
||||
|
||||
.PHONY: .develop.env
|
||||
.develop.env:
|
||||
[[ ! -z "$(CI)" || ! -z "$$PORT_OFFSET" ]] || ( echo "Not in develop!" 2>&1 ; exit 1 )
|
||||
|
||||
# .PHONY: --stack-work-%
|
||||
# --stack-work-%: .stack-work.lock
|
||||
# [[ -d .stack-work && -d .stack-work-$* ]] && (echo ".stack-work collides with .stack-work-$*, please resolve manually" 1>&2; exit 1) || :
|
||||
# [[ ! -d .stack-work-$* ]] || mv .stack-work-$* .stack-work
|
||||
#
|
||||
# .PHONY: --stack-krow-%
|
||||
# --stack-krow-%:
|
||||
# [[ ! -d .stack-work-$* ]] || ( echo ".stack-work-$* already exists, please resolve manually" 1>&2 ; exit 1 )
|
||||
# mv .stack-work .stack-work-$*
|
||||
# flock -u .stack-work.lock true
|
||||
|
||||
# watch:
|
||||
# while true; do \
|
||||
#
|
||||
# start: frontend-build frontend-build-watch yesod-start
|
||||
# run-s frontend:build --parallel "frontend:build:watch" "yesod:start"
|
||||
|
||||
16
assets/fonts/fradrive/AUTHORS
Normal file
16
assets/fonts/fradrive/AUTHORS
Normal file
@ -0,0 +1,16 @@
|
||||
AUTHORS
|
||||
|
||||
Current Contributors (sorted alphabetically):
|
||||
|
||||
- Vishal Vijayraghavan <vishalvvr at fedoraproject dot org>
|
||||
Project Owner/ Maintainer (Current)
|
||||
Red Hat, Inc.
|
||||
|
||||
Previous Contributors
|
||||
- Pravin Satpute <psatpute at redhat dot com>
|
||||
Project Owner/ Maintainer
|
||||
Red Hat, Inc.
|
||||
|
||||
- Steve Matteson
|
||||
Original Designer
|
||||
Ascender, Inc.
|
||||
79
assets/fonts/fradrive/ChangeLog
Normal file
79
assets/fonts/fradrive/ChangeLog
Normal file
@ -0,0 +1,79 @@
|
||||
* Thu Sep 30 2021 Vishal Vijayraghavan <vishalvijayraghavan@gmail.com>
|
||||
- Releasing liberation-fonts 2.1.5 version
|
||||
- Resolves #40: More fixes to Superscript and subscript Numbers missing issues
|
||||
-- fixed inconsistent weight, missing glyphs and GSUB issues
|
||||
|
||||
* Tue May 04 2021 Vishal Vijayraghavan <vishalvijayraghavan@gmail.com>
|
||||
- Releasing liberation-fonts 2.1.4 version
|
||||
- Resolves #40: Superscript and subscript Numbers missing
|
||||
- Resolves #24: Gender symbol are inconsistent in Sans
|
||||
|
||||
* Tue Feb 23 2021 Vishal Vijayraghavan <vishalvijayraghavan@gmail.com>
|
||||
- Releasing liberation-fonts 2.1.3 version
|
||||
- Resolves #37: U+2262,2669,266C too wide
|
||||
- Resolves Bugzilla #1464310: Tilded G not works with Liberation Sans and Serif
|
||||
|
||||
* Mon Dec 21 2020 Vishal Vijayraghavan <vishalvijayraghavan@gmail.com>
|
||||
- Releasing liberation-fonts 2.1.2 version
|
||||
- Resolves #25: liberation-fonts doesn't have proper <==> symbol
|
||||
- Resolves #33: Liberation Mono: U+20BF is too wide
|
||||
- Resolves #14: Liberation mono fonts are not recognized as monospace by fontconfig and cairo
|
||||
|
||||
* Wed Jun 03 2020 Vishal Vijayraghavan <vishalvijayraghavan@gmail.com>
|
||||
- Releasing liberation-fonts 2.1.1 version
|
||||
- Few more fixes to Bugzilla #1072095: Liberation Sans renders most Latin combining characters incorrectly
|
||||
|
||||
* Mon Feb 10 2020 Vishal Vijayraghavan <vishalvijayraghavan@gmail.com>
|
||||
- Releasing liberation-fonts 2.1.0 version
|
||||
- Updated release versioning scheme
|
||||
- Resolved Bugzilla #1072095: Liberation Sans renders most Latin combining characters incorrectly
|
||||
- Resolved Pagure issue-11: Combining diaerasis below does not work except U
|
||||
- Resolved GitHub issue-19: Incorrect glyph name mapped to unicode
|
||||
- Resolved Pagure issue-5: Incorrect glyph of Cent sign (U+00A2) in Sans and Mono style
|
||||
- Resolved Pagure issue-28 : U+25D2 and U+25D3 circle with lower / upper half black are backwards
|
||||
|
||||
* Mon Mar 4 2019 Vishal Vijayraghavan <vishalvijayraghavan@gmail.com>
|
||||
- Releasing liberation-fonts 2.00.5 version
|
||||
- Resolved issue-10: Improving lowercase Cyrillic glyps localized for Macedonian and Serbian, Patch fix by Dimitrij Mijoski
|
||||
- Resolved #1014357: U+266B incorrect glyph with extra beam
|
||||
-- Added two new glyphs U+266C and U+2669
|
||||
- Resolved issue-13: COMBINING LONG SOLIDUS OVERLAY (U+0338) not centred on base character.
|
||||
- Validated Missing Points at Extrema, Non-integral coordinates, Wrong Direction issues for newly added and existing glyphs
|
||||
|
||||
* Mon Nov 05 2018 Vishal Vijayraghavan <vishalvijayraghavan@gmail.com>
|
||||
- Releasing liberation-fonts 2.00.4 version, it includes few bug fixes and enhancements as follows:
|
||||
-- Added Bitcoin sign #1533798
|
||||
-- Fixed Incorrect lowercase Cyrillic BE for Macedonian language in liberation v2 (look like Greek delta), Patch fix by Dimitrij Mijoski #1574410
|
||||
-- Fixed Liberation Sans Mono Enhancement Request: Modification needed for "l" Character, Patch fix by Nikolaus Waxweiler #1574410
|
||||
|
||||
* Tue Sep 18 2018 Vishal Vijayraghavan <vishalvijayraghavan@gmail.com>
|
||||
- Resolved #1574410: Incorrect lowercase Cyrillic BE for Macedonian language in liberation v2 (look like Greek delta)
|
||||
- Patch fix by Dimitrij Mijoski: https://pagure.io/liberation-fonts/pull-request/21
|
||||
- Updated LiberationMono-Bold, LiberationMono-Regular, LiberationSans-Bold, LiberationSans-Regular, LiberationSerif-Bold, LiberationSerif-Regular
|
||||
|
||||
* Thu May 17 2018 Pravin Satpute <psatpute AT redhat DOT com> - 2.00.3
|
||||
- Releasing liberation-fonts 2.00.3 version, it includes fix for few bugs.
|
||||
- This release was pending from long time, will work on other open bugs
|
||||
post this release.
|
||||
|
||||
* Tue Oct 14 2014 Pravin Satpute <psatpute AT redhat DOT com>
|
||||
- Resolved #1096336: Liberation 2.00.x missing unicode hyphen (U+2010)
|
||||
- Added U+2007 character in Liberation Mono
|
||||
- Imported missing gpos tables from Arimo #1072095
|
||||
- Missing MIDDLE DOT (u+00B7) glyph for Liberation Sans Italic #1084493
|
||||
- Rendering of Unicode tie bars could be improved #1076190
|
||||
|
||||
* Thu Oct 04 2012 Pravin Satpute <psatpute AT redhat DOT com>
|
||||
- Resolved "Glyphs with multiple unicode encodings inhibit subsetting" #851790
|
||||
- Resolved #851791, #854601 and #851825
|
||||
- Following GASP table version as per Liberation old version. (Anti-aliasing disabled)
|
||||
- Added support for Serbian glyphs for wikipedia #657849
|
||||
- In Monospace fonts, isFixedPitch bit set via script for getting it recognized as Monospace in putty.exe
|
||||
|
||||
* Fri Jul 06 2012 Pravin Satpute <psatpute AT redhat DOT com>
|
||||
- Initial version of Liberation fonts based on croscore fonts version 1.21.0
|
||||
- Converted TTF files into SFD files to be open source.
|
||||
- Update Copyright and License file
|
||||
- set fsType bit to 0, Installable Embedding is allowed.
|
||||
- Absolute value in HHeadAscent/Descent values for maintaining Metric compatibility.
|
||||
|
||||
BIN
assets/fonts/fradrive/FRADriveMono-Bold.ttf
Normal file
BIN
assets/fonts/fradrive/FRADriveMono-Bold.ttf
Normal file
Binary file not shown.
BIN
assets/fonts/fradrive/FRADriveMono-BoldItalic.ttf
Normal file
BIN
assets/fonts/fradrive/FRADriveMono-BoldItalic.ttf
Normal file
Binary file not shown.
BIN
assets/fonts/fradrive/FRADriveMono-Italic.ttf
Normal file
BIN
assets/fonts/fradrive/FRADriveMono-Italic.ttf
Normal file
Binary file not shown.
BIN
assets/fonts/fradrive/FRADriveMono-Regular.ttf
Normal file
BIN
assets/fonts/fradrive/FRADriveMono-Regular.ttf
Normal file
Binary file not shown.
BIN
assets/fonts/fradrive/FRADriveSans-Bold.ttf
Normal file
BIN
assets/fonts/fradrive/FRADriveSans-Bold.ttf
Normal file
Binary file not shown.
BIN
assets/fonts/fradrive/FRADriveSans-BoldItalic.ttf
Normal file
BIN
assets/fonts/fradrive/FRADriveSans-BoldItalic.ttf
Normal file
Binary file not shown.
BIN
assets/fonts/fradrive/FRADriveSans-Italic.ttf
Normal file
BIN
assets/fonts/fradrive/FRADriveSans-Italic.ttf
Normal file
Binary file not shown.
BIN
assets/fonts/fradrive/FRADriveSans-Regular.ttf
Normal file
BIN
assets/fonts/fradrive/FRADriveSans-Regular.ttf
Normal file
Binary file not shown.
86
assets/fonts/fradrive/README.md
Normal file
86
assets/fonts/fradrive/README.md
Normal file
@ -0,0 +1,86 @@
|
||||
Liberation Fonts
|
||||
=================
|
||||
|
||||
The Liberation Fonts is font collection which aims to provide document
|
||||
layout compatibility as usage of Times New Roman, Arial, Courier New.
|
||||
|
||||
|
||||
Requirements
|
||||
=================
|
||||
|
||||
* [fontforge](http://fontforge.sourceforge.net)
|
||||
* [python fonttools](https://pypi.org/project/fonttools/)
|
||||
|
||||
|
||||
Install
|
||||
============
|
||||
|
||||
1. Get source
|
||||
```
|
||||
$ git clone https://github.com/liberationfonts/liberation-fonts.git
|
||||
```
|
||||
|
||||
- Or downloading the tar.gz file from [releases](https://github.com/fontforge/fontforge/releases).
|
||||
|
||||
- Extract the tar file:
|
||||
```
|
||||
$ tar zxvf liberation-fonts-[VERSION].tar.gz
|
||||
```
|
||||
2. Build from the source
|
||||
```
|
||||
$ cd liberation-fonts or $ cd liberation-fonts-[VERSION]
|
||||
$ make
|
||||
```
|
||||
The binary font files will be available in 'liberation-fonts-ttf-[VERSION]' directory.
|
||||
|
||||
3. Install to system
|
||||
|
||||
Fedora Users :
|
||||
- One can manually install the fonts by copying the TTFs to `~/.fonts` for user wide usage
|
||||
- and/or to `/usr/share/fonts/liberation` for system-wide availability.
|
||||
- Then, run `fc-cache` to let that cached.
|
||||
|
||||
Other distributions :
|
||||
please check out corresponding documentation.
|
||||
|
||||
|
||||
Usage
|
||||
==========
|
||||
|
||||
Simply select preferred liberation font in applications and start using.
|
||||
|
||||
|
||||
License
|
||||
============
|
||||
|
||||
This Font Software is licensed under the SIL Open Font License,
|
||||
Version 1.1.
|
||||
|
||||
Please read file "LICENSE" for details.
|
||||
|
||||
|
||||
For Maintainers
|
||||
====================
|
||||
|
||||
1. Before packaging a new release based on a new source tarball, you have to update the version suffix in the Makefile:
|
||||
```
|
||||
VER = [NEW_VERSION]
|
||||
```
|
||||
2. After updating Makefile VER attribute, update all font metadata by executing:
|
||||
```
|
||||
$ make versionupdate
|
||||
```
|
||||
can verfy changes using ftinfo/otfinfo or fontforge itself.
|
||||
3. It is highly recommended that file 'ChangeLog' is updated to reflect changes.
|
||||
|
||||
4. Create a tarball with the following command:
|
||||
```
|
||||
$ make dist
|
||||
```
|
||||
The new versioned tarball will be available in the dist/ folder as `liberation-fonts-[NEW_VERSION].tar.gz.`
|
||||
5. Create github tag for that [NEW_VERSION] and upload dist tarball
|
||||
|
||||
Credits
|
||||
============
|
||||
|
||||
Please read file "AUTHORS" for list of contributors.
|
||||
4
assets/fonts/fradrive/TODO
Normal file
4
assets/fonts/fradrive/TODO
Normal file
@ -0,0 +1,4 @@
|
||||
Here are todo for next release
|
||||
1) Serbian glyph for wikipedia https://bugzilla.redhat.com/show_bug.cgi?id=657849
|
||||
2) Liberation Mono not recognizing as Mono in Windows application #861003
|
||||
- presently it is patch, we have to update zero width characters to fixed width
|
||||
23
babel.config.cjs
Normal file
23
babel.config.cjs
Normal file
@ -0,0 +1,23 @@
|
||||
module.exports = (api) => {
|
||||
api.cache(true);
|
||||
|
||||
const presets = [
|
||||
[ '@babel/preset-env'
|
||||
]
|
||||
];
|
||||
|
||||
const plugins = [
|
||||
["@babel/plugin-proposal-decorators", { "legacy": true }],
|
||||
["@babel/plugin-syntax-dynamic-import"],
|
||||
["@babel/plugin-transform-class-properties", { "loose": true }],
|
||||
["@babel/plugin-transform-private-methods", { "loose": true }],
|
||||
["@babel/plugin-transform-private-property-in-object", { "loose": true }],
|
||||
["@babel/plugin-transform-modules-commonjs"],
|
||||
["@babel/transform-runtime"],
|
||||
];
|
||||
|
||||
return {
|
||||
presets,
|
||||
plugins,
|
||||
};
|
||||
}
|
||||
34
compose.yaml
Normal file
34
compose.yaml
Normal file
@ -0,0 +1,34 @@
|
||||
services:
|
||||
frontend:
|
||||
# image: registry.uniworx.de/fradrive/fradrive/frontend # TODO: reference to current branch required; how to do that here?
|
||||
# pull_policy: if_not_present
|
||||
build:
|
||||
dockerfile: ./docker/frontend/Dockerfile
|
||||
context: .
|
||||
environment:
|
||||
- PROJECT_DIR=/fradrive
|
||||
volumes:
|
||||
- &fradrive-mnt .:/tmp/fradrive
|
||||
backend:
|
||||
# image: registry.uniworx.de/fradrive/fradrive/backend
|
||||
# pull_policy: if_not_present
|
||||
build:
|
||||
dockerfile: ./docker/backend/Dockerfile
|
||||
context: ./
|
||||
volumes:
|
||||
- *fradrive-mnt
|
||||
depends_on:
|
||||
- frontend
|
||||
database:
|
||||
# image: registry.uniworx.de/fradrive/fradrive/database
|
||||
# pull_policy: if_not_present
|
||||
build: ./docker/database
|
||||
ports:
|
||||
- "9876:5432"
|
||||
# privileged: true
|
||||
|
||||
# driver: local
|
||||
# driver_opts:
|
||||
# type: none
|
||||
# o: bind
|
||||
# device: ./
|
||||
29
docker/backend/Dockerfile
Normal file
29
docker/backend/Dockerfile
Normal file
@ -0,0 +1,29 @@
|
||||
FROM registry.uniworx.de/uniworx/containers/debian:12.5
|
||||
|
||||
RUN apt-get -y update && apt-get -y install haskell-stack git
|
||||
RUN apt-get -y update && apt-get -y install alex g++ happy libghc-zlib-dev libpq-dev libsodium-dev locales locales-all pkg-config
|
||||
RUN apt-get -y update && apt-get -y install llvm
|
||||
|
||||
ENV LANG=en_US.UTF-8
|
||||
|
||||
# locally these two should be identical, so that compilation results are written out into the file dir.
|
||||
# in CI-pipelines these two should be different, so that the container caches the compilation results.
|
||||
ARG MOUNT_DIR=/mnt/fradrive
|
||||
ARG PROJECT_DIR=/fradrive
|
||||
RUN mkdir -p ${PROJECT_DIR}
|
||||
RUN if [ "${PROJECT_DIR}" != "${MOUNT_DIR}" ] ; then cp -r ${MOUNT_DIR}/* ${PROJECT_DIR} ; fi
|
||||
WORKDIR ${PROJECT_DIR}
|
||||
ENV HOME=${PROJECT_DIR}
|
||||
|
||||
RUN mkdir -p /.stack
|
||||
ENV STACK_ROOT=/.stack
|
||||
|
||||
RUN make .stack STACK_ROOT=${STACK_ROOT} IN_CONTAINER=true
|
||||
RUN stack build yesod-bin
|
||||
|
||||
ENV FRADRIVE_MAKE_TARGET=serve-backend
|
||||
ENTRYPOINT make -- ${FRADRIVE_MAKE_TARGET} STACK_ROOT=${STACK_ROOT} IN_CONTAINER=true
|
||||
|
||||
# export full develop port range
|
||||
ENV DEV_PORT=3000
|
||||
EXPOSE ${DEV_PORT}/tcp
|
||||
1
docker/backend/dev_port.sh
Normal file
1
docker/backend/dev_port.sh
Normal file
@ -0,0 +1 @@
|
||||
netstat -tulan | perl -le 'use strict;use warnings;my %p=();my $addr=qr((?:\d+.\d+.\d+.\d+|[0-9a-f:]+));while(<>){ if(m#$addr:(\d+)\s+$addr:(?:\d+|*)\s+#) { $p{$1}=1 }}; my $port = 3000; $port++ while $p{$port}; print $port'
|
||||
19
docker/database/Dockerfile
Normal file
19
docker/database/Dockerfile
Normal file
@ -0,0 +1,19 @@
|
||||
FROM registry.uniworx.de/fradrive/fradrive/postgres:12
|
||||
|
||||
# Allow for connecting to database without password authentication
|
||||
ENV POSTGRES_HOST_AUTH_METHOD=trust
|
||||
|
||||
RUN cp /mnt/fradrive/docker/database/schema.sql /schema.sql && chown postgres:postgres /schema.sql
|
||||
RUN cp /mnt/fradrive/docker/database/initdb.sh /etc/fradrive-db && chmod 755 /etc/fradrive-db
|
||||
|
||||
USER postgres
|
||||
|
||||
# postgresql.conf and postgres_hba.conf resulted in error (Invalid data directory or sth); using -o/--options in initdb.sh instead
|
||||
# COPY --chown=postgres:postgres --chmod=644 ./postgresql.conf /etc/postgresql/12/main/postgresql.conf
|
||||
# COPY --chown=postgres:postgres --chmod=644 ./pg_hba.conf /etc/postgresql/12/main/pg_hba.conf
|
||||
# ADD ./schema.sql /schema.sql
|
||||
# ADD --chmod=755 ./initdb.sh /etc/fradrive-db
|
||||
|
||||
ENTRYPOINT /etc/fradrive-db
|
||||
EXPOSE $DEV_PORT_HTTP/tcp
|
||||
EXPOSE $DEV_PORT_HTTPS/tcp
|
||||
14
docker/database/initdb.sh
Normal file
14
docker/database/initdb.sh
Normal file
@ -0,0 +1,14 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Init and start the postgres daemon
|
||||
initdb --no-locale
|
||||
pg_ctl start -w -o "-c listen_addresses=0.0.0.0 -c unix_socket_permissions=0700 -c max_connections=9990 -c shared_preload_libraries=pg_stat_statements -c session_preload_libraries=auto_explain -c auto_explain.log_min_duration=100ms" # COPY postgresql.conf and postgres_hba.conf resulted in error (Invalid data directory)
|
||||
POSTGRID=`cat /var/lib/postgresql/data/postmaster.pid | perl -le '<>=~m#(\d+)# and print $1'`
|
||||
|
||||
# Create uniworx and uniworx_test database
|
||||
psql -f /schema.sql postgres
|
||||
|
||||
# Wait for postgres daemon to terminate
|
||||
while [ -e /proc/$POSTGRID ]; do
|
||||
sleep 0.5;
|
||||
done
|
||||
1
docker/database/pg_hba.conf
Normal file
1
docker/database/pg_hba.conf
Normal file
@ -0,0 +1 @@
|
||||
local all all trust
|
||||
6
docker/database/postgresql.conf
Normal file
6
docker/database/postgresql.conf
Normal file
@ -0,0 +1,6 @@
|
||||
listen_addresses=0.0.0.0
|
||||
unix_socket_permissions=0700
|
||||
max_connections=9990
|
||||
shared_preload_libraries=pg_stat_statements
|
||||
session_preload_libraries=auto_explain
|
||||
auto_explain.log_min_duration=100ms
|
||||
5
docker/database/schema.sql
Normal file
5
docker/database/schema.sql
Normal file
@ -0,0 +1,5 @@
|
||||
CREATE USER uniworx WITH SUPERUSER;
|
||||
CREATE DATABASE uniworx_test;
|
||||
GRANT ALL ON DATABASE uniworx_test TO uniworx;
|
||||
CREATE DATABASE uniworx;
|
||||
GRANT ALL ON DATABASE uniworx TO uniworx;
|
||||
31
docker/fradrive/Dockerfile
Normal file
31
docker/fradrive/Dockerfile
Normal file
@ -0,0 +1,31 @@
|
||||
FROM debian:12.5
|
||||
|
||||
RUN apt-get -y update
|
||||
|
||||
# setup locales
|
||||
RUN apt-get -y install locales locales-all
|
||||
RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && \
|
||||
locale-gen
|
||||
ENV LANG=en_US.UTF-8 LANGUAGE=en_US:en LC_ALL=en_US.UTF-8
|
||||
|
||||
# Binary runtime dependencies
|
||||
# TODO: minimize texlive dependencies, switch to basic schemes where possible
|
||||
RUN apt-get -y install texlive-latex-recommended texlive-latex-extra texlive-luatex texlive-fonts-recommended texlive-fonts-extra texlive-lang-english texlive-lang-german
|
||||
|
||||
# Add uniworx user and directories
|
||||
RUN mkdir -p /var/lib
|
||||
RUN mkdir -p /var/log
|
||||
RUN groupadd -r uniworx
|
||||
RUN useradd -r -g uniworx -d /var/lib/uniworx -M uniworx --uid 999
|
||||
RUN mkdir -p /var/lib/uniworx && chown -R uniworx:uniworx /var/lib/uniworx
|
||||
RUN mkdir -p /var/log/uniworx && chown -R uniworx:uniworx /var/log/uniworx
|
||||
|
||||
# TODO: is this still needed?
|
||||
# RUN install -d -g uniworx -o uniworx -m 0750 /var/lib/uniworx
|
||||
# RUN install -d -g uniworx -o uniworx -m 0755 /var/log/uniworx
|
||||
RUN cp /tmp/uniworx-bin/uniworx /usr/bin/uniworx
|
||||
|
||||
USER uniworx
|
||||
ENTRYPOINT fradrive-entrypoint.sh
|
||||
EXPOSE 8080/tcp
|
||||
VOLUME /var/lib/uniworx /var/log
|
||||
19
docker/fradrive/fradrive-entrypoint.sh
Normal file
19
docker/fradrive/fradrive-entrypoint.sh
Normal file
@ -0,0 +1,19 @@
|
||||
#!/bin/bash -xe
|
||||
|
||||
cTime=$(date -Is)
|
||||
|
||||
# export LOGDEST=/var/log/uniworx/${cTime}.log # kubernetes prefers log via stdout
|
||||
|
||||
typeset -a configs
|
||||
|
||||
configDir=${CONFIG_DIR-/cfg}
|
||||
configs=()
|
||||
if [[ -d "${configDir}" ]]; then
|
||||
while IFS= read -d $'\0' cfg; do
|
||||
configs+=("${(q)cfg}")
|
||||
done < <(find "${configDir}" \( -name '*.yml' -o -name '*.yaml' \) -print0 | sort -rz)
|
||||
fi
|
||||
|
||||
cd /var/lib/uniworx
|
||||
|
||||
exec -- uniworx ${configs}
|
||||
31
docker/frontend/Dockerfile
Normal file
31
docker/frontend/Dockerfile
Normal file
@ -0,0 +1,31 @@
|
||||
FROM registry.uniworx.de/uniworx/containers/debian:12.5
|
||||
|
||||
# Basic dependencies
|
||||
RUN apt-get -y update && apt-get -y install curl npm
|
||||
|
||||
# Build and watch dependencies
|
||||
RUN apt-get -y update && apt-get -y install exiftool
|
||||
|
||||
# Test dependencies
|
||||
RUN apt-get -y update && apt-get -y install chromium
|
||||
ENV CHROME_BIN=chromium
|
||||
|
||||
# TODO: use dotenv for npm version?
|
||||
RUN npm install -g n
|
||||
RUN n 20.17.0
|
||||
|
||||
# locally these two should be identical, so that compilation results are written out into the file dir.
|
||||
# in CI-pipelines these two should be different, so that the container caches the compilation results.
|
||||
ARG MOUNT_DIR=/mnt/fradrive
|
||||
ARG PROJECT_DIR=/fradrive
|
||||
RUN mkdir -p ${PROJECT_DIR}
|
||||
RUN if [ "${PROJECT_DIR}" != "${MOUNT_DIR}"]; then cp -r ${MOUNT_DIR}/* ${PROJECT_DIR} ; fi
|
||||
WORKDIR ${PROJECT_DIR}
|
||||
ENV HOME=${PROJECT_DIR}
|
||||
|
||||
#RUN make node_modules IN_CONTAINER=true
|
||||
#RUN make well-known IN_CONTAINER=true
|
||||
RUN make -- --frontend-dependencies
|
||||
|
||||
ENV FRADRIVE_MAKE_TARGET=watch-frontend
|
||||
ENTRYPOINT make -- ${FRADRIVE_MAKE_TARGET} IN_CONTAINER=true CHROME_BIN=${CHROME_BIN}
|
||||
33
docker/podman/Dockerfile
Normal file
33
docker/podman/Dockerfile
Normal file
@ -0,0 +1,33 @@
|
||||
# Debian-based podman daemon image for building docker images
|
||||
# inside docker containers (e.g. gitlab runners).
|
||||
#
|
||||
# Yoinked with love from:
|
||||
# https://www.redhat.com/sysadmin/podman-inside-container
|
||||
|
||||
FROM registry.uniworx.de/uniworx/containers/debian:12.5
|
||||
|
||||
RUN apt-get -y update
|
||||
|
||||
RUN apt-get -y install make podman podman-compose fuse-overlayfs
|
||||
|
||||
RUN useradd podman; \
|
||||
echo podman:10000:5000 > /etc/subuid; \
|
||||
echo podman:10000:5000 > /etc/subgid;
|
||||
|
||||
VOLUME /var/lib/containers
|
||||
VOLUME /home/podman/.local/share/containers
|
||||
|
||||
ADD https://raw.githubusercontent.com/containers/image_build/main/podman/containers.conf /etc/containers/containers.conf
|
||||
ADD https://raw.githubusercontent.com/containers/image_build/main/podman/podman-containers.conf /home/podman/.config/containers/containers.conf
|
||||
|
||||
RUN chown podman:podman -R /home/podman
|
||||
|
||||
# chmod containers.conf and adjust storage.conf to enable Fuse storage.
|
||||
# RUN chmod 644 /etc/containers/containers.conf; sed -i -e 's|^#mount_program|mount_program|g' -e '/additionalimage.*/a "/var/lib/shared",' -e 's|^mountopt[[:space:]]*=.*$|mountopt = "nodev,fsync=0"|g' /etc/containers/containers.conf
|
||||
# RUN echo -e '[storage]\ndriver="zfs"\nmount_program="zfs"\nadditionalimage=/var/lib/shared\nmountopt="nodev,fsync=0"' >> /etc/containers/containers.conf
|
||||
RUN chmod 644 /etc/containers/containers.conf
|
||||
RUN echo '[storage]\ndriver="overlay"\n[storage.options.overlay]\nforce_mask="private"\nmount_program="/usr/bin/fuse-overlayfs"\nmountopt="nodev"' >> /etc/containers/containers.conf
|
||||
RUN mkdir -p /root/.config/containers/ && echo '[storage]\ndriver="overlay"\n[storage.options.overlay]\nforce_mask="private"\nmount_program="/usr/bin/fuse-overlayfs"\nmountopt="nodev"' > /root/.config/containers/storage.conf
|
||||
RUN mkdir -p /var/lib/shared/overlay-images /var/lib/shared/overlay-layers /var/lib/shared/vfs-images /var/lib/shared/vfs-layers; touch /var/lib/shared/overlay-images/images.lock; touch /var/lib/shared/overlay-layers/layers.lock; touch /var/lib/shared/vfs-images/images.lock; touch /var/lib/shared/vfs-layers/layers.lock
|
||||
|
||||
ENV _CONTAINERS_USERNS_CONFIGURED=""
|
||||
33
eslint.config.js
Normal file
33
eslint.config.js
Normal file
@ -0,0 +1,33 @@
|
||||
import js from "@eslint/js";
|
||||
import globals from "globals";
|
||||
import babelParser from "@babel/eslint-parser";
|
||||
|
||||
export default [
|
||||
js.configs.recommended,
|
||||
{
|
||||
files: ["**/*.js"],
|
||||
plugins: {},
|
||||
languageOptions: {
|
||||
ecmaVersion: 2018,
|
||||
globals: {
|
||||
...globals.browser,
|
||||
...globals.es6,
|
||||
...globals.node,
|
||||
...globals.jasmine,
|
||||
Atomics: "readonly",
|
||||
SharedArrayBuffer: "readonly",
|
||||
flatpickr: "readonly",
|
||||
$: "readonly",
|
||||
},
|
||||
parser: babelParser,
|
||||
},
|
||||
rules: {
|
||||
"no-console": "off",
|
||||
"no-extra-semi": "off",
|
||||
"semi": ["error", "always"],
|
||||
"comma-dangle": ["error", "always-multiline"],
|
||||
"quotes": ["error", "single"],
|
||||
"no-var": "error",
|
||||
},
|
||||
},
|
||||
];
|
||||
85
flake.lock
85
flake.lock
@ -219,6 +219,23 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-compat": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1672831974,
|
||||
"narHash": "sha256-z9k3MfslLjWQfnjBtEtJZdq3H7kyi2kQtUThfTgdRk0=",
|
||||
"owner": "input-output-hk",
|
||||
"repo": "flake-compat",
|
||||
"rev": "45f2638735f8cdc40fe302742b79f248d23eb368",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "input-output-hk",
|
||||
"ref": "hkm/gitlab-fix",
|
||||
"repo": "flake-compat",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
@ -295,11 +312,11 @@
|
||||
"hackage": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1708993343,
|
||||
"narHash": "sha256-8EbbR5ReQK61yP/7VYtFSCerBXSE59VtfV+Wahdsuqg=",
|
||||
"lastModified": 1705796710,
|
||||
"narHash": "sha256-BdAqEqx6rdp8O8lu9yW1nXa8/da7+/QPgVjCJVEXyWw=",
|
||||
"owner": "input-output-hk",
|
||||
"repo": "hackage.nix",
|
||||
"rev": "f823c9258e9316cb4da256fc93e9c0407f0c296a",
|
||||
"rev": "31d4fed569912819adbf66b580489b45dc80a29a",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -330,7 +347,6 @@
|
||||
"hpc-coveralls": "hpc-coveralls",
|
||||
"hydra": "hydra",
|
||||
"iserv-proxy": "iserv-proxy",
|
||||
"nix-tools-static": "nix-tools-static",
|
||||
"nixpkgs": [
|
||||
"haskell-nix",
|
||||
"nixpkgs-unstable"
|
||||
@ -347,11 +363,11 @@
|
||||
"stackage": "stackage"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1708994988,
|
||||
"narHash": "sha256-GQD4phMqbuEmSvUpcmTp1W9InABwuegI4EFhBOyMM8Q=",
|
||||
"lastModified": 1705798224,
|
||||
"narHash": "sha256-/zJa0hC58vLD8PqTEQNeN9EJAQpbS+YluJhLVstgqY8=",
|
||||
"owner": "input-output-hk",
|
||||
"repo": "haskell.nix",
|
||||
"rev": "edd4f75235c050c0ee476e1fe28f757b068cf465",
|
||||
"rev": "2a31673a97ed3efbae9835ea7334528d2bc4b6ab",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -568,6 +584,22 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"lowdown-src": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1633514407,
|
||||
"narHash": "sha256-Dw32tiMjdK9t3ETl5fzGrutQTzh2rufgZV4A/BbxuD4=",
|
||||
"owner": "kristapsdz",
|
||||
"repo": "lowdown",
|
||||
"rev": "d2c2b44ff6c27b936ec27358a2653caaef8f73b8",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "kristapsdz",
|
||||
"repo": "lowdown",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"memcached-binary": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
@ -623,23 +655,6 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nix-tools-static": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1706266250,
|
||||
"narHash": "sha256-9t+GRk3eO9muCtKdNAwBtNBZ5dH1xHcnS17WaQyftwA=",
|
||||
"owner": "input-output-hk",
|
||||
"repo": "haskell-nix-example",
|
||||
"rev": "580cb6db546a7777dad3b9c0fa487a366c045c4e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "input-output-hk",
|
||||
"ref": "nix",
|
||||
"repo": "haskell-nix-example",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1657693803,
|
||||
@ -768,6 +783,22 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-recent": {
|
||||
"locked": {
|
||||
"lastModified": 1659446231,
|
||||
"narHash": "sha256-hekabNdTdgR/iLsgce5TGWmfIDZ86qjPhxDg/8TlzhE=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "eabc38219184cc3e04a974fe31857d8e0eac098d",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixpkgs-21.11-darwin",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-regression": {
|
||||
"locked": {
|
||||
"lastModified": 1643052045,
|
||||
@ -874,11 +905,11 @@
|
||||
"stackage": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1708992583,
|
||||
"narHash": "sha256-g9sClypeNUzEjc2aFaTyaptFc5dXt3zA3HnfRV5AN2g=",
|
||||
"lastModified": 1705795852,
|
||||
"narHash": "sha256-Po+1G5KgHVRbP/PzK3HgdI1ZS7XJtP63vJmpSZMvFV8=",
|
||||
"owner": "input-output-hk",
|
||||
"repo": "stackage.nix",
|
||||
"rev": "08e1deb077c113cde815daa21683c7fbc71ee4aa",
|
||||
"rev": "8adfc78e62d3dbc3498a03579a50f3cf70cd4328",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
||||
@ -107,7 +107,9 @@
|
||||
(final: prev: let
|
||||
pkgs = import nixpkgs { inherit system; };
|
||||
in {
|
||||
inherit (pkgs) gup dockerTools nodejs_21 glibcLocalesUtf8 tzdata exiftools bzip2 glibc chromium minio minio-client skopeo; inherit (pkgs.stdenv) fetchurlBoot make; inherit (pkgs.coreutils) touch;
|
||||
inherit (pkgs-recent) gup dockerTools nodejs-14_x glibcLocalesUtf8 tzdata chromium minio minio-client skopeo;
|
||||
inherit (pkgs-recent.stdenv) fetchurlBoot make;
|
||||
inherit (pkgs-recent.coreutils) touch;
|
||||
stack = pkgs.symlinkJoin {
|
||||
inherit (pkgs.stack) name;
|
||||
paths = [pkgs.stack];
|
||||
@ -131,8 +133,6 @@
|
||||
|
||||
inherit (pkgs.lib) recursiveUpdate;
|
||||
in {
|
||||
packages = pkgs;
|
||||
|
||||
devShell = import ./shell.nix { pkgs = self.legacyPackages.${system}; nixpkgsPath = nixpkgs; };
|
||||
|
||||
legacyPackages = pkgs.lib.foldr (overlay: acc: acc // recursiveUpdate (overlay self.legacyPackages.${system} pkgs) pkgs) {} overlays;
|
||||
|
||||
@ -1,14 +1,18 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor.kleen@ifi.lmu.de>, David Mosbach <david.mosbach@uniworx.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// SPDX-License-Identifier: LicenseRef-Fraport-Corporate-Design
|
||||
|
||||
@use "~@fortawesome/fontawesome-pro/scss/fontawesome" with ( $fa-font-path: "~@fortawesome/fontawesome-pro/webfonts" )
|
||||
//@use "~@fortawesome/fontawesome-pro/scss/fontawesome" with ( $fa-font-path: "~@fortawesome/fontawesome-pro/webfonts" )
|
||||
|
||||
@forward "~@fortawesome/fontawesome-pro/scss/fontawesome"
|
||||
//@forward "~@fortawesome/fontawesome-pro/scss/fontawesome"
|
||||
|
||||
@use "~@fortawesome/fontawesome-pro/scss/solid"
|
||||
//@use "~@fortawesome/fontawesome-pro/scss/solid"
|
||||
|
||||
@use "icons.scss"
|
||||
|
||||
$icons: '~/assets/icons/fradrive'
|
||||
|
||||
@function ico-content($ico)
|
||||
@return url('#{$icons}/#{$ico}.svg')
|
||||
|
||||
@use "~typeface-roboto" as roboto
|
||||
@use "~typeface-source-sans-pro" as source-sans-pro
|
||||
@use "~typeface-source-code-pro" as source-code-pro
|
||||
|
||||
@ -1,9 +1,10 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>,Sarah Vaupel <vaupel.sarah@campus.lmu.de>,Steffen Jost <jost@cip.ifi.lmu.de>,Wolfgang Witt <Wolfgang.Witt@campus.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor.kleen@ifi.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>,Sarah Vaupel <vaupel.sarah@campus.lmu.de>,Steffen Jost <jost@cip.ifi.lmu.de>,Wolfgang Witt <Wolfgang.Witt@campus.lmu.de>,David Mosbach <david.mosbach@uniworx.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// SPDX-License-Identifier: LicenseRef-Fraport-Corporate-Design
|
||||
|
||||
@use "common" as *
|
||||
@use "icons"
|
||||
|
||||
\:root
|
||||
// THEME INDEPENDENT COLORS
|
||||
@ -27,9 +28,9 @@
|
||||
--color-fontsec: #5b5861
|
||||
|
||||
// FONTS
|
||||
--font-base: "Source Sans Pro", "Trebuchet MS", sans-serif
|
||||
--font-logo: "Roboto", var(--font-base)
|
||||
--font-monospace: "Source Code Pro", monospace
|
||||
--font-base: 'Arial', sans-serif
|
||||
--font-logo: var(--font-base)
|
||||
--font-monospace: 'Arial Mono', monospace
|
||||
|
||||
// DIMENSIONS
|
||||
--header-height: 100px
|
||||
@ -62,6 +63,7 @@ body
|
||||
--color-link: var(--color-font)
|
||||
--color-link-hover: var(--color-font)
|
||||
--color-lmu-box-border: var(--color-lightwhite)
|
||||
--filter-primary: invert(7%) sepia(83%) saturate(4889%) hue-rotate(241deg) brightness(106%) contrast(169%)
|
||||
|
||||
&.theme--lavender
|
||||
--color-primary: #584c9c
|
||||
@ -71,6 +73,7 @@ body
|
||||
--color-darker: #3c2765
|
||||
--color-link: var(--color-dark)
|
||||
--color-link-hover: var(--color-darker)
|
||||
--filter-primary: invert(28%) sepia(36%) saturate(1286%) hue-rotate(212deg) brightness(97%) contrast(83%)
|
||||
|
||||
&.theme--neutral-blue
|
||||
--color-primary: #3E606F
|
||||
@ -78,6 +81,7 @@ body
|
||||
--color-lighter: rgb(145, 159, 170)
|
||||
--color-dark: rgb(42, 74, 88)
|
||||
--color-darker: #193441
|
||||
--filter-primary: invert(35%) sepia(8%) saturate(2168%) hue-rotate(153deg) brightness(88%) contrast(80%)
|
||||
|
||||
&.theme--aberdeen-reds
|
||||
--color-primary: #820333
|
||||
@ -85,6 +89,7 @@ body
|
||||
--color-lighter: #F0433A
|
||||
--color-dark: #540032
|
||||
--color-darker: #2E112D
|
||||
--filter-primary: invert(12%) sepia(38%) saturate(6051%) hue-rotate(322deg) brightness(91%) contrast(110%)
|
||||
|
||||
&.theme--moss-green
|
||||
--color-primary: #5C996B
|
||||
@ -92,6 +97,7 @@ body
|
||||
--color-lighter: #99FFB2
|
||||
--color-dark: #3D6647
|
||||
--color-darker: #1F3324
|
||||
--filter-primary: invert(57%) sepia(19%) saturate(788%) hue-rotate(82deg) brightness(92%) contrast(87%)
|
||||
|
||||
&.theme--sky-love
|
||||
--color-primary: #87ABE5
|
||||
@ -101,6 +107,7 @@ body
|
||||
--color-darker: #6B7BC9
|
||||
--color-link: var(--color-lightblack)
|
||||
--color-link-hover: var(--color-darker)
|
||||
--filter-primary: invert(55%) sepia(47%) saturate(394%) hue-rotate(180deg) brightness(115%) contrast(80%)
|
||||
|
||||
// END THEMES
|
||||
|
||||
@ -263,6 +270,9 @@ button:not(.btn-link),
|
||||
&.btn-danger
|
||||
background-color: var(--color-error-dark)
|
||||
|
||||
span
|
||||
@extend .white-ico
|
||||
|
||||
.buttongroup
|
||||
display: grid
|
||||
grid: min-content / auto-flow max-content
|
||||
@ -450,9 +460,9 @@ input[type="button"].btn-info:not(.btn-link):hover,
|
||||
color: inherit
|
||||
|
||||
&::before
|
||||
@extend .fas
|
||||
@extend .ico-link
|
||||
|
||||
content: fa-content($fa-var-link)
|
||||
content:""
|
||||
margin-right: 0.25em
|
||||
|
||||
&.table__th-link::before
|
||||
@ -655,7 +665,6 @@ section
|
||||
margin: 0 auto 0.5rem
|
||||
|
||||
&::before
|
||||
@extend .fas
|
||||
|
||||
position: absolute
|
||||
display: flex
|
||||
@ -675,6 +684,13 @@ section
|
||||
&.notification--broad
|
||||
max-width: none
|
||||
|
||||
.fas
|
||||
position: absolute
|
||||
left: 20px
|
||||
top: 0
|
||||
height: 100%
|
||||
width: 60px
|
||||
|
||||
&:first-child
|
||||
margin-top: 0
|
||||
&:last-child
|
||||
@ -1271,6 +1287,9 @@ ul.breadcrumbs__list
|
||||
margin: 0 5px
|
||||
margin-top: 1px
|
||||
|
||||
span
|
||||
@extend .white-ico
|
||||
|
||||
a.breadcrumbs__home
|
||||
opacity: 0.5
|
||||
margin-right: 7px
|
||||
@ -1281,6 +1300,10 @@ a.breadcrumbs__home
|
||||
&:hover
|
||||
opacity: 1
|
||||
|
||||
i
|
||||
@extend .white-ico
|
||||
|
||||
|
||||
.breadcrumbs__last-item
|
||||
font-weight: 600
|
||||
opacity: 1
|
||||
|
||||
164
frontend/src/icons.scss
Normal file
164
frontend/src/icons.scss
Normal file
@ -0,0 +1,164 @@
|
||||
// SPDX-FileCopyrightText: 2024 David Mosbach <david.mosbach@uniworx.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// SPDX-License-Identifier: LicenseRef-Fraport-Corporate-Design
|
||||
|
||||
|
||||
$ico-width: 30px;
|
||||
|
||||
$icons: new,
|
||||
ok,
|
||||
not-ok,
|
||||
warning,
|
||||
problem,
|
||||
visible,
|
||||
invisible,
|
||||
course-favourite-manual,
|
||||
course-favourite-automatic,
|
||||
course-favourite-off,
|
||||
enrol-true,
|
||||
enrol-false,
|
||||
planned,
|
||||
announce,
|
||||
exam,
|
||||
exam-register-true,
|
||||
exam-register-false,
|
||||
exam-auto-occurrence-nudge-up,
|
||||
exam-auto-occurrence-nudge-down,
|
||||
exam-auto-occurrence-ignore,
|
||||
exam-auto-occurrence-reconsider,
|
||||
comment-true,
|
||||
comment-false,
|
||||
link,
|
||||
file-donwload,
|
||||
file-upload,
|
||||
file-zip,
|
||||
file-csv,
|
||||
sft-question,
|
||||
sft-hint,
|
||||
sft-solution,
|
||||
sft-marking,
|
||||
email,
|
||||
register-template,
|
||||
no-correctors,
|
||||
remove-user,
|
||||
tooltip-default,
|
||||
notification-success,
|
||||
notification-info,
|
||||
notification-warning,
|
||||
notification-error,
|
||||
notification-nonactive,
|
||||
favourite,
|
||||
language,
|
||||
nav-container-close,
|
||||
page-action-children-close,
|
||||
menu-news,
|
||||
menu-help,
|
||||
menu-profile,
|
||||
menu-login,
|
||||
menu-logout,
|
||||
breadcrumbs-home,
|
||||
menu-extra,
|
||||
menu-course-list,
|
||||
menu-corrections,
|
||||
menu-exams,
|
||||
menu-admin,
|
||||
menu-lms,
|
||||
menu-qualification,
|
||||
page-action-primary-expand,
|
||||
page-action-secondary,
|
||||
breadcrumb-separator,
|
||||
file-upload-session,
|
||||
standalone-field-error,
|
||||
file-user,
|
||||
notification,
|
||||
notification-sent,
|
||||
no-notification,
|
||||
personal-identification,
|
||||
menu-workflows,
|
||||
video,
|
||||
submission-user-duplicate,
|
||||
submission-no-users,
|
||||
reset,
|
||||
blocked,
|
||||
certificate,
|
||||
print-center,
|
||||
letter,
|
||||
at,
|
||||
supervisor,
|
||||
supervisor-foreign,
|
||||
waiting-for-user,
|
||||
expired,
|
||||
locked,
|
||||
unlocked,
|
||||
trash,
|
||||
reset-tries,
|
||||
company,
|
||||
edit,
|
||||
user-edit,
|
||||
placeholder,
|
||||
loading;
|
||||
|
||||
|
||||
@each $name in $icons {
|
||||
.ico-#{$name} {
|
||||
background-image: url('../../assets/icons/fradrive/#{$name}.svg');
|
||||
background-size: contain;
|
||||
background-repeat: no-repeat;
|
||||
background-position: center;
|
||||
aspect-ratio: 1/1;
|
||||
min-width: 1em;
|
||||
font-size: inherit;
|
||||
display: inline-block;
|
||||
vertical-align: middle;
|
||||
}
|
||||
}
|
||||
|
||||
.white-ico {
|
||||
filter: invert(100%) sepia(20%) saturate(901%) hue-rotate(47deg) brightness(106%) contrast(101%);
|
||||
}
|
||||
|
||||
.fw-ico {
|
||||
width: $ico-width;
|
||||
}
|
||||
|
||||
.small-ico {
|
||||
font-size: 1em;
|
||||
}
|
||||
|
||||
.medium-ico {
|
||||
font-size: 1.5em;
|
||||
}
|
||||
|
||||
.large-ico {
|
||||
font-size: 2em;
|
||||
}
|
||||
|
||||
.ico-spin {
|
||||
animation-name: ico-spin;
|
||||
animation-delay: 0s;
|
||||
animation-duration: 3s;
|
||||
animation-direction: normal;
|
||||
animation-iteration-count: infinite;
|
||||
animation-timing-function: linear;
|
||||
}
|
||||
|
||||
@media (prefers-reduced-motion: reduce) {
|
||||
.ico-spin {
|
||||
animation-delay: -1ms;
|
||||
animation-duration: 1ms;
|
||||
animation-iteration-count: 1;
|
||||
transition-delay: 0s;
|
||||
transition-duration: 0s;
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes ico-spin {
|
||||
0% {
|
||||
transform: rotate(0deg);
|
||||
}
|
||||
100% {
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Johannes Eder <ederj@cip.ifi.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>,Sarah Vaupel <vaupel.sarah@campus.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor.kleen@ifi.lmu.de>,Johannes Eder <ederj@cip.ifi.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>,Sarah Vaupel <vaupel.sarah@campus.lmu.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
/* global global:writable */
|
||||
/* global:writable */
|
||||
|
||||
import semver from 'semver';
|
||||
import sodium from 'sodium-javascript';
|
||||
@ -365,7 +365,7 @@ export class StorageManager {
|
||||
}
|
||||
|
||||
addHistoryListener(listener, options=this._options, ...args) {
|
||||
const modified_listener = (function(event, ...listener_args) { // eslint-disable-line no-unused-vars
|
||||
const modified_listener = (function(event, ...listener_args) {
|
||||
|
||||
// do not propagate popstate events with empty state
|
||||
if(event.state === null)
|
||||
@ -498,13 +498,11 @@ function encrypt(plaintext, key) {
|
||||
if (!plaintext) return '';
|
||||
if (!key) throw new Error('Cannot encrypt plaintext without a valid key!');
|
||||
|
||||
/* eslint-disable no-undef */
|
||||
// TODO use const if possible
|
||||
let plaintextB = Buffer.from(plaintext);
|
||||
let cipherB = Buffer.alloc(plaintextB.length + sodium.crypto_secretbox_MACBYTES);
|
||||
let nonceB = Buffer.alloc(sodium.crypto_secretbox_NONCEBYTES);
|
||||
let keyB = Buffer.from(key);
|
||||
/* eslint-enable no-undef */
|
||||
|
||||
sodium.crypto_secretbox_easy(cipherB, plaintextB, nonceB, keyB);
|
||||
|
||||
@ -520,13 +518,11 @@ function decrypt(ciphertext, key) {
|
||||
if (!ciphertext) return '';
|
||||
if (!key) throw new Error('Cannot decrypt ciphertext without a valid key!');
|
||||
|
||||
/* eslint-disable no-undef */
|
||||
// TODO use const if possible
|
||||
let cipherB = Buffer.from(ciphertext);
|
||||
let plaintextB = Buffer.alloc(cipherB.length - sodium.crypto_secretbox_MACBYTES);
|
||||
let nonceB = Buffer.alloc(sodium.crypto_secretbox_NONCEBYTES);
|
||||
let keyB = Buffer.from(key);
|
||||
/* eslint-enable no-undef */
|
||||
|
||||
sodium.crypto_secretbox_open_easy(plaintextB, cipherB, nonceB, keyB);
|
||||
|
||||
|
||||
@ -171,7 +171,7 @@ export class Alerts {
|
||||
}
|
||||
};
|
||||
|
||||
_createAlertElement(type, content, icon = 'info-circle') {
|
||||
_createAlertElement(type, content, icon = 'notification-info') {
|
||||
const alertElement = document.createElement('div');
|
||||
alertElement.classList.add(ALERT_CLASS, 'alert-' + type);
|
||||
|
||||
@ -179,7 +179,7 @@ export class Alerts {
|
||||
alertCloser.classList.add(ALERT_CLOSER_CLASS);
|
||||
|
||||
const alertIcon = document.createElement('div');
|
||||
alertIcon.classList.add(ALERT_ICON_CLASS, 'fas', 'fa-' + icon);
|
||||
alertIcon.classList.add(ALERT_ICON_CLASS, 'ico-' + icon);
|
||||
|
||||
const alertContent = document.createElement('div');
|
||||
alertContent.classList.add(ALERT_CONTENT_CLASS);
|
||||
|
||||
@ -1,9 +1,10 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Gregor Kleen <gregor.kleen@ifi.lmu.de>,David Mosbach <david.mosbach@uniworx.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// SPDX-License-Identifier: LicenseRef-Fraport-Corporate-Design
|
||||
|
||||
@use "../../common" as *
|
||||
@use '../../icons'
|
||||
|
||||
.alerts
|
||||
position: fixed
|
||||
@ -24,9 +25,9 @@
|
||||
cursor: pointer
|
||||
|
||||
&::before
|
||||
@extend .fas
|
||||
@extend .ico-nav-container-close
|
||||
|
||||
content: fa-content($fa-var-chevron-up)
|
||||
content: ""
|
||||
position: absolute
|
||||
left: 50%
|
||||
top: 0
|
||||
@ -111,7 +112,7 @@
|
||||
.alert__icon
|
||||
text-align: right
|
||||
position: absolute
|
||||
left: 0px
|
||||
left: 8px
|
||||
bottom: 0
|
||||
width: 50px
|
||||
height: 100%
|
||||
@ -152,9 +153,9 @@
|
||||
color: white
|
||||
|
||||
&::before
|
||||
@extend .fas
|
||||
@extend .ico-not-ok
|
||||
|
||||
content: fa-content($fa-var-times)
|
||||
content: ""
|
||||
position: absolute
|
||||
top: 50%
|
||||
left: 50%
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>,Sarah Vaupel <vaupel.sarah@campus.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>,Sarah Vaupel <vaupel.sarah@campus.lmu.de>,David Mosbach <david.mosbach@uniworx.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
@ -301,7 +301,7 @@ export class ExamCorrect {
|
||||
users: [user],
|
||||
status: STATUS.LOADING,
|
||||
};
|
||||
if (results && results !== {}) rowInfo.results = results;
|
||||
if (results && results != {}) rowInfo.results = results;
|
||||
if (result !== undefined) rowInfo.result = result;
|
||||
this._addRow(rowInfo);
|
||||
|
||||
@ -461,7 +461,7 @@ export class ExamCorrect {
|
||||
for (let [k, v] of Object.entries(newEntry.results)) {
|
||||
const resultCell = row.cells.item(this._cIndices.get(k));
|
||||
if (v === null) {
|
||||
resultCell.innerHTML = '<i class="fas fa-fw fa-trash"></i>';
|
||||
resultCell.innerHTML = '<i class="fas fw-ico ico-trash"></i>';
|
||||
resultCell.classList.remove('exam-correct--result-unconfirmed');
|
||||
} else if (v && v.result !== undefined && v.result !== null) {
|
||||
resultCell.innerHTML = v.result;
|
||||
@ -499,7 +499,7 @@ export class ExamCorrect {
|
||||
else
|
||||
html = examResult.status;
|
||||
} else if (examResult === null) {
|
||||
html = '<i class="fas fa-fw fa-trash"></i>';
|
||||
html = '<i class="fas fw-ico ico-trash"></i>';
|
||||
}
|
||||
|
||||
return html;
|
||||
@ -598,7 +598,7 @@ export class ExamCorrect {
|
||||
const partCell = document.createElement('TD');
|
||||
|
||||
if (partResult === null) {
|
||||
partCell.innerHTML = '<i class="fas fa-fw fa-trash"></i>';
|
||||
partCell.innerHTML = '<i class="fas fw-ico ico-trash"></i>';
|
||||
} else {
|
||||
partCell.innerHTML = partResult;
|
||||
}
|
||||
@ -683,10 +683,10 @@ function userToHTML(user) {
|
||||
}
|
||||
|
||||
function setStatus(elem, status) {
|
||||
const successClasses = ['fas', 'fa-fw', 'fa-check', 'exam-correct--success'];
|
||||
const ambiguousClasses = ['fas', 'fa-fw', 'fa-question', 'exam-correct--ambiguous'];
|
||||
const errorClasses = ['fas', 'fa-fw', 'fa-times', 'exam-correct--error'];
|
||||
const loadingClasses = ['fas', 'fa-fw', 'fa-spinner-third', 'fa-spin'];
|
||||
const successClasses = ['fas', 'fw-ico', 'ico-ok', 'exam-correct--success'];
|
||||
const ambiguousClasses = ['fas', 'fw-ico', 'ico-menu-help', 'exam-correct--ambiguous'];
|
||||
const errorClasses = ['fas', 'fw-ico', 'ico-not-ok', 'exam-correct--error'];
|
||||
const loadingClasses = ['fas', 'fw-ico', 'ico-loading', 'ico-spin'];
|
||||
|
||||
elem.classList.remove(...successClasses, ...ambiguousClasses, ...errorClasses, ...loadingClasses);
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>,Sarah Vaupel <vaupel.sarah@campus.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>,Sarah Vaupel <vaupel.sarah@campus.lmu.de>,David Mosbach <david.mosbach@uniworx.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
@ -104,8 +104,8 @@ export class HideColumns {
|
||||
|
||||
const hider = document.createElement('span');
|
||||
|
||||
const hiderIcon = document.createElement('i');
|
||||
hiderIcon.classList.add('fas', 'fa-fw');
|
||||
const hiderIcon = document.createElement('span');
|
||||
hiderIcon.classList.add('fas');
|
||||
hider.appendChild(hiderIcon);
|
||||
|
||||
const hiderContent = document.createElement('span');
|
||||
@ -240,8 +240,8 @@ export class HideColumns {
|
||||
|
||||
updateHiderIcon(hider, hidden) {
|
||||
Array.from(hider.getElementsByClassName('fas')).forEach(hiderIcon => {
|
||||
hiderIcon.classList.remove(hidden ? 'fa-eye' : 'fa-eye-slash');
|
||||
hiderIcon.classList.add(hidden ? 'fa-eye-slash' : 'fa-eye');
|
||||
hiderIcon.classList.remove(hidden ? 'ico-visible' : 'ico-invisible');
|
||||
hiderIcon.classList.add(hidden ? 'ico-invisible' : 'ico-visible');
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@ -1,8 +1,10 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Gregor Kleen <gregor.kleen@ifi.lmu.de>,David Mosbach <david.mosbach@uniworx.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// SPDX-License-Identifier: LicenseRef-Fraport-Corporate-Design
|
||||
|
||||
@use "../../icons.scss"
|
||||
|
||||
.table-hider
|
||||
background-color: #fff
|
||||
color: var(--color-link)
|
||||
@ -16,6 +18,9 @@
|
||||
transform-origin: top
|
||||
z-index: 1
|
||||
|
||||
.fas
|
||||
filter: var(--filter-primary)
|
||||
|
||||
&:hover
|
||||
background-color: var(--color-grey-light)
|
||||
|
||||
@ -66,5 +71,9 @@
|
||||
&:empty
|
||||
margin: 0
|
||||
|
||||
.fas
|
||||
@extend .white-ico
|
||||
|
||||
|
||||
.hide-columns--hidden-cell
|
||||
display: none
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>,David Mosbach <david.mosbach@uniworx.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
@ -42,7 +42,7 @@ export class Password {
|
||||
this._wrapperEl.appendChild(this._toggleContainerEl);
|
||||
|
||||
this._iconEl = document.createElement('i');
|
||||
this._iconEl.classList.add('fas', 'fa-fw');
|
||||
this._iconEl.classList.add('fas');
|
||||
this._toggleContainerEl.appendChild(this._iconEl);
|
||||
|
||||
parentEl.insertBefore(this._wrapperEl, siblingEl);
|
||||
@ -91,7 +91,7 @@ export class Password {
|
||||
|
||||
updateVisibleIcon(visible) {
|
||||
function visibleClass(visible) {
|
||||
return 'fa-' + (visible ? 'eye' : 'eye-slash');
|
||||
return `ico-${visible ? '' : 'in'}visible`;
|
||||
}
|
||||
|
||||
this._iconEl.classList.remove(visibleClass(!visible));
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
/* global global:writable */
|
||||
/* global:writable */
|
||||
|
||||
import { Utility } from '../../core/utility';
|
||||
import { Datepicker } from '../form/datepicker';
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Gregor Kleen <gregor.kleen@ifi.lmu.de>,David Mosbach <david.mosbach@uniworx.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// SPDX-License-Identifier: LicenseRef-Fraport-Corporate-Design
|
||||
@ -98,9 +98,10 @@ div.modal__trigger
|
||||
z-index: 20
|
||||
|
||||
&::before
|
||||
@extend .fas
|
||||
@extend .ico-not-ok
|
||||
@extend .white-ico
|
||||
|
||||
content: fa-content($fa-var-times)
|
||||
content: ""
|
||||
color: white
|
||||
|
||||
.modal__content
|
||||
|
||||
@ -1,8 +1,10 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Gregor Kleen <gregor.kleen@ifi.lmu.de>,David Mosbach <david.mosbach@uniworx.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// SPDX-License-Identifier: LicenseRef-Fraport-Corporate-Design
|
||||
|
||||
@use '../../icons.scss'
|
||||
|
||||
.navbar-container
|
||||
position: relative
|
||||
|
||||
@ -170,6 +172,9 @@
|
||||
transition: opacity 0.2s ease
|
||||
margin-bottom: 7px
|
||||
|
||||
span
|
||||
@extend .white-ico
|
||||
|
||||
.navbar__link-label
|
||||
transition: opacity .2s ease
|
||||
padding: 2px 4px
|
||||
@ -253,6 +258,11 @@
|
||||
.navbar__link-wrapper
|
||||
color: var(--color-dark)
|
||||
|
||||
.navbar__link-icon
|
||||
.white-ico
|
||||
filter: var(--filter-primary)
|
||||
|
||||
|
||||
.navbar__list-item--active .navbar__link-wrapper
|
||||
color: var(--color-dark)
|
||||
|
||||
@ -263,6 +273,7 @@
|
||||
.navbar__link-icon
|
||||
opacity: 1
|
||||
|
||||
|
||||
// sticky state
|
||||
.navbar--sticky
|
||||
height: var(--header-height-collapsed)
|
||||
|
||||
@ -1,9 +1,10 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Gregor Kleen <gregor.kleen@ifi.lmu.de>,David Mosbach <david.mosbach@uniworx.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// SPDX-License-Identifier: LicenseRef-Fraport-Corporate-Design
|
||||
|
||||
@use "../../common" as *
|
||||
@use "../../icons.scss"
|
||||
|
||||
.tooltip
|
||||
position: relative
|
||||
@ -63,11 +64,19 @@
|
||||
.table__th &
|
||||
color: white
|
||||
|
||||
.fas
|
||||
@extend .white-ico
|
||||
|
||||
.tooltip.tooltip__inline
|
||||
.tooltip__handle
|
||||
height: 1.0rem
|
||||
line-height: 1.0rem
|
||||
font-size: 1.0rem
|
||||
.fas
|
||||
filter: var(--filter-primary)
|
||||
.table__th & .fas
|
||||
@extend .white-ico
|
||||
|
||||
|
||||
.tooltip__content
|
||||
position: absolute
|
||||
@ -112,6 +121,11 @@
|
||||
left: unset
|
||||
top: unset
|
||||
transform: unset
|
||||
.tooltip__handle
|
||||
.fas
|
||||
filter: var(--filter-primary)
|
||||
.table__th & .fas
|
||||
@extend .white-ico
|
||||
|
||||
.tooltip--spread
|
||||
width: 100%
|
||||
@ -123,10 +137,10 @@
|
||||
padding: 4px 17px 4px 4px
|
||||
|
||||
&::after
|
||||
@extend .fas
|
||||
@extend .fa-fw
|
||||
@extend .ico-notification-nonactive
|
||||
@extend .fw-ico
|
||||
|
||||
content: '\f129'
|
||||
content: ''
|
||||
position: absolute
|
||||
right: 2px
|
||||
top: 6px
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Sarah Vaupel <vaupel.sarah@campus.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor.kleen@ifi.lmu.de>, Sarah Vaupel <vaupel.sarah@campus.lmu.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
@ -20,7 +20,7 @@ module.exports = function(config) {
|
||||
logLevel: config.LOG_WARN, //config.LOG_DISABLE, config.LOG_ERROR, config.LOG_INFO, config.LOG_DEBUG
|
||||
|
||||
//list of frameworks you want to use, only jasmine is installed automatically
|
||||
frameworks: ['jasmine'],
|
||||
frameworks: ['jasmine', 'browserify'],
|
||||
//list of browsers to launch and capture
|
||||
browsers: ['ChromeHeadless'],
|
||||
//list of reporters to use
|
||||
@ -56,8 +56,11 @@ module.exports = function(config) {
|
||||
},
|
||||
preprocessors: {
|
||||
//add webpack as preprocessor to support require() in test-suits .js files
|
||||
'./frontend/src/**/*.js': ['webpack']
|
||||
'./frontend/src/**/*.js': ['browserify']
|
||||
},
|
||||
plugins: [
|
||||
'karma-browserify'
|
||||
],
|
||||
webpackMiddleware: {
|
||||
//turn off webpack bash output when run the tests
|
||||
noInfo: true,
|
||||
@ -1,11 +1,9 @@
|
||||
# SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Steffen Jost <jost@tcs.ifi.lmu.de>
|
||||
# SPDX-FileCopyrightText: 2022-24 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Steffen Jost <jost@tcs.ifi.lmu.de>,Steffen Jost <s.jost@fraport.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
FAQLoginExpired: Mein Passwort ist abgelaufen und muss erneuert werden
|
||||
FAQNoCampusAccount: Ich habe keine Fraport AG Kennung (Büko-Login); kann ich trotzdem Zugang zum System erhalten?
|
||||
FAQForgottenPassword: Ich habe mein Passwort vergessen
|
||||
FAQCampusCantLogin: Ich kann mich mit meiner Fraport AG Kennung (Büko-Login) nicht anmelden
|
||||
FAQCourseCorrectorsTutors: Wie kann ich Ausbilder oder Korrektoren für meine Kursart konfigurieren?
|
||||
FAQNotLecturerHowToCreateCourses: Wie kann ich eine neue Kursart anlegen?
|
||||
FAQExamPoints: Warum kann ich bei meiner Klausur keine Punkte eintragen?
|
||||
FAQInvalidCredentialsAdAccountDisabled: Ich kann mich nicht anmelden und bekomme die Meldung „Benutzereintrag gesperrt“
|
||||
FAQNotLecturerHowToCreateCourses: Wie kann ich eine neue Kursart anlegen?
|
||||
@ -1,11 +1,9 @@
|
||||
# SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Steffen Jost <jost@tcs.ifi.lmu.de>
|
||||
# SPDX-FileCopyrightText: 2022-24 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Steffen Jost <jost@tcs.ifi.lmu.de>,Steffen Jost <s.jost@fraport.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
FAQLoginExpired: My password expired
|
||||
FAQNoCampusAccount: I don't have Fraport AG credentials (Büko login); can I still get access?
|
||||
FAQForgottenPassword: I have forgotten my password
|
||||
FAQCampusCantLogin: I can't log in using my Fraport AG credentials (Büko login)
|
||||
FAQCourseCorrectorsTutors: How can I add instructors or correctors to my course?
|
||||
FAQNotLecturerHowToCreateCourses: How can I create new courses?
|
||||
FAQExamPoints: Why can't I enter achievements for my exam as points?
|
||||
FAQInvalidCredentialsAdAccountDisabled: I can't log in and am instead given the message “Account disabled”
|
||||
FAQNotLecturerHowToCreateCourses: How can I create new courses?
|
||||
@ -121,6 +121,13 @@ ProblemsAvsSynchHeading: Synchronisation AVS Fahrberechtigungen
|
||||
ProblemsAvsErrorHeading: Fehlermeldungen
|
||||
ProblemsInterfaceSince: Berücksichtigt werden nur Erfolge und Fehler seit
|
||||
|
||||
InterfacesOk: Schnittstellen sind ok.
|
||||
InterfacesFail n@Int: #{pluralDEeN n "Schnittstellenproblem"}!
|
||||
InterfaceStatus !ident-ok: Status
|
||||
InterfaceName: Schnittstelle
|
||||
InterfaceLastSynch: Zuletzt
|
||||
InterfaceSubtype: Betreffend
|
||||
InterfaceWrite: Schreibend
|
||||
InterfaceWrite: Schreibend
|
||||
InterfaceSuccess: Rückmeldung
|
||||
InterfaceInfo: Nachricht
|
||||
InterfaceFreshness: Prüfungszeitraum (h)
|
||||
@ -121,6 +121,13 @@ ProblemsAvsSynchHeading: Synchronisation AVS Driving Licences
|
||||
ProblemsAvsErrorHeading: Error Log
|
||||
ProblemsInterfaceSince: Only considering successes and errors since
|
||||
|
||||
InterfacesOk: Interfaces are ok.
|
||||
InterfacesFail n: #{pluralENsN n "interface problem"}!
|
||||
InterfaceStatus: Status
|
||||
InterfaceName: Interface
|
||||
InterfaceLastSynch: Last
|
||||
InterfaceSubtype: Affecting
|
||||
InterfaceWrite: Write
|
||||
InterfaceWrite: Write
|
||||
InterfaceSuccess: Returned
|
||||
InterfaceInfo: Message
|
||||
InterfaceFreshness: Check hours
|
||||
@ -95,7 +95,7 @@ CourseParticipantsInvited n@Int: #{n} #{pluralDE n "Einladung" "Einladungen"} pe
|
||||
CourseParticipantsAlreadyRegistered n@Int: #{n} #{pluralDE n "Teinehmer:in" "Teilnehmer:innen"} #{pluralDE n "ist" "sind"} bereits zur Kursart angemeldet
|
||||
CourseParticipantsAlreadyTutorialMember n@Int: #{n} #{pluralDE n "Teinehmer:in" "Teilnehmer:innen"} #{pluralDE n "ist" "sind"} bereits in dieser Kurs angemeldet
|
||||
CourseParticipantsRegistered n@Int: #{n} #{pluralDE n "Teinehmer:in" "Teilnehmer:innen"} erfolgreich zur Kursart angemeldet
|
||||
CourseParticipantsRegisteredTutorial n@Int: #{n} #{pluralDE n "Teinehmer:in" "Teilnehmer:innen"} erfolgreich zur Kurs angemeldet
|
||||
CourseParticipantsRegisteredTutorial n@Int: #{n} #{pluralDE n "Teinehmer:in" "Teilnehmer:innen"} erfolgreich zum Kurs angemeldet
|
||||
CourseParticipantsRegisterConfirmationHeading: Teilnehmer:innen hinzufügen
|
||||
CourseParticipantsRegisterUnnecessary: Alle angeforderten Anmeldungen sind bereits vorhanden. Es wurden keine Änderungen vorgenommen.
|
||||
CourseParticipantsRegisterConfirmInvalid: Ungültiges Bestätigungsformular!
|
||||
|
||||
@ -23,6 +23,7 @@ MenuPayments: Zahlungsbedingungen
|
||||
|
||||
MenuInstance: Instanz-Identifikation
|
||||
MenuHealth: Instanz-Zustand
|
||||
MenuHealthInterface: Schnittstellen Zustand
|
||||
MenuHelp: Hilfe
|
||||
MenuProfile: Anpassen
|
||||
MenuLogin !ident-ok: Login
|
||||
@ -145,6 +146,8 @@ MenuLdap: LDAP Schnittstelle
|
||||
MenuApc: Druckerei
|
||||
MenuPrintSend: Manueller Briefversand
|
||||
MenuPrintDownload: Brief herunterladen
|
||||
MenuPrintLog: LPR Schnittstelle
|
||||
MenuPrintAck: Druckbestätigung
|
||||
|
||||
MenuApiDocs: API-Dokumentation (Englisch)
|
||||
MenuSwagger !ident-ok: OpenAPI 2.0 (Swagger)
|
||||
|
||||
@ -23,6 +23,7 @@ MenuPayments: Payment Terms
|
||||
|
||||
MenuInstance: Instance identification
|
||||
MenuHealth: Instance health
|
||||
MenuHealthInterface: Interface health
|
||||
MenuHelp: Support
|
||||
MenuProfile: Settings
|
||||
MenuLogin: Login
|
||||
@ -145,6 +146,8 @@ MenuLdap: LDAP Interface
|
||||
MenuApc: Printing
|
||||
MenuPrintSend: Send Letter
|
||||
MenuPrintDownload: Download Letter
|
||||
MenuPrintLog: LPR Interface
|
||||
MenuPrintAck: Acknowledge Printing
|
||||
|
||||
MenuApiDocs: API documentation
|
||||
MenuSwagger: OpenAPI 2.0 (Swagger)
|
||||
|
||||
@ -14,9 +14,18 @@ TransactionLog
|
||||
InterfaceLog
|
||||
interface Text
|
||||
subtype Text
|
||||
write Bool -- requestMethod /= GET, i.e. True implies a write to FRADrive
|
||||
write Bool -- requestMethod /= GET, i.e. True implies a write to FRADrive
|
||||
time UTCTime
|
||||
rows Int Maybe -- number of datasets transmitted
|
||||
info Text -- addtional status information
|
||||
rows Int Maybe -- number of datasets transmitted
|
||||
info Text -- addtional status information
|
||||
success Bool default=true -- false logs a failure; but it will be overwritten by next transaction, but logged in TransactionLog
|
||||
UniqueInterfaceSubtypeWrite interface subtype write
|
||||
deriving Eq Read Show Generic
|
||||
deriving Eq Read Show Generic
|
||||
|
||||
InterfaceHealth
|
||||
interface Text
|
||||
subtype Text Maybe
|
||||
write Bool Maybe
|
||||
hours Int
|
||||
UniqueInterfaceHealth interface subtype write !force -- Note that nullable fields must be either empty or unique
|
||||
deriving Eq Read Show Generic
|
||||
|
||||
@ -20,7 +20,7 @@ Qualification
|
||||
SchoolQualificationShort school shorthand -- must be unique per school and shorthand
|
||||
SchoolQualificationName school name -- must be unique per school and name
|
||||
-- across all schools, only one qualification may be a driving licence:
|
||||
UniqueQualificationAvsLicence avsLicence !force
|
||||
UniqueQualificationAvsLicence avsLicence !force -- either empty or unique
|
||||
-- NOTE: two NULL values are not equal for the purpose of Uniqueness constraints!
|
||||
deriving Eq Generic
|
||||
|
||||
|
||||
@ -1,15 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
final: prev:
|
||||
let
|
||||
awsPatch = prev.fetchurl {
|
||||
url = "https://github.com/gkleen/nix/commit/fd67a0f927ec0711eba59714939ff939fc95db38.diff";
|
||||
hash = "sha256-1dJ9zGQvYu5b47O2NjdggSSinlGQDcqBwXoZcKUGfYQ=";
|
||||
};
|
||||
in {
|
||||
nixUnstable = prev.nixUnstable.overrideAttrs (oldAttrs: {
|
||||
patches = oldAttrs.patches or [] ++ [ awsPatch ];
|
||||
});
|
||||
}
|
||||
217
nix/develop.nix
217
nix/develop.nix
@ -1,217 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
{ pkgs
|
||||
, prev ? pkgs
|
||||
, doPortOffset ? true
|
||||
, doDevelopEnv ? true
|
||||
}:
|
||||
|
||||
with prev.lib;
|
||||
|
||||
let
|
||||
withDevelop = action: ''
|
||||
#!${pkgs.zsh}/bin/zsh -e
|
||||
|
||||
${optionalString doDevelopEnv ''
|
||||
basePath=$(pwd)
|
||||
exec 4<>''${basePath}/.develop.env
|
||||
|
||||
flockRes=
|
||||
set +e
|
||||
${pkgs.util-linux}/bin/flock -en 4; flockRes=$?
|
||||
set -e
|
||||
if [[ ''${flockRes} -ne 0 ]]; then
|
||||
echo "Could not take exclusive lock; is another develop running?" >&2
|
||||
exit ''${flockRes}
|
||||
fi
|
||||
''}
|
||||
|
||||
cleanup() {
|
||||
set +e -x
|
||||
type cleanup_postgres &>/dev/null && cleanup_postgres
|
||||
type cleanup_widget_memcached &>/dev/null && cleanup_widget_memcached
|
||||
type cleanup_session_memcached &>/dev/null && cleanup_session_memcached
|
||||
type cleanup_cache_memcached &>/dev/null && cleanup_cache_memcached
|
||||
type cleanup_minio &>/dev/null && cleanup_minio
|
||||
type cleanup_maildev &>/dev/null && cleanup_maildev
|
||||
|
||||
${optionalString doDevelopEnv ''
|
||||
[ -f "''${basePath}/.develop.env" ] && rm -vf "''${basePath}/.develop.env"
|
||||
''}
|
||||
set +x
|
||||
}
|
||||
|
||||
trap cleanup EXIT
|
||||
|
||||
export PORT_OFFSET=${if doPortOffset then "$(((16#$(echo \"fradrive $(whoami)\" | sha256sum | head -c 16)) % 1000))" else "0"}
|
||||
|
||||
if [[ -z "$PGHOST" ]]; then
|
||||
set -xe
|
||||
|
||||
pgDir=$(mktemp -d --tmpdir=''${XDG_RUNTIME_DIR} postgresql.XXXXXX)
|
||||
pgSockDir=$(mktemp -d --tmpdir=''${XDG_RUNTIME_DIR} postgresql.sock.XXXXXX)
|
||||
pgLogFile=$(mktemp --tmpdir=''${XDG_RUNTIME_DIR} postgresql.XXXXXX.log)
|
||||
initdb --no-locale -D ''${pgDir}
|
||||
pg_ctl start -D ''${pgDir} -l ''${pgLogFile} -w -o "-k ''${pgSockDir} -c listen_addresses=''' -c hba_file='${postgresHba}' -c unix_socket_permissions=0700 -c max_connections=9990 -c shared_preload_libraries=pg_stat_statements -c auto_explain.log_min_duration=100ms"
|
||||
psql -h ''${pgSockDir} -f ${postgresSchema} postgres
|
||||
printf "Postgres logfile is %s\nPostgres socket directory is %s\n" ''${pgLogFile} ''${pgSockDir}
|
||||
|
||||
export PGHOST=''${pgSockDir}
|
||||
export PGLOG=''${pgLogFile}
|
||||
|
||||
cleanup_postgres() {
|
||||
set +e -x
|
||||
pg_ctl stop -D ''${pgDir}
|
||||
rm -rvf ''${pgDir} ''${pgSockDir} ''${pgLogFile}
|
||||
set +x
|
||||
}
|
||||
|
||||
set +xe
|
||||
fi
|
||||
|
||||
if [[ -z "$WIDGET_MEMCACHED_HOST" ]]; then
|
||||
set -xe
|
||||
|
||||
memcached -l localhost -p $(($PORT_OFFSET + 11211)) &>/dev/null &
|
||||
widget_memcached_pid=$!
|
||||
|
||||
export WIDGET_MEMCACHED_HOST=localhost
|
||||
export WIDGET_MEMCACHED_PORT=$(($PORT_OFFSET + 11211))
|
||||
|
||||
cleanup_widget_memcached() {
|
||||
[[ -n "$widget_memcached_pid" ]] && kill $widget_memcached_pid
|
||||
}
|
||||
|
||||
set +xe
|
||||
fi
|
||||
|
||||
if [[ -z "$SESSION_MEMCACHED_HOST" ]]; then
|
||||
set -xe
|
||||
|
||||
memcached -l localhost -p $(($PORT_OFFSET + 11212)) &>/dev/null &
|
||||
session_memcached_pid=$!
|
||||
|
||||
export SESSION_MEMCACHED_HOST=localhost
|
||||
export SESSION_MEMCACHED_PORT=$(($PORT_OFFSET + 11212))
|
||||
|
||||
cleanup_session_memcached() {
|
||||
[[ -n "$session_memcached_pid" ]] && kill $session_memcached_pid
|
||||
}
|
||||
|
||||
set +xe
|
||||
fi
|
||||
|
||||
if [[ -z "$MEMCACHED_HOST" ]]; then
|
||||
set -xe
|
||||
|
||||
memcached -l localhost -p $(($PORT_OFFSET + 11213)) &>/dev/null &
|
||||
memcached_pid=$!
|
||||
|
||||
export MEMCACHED_HOST=localhost
|
||||
export MEMCACHED_PORT=$(($PORT_OFFSET + 11212))
|
||||
|
||||
cleanup_session_memcached() {
|
||||
[[ -n "$memcached_pid" ]] && kill $memcached_pid
|
||||
}
|
||||
|
||||
set +xe
|
||||
fi
|
||||
|
||||
if [[ -z "$UPLOAD_S3_HOST" ]]; then
|
||||
set -xe
|
||||
|
||||
cleanup_minio() {
|
||||
[[ -n "$minio_pid" ]] && kill $minio_pid
|
||||
[[ -n "''${MINIO_DIR}" ]] && rm -rvf ''${MINIO_DIR}
|
||||
[[ -n "''${MINIO_LOGFILE}" ]] && rm -rvf ''${MINIO_LOGFILE}
|
||||
}
|
||||
|
||||
export MINIO_DIR=$(mktemp -d --tmpdir=''${XDG_RUNTIME_DIR} minio.XXXXXX)
|
||||
export MINIO_LOGFILE=$(mktemp --tmpdir=''${XDG_RUNTIME_DIR} minio.XXXXXX.log)
|
||||
export MINIO_ACCESS_KEY=$(${pkgs.pwgen}/bin/pwgen -s 16 1)
|
||||
export MINIO_SECRET_KEY=$(${pkgs.pwgen}/bin/pwgen -s 32 1)
|
||||
|
||||
minio server --address localhost:$(($PORT_OFFSET + 9000)) ''${MINIO_DIR} &>''${MINIO_LOGFILE} &
|
||||
minio_pid=$!
|
||||
|
||||
export UPLOAD_S3_HOST=localhost
|
||||
export UPLOAD_S3_PORT=$(($PORT_OFFSET + 9000))
|
||||
export UPLOAD_S3_SSL=false
|
||||
export UPLOAD_S3_KEY_ID=''${MINIO_ACCESS_KEY}
|
||||
export UPLOAD_S3_KEY=''${MINIO_SECRET_KEY}
|
||||
|
||||
sleep 1
|
||||
|
||||
set +xe
|
||||
fi
|
||||
|
||||
${optionalString (pkgs.nodePackages ? "maildev") ''
|
||||
if [[ -z "$SMTPHOST" ]]; then
|
||||
set -xe
|
||||
|
||||
cleanup_maildev() {
|
||||
[[ -n "$maildev_pid" ]] && kill $maildev_pid
|
||||
}
|
||||
|
||||
TMPDIR=''${XDG_RUNTIME_DIR} ${pkgs.nodePackages.maildev}/bin/maildev --smtp $(($PORT_OFFSET + 1025)) --web $(($PORT_OFFSET + 8080)) --ip localhost --web-ip localhost &>/dev/null &
|
||||
maildev_pid=$!
|
||||
|
||||
export SMTPHOST=localhost
|
||||
export SMTPPORT=$(($PORT_OFFSET + 1025))
|
||||
export SMTPSSL=none
|
||||
|
||||
set +xe
|
||||
fi
|
||||
''}
|
||||
|
||||
${optionalString doDevelopEnv ''
|
||||
set -xe
|
||||
|
||||
cat >&4 <<EOF
|
||||
PORT_OFFSET=''${PORT_OFFSET}
|
||||
|
||||
PGHOST=''${pgSockDir}
|
||||
PGLOG=''${pgLogFile}
|
||||
|
||||
WIDGET_MEMCACHED_HOST=localhost
|
||||
WIDGET_MEMCACHED_PORT=$(($PORT_OFFSET + 11211))
|
||||
|
||||
SESSION_MEMCACHED_HOST=localhost
|
||||
SESSION_MEMCACHED_PORT=$(($PORT_OFFSET + 11212))
|
||||
|
||||
MEMCACHED_HOST=localhost
|
||||
MEMCACHED_PORT=$(($PORT_OFFSET + 11212))
|
||||
|
||||
MINIO_DIR=''${MINIO_DIR}
|
||||
MINIO_LOGFILE=''${MINIO_LOGFILE}
|
||||
UPLOAD_S3_HOST=localhost
|
||||
UPLOAD_S3_PORT=$(($PORT_OFFSET + 9000))
|
||||
UPLOAD_S3_SSL=false
|
||||
UPLOAD_S3_KEY_ID=''${MINIO_ACCESS_KEY}
|
||||
UPLOAD_S3_KEY=''${MINIO_SECRET_KEY}
|
||||
|
||||
SMTPHOST=''${SMTPHOST}
|
||||
SMTPPORT=''${SMTPPORT}
|
||||
SMTPSSL=''${SMTPSSL}
|
||||
EOF
|
||||
|
||||
set +xe
|
||||
''}
|
||||
|
||||
${action}
|
||||
'';
|
||||
|
||||
postgresSchema = prev.writeText "schema.sql" ''
|
||||
CREATE USER uniworx WITH SUPERUSER;
|
||||
CREATE DATABASE uniworx_test;
|
||||
GRANT ALL ON DATABASE uniworx_test TO uniworx;
|
||||
CREATE DATABASE uniworx;
|
||||
GRANT ALL ON DATABASE uniworx TO uniworx;
|
||||
'';
|
||||
|
||||
postgresHba = prev.writeText "hba_file" ''
|
||||
local all all trust
|
||||
'';
|
||||
in withDevelop
|
||||
@ -1,116 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022-2023 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor@kleen.consulting>, Steffen Jost <jost@tcs.ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
{ self }: final: prev:
|
||||
|
||||
with prev.lib;
|
||||
|
||||
let
|
||||
created =
|
||||
let
|
||||
fromDate = builtins.readFile (prev.runCommand "date" { nativeBuildInputs = with final; [ coreutils ]; } ''
|
||||
printf '%s' $(date -Is -d '@${toString self.lastModified}') > $out
|
||||
'');
|
||||
in if self ? lastModified then fromDate else "1970-01-01T00:00:01Z";
|
||||
|
||||
mkUniworxDocker = { isTest }: prev.dockerTools.buildImage {
|
||||
name = "uniworx${optionalString isTest "-test"}";
|
||||
tag =
|
||||
let
|
||||
versionFile = if isTest then ./test-version.json else ./version.json;
|
||||
in (builtins.fromJSON (prev.lib.readFile versionFile)).version;
|
||||
inherit created;
|
||||
|
||||
contents = with final; [
|
||||
uniworx.uniworx.components.exes.uniworx
|
||||
prev.dockerTools.binSh findutils coreutils
|
||||
iana-etc
|
||||
# for PDF creation with Pandoc and LuaTeX
|
||||
#cups # needed for interface with print center -- did not work as intended, requires lpd running
|
||||
busybox # should provide a working lpr -- to be tested
|
||||
htop
|
||||
pdftk # for encrypting pdfs
|
||||
#texlive.combined.scheme-medium # too large for container in LMU build environment.
|
||||
(texlive.combine {
|
||||
inherit (texlive) scheme-basic
|
||||
babel-german babel-english booktabs textpos
|
||||
enumitem eurosym koma-script parskip xcolor dejavu
|
||||
# required fro LuaTeX
|
||||
luatexbase lualatex-math unicode-math selnolig
|
||||
;
|
||||
})
|
||||
# just for manual testing within the pod, may be removef for production?
|
||||
curl wget netcat openldap
|
||||
unixtools.netstat htop gnugrep
|
||||
locale
|
||||
];
|
||||
|
||||
runAsRoot = ''
|
||||
#!${final.stdenv.shell}
|
||||
|
||||
${prev.dockerTools.shadowSetup}
|
||||
|
||||
mkdir -p /var/lib
|
||||
|
||||
groupadd -r uniworx
|
||||
useradd -r -g uniworx -d /var/lib/uniworx -M uniworx --uid 999
|
||||
install -d -g uniworx -o uniworx -m 0750 /var/lib/uniworx
|
||||
|
||||
mkdir -p /var/log
|
||||
install -d -g uniworx -o uniworx -m 0755 /var/log/uniworx
|
||||
|
||||
# just to see how to create directories here
|
||||
mkdir -p /testdir
|
||||
'';
|
||||
|
||||
config =
|
||||
let
|
||||
entrypoint = prev.writeScriptBin "uniworx-entrypoint" ''
|
||||
#!${final.zsh}/bin/zsh -xe
|
||||
|
||||
cTime=$(date -Is)
|
||||
|
||||
# export LOGDEST=/var/log/uniworx/''${cTime}.log # kubernetes prefers log via stdout
|
||||
typeset -a configs
|
||||
configs=()
|
||||
configDir=''${CONFIG_DIR-/cfg}
|
||||
if [[ -d "''${configDir}" ]]; then
|
||||
while IFS= read -d $'\0' cfg; do
|
||||
configs+=("''${(q)cfg}")
|
||||
done < <(find "''${configDir}" \( -name '*.yml' -o -name '*.yaml' \) -print0 | sort -rz)
|
||||
fi
|
||||
configs+=('${uniworxConfig}')
|
||||
cd /var/lib/uniworx
|
||||
exec -- uniworx ''${configs}
|
||||
'';
|
||||
postgresSchema = prev.writeText "schema.sql" ''
|
||||
CREATE USER uniworx WITH SUPERUSER;
|
||||
CREATE DATABASE uniworx;
|
||||
GRANT ALL ON DATABASE uniworx TO uniworx;
|
||||
'';
|
||||
|
||||
postgresHba = prev.writeText "hba_file" ''
|
||||
local all all trust
|
||||
'';
|
||||
uniworxConfig = prev.writeText "uni2work.yml" ''
|
||||
port: 8080
|
||||
approot: "_env:APPROOT:http://localhost:8080"
|
||||
'';
|
||||
in {
|
||||
Cmd = [ "${entrypoint}/bin/uniworx-entrypoint" ];
|
||||
User = "uniworx:uniworx";
|
||||
ExposedPorts = {
|
||||
"8080/tcp" = {};
|
||||
};
|
||||
Volumes = {
|
||||
"/var/lib/uniworx" = {};
|
||||
"/var/log" = {};
|
||||
};
|
||||
};
|
||||
};
|
||||
in
|
||||
mapAttrs (_name: mkUniworxDocker) {
|
||||
uniworxTestDocker = { isTest = true; };
|
||||
uniworxDocker = { isTest = false; };
|
||||
}
|
||||
@ -1,3 +0,0 @@
|
||||
{
|
||||
"version": "27.4.18"
|
||||
}
|
||||
@ -1,3 +0,0 @@
|
||||
SPDX-FileCopyrightText: 2022-2023 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor.kleen@ifi.lmu.de>, Steffen Jost <jost@tcs.ifi.lmu.de>
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
@ -1,3 +0,0 @@
|
||||
{
|
||||
"version": "27.4.56"
|
||||
}
|
||||
@ -1,3 +0,0 @@
|
||||
SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Steffen Jost <jost@tcs.ifi.lmu.de>
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
@ -1,17 +0,0 @@
|
||||
# This file has been generated by node2nix 1.11.1. Do not edit!
|
||||
|
||||
{pkgs ? import <nixpkgs> {
|
||||
inherit system;
|
||||
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs_14"}:
|
||||
|
||||
let
|
||||
nodeEnv = import ./node-env.nix {
|
||||
inherit (pkgs) stdenv lib python2 runCommand writeTextFile writeShellScript;
|
||||
inherit pkgs nodejs;
|
||||
libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
|
||||
};
|
||||
in
|
||||
import ./node-packages.nix {
|
||||
inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit;
|
||||
inherit nodeEnv;
|
||||
}
|
||||
@ -1,689 +0,0 @@
|
||||
# This file originates from node2nix
|
||||
|
||||
{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile, writeShellScript}:
|
||||
|
||||
let
|
||||
# Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
|
||||
utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;
|
||||
|
||||
python = if nodejs ? python then nodejs.python else python2;
|
||||
|
||||
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
|
||||
tarWrapper = runCommand "tarWrapper" {} ''
|
||||
mkdir -p $out/bin
|
||||
|
||||
cat > $out/bin/tar <<EOF
|
||||
#! ${stdenv.shell} -e
|
||||
$(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
|
||||
EOF
|
||||
|
||||
chmod +x $out/bin/tar
|
||||
'';
|
||||
|
||||
# Function that generates a TGZ file from a NPM project
|
||||
buildNodeSourceDist =
|
||||
{ name, version, src, ... }:
|
||||
|
||||
stdenv.mkDerivation {
|
||||
name = "node-tarball-${name}-${version}";
|
||||
inherit src;
|
||||
buildInputs = [ nodejs ];
|
||||
buildPhase = ''
|
||||
export HOME=$TMPDIR
|
||||
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
|
||||
'';
|
||||
installPhase = ''
|
||||
mkdir -p $out/tarballs
|
||||
mv $tgzFile $out/tarballs
|
||||
mkdir -p $out/nix-support
|
||||
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
|
||||
'';
|
||||
};
|
||||
|
||||
# Common shell logic
|
||||
installPackage = writeShellScript "install-package" ''
|
||||
installPackage() {
|
||||
local packageName=$1 src=$2
|
||||
|
||||
local strippedName
|
||||
|
||||
local DIR=$PWD
|
||||
cd $TMPDIR
|
||||
|
||||
unpackFile $src
|
||||
|
||||
# Make the base dir in which the target dependency resides first
|
||||
mkdir -p "$(dirname "$DIR/$packageName")"
|
||||
|
||||
if [ -f "$src" ]
|
||||
then
|
||||
# Figure out what directory has been unpacked
|
||||
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
|
||||
|
||||
# Restore write permissions to make building work
|
||||
find "$packageDir" -type d -exec chmod u+x {} \;
|
||||
chmod -R u+w "$packageDir"
|
||||
|
||||
# Move the extracted tarball into the output folder
|
||||
mv "$packageDir" "$DIR/$packageName"
|
||||
elif [ -d "$src" ]
|
||||
then
|
||||
# Get a stripped name (without hash) of the source directory.
|
||||
# On old nixpkgs it's already set internally.
|
||||
if [ -z "$strippedName" ]
|
||||
then
|
||||
strippedName="$(stripHash $src)"
|
||||
fi
|
||||
|
||||
# Restore write permissions to make building work
|
||||
chmod -R u+w "$strippedName"
|
||||
|
||||
# Move the extracted directory into the output folder
|
||||
mv "$strippedName" "$DIR/$packageName"
|
||||
fi
|
||||
|
||||
# Change to the package directory to install dependencies
|
||||
cd "$DIR/$packageName"
|
||||
}
|
||||
'';
|
||||
|
||||
# Bundle the dependencies of the package
|
||||
#
|
||||
# Only include dependencies if they don't exist. They may also be bundled in the package.
|
||||
includeDependencies = {dependencies}:
|
||||
lib.optionalString (dependencies != []) (
|
||||
''
|
||||
mkdir -p node_modules
|
||||
cd node_modules
|
||||
''
|
||||
+ (lib.concatMapStrings (dependency:
|
||||
''
|
||||
if [ ! -e "${dependency.packageName}" ]; then
|
||||
${composePackage dependency}
|
||||
fi
|
||||
''
|
||||
) dependencies)
|
||||
+ ''
|
||||
cd ..
|
||||
''
|
||||
);
|
||||
|
||||
# Recursively composes the dependencies of a package
|
||||
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
|
||||
builtins.addErrorContext "while evaluating node package '${packageName}'" ''
|
||||
installPackage "${packageName}" "${src}"
|
||||
${includeDependencies { inherit dependencies; }}
|
||||
cd ..
|
||||
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
|
||||
'';
|
||||
|
||||
pinpointDependencies = {dependencies, production}:
|
||||
let
|
||||
pinpointDependenciesFromPackageJSON = writeTextFile {
|
||||
name = "pinpointDependencies.js";
|
||||
text = ''
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
|
||||
function resolveDependencyVersion(location, name) {
|
||||
if(location == process.env['NIX_STORE']) {
|
||||
return null;
|
||||
} else {
|
||||
var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
|
||||
|
||||
if(fs.existsSync(dependencyPackageJSON)) {
|
||||
var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
|
||||
|
||||
if(dependencyPackageObj.name == name) {
|
||||
return dependencyPackageObj.version;
|
||||
}
|
||||
} else {
|
||||
return resolveDependencyVersion(path.resolve(location, ".."), name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function replaceDependencies(dependencies) {
|
||||
if(typeof dependencies == "object" && dependencies !== null) {
|
||||
for(var dependency in dependencies) {
|
||||
var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
|
||||
|
||||
if(resolvedVersion === null) {
|
||||
process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
|
||||
} else {
|
||||
dependencies[dependency] = resolvedVersion;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Read the package.json configuration */
|
||||
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
|
||||
|
||||
/* Pinpoint all dependencies */
|
||||
replaceDependencies(packageObj.dependencies);
|
||||
if(process.argv[2] == "development") {
|
||||
replaceDependencies(packageObj.devDependencies);
|
||||
}
|
||||
else {
|
||||
packageObj.devDependencies = {};
|
||||
}
|
||||
replaceDependencies(packageObj.optionalDependencies);
|
||||
replaceDependencies(packageObj.peerDependencies);
|
||||
|
||||
/* Write the fixed package.json file */
|
||||
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
|
||||
'';
|
||||
};
|
||||
in
|
||||
''
|
||||
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
|
||||
|
||||
${lib.optionalString (dependencies != [])
|
||||
''
|
||||
if [ -d node_modules ]
|
||||
then
|
||||
cd node_modules
|
||||
${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
|
||||
cd ..
|
||||
fi
|
||||
''}
|
||||
'';
|
||||
|
||||
# Recursively traverses all dependencies of a package and pinpoints all
|
||||
# dependencies in the package.json file to the versions that are actually
|
||||
# being used.
|
||||
|
||||
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
|
||||
''
|
||||
if [ -d "${packageName}" ]
|
||||
then
|
||||
cd "${packageName}"
|
||||
${pinpointDependencies { inherit dependencies production; }}
|
||||
cd ..
|
||||
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
|
||||
fi
|
||||
'';
|
||||
|
||||
# Extract the Node.js source code which is used to compile packages with
|
||||
# native bindings
|
||||
nodeSources = runCommand "node-sources" {} ''
|
||||
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
|
||||
mv node-* $out
|
||||
'';
|
||||
|
||||
# Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
|
||||
addIntegrityFieldsScript = writeTextFile {
|
||||
name = "addintegrityfields.js";
|
||||
text = ''
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
|
||||
function augmentDependencies(baseDir, dependencies) {
|
||||
for(var dependencyName in dependencies) {
|
||||
var dependency = dependencies[dependencyName];
|
||||
|
||||
// Open package.json and augment metadata fields
|
||||
var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
|
||||
var packageJSONPath = path.join(packageJSONDir, "package.json");
|
||||
|
||||
if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
|
||||
console.log("Adding metadata fields to: "+packageJSONPath);
|
||||
var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
|
||||
|
||||
if(dependency.integrity) {
|
||||
packageObj["_integrity"] = dependency.integrity;
|
||||
} else {
|
||||
packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
|
||||
}
|
||||
|
||||
if(dependency.resolved) {
|
||||
packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
|
||||
} else {
|
||||
packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
|
||||
}
|
||||
|
||||
if(dependency.from !== undefined) { // Adopt from property if one has been provided
|
||||
packageObj["_from"] = dependency.from;
|
||||
}
|
||||
|
||||
fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
|
||||
}
|
||||
|
||||
// Augment transitive dependencies
|
||||
if(dependency.dependencies !== undefined) {
|
||||
augmentDependencies(packageJSONDir, dependency.dependencies);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(fs.existsSync("./package-lock.json")) {
|
||||
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
|
||||
|
||||
if(![1, 2].includes(packageLock.lockfileVersion)) {
|
||||
process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if(packageLock.dependencies !== undefined) {
|
||||
augmentDependencies(".", packageLock.dependencies);
|
||||
}
|
||||
}
|
||||
'';
|
||||
};
|
||||
|
||||
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
|
||||
reconstructPackageLock = writeTextFile {
|
||||
name = "reconstructpackagelock.js";
|
||||
text = ''
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
|
||||
var packageObj = JSON.parse(fs.readFileSync("package.json"));
|
||||
|
||||
var lockObj = {
|
||||
name: packageObj.name,
|
||||
version: packageObj.version,
|
||||
lockfileVersion: 2,
|
||||
requires: true,
|
||||
packages: {
|
||||
"": {
|
||||
name: packageObj.name,
|
||||
version: packageObj.version,
|
||||
license: packageObj.license,
|
||||
bin: packageObj.bin,
|
||||
dependencies: packageObj.dependencies,
|
||||
engines: packageObj.engines,
|
||||
optionalDependencies: packageObj.optionalDependencies
|
||||
}
|
||||
},
|
||||
dependencies: {}
|
||||
};
|
||||
|
||||
function augmentPackageJSON(filePath, packages, dependencies) {
|
||||
var packageJSON = path.join(filePath, "package.json");
|
||||
if(fs.existsSync(packageJSON)) {
|
||||
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
|
||||
packages[filePath] = {
|
||||
version: packageObj.version,
|
||||
integrity: "sha1-000000000000000000000000000=",
|
||||
dependencies: packageObj.dependencies,
|
||||
engines: packageObj.engines,
|
||||
optionalDependencies: packageObj.optionalDependencies
|
||||
};
|
||||
dependencies[packageObj.name] = {
|
||||
version: packageObj.version,
|
||||
integrity: "sha1-000000000000000000000000000=",
|
||||
dependencies: {}
|
||||
};
|
||||
processDependencies(path.join(filePath, "node_modules"), packages, dependencies[packageObj.name].dependencies);
|
||||
}
|
||||
}
|
||||
|
||||
function processDependencies(dir, packages, dependencies) {
|
||||
if(fs.existsSync(dir)) {
|
||||
var files = fs.readdirSync(dir);
|
||||
|
||||
files.forEach(function(entry) {
|
||||
var filePath = path.join(dir, entry);
|
||||
var stats = fs.statSync(filePath);
|
||||
|
||||
if(stats.isDirectory()) {
|
||||
if(entry.substr(0, 1) == "@") {
|
||||
// When we encounter a namespace folder, augment all packages belonging to the scope
|
||||
var pkgFiles = fs.readdirSync(filePath);
|
||||
|
||||
pkgFiles.forEach(function(entry) {
|
||||
if(stats.isDirectory()) {
|
||||
var pkgFilePath = path.join(filePath, entry);
|
||||
augmentPackageJSON(pkgFilePath, packages, dependencies);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
augmentPackageJSON(filePath, packages, dependencies);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
processDependencies("node_modules", lockObj.packages, lockObj.dependencies);
|
||||
|
||||
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
|
||||
'';
|
||||
};
|
||||
|
||||
# Script that links bins defined in package.json to the node_modules bin directory
|
||||
# NPM does not do this for top-level packages itself anymore as of v7
|
||||
linkBinsScript = writeTextFile {
|
||||
name = "linkbins.js";
|
||||
text = ''
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
|
||||
var packageObj = JSON.parse(fs.readFileSync("package.json"));
|
||||
|
||||
var nodeModules = Array(packageObj.name.split("/").length).fill("..").join(path.sep);
|
||||
|
||||
if(packageObj.bin !== undefined) {
|
||||
fs.mkdirSync(path.join(nodeModules, ".bin"))
|
||||
|
||||
if(typeof packageObj.bin == "object") {
|
||||
Object.keys(packageObj.bin).forEach(function(exe) {
|
||||
if(fs.existsSync(packageObj.bin[exe])) {
|
||||
console.log("linking bin '" + exe + "'");
|
||||
fs.symlinkSync(
|
||||
path.join("..", packageObj.name, packageObj.bin[exe]),
|
||||
path.join(nodeModules, ".bin", exe)
|
||||
);
|
||||
}
|
||||
else {
|
||||
console.log("skipping non-existent bin '" + exe + "'");
|
||||
}
|
||||
})
|
||||
}
|
||||
else {
|
||||
if(fs.existsSync(packageObj.bin)) {
|
||||
console.log("linking bin '" + packageObj.bin + "'");
|
||||
fs.symlinkSync(
|
||||
path.join("..", packageObj.name, packageObj.bin),
|
||||
path.join(nodeModules, ".bin", packageObj.name.split("/").pop())
|
||||
);
|
||||
}
|
||||
else {
|
||||
console.log("skipping non-existent bin '" + packageObj.bin + "'");
|
||||
}
|
||||
}
|
||||
}
|
||||
else if(packageObj.directories !== undefined && packageObj.directories.bin !== undefined) {
|
||||
fs.mkdirSync(path.join(nodeModules, ".bin"))
|
||||
|
||||
fs.readdirSync(packageObj.directories.bin).forEach(function(exe) {
|
||||
if(fs.existsSync(path.join(packageObj.directories.bin, exe))) {
|
||||
console.log("linking bin '" + exe + "'");
|
||||
fs.symlinkSync(
|
||||
path.join("..", packageObj.name, packageObj.directories.bin, exe),
|
||||
path.join(nodeModules, ".bin", exe)
|
||||
);
|
||||
}
|
||||
else {
|
||||
console.log("skipping non-existent bin '" + exe + "'");
|
||||
}
|
||||
})
|
||||
}
|
||||
'';
|
||||
};
|
||||
|
||||
prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
|
||||
let
|
||||
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
|
||||
in
|
||||
''
|
||||
# Pinpoint the versions of all dependencies to the ones that are actually being used
|
||||
echo "pinpointing versions of dependencies..."
|
||||
source $pinpointDependenciesScriptPath
|
||||
|
||||
# Patch the shebangs of the bundled modules to prevent them from
|
||||
# calling executables outside the Nix store as much as possible
|
||||
patchShebangs .
|
||||
|
||||
# Deploy the Node.js package by running npm install. Since the
|
||||
# dependencies have been provided already by ourselves, it should not
|
||||
# attempt to install them again, which is good, because we want to make
|
||||
# it Nix's responsibility. If it needs to install any dependencies
|
||||
# anyway (e.g. because the dependency parameters are
|
||||
# incomplete/incorrect), it fails.
|
||||
#
|
||||
# The other responsibilities of NPM are kept -- version checks, build
|
||||
# steps, postprocessing etc.
|
||||
|
||||
export HOME=$TMPDIR
|
||||
cd "${packageName}"
|
||||
runHook preRebuild
|
||||
|
||||
${lib.optionalString bypassCache ''
|
||||
${lib.optionalString reconstructLock ''
|
||||
if [ -f package-lock.json ]
|
||||
then
|
||||
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
|
||||
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
|
||||
rm package-lock.json
|
||||
else
|
||||
echo "No package-lock.json file found, reconstructing..."
|
||||
fi
|
||||
|
||||
node ${reconstructPackageLock}
|
||||
''}
|
||||
|
||||
node ${addIntegrityFieldsScript}
|
||||
''}
|
||||
|
||||
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
|
||||
|
||||
runHook postRebuild
|
||||
|
||||
if [ "''${dontNpmInstall-}" != "1" ]
|
||||
then
|
||||
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
|
||||
rm -f npm-shrinkwrap.json
|
||||
|
||||
npm ${forceOfflineFlag} --nodedir=${nodeSources} --no-bin-links --ignore-scripts ${npmFlags} ${lib.optionalString production "--production"} install
|
||||
fi
|
||||
|
||||
# Link executables defined in package.json
|
||||
node ${linkBinsScript}
|
||||
'';
|
||||
|
||||
# Builds and composes an NPM package including all its dependencies
|
||||
buildNodePackage =
|
||||
{ name
|
||||
, packageName
|
||||
, version ? null
|
||||
, dependencies ? []
|
||||
, buildInputs ? []
|
||||
, production ? true
|
||||
, npmFlags ? ""
|
||||
, dontNpmInstall ? false
|
||||
, bypassCache ? false
|
||||
, reconstructLock ? false
|
||||
, preRebuild ? ""
|
||||
, dontStrip ? true
|
||||
, unpackPhase ? "true"
|
||||
, buildPhase ? "true"
|
||||
, meta ? {}
|
||||
, ... }@args:
|
||||
|
||||
let
|
||||
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ];
|
||||
in
|
||||
stdenv.mkDerivation ({
|
||||
name = "${name}${if version == null then "" else "-${version}"}";
|
||||
buildInputs = [ tarWrapper python nodejs ]
|
||||
++ lib.optional (stdenv.isLinux) utillinux
|
||||
++ lib.optional (stdenv.isDarwin) libtool
|
||||
++ buildInputs;
|
||||
|
||||
inherit nodejs;
|
||||
|
||||
inherit dontStrip; # Stripping may fail a build for some package deployments
|
||||
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
|
||||
|
||||
compositionScript = composePackage args;
|
||||
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
|
||||
|
||||
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
|
||||
|
||||
installPhase = ''
|
||||
source ${installPackage}
|
||||
|
||||
# Create and enter a root node_modules/ folder
|
||||
mkdir -p $out/lib/node_modules
|
||||
cd $out/lib/node_modules
|
||||
|
||||
# Compose the package and all its dependencies
|
||||
source $compositionScriptPath
|
||||
|
||||
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
|
||||
|
||||
# Create symlink to the deployed executable folder, if applicable
|
||||
if [ -d "$out/lib/node_modules/.bin" ]
|
||||
then
|
||||
ln -s $out/lib/node_modules/.bin $out/bin
|
||||
|
||||
# Fixup all executables
|
||||
ls $out/bin/* | while read i
|
||||
do
|
||||
file="$(readlink -f "$i")"
|
||||
chmod u+rwx "$file"
|
||||
if isScript "$file"
|
||||
then
|
||||
sed -i 's/\r$//' "$file" # convert crlf to lf
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Create symlinks to the deployed manual page folders, if applicable
|
||||
if [ -d "$out/lib/node_modules/${packageName}/man" ]
|
||||
then
|
||||
mkdir -p $out/share
|
||||
for dir in "$out/lib/node_modules/${packageName}/man/"*
|
||||
do
|
||||
mkdir -p $out/share/man/$(basename "$dir")
|
||||
for page in "$dir"/*
|
||||
do
|
||||
ln -s $page $out/share/man/$(basename "$dir")
|
||||
done
|
||||
done
|
||||
fi
|
||||
|
||||
# Run post install hook, if provided
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
meta = {
|
||||
# default to Node.js' platforms
|
||||
platforms = nodejs.meta.platforms;
|
||||
} // meta;
|
||||
} // extraArgs);
|
||||
|
||||
# Builds a node environment (a node_modules folder and a set of binaries)
|
||||
buildNodeDependencies =
|
||||
{ name
|
||||
, packageName
|
||||
, version ? null
|
||||
, src
|
||||
, dependencies ? []
|
||||
, buildInputs ? []
|
||||
, production ? true
|
||||
, npmFlags ? ""
|
||||
, dontNpmInstall ? false
|
||||
, bypassCache ? false
|
||||
, reconstructLock ? false
|
||||
, dontStrip ? true
|
||||
, unpackPhase ? "true"
|
||||
, buildPhase ? "true"
|
||||
, ... }@args:
|
||||
|
||||
let
|
||||
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
|
||||
in
|
||||
stdenv.mkDerivation ({
|
||||
name = "node-dependencies-${name}${if version == null then "" else "-${version}"}";
|
||||
|
||||
buildInputs = [ tarWrapper python nodejs ]
|
||||
++ lib.optional (stdenv.isLinux) utillinux
|
||||
++ lib.optional (stdenv.isDarwin) libtool
|
||||
++ buildInputs;
|
||||
|
||||
inherit dontStrip; # Stripping may fail a build for some package deployments
|
||||
inherit dontNpmInstall unpackPhase buildPhase;
|
||||
|
||||
includeScript = includeDependencies { inherit dependencies; };
|
||||
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
|
||||
|
||||
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
|
||||
|
||||
installPhase = ''
|
||||
source ${installPackage}
|
||||
|
||||
mkdir -p $out/${packageName}
|
||||
cd $out/${packageName}
|
||||
|
||||
source $includeScriptPath
|
||||
|
||||
# Create fake package.json to make the npm commands work properly
|
||||
cp ${src}/package.json .
|
||||
chmod 644 package.json
|
||||
${lib.optionalString bypassCache ''
|
||||
if [ -f ${src}/package-lock.json ]
|
||||
then
|
||||
cp ${src}/package-lock.json .
|
||||
chmod 644 package-lock.json
|
||||
fi
|
||||
''}
|
||||
|
||||
# Go to the parent folder to make sure that all packages are pinpointed
|
||||
cd ..
|
||||
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
|
||||
|
||||
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
|
||||
|
||||
# Expose the executables that were installed
|
||||
cd ..
|
||||
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
|
||||
|
||||
mv ${packageName} lib
|
||||
ln -s $out/lib/node_modules/.bin $out/bin
|
||||
'';
|
||||
} // extraArgs);
|
||||
|
||||
# Builds a development shell
|
||||
buildNodeShell =
|
||||
{ name
|
||||
, packageName
|
||||
, version ? null
|
||||
, src
|
||||
, dependencies ? []
|
||||
, buildInputs ? []
|
||||
, production ? true
|
||||
, npmFlags ? ""
|
||||
, dontNpmInstall ? false
|
||||
, bypassCache ? false
|
||||
, reconstructLock ? false
|
||||
, dontStrip ? true
|
||||
, unpackPhase ? "true"
|
||||
, buildPhase ? "true"
|
||||
, ... }@args:
|
||||
|
||||
let
|
||||
nodeDependencies = buildNodeDependencies args;
|
||||
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "unpackPhase" "buildPhase" ];
|
||||
in
|
||||
stdenv.mkDerivation ({
|
||||
name = "node-shell-${name}${if version == null then "" else "-${version}"}";
|
||||
|
||||
buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
|
||||
buildCommand = ''
|
||||
mkdir -p $out/bin
|
||||
cat > $out/bin/shell <<EOF
|
||||
#! ${stdenv.shell} -e
|
||||
$shellHook
|
||||
exec ${stdenv.shell}
|
||||
EOF
|
||||
chmod +x $out/bin/shell
|
||||
'';
|
||||
|
||||
# Provide the dependencies in a development shell through the NODE_PATH environment variable
|
||||
inherit nodeDependencies;
|
||||
shellHook = lib.optionalString (dependencies != []) ''
|
||||
export NODE_PATH=${nodeDependencies}/lib/node_modules
|
||||
export PATH="${nodeDependencies}/bin:$PATH"
|
||||
'';
|
||||
} // extraArgs);
|
||||
in
|
||||
{
|
||||
buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
|
||||
buildNodePackage = lib.makeOverridable buildNodePackage;
|
||||
buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
|
||||
buildNodeShell = lib.makeOverridable buildNodeShell;
|
||||
}
|
||||
@ -1,3 +0,0 @@
|
||||
SPDX-FileCopyrightText: 2022-2023 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor@kleen.consulting>
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
@ -1,40 +0,0 @@
|
||||
# This file has been generated by node2nix 1.11.1. Do not edit!
|
||||
|
||||
{nodeEnv, fetchurl, fetchgit, nix-gitignore, stdenv, lib, globalBuildInputs ? []}:
|
||||
|
||||
let
|
||||
sources = {};
|
||||
args = {
|
||||
name = "uni2work";
|
||||
packageName = "uni2work";
|
||||
version = "27.4.56";
|
||||
src = ../..;
|
||||
buildInputs = globalBuildInputs;
|
||||
meta = {
|
||||
description = "";
|
||||
license = "AGPL-3.0-or-later";
|
||||
};
|
||||
production = true;
|
||||
bypassCache = true;
|
||||
reconstructLock = false;
|
||||
};
|
||||
in
|
||||
{
|
||||
args = args;
|
||||
sources = sources;
|
||||
tarball = nodeEnv.buildNodeSourceDist args;
|
||||
package = nodeEnv.buildNodePackage args;
|
||||
shell = nodeEnv.buildNodeShell args;
|
||||
nodeDependencies = nodeEnv.buildNodeDependencies (lib.overrideExisting args {
|
||||
src = stdenv.mkDerivation {
|
||||
name = args.name + "-package-json";
|
||||
src = nix-gitignore.gitignoreSourcePure [
|
||||
"*"
|
||||
"!package.json"
|
||||
"!package-lock.json"
|
||||
] args.src;
|
||||
dontBuild = true;
|
||||
installPhase = "mkdir -p $out; cp -r ./* $out;";
|
||||
};
|
||||
});
|
||||
}
|
||||
@ -1,3 +0,0 @@
|
||||
SPDX-FileCopyrightText: 2022-2023 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor@kleen.consulting>
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
@ -1,8 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
final: prev:
|
||||
{
|
||||
nodePackages = (import ./node2nix.nix { pkgs = final; inherit (final.config) system; }) // prev.nodePackages;
|
||||
}
|
||||
@ -1,689 +0,0 @@
|
||||
# This file originates from node2nix
|
||||
|
||||
{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile, writeShellScript}:
|
||||
|
||||
let
|
||||
# Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
|
||||
utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;
|
||||
|
||||
python = if nodejs ? python then nodejs.python else python2;
|
||||
|
||||
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
|
||||
tarWrapper = runCommand "tarWrapper" {} ''
|
||||
mkdir -p $out/bin
|
||||
|
||||
cat > $out/bin/tar <<EOF
|
||||
#! ${stdenv.shell} -e
|
||||
$(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
|
||||
EOF
|
||||
|
||||
chmod +x $out/bin/tar
|
||||
'';
|
||||
|
||||
# Function that generates a TGZ file from a NPM project
|
||||
buildNodeSourceDist =
|
||||
{ name, version, src, ... }:
|
||||
|
||||
stdenv.mkDerivation {
|
||||
name = "node-tarball-${name}-${version}";
|
||||
inherit src;
|
||||
buildInputs = [ nodejs ];
|
||||
buildPhase = ''
|
||||
export HOME=$TMPDIR
|
||||
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
|
||||
'';
|
||||
installPhase = ''
|
||||
mkdir -p $out/tarballs
|
||||
mv $tgzFile $out/tarballs
|
||||
mkdir -p $out/nix-support
|
||||
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
|
||||
'';
|
||||
};
|
||||
|
||||
# Common shell logic
|
||||
installPackage = writeShellScript "install-package" ''
|
||||
installPackage() {
|
||||
local packageName=$1 src=$2
|
||||
|
||||
local strippedName
|
||||
|
||||
local DIR=$PWD
|
||||
cd $TMPDIR
|
||||
|
||||
unpackFile $src
|
||||
|
||||
# Make the base dir in which the target dependency resides first
|
||||
mkdir -p "$(dirname "$DIR/$packageName")"
|
||||
|
||||
if [ -f "$src" ]
|
||||
then
|
||||
# Figure out what directory has been unpacked
|
||||
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
|
||||
|
||||
# Restore write permissions to make building work
|
||||
find "$packageDir" -type d -exec chmod u+x {} \;
|
||||
chmod -R u+w "$packageDir"
|
||||
|
||||
# Move the extracted tarball into the output folder
|
||||
mv "$packageDir" "$DIR/$packageName"
|
||||
elif [ -d "$src" ]
|
||||
then
|
||||
# Get a stripped name (without hash) of the source directory.
|
||||
# On old nixpkgs it's already set internally.
|
||||
if [ -z "$strippedName" ]
|
||||
then
|
||||
strippedName="$(stripHash $src)"
|
||||
fi
|
||||
|
||||
# Restore write permissions to make building work
|
||||
chmod -R u+w "$strippedName"
|
||||
|
||||
# Move the extracted directory into the output folder
|
||||
mv "$strippedName" "$DIR/$packageName"
|
||||
fi
|
||||
|
||||
# Change to the package directory to install dependencies
|
||||
cd "$DIR/$packageName"
|
||||
}
|
||||
'';
|
||||
|
||||
# Bundle the dependencies of the package
|
||||
#
|
||||
# Only include dependencies if they don't exist. They may also be bundled in the package.
|
||||
includeDependencies = {dependencies}:
|
||||
lib.optionalString (dependencies != []) (
|
||||
''
|
||||
mkdir -p node_modules
|
||||
cd node_modules
|
||||
''
|
||||
+ (lib.concatMapStrings (dependency:
|
||||
''
|
||||
if [ ! -e "${dependency.packageName}" ]; then
|
||||
${composePackage dependency}
|
||||
fi
|
||||
''
|
||||
) dependencies)
|
||||
+ ''
|
||||
cd ..
|
||||
''
|
||||
);
|
||||
|
||||
# Recursively composes the dependencies of a package
|
||||
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
|
||||
builtins.addErrorContext "while evaluating node package '${packageName}'" ''
|
||||
installPackage "${packageName}" "${src}"
|
||||
${includeDependencies { inherit dependencies; }}
|
||||
cd ..
|
||||
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
|
||||
'';
|
||||
|
||||
pinpointDependencies = {dependencies, production}:
|
||||
let
|
||||
pinpointDependenciesFromPackageJSON = writeTextFile {
|
||||
name = "pinpointDependencies.js";
|
||||
text = ''
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
|
||||
function resolveDependencyVersion(location, name) {
|
||||
if(location == process.env['NIX_STORE']) {
|
||||
return null;
|
||||
} else {
|
||||
var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
|
||||
|
||||
if(fs.existsSync(dependencyPackageJSON)) {
|
||||
var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
|
||||
|
||||
if(dependencyPackageObj.name == name) {
|
||||
return dependencyPackageObj.version;
|
||||
}
|
||||
} else {
|
||||
return resolveDependencyVersion(path.resolve(location, ".."), name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function replaceDependencies(dependencies) {
|
||||
if(typeof dependencies == "object" && dependencies !== null) {
|
||||
for(var dependency in dependencies) {
|
||||
var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
|
||||
|
||||
if(resolvedVersion === null) {
|
||||
process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
|
||||
} else {
|
||||
dependencies[dependency] = resolvedVersion;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Read the package.json configuration */
|
||||
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
|
||||
|
||||
/* Pinpoint all dependencies */
|
||||
replaceDependencies(packageObj.dependencies);
|
||||
if(process.argv[2] == "development") {
|
||||
replaceDependencies(packageObj.devDependencies);
|
||||
}
|
||||
else {
|
||||
packageObj.devDependencies = {};
|
||||
}
|
||||
replaceDependencies(packageObj.optionalDependencies);
|
||||
replaceDependencies(packageObj.peerDependencies);
|
||||
|
||||
/* Write the fixed package.json file */
|
||||
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
|
||||
'';
|
||||
};
|
||||
in
|
||||
''
|
||||
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
|
||||
|
||||
${lib.optionalString (dependencies != [])
|
||||
''
|
||||
if [ -d node_modules ]
|
||||
then
|
||||
cd node_modules
|
||||
${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
|
||||
cd ..
|
||||
fi
|
||||
''}
|
||||
'';
|
||||
|
||||
# Recursively traverses all dependencies of a package and pinpoints all
|
||||
# dependencies in the package.json file to the versions that are actually
|
||||
# being used.
|
||||
|
||||
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
|
||||
''
|
||||
if [ -d "${packageName}" ]
|
||||
then
|
||||
cd "${packageName}"
|
||||
${pinpointDependencies { inherit dependencies production; }}
|
||||
cd ..
|
||||
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
|
||||
fi
|
||||
'';
|
||||
|
||||
# Extract the Node.js source code which is used to compile packages with
|
||||
# native bindings
|
||||
nodeSources = runCommand "node-sources" {} ''
|
||||
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
|
||||
mv node-* $out
|
||||
'';
|
||||
|
||||
# Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
|
||||
addIntegrityFieldsScript = writeTextFile {
|
||||
name = "addintegrityfields.js";
|
||||
text = ''
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
|
||||
function augmentDependencies(baseDir, dependencies) {
|
||||
for(var dependencyName in dependencies) {
|
||||
var dependency = dependencies[dependencyName];
|
||||
|
||||
// Open package.json and augment metadata fields
|
||||
var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
|
||||
var packageJSONPath = path.join(packageJSONDir, "package.json");
|
||||
|
||||
if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
|
||||
console.log("Adding metadata fields to: "+packageJSONPath);
|
||||
var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
|
||||
|
||||
if(dependency.integrity) {
|
||||
packageObj["_integrity"] = dependency.integrity;
|
||||
} else {
|
||||
packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
|
||||
}
|
||||
|
||||
if(dependency.resolved) {
|
||||
packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
|
||||
} else {
|
||||
packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
|
||||
}
|
||||
|
||||
if(dependency.from !== undefined) { // Adopt from property if one has been provided
|
||||
packageObj["_from"] = dependency.from;
|
||||
}
|
||||
|
||||
fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
|
||||
}
|
||||
|
||||
// Augment transitive dependencies
|
||||
if(dependency.dependencies !== undefined) {
|
||||
augmentDependencies(packageJSONDir, dependency.dependencies);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(fs.existsSync("./package-lock.json")) {
|
||||
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
|
||||
|
||||
if(![1, 2].includes(packageLock.lockfileVersion)) {
|
||||
process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if(packageLock.dependencies !== undefined) {
|
||||
augmentDependencies(".", packageLock.dependencies);
|
||||
}
|
||||
}
|
||||
'';
|
||||
};
|
||||
|
||||
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
|
||||
reconstructPackageLock = writeTextFile {
|
||||
name = "reconstructpackagelock.js";
|
||||
text = ''
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
|
||||
var packageObj = JSON.parse(fs.readFileSync("package.json"));
|
||||
|
||||
var lockObj = {
|
||||
name: packageObj.name,
|
||||
version: packageObj.version,
|
||||
lockfileVersion: 2,
|
||||
requires: true,
|
||||
packages: {
|
||||
"": {
|
||||
name: packageObj.name,
|
||||
version: packageObj.version,
|
||||
license: packageObj.license,
|
||||
bin: packageObj.bin,
|
||||
dependencies: packageObj.dependencies,
|
||||
engines: packageObj.engines,
|
||||
optionalDependencies: packageObj.optionalDependencies
|
||||
}
|
||||
},
|
||||
dependencies: {}
|
||||
};
|
||||
|
||||
function augmentPackageJSON(filePath, packages, dependencies) {
|
||||
var packageJSON = path.join(filePath, "package.json");
|
||||
if(fs.existsSync(packageJSON)) {
|
||||
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
|
||||
packages[filePath] = {
|
||||
version: packageObj.version,
|
||||
integrity: "sha1-000000000000000000000000000=",
|
||||
dependencies: packageObj.dependencies,
|
||||
engines: packageObj.engines,
|
||||
optionalDependencies: packageObj.optionalDependencies
|
||||
};
|
||||
dependencies[packageObj.name] = {
|
||||
version: packageObj.version,
|
||||
integrity: "sha1-000000000000000000000000000=",
|
||||
dependencies: {}
|
||||
};
|
||||
processDependencies(path.join(filePath, "node_modules"), packages, dependencies[packageObj.name].dependencies);
|
||||
}
|
||||
}
|
||||
|
||||
function processDependencies(dir, packages, dependencies) {
|
||||
if(fs.existsSync(dir)) {
|
||||
var files = fs.readdirSync(dir);
|
||||
|
||||
files.forEach(function(entry) {
|
||||
var filePath = path.join(dir, entry);
|
||||
var stats = fs.statSync(filePath);
|
||||
|
||||
if(stats.isDirectory()) {
|
||||
if(entry.substr(0, 1) == "@") {
|
||||
// When we encounter a namespace folder, augment all packages belonging to the scope
|
||||
var pkgFiles = fs.readdirSync(filePath);
|
||||
|
||||
pkgFiles.forEach(function(entry) {
|
||||
if(stats.isDirectory()) {
|
||||
var pkgFilePath = path.join(filePath, entry);
|
||||
augmentPackageJSON(pkgFilePath, packages, dependencies);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
augmentPackageJSON(filePath, packages, dependencies);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
processDependencies("node_modules", lockObj.packages, lockObj.dependencies);
|
||||
|
||||
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
|
||||
'';
|
||||
};
|
||||
|
||||
# Script that links bins defined in package.json to the node_modules bin directory
|
||||
# NPM does not do this for top-level packages itself anymore as of v7
|
||||
linkBinsScript = writeTextFile {
|
||||
name = "linkbins.js";
|
||||
text = ''
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
|
||||
var packageObj = JSON.parse(fs.readFileSync("package.json"));
|
||||
|
||||
var nodeModules = Array(packageObj.name.split("/").length).fill("..").join(path.sep);
|
||||
|
||||
if(packageObj.bin !== undefined) {
|
||||
fs.mkdirSync(path.join(nodeModules, ".bin"))
|
||||
|
||||
if(typeof packageObj.bin == "object") {
|
||||
Object.keys(packageObj.bin).forEach(function(exe) {
|
||||
if(fs.existsSync(packageObj.bin[exe])) {
|
||||
console.log("linking bin '" + exe + "'");
|
||||
fs.symlinkSync(
|
||||
path.join("..", packageObj.name, packageObj.bin[exe]),
|
||||
path.join(nodeModules, ".bin", exe)
|
||||
);
|
||||
}
|
||||
else {
|
||||
console.log("skipping non-existent bin '" + exe + "'");
|
||||
}
|
||||
})
|
||||
}
|
||||
else {
|
||||
if(fs.existsSync(packageObj.bin)) {
|
||||
console.log("linking bin '" + packageObj.bin + "'");
|
||||
fs.symlinkSync(
|
||||
path.join("..", packageObj.name, packageObj.bin),
|
||||
path.join(nodeModules, ".bin", packageObj.name.split("/").pop())
|
||||
);
|
||||
}
|
||||
else {
|
||||
console.log("skipping non-existent bin '" + packageObj.bin + "'");
|
||||
}
|
||||
}
|
||||
}
|
||||
else if(packageObj.directories !== undefined && packageObj.directories.bin !== undefined) {
|
||||
fs.mkdirSync(path.join(nodeModules, ".bin"))
|
||||
|
||||
fs.readdirSync(packageObj.directories.bin).forEach(function(exe) {
|
||||
if(fs.existsSync(path.join(packageObj.directories.bin, exe))) {
|
||||
console.log("linking bin '" + exe + "'");
|
||||
fs.symlinkSync(
|
||||
path.join("..", packageObj.name, packageObj.directories.bin, exe),
|
||||
path.join(nodeModules, ".bin", exe)
|
||||
);
|
||||
}
|
||||
else {
|
||||
console.log("skipping non-existent bin '" + exe + "'");
|
||||
}
|
||||
})
|
||||
}
|
||||
'';
|
||||
};
|
||||
|
||||
prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
|
||||
let
|
||||
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
|
||||
in
|
||||
''
|
||||
# Pinpoint the versions of all dependencies to the ones that are actually being used
|
||||
echo "pinpointing versions of dependencies..."
|
||||
source $pinpointDependenciesScriptPath
|
||||
|
||||
# Patch the shebangs of the bundled modules to prevent them from
|
||||
# calling executables outside the Nix store as much as possible
|
||||
patchShebangs .
|
||||
|
||||
# Deploy the Node.js package by running npm install. Since the
|
||||
# dependencies have been provided already by ourselves, it should not
|
||||
# attempt to install them again, which is good, because we want to make
|
||||
# it Nix's responsibility. If it needs to install any dependencies
|
||||
# anyway (e.g. because the dependency parameters are
|
||||
# incomplete/incorrect), it fails.
|
||||
#
|
||||
# The other responsibilities of NPM are kept -- version checks, build
|
||||
# steps, postprocessing etc.
|
||||
|
||||
export HOME=$TMPDIR
|
||||
cd "${packageName}"
|
||||
runHook preRebuild
|
||||
|
||||
${lib.optionalString bypassCache ''
|
||||
${lib.optionalString reconstructLock ''
|
||||
if [ -f package-lock.json ]
|
||||
then
|
||||
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
|
||||
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
|
||||
rm package-lock.json
|
||||
else
|
||||
echo "No package-lock.json file found, reconstructing..."
|
||||
fi
|
||||
|
||||
node ${reconstructPackageLock}
|
||||
''}
|
||||
|
||||
node ${addIntegrityFieldsScript}
|
||||
''}
|
||||
|
||||
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
|
||||
|
||||
runHook postRebuild
|
||||
|
||||
if [ "''${dontNpmInstall-}" != "1" ]
|
||||
then
|
||||
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
|
||||
rm -f npm-shrinkwrap.json
|
||||
|
||||
npm ${forceOfflineFlag} --nodedir=${nodeSources} --no-bin-links --ignore-scripts ${npmFlags} ${lib.optionalString production "--production"} install
|
||||
fi
|
||||
|
||||
# Link executables defined in package.json
|
||||
node ${linkBinsScript}
|
||||
'';
|
||||
|
||||
# Builds and composes an NPM package including all its dependencies
|
||||
buildNodePackage =
|
||||
{ name
|
||||
, packageName
|
||||
, version ? null
|
||||
, dependencies ? []
|
||||
, buildInputs ? []
|
||||
, production ? true
|
||||
, npmFlags ? ""
|
||||
, dontNpmInstall ? false
|
||||
, bypassCache ? false
|
||||
, reconstructLock ? false
|
||||
, preRebuild ? ""
|
||||
, dontStrip ? true
|
||||
, unpackPhase ? "true"
|
||||
, buildPhase ? "true"
|
||||
, meta ? {}
|
||||
, ... }@args:
|
||||
|
||||
let
|
||||
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ];
|
||||
in
|
||||
stdenv.mkDerivation ({
|
||||
name = "${name}${if version == null then "" else "-${version}"}";
|
||||
buildInputs = [ tarWrapper python nodejs ]
|
||||
++ lib.optional (stdenv.isLinux) utillinux
|
||||
++ lib.optional (stdenv.isDarwin) libtool
|
||||
++ buildInputs;
|
||||
|
||||
inherit nodejs;
|
||||
|
||||
inherit dontStrip; # Stripping may fail a build for some package deployments
|
||||
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
|
||||
|
||||
compositionScript = composePackage args;
|
||||
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
|
||||
|
||||
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
|
||||
|
||||
installPhase = ''
|
||||
source ${installPackage}
|
||||
|
||||
# Create and enter a root node_modules/ folder
|
||||
mkdir -p $out/lib/node_modules
|
||||
cd $out/lib/node_modules
|
||||
|
||||
# Compose the package and all its dependencies
|
||||
source $compositionScriptPath
|
||||
|
||||
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
|
||||
|
||||
# Create symlink to the deployed executable folder, if applicable
|
||||
if [ -d "$out/lib/node_modules/.bin" ]
|
||||
then
|
||||
ln -s $out/lib/node_modules/.bin $out/bin
|
||||
|
||||
# Fixup all executables
|
||||
ls $out/bin/* | while read i
|
||||
do
|
||||
file="$(readlink -f "$i")"
|
||||
chmod u+rwx "$file"
|
||||
if isScript "$file"
|
||||
then
|
||||
sed -i 's/\r$//' "$file" # convert crlf to lf
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Create symlinks to the deployed manual page folders, if applicable
|
||||
if [ -d "$out/lib/node_modules/${packageName}/man" ]
|
||||
then
|
||||
mkdir -p $out/share
|
||||
for dir in "$out/lib/node_modules/${packageName}/man/"*
|
||||
do
|
||||
mkdir -p $out/share/man/$(basename "$dir")
|
||||
for page in "$dir"/*
|
||||
do
|
||||
ln -s $page $out/share/man/$(basename "$dir")
|
||||
done
|
||||
done
|
||||
fi
|
||||
|
||||
# Run post install hook, if provided
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
meta = {
|
||||
# default to Node.js' platforms
|
||||
platforms = nodejs.meta.platforms;
|
||||
} // meta;
|
||||
} // extraArgs);
|
||||
|
||||
# Builds a node environment (a node_modules folder and a set of binaries)
|
||||
buildNodeDependencies =
|
||||
{ name
|
||||
, packageName
|
||||
, version ? null
|
||||
, src
|
||||
, dependencies ? []
|
||||
, buildInputs ? []
|
||||
, production ? true
|
||||
, npmFlags ? ""
|
||||
, dontNpmInstall ? false
|
||||
, bypassCache ? false
|
||||
, reconstructLock ? false
|
||||
, dontStrip ? true
|
||||
, unpackPhase ? "true"
|
||||
, buildPhase ? "true"
|
||||
, ... }@args:
|
||||
|
||||
let
|
||||
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
|
||||
in
|
||||
stdenv.mkDerivation ({
|
||||
name = "node-dependencies-${name}${if version == null then "" else "-${version}"}";
|
||||
|
||||
buildInputs = [ tarWrapper python nodejs ]
|
||||
++ lib.optional (stdenv.isLinux) utillinux
|
||||
++ lib.optional (stdenv.isDarwin) libtool
|
||||
++ buildInputs;
|
||||
|
||||
inherit dontStrip; # Stripping may fail a build for some package deployments
|
||||
inherit dontNpmInstall unpackPhase buildPhase;
|
||||
|
||||
includeScript = includeDependencies { inherit dependencies; };
|
||||
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
|
||||
|
||||
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
|
||||
|
||||
installPhase = ''
|
||||
source ${installPackage}
|
||||
|
||||
mkdir -p $out/${packageName}
|
||||
cd $out/${packageName}
|
||||
|
||||
source $includeScriptPath
|
||||
|
||||
# Create fake package.json to make the npm commands work properly
|
||||
cp ${src}/package.json .
|
||||
chmod 644 package.json
|
||||
${lib.optionalString bypassCache ''
|
||||
if [ -f ${src}/package-lock.json ]
|
||||
then
|
||||
cp ${src}/package-lock.json .
|
||||
chmod 644 package-lock.json
|
||||
fi
|
||||
''}
|
||||
|
||||
# Go to the parent folder to make sure that all packages are pinpointed
|
||||
cd ..
|
||||
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
|
||||
|
||||
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
|
||||
|
||||
# Expose the executables that were installed
|
||||
cd ..
|
||||
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
|
||||
|
||||
mv ${packageName} lib
|
||||
ln -s $out/lib/node_modules/.bin $out/bin
|
||||
'';
|
||||
} // extraArgs);
|
||||
|
||||
# Builds a development shell
|
||||
buildNodeShell =
|
||||
{ name
|
||||
, packageName
|
||||
, version ? null
|
||||
, src
|
||||
, dependencies ? []
|
||||
, buildInputs ? []
|
||||
, production ? true
|
||||
, npmFlags ? ""
|
||||
, dontNpmInstall ? false
|
||||
, bypassCache ? false
|
||||
, reconstructLock ? false
|
||||
, dontStrip ? true
|
||||
, unpackPhase ? "true"
|
||||
, buildPhase ? "true"
|
||||
, ... }@args:
|
||||
|
||||
let
|
||||
nodeDependencies = buildNodeDependencies args;
|
||||
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "unpackPhase" "buildPhase" ];
|
||||
in
|
||||
stdenv.mkDerivation ({
|
||||
name = "node-shell-${name}${if version == null then "" else "-${version}"}";
|
||||
|
||||
buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
|
||||
buildCommand = ''
|
||||
mkdir -p $out/bin
|
||||
cat > $out/bin/shell <<EOF
|
||||
#! ${stdenv.shell} -e
|
||||
$shellHook
|
||||
exec ${stdenv.shell}
|
||||
EOF
|
||||
chmod +x $out/bin/shell
|
||||
'';
|
||||
|
||||
# Provide the dependencies in a development shell through the NODE_PATH environment variable
|
||||
inherit nodeDependencies;
|
||||
shellHook = lib.optionalString (dependencies != []) ''
|
||||
export NODE_PATH=${nodeDependencies}/lib/node_modules
|
||||
export PATH="${nodeDependencies}/bin:$PATH"
|
||||
'';
|
||||
} // extraArgs);
|
||||
in
|
||||
{
|
||||
buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
|
||||
buildNodePackage = lib.makeOverridable buildNodePackage;
|
||||
buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
|
||||
buildNodeShell = lib.makeOverridable buildNodeShell;
|
||||
}
|
||||
@ -1,40 +0,0 @@
|
||||
# This file has been generated by node2nix 1.11.1. Do not edit!
|
||||
|
||||
{nodeEnv, fetchurl, fetchgit, nix-gitignore, stdenv, lib, globalBuildInputs ? []}:
|
||||
|
||||
let
|
||||
sources = {};
|
||||
args = {
|
||||
name = "maildev";
|
||||
packageName = "maildev";
|
||||
version = "27.4.56";
|
||||
src = ../..;
|
||||
buildInputs = globalBuildInputs;
|
||||
meta = {
|
||||
description = "";
|
||||
license = "AGPL-3.0-or-later";
|
||||
};
|
||||
production = true;
|
||||
bypassCache = true;
|
||||
reconstructLock = false;
|
||||
};
|
||||
in
|
||||
{
|
||||
args = args;
|
||||
sources = sources;
|
||||
tarball = nodeEnv.buildNodeSourceDist args;
|
||||
package = nodeEnv.buildNodePackage args;
|
||||
shell = nodeEnv.buildNodeShell args;
|
||||
nodeDependencies = nodeEnv.buildNodeDependencies (lib.overrideExisting args {
|
||||
src = stdenv.mkDerivation {
|
||||
name = args.name + "-package-json";
|
||||
src = nix-gitignore.gitignoreSourcePure [
|
||||
"*"
|
||||
"!package.json"
|
||||
"!package-lock.json"
|
||||
] args.src;
|
||||
dontBuild = true;
|
||||
installPhase = "mkdir -p $out; cp -r ./* $out;";
|
||||
};
|
||||
});
|
||||
}
|
||||
@ -1,17 +0,0 @@
|
||||
# This file has been generated by node2nix 1.11.1. Do not edit!
|
||||
|
||||
{pkgs ? import <nixpkgs> {
|
||||
inherit system;
|
||||
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs_21"}:
|
||||
|
||||
let
|
||||
nodeEnv = import ./node-env.nix {
|
||||
inherit (pkgs) stdenv lib python2 runCommand writeTextFile writeShellScript;
|
||||
inherit pkgs nodejs;
|
||||
libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
|
||||
};
|
||||
in
|
||||
import ./node-packages.nix {
|
||||
inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit;
|
||||
inherit nodeEnv;
|
||||
}
|
||||
@ -1,17 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
{}: prev: final: rec {
|
||||
changelogJson = prev.runCommand "changelog.json" {
|
||||
} ''
|
||||
ln -s ${final.uniworxNodeDependencies}/lib/node_modules ./node_modules
|
||||
export PATH="${final.uniworxNodeDependencies}/bin:$PATH"
|
||||
|
||||
changelog-parser ${../CHANGELOG.md} > $out
|
||||
'';
|
||||
|
||||
jqChangelogJson = prev.writeShellScriptBin "jq-changelog" ''
|
||||
exec -- ${final.jq}/bin/jq $@ < ${changelogJson}
|
||||
'';
|
||||
}
|
||||
@ -1,95 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022-2024 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor@kleen.consulting>, Steffen Jost <jost@cip.ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
{ inputs, backendSource, gitRevision ? null, ... }: final: prev:
|
||||
|
||||
with prev.lib;
|
||||
|
||||
let
|
||||
haskellInputs = ["encoding" "memcached-binary" "conduit-resumablesink" "HaskellNet-SSL" "ldap-client" "serversession" "xss-sanitize" "colonnade" "minio-hs" "cryptoids" "zip-stream" "yesod" "cryptonite" "esqueleto"];
|
||||
in {
|
||||
uniworx = final.haskell-nix.stackProject {
|
||||
src = prev.stdenv.mkDerivation {
|
||||
name = "uniworx-src";
|
||||
src = backendSource;
|
||||
|
||||
phases = ["unpackPhase" "patchPhase" "installPhase"];
|
||||
|
||||
patchPhase = ''
|
||||
substitute stack-flake.yaml stack.yaml \
|
||||
${concatMapStringsSep " \\\n" (pkgName: "--replace @${pkgName}@ ${inputs."${pkgName}"}") haskellInputs}
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out
|
||||
cp -pr --reflink=auto ./. $out
|
||||
'';
|
||||
};
|
||||
compiler-nix-name = "ghc94";
|
||||
# stack-sha256 = "1n7z294ldv2rjkfj1vs3kqmnbp34m2scrmyrp5kwmga9vp86fd9z"; # produces errors gregor does not understand :(
|
||||
modules = [
|
||||
{
|
||||
packages = {
|
||||
encoding.src = inputs.encoding;
|
||||
memcached-binary.src = inputs.memcached-binary;
|
||||
conduit-resumablesink.src = inputs.conduit-resumablesink;
|
||||
HaskellNet-SSL.src = inputs.HaskellNet-SSL;
|
||||
ldap-client.src = inputs.ldap-client;
|
||||
serversession.src = "${inputs.serversession}/serversession";
|
||||
serversession-backend-acid-state.src = "${inputs.serversession}/serversession-backend-acid-state";
|
||||
xss-sanitize.src = inputs.xss-sanitize;
|
||||
colonnade.src = "${inputs.colonnade}/colonnade";
|
||||
minio-hs.src = inputs.minio-hs;
|
||||
cryptoids-class.src = "${inputs.cryptoids}/cryptoids-class";
|
||||
cryptoids-types.src = "${inputs.cryptoids}/cryptoids-types";
|
||||
cryptoids.src = "${inputs.cryptoids}/cryptoids";
|
||||
filepath-crypto.src = "${inputs.cryptoids}/filepath-crypto";
|
||||
uuid-crypto.src = "${inputs.cryptoids}/uuid-crypto";
|
||||
zip-stream.src = inputs.zip-stream;
|
||||
yesod.src = "${inputs.yesod}/yesod";
|
||||
yesod-core.src = "${inputs.yesod}/yesod-core";
|
||||
yesod-static.src = "${inputs.yesod}/yesod-static";
|
||||
yesod-persistent.src = "${inputs.yesod}/yesod-persistent";
|
||||
yesod-form.src = "${inputs.yesod}/yesod-form";
|
||||
yesod-auth.src = "${inputs.yesod}/yesod-auth";
|
||||
yesod-test.src = "${inputs.yesod}/yesod-test";
|
||||
cryptonite.src = inputs.cryptonite;
|
||||
esqueleto.src = inputs.esqueleto;
|
||||
};
|
||||
}
|
||||
{
|
||||
packages.uniworx = {
|
||||
postUnpack = ''
|
||||
${final.xorg.lndir}/bin/lndir -silent ${prev.uniworxFrontend} $sourceRoot
|
||||
chmod a+w -R $sourceRoot
|
||||
'';
|
||||
preBuild = ''
|
||||
export TZDIR=${final.tzdata}/share/zoneinfo
|
||||
${optionalString (gitRevision != null) ''
|
||||
export GIT_REVISION=${gitRevision}
|
||||
''}
|
||||
'';
|
||||
components.library.build-tools = with final.pkgs; [ llvm_9 ];
|
||||
components.exes.uniworx.build-tools = with final.pkgs; [ llvm_9 ];
|
||||
components.exes.uniworxdb.build-tools = with final.pkgs; [ llvm_9 ];
|
||||
components.exes.uniworxload.build-tools = with final.pkgs; [ llvm_9 ];
|
||||
components.tests.yesod = {
|
||||
build-tools = with final.pkgs; [ llvm_9 final.uniworx.hsPkgs.hspec-discover ];
|
||||
testWrapper =
|
||||
let
|
||||
testWrapper = prev.writeScript "test-wrapper" (import ../develop.nix { inherit prev; pkgs = final; doDevelopEnv = false; } "$@");
|
||||
testWrapperWrapped = prev.runCommand "test-wrapper" { buildInputs = [final.makeWrapper]; } ''
|
||||
makeWrapper ${testWrapper} $out \
|
||||
--prefix PATH : ${final.postgresql_12}/bin \
|
||||
--prefix PATH : ${final.minio}/bin \
|
||||
--prefix PATH : ${final.memcached}/bin
|
||||
'';
|
||||
in singleton (toString testWrapperWrapped);
|
||||
};
|
||||
components.tests.hlint.build-tools = with final.pkgs; [ llvm_9 final.uniworx.hsPkgs.hlint-test ];
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
}
|
||||
@ -1,10 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022-2023 Gregor Kleen <gregor@kleen.consulting>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
{ inputs, frontendSource, backendSource, gitRevision ? null }: final: prev: prev.lib.composeManyExtensions [
|
||||
(import ./node-dependencies.nix { inherit inputs; })
|
||||
(import ./well-known.nix { inherit frontendSource; })
|
||||
(import ./frontend.nix { inherit frontendSource; })
|
||||
(import ./backend.nix { inherit backendSource inputs gitRevision; })
|
||||
] final prev
|
||||
@ -1,87 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022-2024 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
{ frontendSource, ... }: final: prev:
|
||||
let
|
||||
pname = "uniworx-frontend";
|
||||
# extract the version from package.json (ensuring these never get out of sync)
|
||||
version = (builtins.fromJSON (builtins.readFile ./../../package.json)).version;
|
||||
|
||||
# grab our dependencies
|
||||
deps = prev.mkYarnModules {
|
||||
inherit pname version;
|
||||
packageJSON = ./../../package.json;
|
||||
yarnLock = ./../../yarn.lock;
|
||||
yarnNix = ./../../yarn.nix;
|
||||
};
|
||||
setupNodeDeps = ''
|
||||
ln -s ${final.uniworxNodeDependencies}/lib/node_modules ./node_modules
|
||||
export PATH="${final.uniworxNodeDependencies}/bin:$PATH"
|
||||
'';
|
||||
in {
|
||||
uniworxFrontend = prev.stdenv.mkDerivation {
|
||||
inherit pname version;
|
||||
|
||||
srcs = [frontendSource prev.uniworxWellKnown];
|
||||
sourceRoot = "source";
|
||||
|
||||
phases = ["unpackPhase" "configurePhase" "checkPhase" "buildPhase" "installPhase"];
|
||||
|
||||
# No build dependencies, all work has been done for you already by mkYarnModules
|
||||
nativeBuildInputs = [ ];
|
||||
buildInputs = [ ];
|
||||
|
||||
postUnpack = ''
|
||||
${final.xorg.lndir}/bin/lndir -silent ../uniworx-well-known $sourceRoot
|
||||
'';
|
||||
|
||||
preBuild = setupNodeDeps;
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
|
||||
webpack --progress
|
||||
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
# Grab the dependencies from the above mkYarnModules derivation
|
||||
configurePhase = ''
|
||||
mkdir -p $out/bin
|
||||
ln -s ${deps}/node_modules $out
|
||||
'';
|
||||
|
||||
preCheck = ''
|
||||
${setupNodeDeps}
|
||||
export FONTCONFIG_FILE="${final.fontconfig.out}/etc/fonts/fonts.conf"
|
||||
export FONTCONFIG_PATH="${final.fontconfig.out}/etc/fonts/"
|
||||
export CHROME_BIN="${final.chromium}/bin/chromium-browser"
|
||||
'';
|
||||
checkPhase = ''
|
||||
runHook preCheck
|
||||
|
||||
eslint frontend/src
|
||||
karma start --conf karma.conf.js
|
||||
|
||||
runHook postCheck
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out $out/config
|
||||
cp -r --reflink=auto well-known static $out
|
||||
cp -r --reflink=auto config/webpack.yml $out/config
|
||||
'';
|
||||
|
||||
passthru.check = final.uniworxFrontend.overrideAttrs (oldAttrs: {
|
||||
name = "${oldAttrs.name}-check";
|
||||
phases = ["unpackPhase" "buildPhase"];
|
||||
buildPhase = ''
|
||||
mkdir $out
|
||||
( ${oldAttrs.checkPhase} ) | tee $out/test-stdout
|
||||
'';
|
||||
});
|
||||
|
||||
# Skip the unpack step (mkDerivation will complain otherwise)
|
||||
dontUnpack = true;
|
||||
};
|
||||
}
|
||||
@ -1,13 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022-2023 Gregor Kleen <gregor@kleen.consulting>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
{ ... }: final: prev: {
|
||||
uniworxNodeDependencies = prev.mkYarnModules {
|
||||
pname = "uniworx-node-dependencies";
|
||||
version = (builtins.fromJSON (builtins.readFile ./../../package.json)).version;
|
||||
packageJSON = ../../package.json;
|
||||
yarnLock = ../../yarn.lock;
|
||||
yarnNix = ../../yarn.nix;
|
||||
};
|
||||
}
|
||||
@ -1,26 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022-2023 Gregor Kleen <gregor@kleen.consulting>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
{ frontendSource, ... }: final: prev: {
|
||||
uniworxWellKnown = prev.stdenv.mkDerivation {
|
||||
name = "uniworx-well-known";
|
||||
src = frontendSource;
|
||||
|
||||
phases = ["unpackPhase" "buildPhase" "installPhase" "fixupPhase"];
|
||||
|
||||
buildPhase = ''
|
||||
ln -s ${final.uniworxNodeDependencies}/lib/node_modules ./node_modules
|
||||
export PATH="${final.uniworxNodeDependencies}/bin:${prev.exiftool}/bin:$PATH"
|
||||
webpack --progress
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out
|
||||
cp -r --reflink=auto well-known $out/.nix-well-known
|
||||
'';
|
||||
|
||||
outputHashMode = "recursive";
|
||||
outputHash = "sha256-18MfdmJX3q826Q4p2F3SnwS2fCjLN0LUpIV/jqUPH4I==";
|
||||
};
|
||||
}
|
||||
17463
package-lock.json
generated
17463
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -1,3 +0,0 @@
|
||||
SPDX-FileCopyrightText: 2022-2023 Felix Hamann <felix.hamann@campus.lmu.de>, Gregor Kleen <gregor@kleen.consulting>, Sarah Vaupel <sarah.vaupel@ifi.lmu.de>, Steffen Jost <jost@tcs.ifi.lmu.de>
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
129
package.json
129
package.json
@ -1,126 +1,77 @@
|
||||
{
|
||||
"name": "uni2work",
|
||||
"version": "27.4.56",
|
||||
"name": "fradrive",
|
||||
"version": "27.4.59",
|
||||
"description": "",
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"scripts": {
|
||||
"start": "run-s frontend:build --parallel \"frontend:build:watch\" \"yesod:start\"",
|
||||
"test": "run-s frontend:test yesod:test i18n:test",
|
||||
"lint": "run-s frontend:lint yesod:lint",
|
||||
"build": "run-s frontend:build yesod:build",
|
||||
"yesod:db": "./db.sh",
|
||||
"yesod:start": "./start.sh",
|
||||
"yesod:lint": "./hlint.sh",
|
||||
"yesod:test": "./test.sh",
|
||||
"yesod:test:watch": "./test.sh --file-watch",
|
||||
"yesod:build": "./build.sh",
|
||||
"yesod:build:watch": "./build.sh --file-watch",
|
||||
"frontend:lint": "eslint frontend/src",
|
||||
"frontend:test": "karma start --conf karma.conf.js",
|
||||
"frontend:test:watch": "karma start --conf karma.conf.js --single-run false",
|
||||
"frontend:build": "webpack --progress",
|
||||
"frontend:build:watch": "webpack --watch --progress",
|
||||
"i18n:test": "./missing-translations.sh",
|
||||
"prerelease": "./is-clean.sh && npm run test",
|
||||
"release": "./release.sh",
|
||||
"postrelease": "git push --follow-tags",
|
||||
"parse-changelog": "changelog-parser ./CHANGELOG.md > changelog.json"
|
||||
},
|
||||
"husky": {
|
||||
"hooks": {
|
||||
"pre-commit": "lint-staged",
|
||||
"commit-msg": "commitlint --edit $HUSKY_GIT_PARAMS"
|
||||
}
|
||||
},
|
||||
"lint-staged": {
|
||||
"frontend/src/**/*.js": [
|
||||
"eslint",
|
||||
"git add"
|
||||
]
|
||||
},
|
||||
"browserslist": [
|
||||
"defaults"
|
||||
],
|
||||
"type": "module",
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.23.4",
|
||||
"@babel/core": "^7.23.7",
|
||||
"@babel/eslint-parser": "^7.23.3",
|
||||
"@babel/plugin-proposal-class-properties": "^7.17.12",
|
||||
"@babel/plugin-proposal-decorators": "^7.23.7",
|
||||
"@babel/plugin-proposal-private-methods": "^7.18.6",
|
||||
"@babel/plugin-proposal-private-property-in-object": "^7.17.12",
|
||||
"@babel/plugin-transform-modules-commonjs": "^7.23.3",
|
||||
"@babel/plugin-transform-runtime": "^7.23.7",
|
||||
"@babel/preset-env": "^7.23.7",
|
||||
"@commitlint/cli": "^18.4.4",
|
||||
"@commitlint/config-conventional": "^18.4.4",
|
||||
"@fortawesome/fontawesome-pro": "^6.5.1",
|
||||
"autoprefixer": "^10.4.16",
|
||||
"babel-core": "^6.26.3",
|
||||
"@babel/cli": "^7.25.6",
|
||||
"@babel/core": "^7.25.2",
|
||||
"@babel/eslint-parser": "^7.25.1",
|
||||
"@babel/plugin-proposal-decorators": "^7.24.7",
|
||||
"@babel/plugin-syntax-dynamic-import": "^7.8.3",
|
||||
"@babel/plugin-transform-class-properties": "^7.25.4",
|
||||
"@babel/plugin-transform-modules-commonjs": "^7.24.8",
|
||||
"@babel/plugin-transform-private-methods": "^7.18.6",
|
||||
"@babel/plugin-transform-private-property-in-object": "^7.24.7",
|
||||
"@babel/plugin-transform-runtime": "^7.25.4",
|
||||
"@babel/preset-env": "^7.25.4",
|
||||
"@eslint/js": "^9.9.1",
|
||||
"@fortawesome/fontawesome-free": "^6.6.0",
|
||||
"autoprefixer": "^10.4.20",
|
||||
"axios": "^1.7.5",
|
||||
"babel-loader": "^9.1.3",
|
||||
"babel-plugin-syntax-dynamic-import": "^6.18.0",
|
||||
"babel-plugin-transform-decorators-legacy": "^1.3.5",
|
||||
"babel-preset-env": "^1.7.0",
|
||||
"babel-preset-es2015": "^6.24.1",
|
||||
"changelog-parser": "^3.0.1",
|
||||
"clean-webpack-plugin": "^4.0.0",
|
||||
"copy-webpack-plugin": "^11.0.0",
|
||||
"css-loader": "^6.8.1",
|
||||
"css-minimizer-webpack-plugin": "^5.0.1",
|
||||
"eslint": "^8.56.0",
|
||||
"favicons": "^7.1.5",
|
||||
"favicons-webpack-plugin": "^6.0.1",
|
||||
"copy-webpack-plugin": "^12.0.2",
|
||||
"css-loader": "^7.1.2",
|
||||
"css-minimizer-webpack-plugin": "^7.0.0",
|
||||
"eslint": "^9.9.1",
|
||||
"file-loader": "^6.2.0",
|
||||
"fs-extra": "^11.2.0",
|
||||
"glob": "^10.3.10",
|
||||
"html-webpack-plugin": "^5.6.0",
|
||||
"husky": "^8.0.3",
|
||||
"jasmine-core": "^5.1.1",
|
||||
"glob": "^11.0.0",
|
||||
"globals": "^15.9.0",
|
||||
"jasmine-core": "^5.2.0",
|
||||
"js-yaml": "^4.1.0",
|
||||
"karma": "^6.4.2",
|
||||
"karma": "^6.4.4",
|
||||
"karma-browserify": "^8.1.0",
|
||||
"karma-chrome-launcher": "^3.2.0",
|
||||
"karma-cli": "^2.0.0",
|
||||
"karma-jasmine": "^5.1.0",
|
||||
"karma-jasmine-html-reporter": "^2.1.0",
|
||||
"karma-mocha-reporter": "^2.2.5",
|
||||
"karma-webpack": "^5.0.0",
|
||||
"lint-staged": "^15.2.0",
|
||||
"mini-css-extract-plugin": "^2.7.6",
|
||||
"karma-webpack": "^5.0.1",
|
||||
"mini-css-extract-plugin": "^2.9.1",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"null-loader": "^4.0.1",
|
||||
"postcss-loader": "^7.3.4",
|
||||
"postcss-preset-env": "^9.3.0",
|
||||
"postcss-loader": "^8.1.1",
|
||||
"postcss-preset-env": "^10.0.2",
|
||||
"real-favicon-webpack-plugin": "^0.2.3",
|
||||
"remove-files-webpack-plugin": "^1.5.0",
|
||||
"request": "^2.88.2",
|
||||
"request-promise": "^4.2.6",
|
||||
"resolve-url-loader": "^5.0.0",
|
||||
"sass": "^1.69.7",
|
||||
"sass-loader": "^13.3.3",
|
||||
"semver": "^7.5.4",
|
||||
"standard-version": "^9.5.0",
|
||||
"standard-version-updater-yaml": "^1.0.3",
|
||||
"style-loader": "^3.3.3",
|
||||
"terser-webpack-plugin": "^5.3.10",
|
||||
"tmp": "^0.2.1",
|
||||
"typeface-roboto": "1.1.13",
|
||||
"typeface-source-code-pro": "^1.1.13",
|
||||
"typeface-source-sans-pro": "1.1.13",
|
||||
"webpack": "^5.89.0",
|
||||
"sass": "^1.77.8",
|
||||
"sass-loader": "^16.0.1",
|
||||
"style-loader": "^4.0.0",
|
||||
"tmp": "^0.2.3",
|
||||
"webpack": "^5.94.0",
|
||||
"webpack-cli": "^5.1.4",
|
||||
"webpack-manifest-plugin": "^5.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.23.7",
|
||||
"@babel/runtime": "^7.25.6",
|
||||
"@juggle/resize-observer": "^3.4.0",
|
||||
"core-js": "^3.35.0",
|
||||
"core-js": "^3.38.1",
|
||||
"css.escape": "^1.5.1",
|
||||
"js-cookie": "^3.0.5",
|
||||
"lodash.debounce": "^4.0.8",
|
||||
"lodash.defer": "^4.1.0",
|
||||
"lodash.throttle": "^4.1.1",
|
||||
"moment": "^2.30.1",
|
||||
"npm": "^10.8.3",
|
||||
"sodium-javascript": "^0.8.0",
|
||||
"toposort": "^2.0.2",
|
||||
"whatwg-fetch": "^3.6.20"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user