Draft: Resolve "GHC und Dependencies auf die neuste Version bringen" #206
17
.babelrc
17
.babelrc
@ -1,17 +0,0 @@
|
||||
{
|
||||
"presets": [
|
||||
["env", {
|
||||
"useBuiltIns": "usage",
|
||||
"targets": { "node": "current" }
|
||||
}
|
||||
]
|
||||
],
|
||||
"plugins": [
|
||||
["@babel/plugin-proposal-decorators", { "legacy": true }],
|
||||
["@babel/plugin-proposal-class-properties", { "loose": true }],
|
||||
["@babel/plugin-proposal-private-methods", { "loose": true }],
|
||||
["@babel/plugin-proposal-private-property-in-object", { "loose": true }],
|
||||
["@babel/plugin-transform-modules-commonjs"],
|
||||
["@babel/transform-runtime"]
|
||||
]
|
||||
}
|
||||
@ -1 +0,0 @@
|
||||
**/*
|
||||
@ -1,3 +0,0 @@
|
||||
SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
@ -1,30 +0,0 @@
|
||||
{
|
||||
"env": {
|
||||
"browser": true,
|
||||
"es6": true,
|
||||
"jasmine": true
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"globals": {
|
||||
"Atomics": "readonly",
|
||||
"SharedArrayBuffer": "readonly",
|
||||
"flatpickr": "readonly",
|
||||
"$": "readonly"
|
||||
},
|
||||
"parser": "@babel/eslint-parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 2018,
|
||||
"requireConfigFile": false,
|
||||
"ecmaFeatures": {
|
||||
"legacyDecorators": true
|
||||
}
|
||||
},
|
||||
"rules": {
|
||||
"no-console": "off",
|
||||
"no-extra-semi": "off",
|
||||
"semi": ["error", "always"],
|
||||
"comma-dangle": ["error", "always-multiline"],
|
||||
"quotes": ["error", "single"],
|
||||
"no-var": "error"
|
||||
}
|
||||
}
|
||||
13
.gitignore
vendored
13
.gitignore
vendored
@ -1,5 +1,9 @@
|
||||
.Dockerfile
|
||||
dist*
|
||||
node_modules/
|
||||
assets/icons
|
||||
assets/favicons
|
||||
bin/
|
||||
*.hi
|
||||
*.o
|
||||
*.sqlite3
|
||||
@ -7,7 +11,13 @@ node_modules/
|
||||
*.sqlite3-wal
|
||||
.hsenv*
|
||||
cabal-dev/
|
||||
.cache/
|
||||
.stack/
|
||||
.stack-work/
|
||||
.dev-port-http
|
||||
.dev-port-https
|
||||
.bash_history
|
||||
.lesshst
|
||||
yesod-devel/
|
||||
.cabal-sandbox
|
||||
cabal.sandbox.config
|
||||
@ -34,6 +44,7 @@ test.log
|
||||
*.dump-splices
|
||||
/.stack-work.lock
|
||||
/.npmrc
|
||||
/.npm/
|
||||
/config/webpack.yml
|
||||
tunnel.log
|
||||
/static
|
||||
@ -51,4 +62,4 @@ tunnel.log
|
||||
**/result
|
||||
**/result-*
|
||||
.develop.cmd
|
||||
/.vscode
|
||||
/.vscode
|
||||
|
||||
870
.gitlab-ci.yml
870
.gitlab-ci.yml
@ -1,396 +1,588 @@
|
||||
# SPDX-FileCopyrightText: 2022-2023 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor@kleen.consulting>
|
||||
# SPDX-FileCopyrightText: 2022-2024 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor@kleen.consulting>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
### IMPORTANT NOTICE ###
|
||||
# Our pipeline consists of static and dynamic parts.
|
||||
#
|
||||
# This file only contains the static parts of our pipeline.
|
||||
# Dynamic jobs are defined in .gitlab-ci/frontend.yml and .gitlab-ci/backend.yml.
|
||||
# These are used as a template to generate downstream (child) pipelines during
|
||||
# the runtime of the upstream (parent) pipeline.
|
||||
###
|
||||
|
||||
|
||||
# workflow:
|
||||
# rules:
|
||||
# - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
# - if: $CI_MERGE_REQUEST_ID
|
||||
# - if: $CI_COMMIT_TAG =~ /^v/
|
||||
# - if: $CI_COMMIT_TAG =~ /^t/
|
||||
# - if: $CI_COMMIT_TAG =~ /^d/
|
||||
|
||||
|
||||
variables:
|
||||
IMAGE_BUILDER: quay.io/buildah/stable:latest
|
||||
FRONTEND_IMAGE_DEPENDENCIES:
|
||||
docker/frontend/Dockerfile
|
||||
package.json
|
||||
package-lock.json
|
||||
webpack.config.js
|
||||
BACKEND_IMAGE_DEPENDENCIES:
|
||||
docker/backend/Dockerfile
|
||||
package.yaml
|
||||
stack.yaml
|
||||
stack.yaml.lock
|
||||
|
||||
|
||||
default:
|
||||
image:
|
||||
name: registry.uniworx.de/uniworx/containers/nix-attic:latest
|
||||
name: ${CI_REGISTRY}/uniworx/containers/debian:12.5
|
||||
entrypoint: [""]
|
||||
docker:
|
||||
platform: x86_64
|
||||
artifacts:
|
||||
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
|
||||
variables:
|
||||
NIX_PATH: "nixpkgs=http://nixos.org/channels/nixos-21.05/nixexprs.tar.xz"
|
||||
AWS_SHARED_CREDENTIALS_FILE: "/etc/aws/credentials"
|
||||
|
||||
TRANSFER_METER_FREQUENCY: "2s"
|
||||
|
||||
NIX_CONFIG: |-
|
||||
extra-substituters = https://cache.iog.io
|
||||
extra-trusted-public-keys = hydra.iohk.io:f/Ea+s+dFdN+3Y/G+FDgSq+a5NEWhJGzdjvKNGv0/EQ=
|
||||
|
||||
stages:
|
||||
- frontend:build
|
||||
- backend:build
|
||||
- test
|
||||
- container:build
|
||||
- prepare release
|
||||
- setup
|
||||
- frontend
|
||||
- backend
|
||||
- release
|
||||
|
||||
node dependencies:
|
||||
stage: frontend:build
|
||||
script:
|
||||
- nix -L build -o result ".#uniworxNodeDependencies"
|
||||
- nix-store --export $(nix-store -qR result) | xz -T0 -2 > node-dependencies.nar.xz
|
||||
before_script: &nix-before
|
||||
- git config --global init.defaultBranch master
|
||||
- install -v -m 0700 -d ~/.ssh
|
||||
- install -v -T -m 0644 "${SSH_KNOWN_HOSTS}" ~/.ssh/known_hosts
|
||||
- install -v -T -m 0400 "${SSH_DEPLOY_KEY}" ~/.ssh/deploy && echo "IdentityFile ~/.ssh/deploy" >> ~/.ssh/config;
|
||||
- install -v -T -m 0644 "${FONTAWESOME_NPM_AUTH_FILE}" /etc/fontawesome-token
|
||||
- install -v -T -m 0644 "${NIX_NETRC}" /etc/nix/netrc
|
||||
artifacts:
|
||||
paths:
|
||||
- node-dependencies.nar.xz
|
||||
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
|
||||
well known:
|
||||
stage: frontend:build
|
||||
setup:dynamic:
|
||||
stage: setup
|
||||
before_script:
|
||||
- apt-get -y update
|
||||
- apt-get -y install git
|
||||
variables:
|
||||
GIT_DEPTH: 99999
|
||||
GIT_STRATEGY: clone
|
||||
GIT_LOG_COMMAND: 'git log --max-count=1 --date=format:%Y-%m-%dT%H-%M --pretty=format:%cd'
|
||||
script:
|
||||
- xzcat node-dependencies.nar.xz | nix-store --import
|
||||
- nix -L build -o result ".#uniworxWellKnown"
|
||||
- nix-store --export $(nix-store -qR result) | xz -T0 -2 > well-known.nar.xz
|
||||
before_script: *nix-before
|
||||
needs:
|
||||
- job: node dependencies
|
||||
artifacts: true
|
||||
- echo "${FRONTEND_IMAGE_DEPENDENCIES}"
|
||||
- echo "${BACKEND_IMAGE_DEPENDENCIES}"
|
||||
- FRONTEND_IMAGE_VERSION=`${GIT_LOG_COMMAND} ${FRONTEND_IMAGE_DEPENDENCIES} | tee frontend-image-version`
|
||||
- BACKEND_IMAGE_VERSION=`${GIT_LOG_COMMAND} ${BACKEND_IMAGE_DEPENDENCIES} | tee backend-image-version`
|
||||
- 'echo "FRONTEND_IMAGE_VERSION: ${FRONTEND_IMAGE_VERSION}, BACKEND_IMAGE_VERSION: ${BACKEND_IMAGE_VERSION}"'
|
||||
- cat .gitlab-ci/frontend.yml | .gitlab-ci/dynamci.pl FRONTEND_IMAGE_VERSION=${FRONTEND_IMAGE_VERSION} > frontend.yml
|
||||
- cat .gitlab-ci/backend.yml | .gitlab-ci/dynamci.pl BACKEND_IMAGE_VERSION=${BACKEND_IMAGE_VERSION} PARENT_PIPELINE_ID=${CI_PIPELINE_ID} > backend.yml
|
||||
artifacts:
|
||||
paths:
|
||||
- well-known.nar.xz
|
||||
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
- frontend-image-version
|
||||
- backend-image-version
|
||||
- frontend.yml
|
||||
- backend.yml
|
||||
rules:
|
||||
- when: always
|
||||
|
||||
setup:containers:frontend: &setup-container
|
||||
stage: setup
|
||||
needs:
|
||||
- setup:dynamic
|
||||
image: ${IMAGE_BUILDER}
|
||||
variables:
|
||||
IMAGE_TYPE: frontend
|
||||
before_script: &container-before
|
||||
- IMAGE_VERSION=`cat ${IMAGE_TYPE}-image-version`
|
||||
- IMAGE_TAG=${CI_COMMIT_REF_SLUG}/${IMAGE_TYPE}:${IMAGE_VERSION}
|
||||
- REGISTRY_DESTINATION=${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}/${IMAGE_TYPE}:${IMAGE_VERSION}
|
||||
script:
|
||||
- curl --request GET --header "PRIVATE-TOKEN:${REGISTRY_AUTH_TOKEN}" "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/registry/repositories?tags=true" > .gitlab-ci/.container-registry-contents.json
|
||||
- cat .gitlab-ci/.container-registry-contents.json
|
||||
- IMAGE_EXISTS=`grep \""${REGISTRY_DESTINATION}"\" .gitlab-ci/.container-registry-contents.json || :`
|
||||
- echo "${IMAGE_EXISTS}"
|
||||
- test "${CI_JOB_MANUAL}" = true && echo "Force rebuilding container."
|
||||
- >
|
||||
if [ -z "${IMAGE_EXISTS}" -o "${CI_JOB_MANUAL}" = true ] ; then
|
||||
echo "Building image ${REGISTRY_DESTINATION}..."
|
||||
buildah bud -t ${IMAGE_TAG} --build-arg PROJECT_DIR=${CI_PROJECT_DIR} --build-arg MOUNT_DIR=/mnt/${CI_PROJECT_DIR} --volume ${CI_PROJECT_DIR}:/mnt/${CI_PROJECT_DIR} --file docker/${IMAGE_TYPE}/Dockerfile
|
||||
buildah push --creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" ${IMAGE_TAG} ${REGISTRY_DESTINATION}
|
||||
else
|
||||
echo "Image ${IMAGE_TAG} already exists in the container registry. Skipping build."
|
||||
fi
|
||||
rules:
|
||||
- if: $CI_MERGE_REQUEST_ID
|
||||
when: never
|
||||
- when: always
|
||||
setup:containers:frontend:wait: &setup-container-wait
|
||||
stage: setup
|
||||
needs:
|
||||
- setup:dynamic
|
||||
before_script: *container-before
|
||||
variables:
|
||||
GIT_STRATEGY: none
|
||||
IMAGE_TYPE: frontend
|
||||
script:
|
||||
- apt-get -y update && apt-get -y install curl
|
||||
- >
|
||||
while ! curl --request GET --header "PRIVATE-TOKEN:${REGISTRY_AUTH_TOKEN}" "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/registry/repositories?tags=true" | grep \""${REGISTRY_DESTINATION}"\" ; do
|
||||
echo "Waiting for container ${IMAGE_TAG} to appear in the container registry..."
|
||||
sleep 5
|
||||
done
|
||||
- echo "Image ${IMAGE_TAG} has been found in the container registry. Proceeding with the pipeline."
|
||||
rules:
|
||||
- if: $CI_MERGE_REQUEST_ID
|
||||
when: always
|
||||
- when: never
|
||||
retry: 0
|
||||
interruptible: true
|
||||
timeout: 3h
|
||||
|
||||
setup:containers:backend:
|
||||
<<: *setup-container
|
||||
variables:
|
||||
IMAGE_TYPE: backend
|
||||
setup:containers:backend:wait:
|
||||
<<: *setup-container-wait
|
||||
variables:
|
||||
GIT_STRATEGY: none
|
||||
IMAGE_TYPE: backend
|
||||
|
||||
|
||||
frontend:
|
||||
stage: frontend:build
|
||||
script:
|
||||
- xzcat node-dependencies.nar.xz | nix-store --import
|
||||
- xzcat well-known.nar.xz | nix-store --import
|
||||
- nix -L build -o result ".#uniworxFrontend"
|
||||
- nix-store --export $(nix-store -qR result) | xz -T0 -2 > frontend.nar.xz
|
||||
before_script: *nix-before
|
||||
stage: frontend
|
||||
variables:
|
||||
GIT_STRATEGY: clone
|
||||
needs:
|
||||
- job: node dependencies
|
||||
artifacts: true
|
||||
- job: well known
|
||||
artifacts: true
|
||||
artifacts:
|
||||
paths:
|
||||
- frontend.nar.xz
|
||||
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
- setup:dynamic
|
||||
- job: setup:containers:frontend
|
||||
optional: true
|
||||
trigger:
|
||||
strategy: depend
|
||||
include:
|
||||
- artifact: frontend.yml
|
||||
job: setup:dynamic
|
||||
|
||||
uniworx:lib:uniworx:
|
||||
stage: backend:build
|
||||
script:
|
||||
- xzcat frontend.nar.xz | nix-store --import
|
||||
- nix -L build -o result ".#uniworx:lib:uniworx"
|
||||
- nix-store --export $(nix-store -qR result) | xz -T0 -2 > uniworx:lib:uniworx.nar.xz
|
||||
before_script: *nix-before
|
||||
needs:
|
||||
- job: node dependencies # transitive
|
||||
artifacts: false
|
||||
- job: well known # transitive
|
||||
artifacts: false
|
||||
- job: frontend
|
||||
artifacts: true
|
||||
artifacts:
|
||||
paths:
|
||||
- uniworx:lib:uniworx.nar.xz
|
||||
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
|
||||
uniworx:exe:uniworx:
|
||||
stage: backend:build
|
||||
script:
|
||||
- xzcat uniworx:lib:uniworx.nar.xz | nix-store --import
|
||||
- nix -L build -o result ".#uniworx:exe:uniworx"
|
||||
- nix-store --export $(nix-store -qR result) | xz -T0 -2 > uniworx:exe:uniworx.nar.xz
|
||||
before_script: *nix-before
|
||||
backend:dev: &backend
|
||||
stage: backend
|
||||
variables:
|
||||
GIT_STRATEGY: clone
|
||||
needs:
|
||||
- job: node dependencies # transitive
|
||||
artifacts: false
|
||||
- job: well known # transitive
|
||||
artifacts: false
|
||||
- job: frontend # tranitive
|
||||
artifacts: false
|
||||
- job: uniworx:lib:uniworx
|
||||
artifacts: true
|
||||
artifacts:
|
||||
paths:
|
||||
- uniworx:exe:uniworx.nar.xz
|
||||
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
- setup:dynamic
|
||||
- job: setup:containers:backend
|
||||
optional: true
|
||||
- frontend
|
||||
trigger:
|
||||
strategy: depend
|
||||
include:
|
||||
- artifact: backend.yml
|
||||
job: setup:dynamic
|
||||
|
||||
uniworx:exe:uniworxdb:
|
||||
stage: backend:build
|
||||
script:
|
||||
- xzcat uniworx:lib:uniworx.nar.xz | nix-store --import
|
||||
- nix -L build -o result ".#uniworx:exe:uniworxdb"
|
||||
- nix-store --export $(nix-store -qR result) | xz -T0 -2 > uniworx:exe:uniworxdb.nar.xz
|
||||
before_script: *nix-before
|
||||
needs:
|
||||
- job: node dependencies # transitive
|
||||
artifacts: false
|
||||
- job: well known # transitive
|
||||
artifacts: false
|
||||
- job: frontend # tranitive
|
||||
artifacts: false
|
||||
- job: uniworx:lib:uniworx
|
||||
artifacts: true
|
||||
artifacts:
|
||||
paths:
|
||||
- uniworx:exe:uniworxdb.nar.xz
|
||||
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
backend:prod:
|
||||
<<: *backend
|
||||
variables:
|
||||
PROD_BUILD: -prod
|
||||
|
||||
uniworx:exe:uniworxload:
|
||||
stage: backend:build
|
||||
script:
|
||||
- xzcat uniworx:lib:uniworx.nar.xz | nix-store --import
|
||||
- nix -L build -o result ".#uniworx:exe:uniworxload"
|
||||
- nix-store --export $(nix-store -qR result) | xz -T0 -2 > uniworx:exe:uniworxload.nar.xz
|
||||
before_script: *nix-before
|
||||
needs:
|
||||
- job: node dependencies # transitive
|
||||
artifacts: false
|
||||
- job: well known # transitive
|
||||
artifacts: false
|
||||
- job: frontend # tranitive
|
||||
artifacts: false
|
||||
- job: uniworx:lib:uniworx
|
||||
artifacts: true
|
||||
artifacts:
|
||||
paths:
|
||||
- uniworx:exe:uniworxload.nar.xz
|
||||
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
|
||||
check:
|
||||
stage: test
|
||||
script:
|
||||
- xzcat frontend.nar.xz | nix-store --import
|
||||
- xzcat uniworx:lib:uniworx.nar.xz | nix-store --import
|
||||
- nix -L flake check .
|
||||
before_script: *nix-before
|
||||
needs:
|
||||
- job: node dependencies # transitive
|
||||
artifacts: false
|
||||
- job: well known # transitive
|
||||
artifacts: false
|
||||
- job: frontend
|
||||
artifacts: true
|
||||
- job: uniworx:lib:uniworx
|
||||
artifacts: true
|
||||
retry: 2
|
||||
interruptible: true
|
||||
|
||||
container:
|
||||
stage: container:build
|
||||
script:
|
||||
- xzcat uniworx:exe:uniworx.nar.xz | nix-store --import
|
||||
- cp -pr --reflink=auto -L $(nix build --print-out-paths ".#uniworxDocker") uniworx.tar.gz
|
||||
before_script: *nix-before
|
||||
needs:
|
||||
- job: node dependencies # transitive
|
||||
artifacts: false
|
||||
- job: well known # transitive
|
||||
artifacts: false
|
||||
- job: frontend # tranitive
|
||||
artifacts: false
|
||||
- job: uniworx:lib:uniworx # transitive
|
||||
artifacts: false
|
||||
- job: uniworx:exe:uniworx
|
||||
artifacts: true
|
||||
- job: check # sanity
|
||||
artifacts: false
|
||||
artifacts:
|
||||
paths:
|
||||
- uniworx.tar.gz
|
||||
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
release:changelog:
|
||||
stage: release
|
||||
rules: &release-rules
|
||||
- if: $CI_COMMIT_TAG =~ /^v/
|
||||
test container:
|
||||
stage: container:build
|
||||
- if: $CI_COMMIT_TAG =~ /^v[0-9\.]+$/
|
||||
- if: $CI_COMMIT_TAG =~ /^v[0-9\.]+-test-.*$/
|
||||
script:
|
||||
- xzcat uniworx:exe:uniworx.nar.xz | nix-store --import
|
||||
- cp -pr --reflink=auto -L $(nix build --print-out-paths ".#uniworxTestDocker") uniworx.tar.gz
|
||||
before_script: *nix-before
|
||||
needs:
|
||||
- job: node dependencies # transitive
|
||||
artifacts: false
|
||||
- job: well known # transitive
|
||||
artifacts: false
|
||||
- job: frontend # tranitive
|
||||
artifacts: false
|
||||
- job: uniworx:lib:uniworx # transitive
|
||||
artifacts: false
|
||||
- job: uniworx:exe:uniworx
|
||||
artifacts: true
|
||||
- job: check # sanity
|
||||
artifacts: false
|
||||
artifacts:
|
||||
paths:
|
||||
- uniworx.tar.gz
|
||||
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
rules: &test-release-rules
|
||||
- if: $CI_COMMIT_TAG =~ /^t/
|
||||
|
||||
parse changelog:
|
||||
stage: prepare release
|
||||
needs:
|
||||
- job: node dependencies
|
||||
artifacts: true
|
||||
rules: *release-rules
|
||||
before_script: *nix-before
|
||||
script:
|
||||
- xzcat node-dependencies.nar.xz | nix-store --import
|
||||
- nix -L run ".#jqChangelogJson" -- -r '.versions[0].version' > .current-version
|
||||
- nix -L run ".#jqChangelogJson" -- -r '.versions[0].body' > .current-changelog.md
|
||||
- echo "Preparing release..."
|
||||
- ./.gitlab-ci/version.pl > .current-version
|
||||
- echo "VERSION=$(cat .current-version)" >> build.env
|
||||
- ./.gitlab-ci/version.pl -changelog CHANGELOG.md
|
||||
artifacts:
|
||||
reports:
|
||||
dotenv: build.env
|
||||
paths:
|
||||
- .current-version
|
||||
- .current-changelog.md
|
||||
- .changelog.md
|
||||
name: "changelog-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
parse test changelog:
|
||||
stage: prepare release
|
||||
needs:
|
||||
- job: node dependencies
|
||||
artifacts: true
|
||||
rules: *test-release-rules
|
||||
before_script: *nix-before
|
||||
script:
|
||||
- xzcat node-dependencies.nar.xz | nix-store --import
|
||||
- nix -L run ".#jqChangelogJson" -- -r '.versions[0].version' > .current-version
|
||||
- nix -L run ".#jqChangelogJson" -- -r '.versions[0].body' > .current-changelog.md
|
||||
- echo "VERSION=$(cat .current-version)" >> build.env
|
||||
artifacts:
|
||||
reports:
|
||||
dotenv: build.env
|
||||
paths:
|
||||
- .current-version
|
||||
- .current-changelog.md
|
||||
name: "changelog-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
|
||||
upload container:
|
||||
variables:
|
||||
GIT_STRATEGY: none
|
||||
release:container:
|
||||
stage: release
|
||||
image: quay.io/skopeo/stable:latest
|
||||
script:
|
||||
- skopeo --insecure-policy copy --dest-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" docker-archive://$(pwd)/uniworx.tar.gz docker://${CI_REGISTRY_IMAGE}:${VERSION}
|
||||
- skopeo --insecure-policy copy --src-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" --dest-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" docker://${CI_REGISTRY_IMAGE}:${VERSION} docker://${CI_REGISTRY_IMAGE}:latest
|
||||
needs:
|
||||
- job: node dependencies # transitive
|
||||
artifacts: false
|
||||
- job: well known # transitive
|
||||
artifacts: false
|
||||
- job: frontend # tranitive
|
||||
artifacts: false
|
||||
- job: uniworx:lib:uniworx # transitive
|
||||
artifacts: false
|
||||
- job: uniworx:exe:uniworx # transitive
|
||||
artifacts: false
|
||||
- job: container
|
||||
artifacts: true
|
||||
- job: parse changelog
|
||||
artifacts: true
|
||||
- job: check # sanity
|
||||
artifacts: false
|
||||
rules: *release-rules
|
||||
retry: 2
|
||||
upload test container:
|
||||
variables:
|
||||
GIT_STRATEGY: none
|
||||
stage: release
|
||||
image: quay.io/skopeo/stable:latest
|
||||
image: ${IMAGE_BUILDER}
|
||||
script:
|
||||
- skopeo --insecure-policy copy --dest-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" docker-archive://$(pwd)/uniworx.tar.gz docker://${CI_REGISTRY}/fradrive/fradrive/test:${CI_COMMIT_REF_NAME}
|
||||
- skopeo --insecure-policy copy --src-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" --dest-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" docker://${CI_REGISTRY}/fradrive/fradrive/test:${CI_COMMIT_REF_NAME} docker://${CI_REGISTRY}/fradrive/fradrive/test:latest
|
||||
- echo "Building container for release ${VERSION}..."
|
||||
- buildah bud --tag fradrive:${VERSION} --file docker/fradrive/Dockerfile
|
||||
- buildah add --chown uniworx:uniworx bin/uniworx /bin/uniworx
|
||||
- echo "Pushing container ${VERSION} to container registry..."
|
||||
- buildah push --creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" fradrive:${VERSION} ${CI_REGISTRY_IMAGE}/fradrive:${VERSION}
|
||||
needs:
|
||||
- job: node dependencies # transitive
|
||||
artifacts: false
|
||||
- job: well known # transitive
|
||||
artifacts: false
|
||||
- job: frontend # tranitive
|
||||
artifacts: false
|
||||
- job: uniworx:lib:uniworx # transitive
|
||||
artifacts: false
|
||||
- job: uniworx:exe:uniworx # transitive
|
||||
artifacts: false
|
||||
- job: test container
|
||||
artifacts: true
|
||||
- job: parse test changelog
|
||||
artifacts: true
|
||||
- job: check # sanity
|
||||
artifacts: false
|
||||
rules: *test-release-rules
|
||||
retry: 2
|
||||
- frontend # sanity
|
||||
- backend:prod # sanity
|
||||
- release:changelog
|
||||
|
||||
release:
|
||||
variables:
|
||||
GIT_STRATEGY: none
|
||||
release:release:
|
||||
stage: release
|
||||
image: registry.gitlab.com/gitlab-org/release-cli:latest
|
||||
rules: *release-rules
|
||||
script:
|
||||
- echo "Will create release ${VERSION}..."
|
||||
release:
|
||||
name: '$VERSION'
|
||||
tag_name: '$CI_COMMIT_TAG'
|
||||
description: .current-changelog.md
|
||||
needs:
|
||||
- job: check # sanity
|
||||
artifacts: false
|
||||
- job: parse changelog
|
||||
artifacts: true
|
||||
test release:
|
||||
variables:
|
||||
GIT_STRATEGY: none
|
||||
stage: release
|
||||
image: registry.gitlab.com/gitlab-org/release-cli:latest
|
||||
rules: *test-release-rules
|
||||
script:
|
||||
- echo "Will create test release ${VERSION}-test..."
|
||||
- echo "Creating release ${VERSION}"
|
||||
release:
|
||||
name: "${VERSION}-test"
|
||||
tag_name: '$CI_COMMIT_TAG'
|
||||
description: .current-changelog.md
|
||||
name: "${VERSION}"
|
||||
tag_name: "{$CI_COMMIT_TAG}"
|
||||
description: .changelog.md
|
||||
needs:
|
||||
- job: check # sanity
|
||||
artifacts: false
|
||||
- job: parse test changelog
|
||||
artifacts: true
|
||||
- frontend # sanity
|
||||
- backend:prod # sanity
|
||||
- release:changelog
|
||||
- release:container
|
||||
retry: 0
|
||||
|
||||
|
||||
|
||||
# frontend dependencies:
|
||||
# stage: setup
|
||||
# cache:
|
||||
# - &npm-cache
|
||||
# key: default-npm
|
||||
# paths: &npm-paths
|
||||
# - node_modules/
|
||||
# - .npm/
|
||||
# - .npmrc
|
||||
# script:
|
||||
# - make node_modules
|
||||
# artifacts:
|
||||
# paths: *npm-paths
|
||||
|
||||
# well-known:
|
||||
# stage: setup
|
||||
# script:
|
||||
# - make well-known
|
||||
# needs:
|
||||
# - job: frontend dependencies
|
||||
# artifacts: true
|
||||
# cache:
|
||||
# - &frontend-cache
|
||||
# key: default-frontend
|
||||
# paths:
|
||||
# - .well-known-cache
|
||||
# artifacts:
|
||||
# paths:
|
||||
# - well-known/
|
||||
# - .well-known-cache/
|
||||
|
||||
# # TODO: cache is always uploaded even if up-to-date; prevent re-upload when up-to-date
|
||||
# backend dependencies:
|
||||
# stage: setup
|
||||
# cache:
|
||||
# - &stack-cache
|
||||
# key: default-stack
|
||||
# paths:
|
||||
# - .stack/
|
||||
# - .stack-work/
|
||||
# script:
|
||||
# - make backend-dependencies-prod
|
||||
# artifacts:
|
||||
# paths:
|
||||
# - .stack/
|
||||
# - .stack-work/
|
||||
|
||||
# frontend build:
|
||||
# stage: build
|
||||
# cache:
|
||||
# - *frontend-cache
|
||||
# script:
|
||||
# - make frontend-build
|
||||
# needs:
|
||||
# - job: frontend dependencies
|
||||
# artifacts: true
|
||||
# - job: well-known
|
||||
# artifacts: true
|
||||
# artifacts:
|
||||
# paths:
|
||||
# - static/
|
||||
# - config/webpack.yml
|
||||
|
||||
# # TODO: .stack-work cache not working
|
||||
# backend build:
|
||||
# stage: build
|
||||
# cache:
|
||||
# - *stack-cache
|
||||
# script:
|
||||
# - make bin/uniworx
|
||||
# # - find .stack-work
|
||||
# # - cp $(stack path --dist-dir)/build/hlint/hlint bin/test-hlint
|
||||
# # - cp $(stack path --dist-dir)/build/yesod/yesod bin/test-yesod
|
||||
# needs:
|
||||
# - job: frontend dependencies # transitive
|
||||
# artifacts: false
|
||||
# - job: well-known
|
||||
# artifacts: true
|
||||
# - job: backend dependencies
|
||||
# artifacts: true
|
||||
# - job: frontend build
|
||||
# artifacts: true
|
||||
# artifacts:
|
||||
# paths:
|
||||
# - bin/
|
||||
# resource_group: ram
|
||||
|
||||
# # TODO: part of backend build; probably deprecated
|
||||
# # uniworxdb:
|
||||
# # stage: build
|
||||
# # script:
|
||||
# # - make bin/uniworxdb
|
||||
# # needs:
|
||||
# # # TODO: no frontend needed
|
||||
# # - job: frontend dependencies # transitive
|
||||
# # artifacts: false
|
||||
# # - job: frontend build # transitive
|
||||
# # artifacts: false
|
||||
# # artifacts:
|
||||
# # paths:
|
||||
# # - bin/uniworxdb
|
||||
|
||||
# # TODO: part of backend build; probably deprecated
|
||||
# # TODO: rewrite
|
||||
# # uniworx:exe:uniworxload:
|
||||
# # stage: build
|
||||
# # script:
|
||||
# # - xzcat uniworx:lib:uniworx.nar.xz | nix-store --import
|
||||
# # - nix -L build -o result ".#uniworx:exe:uniworxload"
|
||||
# # - nix-store --export $(nix-store -qR result) | xz -T0 -2 > uniworx:exe:uniworxload.nar.xz
|
||||
# # needs:
|
||||
# # - job: frontend dependencies # transitive
|
||||
# # artifacts: false
|
||||
# # - job: frontend build # transitive
|
||||
# # artifacts: false
|
||||
# # artifacts:
|
||||
# # paths:
|
||||
# # - uniworx:exe:uniworxload.nar.xz
|
||||
|
||||
# frontend lint:
|
||||
# stage: lint
|
||||
# script:
|
||||
# - make frontend-lint
|
||||
# cache:
|
||||
# - *frontend-cache
|
||||
# needs:
|
||||
# - job: frontend dependencies
|
||||
# artifacts: true
|
||||
# - job: well-known # TODO: is this really needed?
|
||||
# artifacts: true
|
||||
|
||||
# backend lint:
|
||||
# stage: lint
|
||||
# cache:
|
||||
# - *stack-cache
|
||||
# script:
|
||||
# # TODO: - make backend-lint-dev
|
||||
# - make backend-lint-prod
|
||||
# needs:
|
||||
# - job: backend dependencies
|
||||
# artifacts: true
|
||||
# - job: backend build
|
||||
# artifacts: true
|
||||
# - job: frontend build
|
||||
# artifacts: true
|
||||
# - job: well-known
|
||||
# artifacts: true
|
||||
|
||||
# frontend test:
|
||||
# stage: test
|
||||
# script:
|
||||
# - make frontend-test
|
||||
# cache: *frontend-cache
|
||||
# needs:
|
||||
# - job: frontend dependencies
|
||||
# artifacts: true
|
||||
# # TODO: configure report artifacts
|
||||
|
||||
# backend test:
|
||||
# stage: test
|
||||
# script:
|
||||
# - make backend-test-prod
|
||||
# cache: *stack-cache
|
||||
# needs:
|
||||
# - job: well-known
|
||||
# artifacts: true
|
||||
# - job: frontend build
|
||||
# artifacts: true
|
||||
# - job: backend dependencies
|
||||
# artifacts: true
|
||||
# - job: backend build
|
||||
# artifacts: true
|
||||
# # TODO: configure report artifacts
|
||||
|
||||
# # TODO: unify prod and test versions
|
||||
# # TODO: rewrite
|
||||
# container:
|
||||
# stage: container
|
||||
# script:
|
||||
# - xzcat uniworx:exe:uniworx.nar.xz | nix-store --import
|
||||
# - cp -pr --reflink=auto -L $(nix build --print-out-paths ".#uniworxDocker") uniworx.tar.gz
|
||||
# needs:
|
||||
# - job: frontend dependencies # transitive
|
||||
# artifacts: false
|
||||
# - job: frontend build # transitive
|
||||
# artifacts: false
|
||||
# - job: frontend test # sanity
|
||||
# artifacts: false
|
||||
# - job: backend test # sanity
|
||||
# artifacts: false
|
||||
# artifacts:
|
||||
# paths:
|
||||
# - uniworx.tar.gz
|
||||
# rules: &release-rules
|
||||
# - if: $CI_COMMIT_TAG =~ /^v/
|
||||
# # TODO: rewrite
|
||||
# test container:
|
||||
# stage: container
|
||||
# script:
|
||||
# - xzcat uniworx:exe:uniworx.nar.xz | nix-store --import
|
||||
# - cp -pr --reflink=auto -L $(nix build --print-out-paths ".#uniworxTestDocker") uniworx.tar.gz
|
||||
# needs:
|
||||
# - job: frontend dependencies # transitive
|
||||
# artifacts: false
|
||||
# - job: frontend build # transitive
|
||||
# artifacts: false
|
||||
# - job: frontend test # sanity
|
||||
# artifacts: false
|
||||
# - job: backend test # sanity
|
||||
# artifacts: false
|
||||
# artifacts:
|
||||
# paths:
|
||||
# - uniworx.tar.gz
|
||||
# rules: &test-release-rules
|
||||
# - if: $CI_COMMIT_TAG =~ /^t/
|
||||
|
||||
# # TODO: unify prod and test versions
|
||||
# # TODO: rewrite
|
||||
# parse changelog:
|
||||
# stage: prepare release
|
||||
# needs:
|
||||
# - job: frontend dependencies
|
||||
# artifacts: true
|
||||
# rules: *release-rules
|
||||
# script:
|
||||
# - xzcat node-dependencies.nar.xz | nix-store --import
|
||||
# - nix -L run ".#jqChangelogJson" -- -r '.versions[0].version' > .current-version
|
||||
# - nix -L run ".#jqChangelogJson" -- -r '.versions[0].body' > .current-changelog.md
|
||||
# - echo "VERSION=$(cat .current-version)" >> build.env
|
||||
# artifacts:
|
||||
# reports:
|
||||
# dotenv: build.env
|
||||
# paths:
|
||||
# - .current-version
|
||||
# - .current-changelog.md
|
||||
# name: "changelog-${CI_COMMIT_SHORT_SHA}"
|
||||
# expire_in: "1 day"
|
||||
# # TODO: rewrite
|
||||
# parse test changelog:
|
||||
# stage: prepare release
|
||||
# needs:
|
||||
# - job: frontend dependencies
|
||||
# artifacts: true
|
||||
# rules: *test-release-rules
|
||||
# script:
|
||||
# - xzcat node-dependencies.nar.xz | nix-store --import
|
||||
# - nix -L run ".#jqChangelogJson" -- -r '.versions[0].version' > .current-version
|
||||
# - nix -L run ".#jqChangelogJson" -- -r '.versions[0].body' > .current-changelog.md
|
||||
# - echo "VERSION=$(cat .current-version)" >> build.env
|
||||
# artifacts:
|
||||
# reports:
|
||||
# dotenv: build.env
|
||||
# paths:
|
||||
# - .current-version
|
||||
# - .current-changelog.md
|
||||
# name: "changelog-${CI_COMMIT_SHORT_SHA}"
|
||||
|
||||
# # TODO: unify prod and test versions
|
||||
# # TODO: rewrite
|
||||
# upload container:
|
||||
# variables:
|
||||
# GIT_STRATEGY: none
|
||||
# stage: release
|
||||
# image: quay.io/skopeo/stable:latest
|
||||
# script:
|
||||
# - skopeo --insecure-policy copy --dest-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" docker-archive://$(pwd)/uniworx.tar.gz docker://${CI_REGISTRY_IMAGE}:${VERSION}
|
||||
# - skopeo --insecure-policy copy --src-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" --dest-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" docker://${CI_REGISTRY_IMAGE}:${VERSION} docker://${CI_REGISTRY_IMAGE}:latest
|
||||
# needs:
|
||||
# - job: frontend dependencies # transitive
|
||||
# artifacts: false
|
||||
# - job: frontend build # transitive
|
||||
# artifacts: false
|
||||
# - job: container
|
||||
# artifacts: true
|
||||
# - job: parse changelog
|
||||
# artifacts: true
|
||||
# - job: frontend test # sanity
|
||||
# artifacts: false
|
||||
# - job: backend test # sanity
|
||||
# artifacts: false
|
||||
# rules: *release-rules
|
||||
# # TODO: rewrite
|
||||
# upload test container:
|
||||
# variables:
|
||||
# GIT_STRATEGY: none
|
||||
# stage: release
|
||||
# image: quay.io/skopeo/stable:latest
|
||||
# script:
|
||||
# - skopeo --insecure-policy copy --dest-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" docker-archive://$(pwd)/uniworx.tar.gz docker://${CI_REGISTRY}/fradrive/fradrive/test:${CI_COMMIT_REF_NAME}
|
||||
# - skopeo --insecure-policy copy --src-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" --dest-creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" docker://${CI_REGISTRY}/fradrive/fradrive/test:${CI_COMMIT_REF_NAME} docker://${CI_REGISTRY}/fradrive/fradrive/test:latest
|
||||
# needs:
|
||||
# - job: frontend dependencies # transitive
|
||||
# artifacts: false
|
||||
# - job: frontend build # transitive
|
||||
# artifacts: false
|
||||
# - job: test container
|
||||
# artifacts: true
|
||||
# - job: parse test changelog
|
||||
# artifacts: true
|
||||
# - job: frontend test # sanity
|
||||
# artifacts: false
|
||||
# - job: backend test # sanity
|
||||
# artifacts: false
|
||||
# rules: *test-release-rules
|
||||
|
||||
# # TODO: unify prod and test versions
|
||||
# # TODO: rewrite
|
||||
# release:
|
||||
# variables:
|
||||
# GIT_STRATEGY: none
|
||||
# stage: release
|
||||
# image: registry.gitlab.com/gitlab-org/release-cli:latest
|
||||
# rules: *release-rules
|
||||
# script:
|
||||
# - echo "Will create release ${VERSION}..."
|
||||
# release:
|
||||
# name: '$VERSION'
|
||||
# tag_name: '$CI_COMMIT_TAG'
|
||||
# description: .current-changelog.md
|
||||
# needs:
|
||||
# - job: frontend test # sanity
|
||||
# artifacts: false
|
||||
# - job: backend test # sanity
|
||||
# artifacts: false
|
||||
# - job: parse changelog
|
||||
# artifacts: true
|
||||
# retry: 0
|
||||
# # TODO: rewrite
|
||||
# test release:
|
||||
# variables:
|
||||
# GIT_STRATEGY: none
|
||||
# stage: release
|
||||
# image: registry.gitlab.com/gitlab-org/release-cli:latest
|
||||
# rules: *test-release-rules
|
||||
# script:
|
||||
# - echo "Will create test release ${VERSION}-test..."
|
||||
# release:
|
||||
# name: "${VERSION}-test"
|
||||
# tag_name: '$CI_COMMIT_TAG'
|
||||
# description: .current-changelog.md
|
||||
# needs:
|
||||
# - job: frontend test # sanity
|
||||
# artifacts: false
|
||||
# - job: backend test # sanity
|
||||
# artifacts: false
|
||||
# - job: parse test changelog
|
||||
# artifacts: true
|
||||
# retry: 0
|
||||
|
||||
77
.gitlab-ci/backend.yml
Normal file
77
.gitlab-ci/backend.yml
Normal file
@ -0,0 +1,77 @@
|
||||
# SPDX-FileCopyrightText: 2024 Sarah Vaupel <sarah.vaupel@uniworx.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
### IMPORTANT NOTICE ###
|
||||
# Our pipeline consists of static and dynamic parts.
|
||||
#
|
||||
# This file only contains the dynamic backend parts of our pipeline.
|
||||
# Static jobs are defined in .gitlab-ci.yml.
|
||||
#
|
||||
# The marker "#dyn#" (without quotes) will be replaced by concrete values.
|
||||
###
|
||||
|
||||
variables:
|
||||
BACKEND_IMAGE_VERSION: #dyn#
|
||||
PARENT_PIPELINE_ID: #dyn#
|
||||
|
||||
stages:
|
||||
- compile
|
||||
- lint
|
||||
- test
|
||||
- release
|
||||
|
||||
default:
|
||||
image:
|
||||
name: ${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}/backend:${BACKEND_IMAGE_VERSION}
|
||||
entrypoint: [""]
|
||||
docker:
|
||||
platform: x86_64
|
||||
before_script:
|
||||
- ./.gitlab-ci/pull-frontend-artifacts.pl "${REGISTRY_AUTH_TOKEN}" "${PARENT_PIPELINE_ID}"
|
||||
- unzip ./.artifacts.tmp/artifacts.zip
|
||||
artifacts:
|
||||
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
|
||||
|
||||
compile:
|
||||
stage: compile
|
||||
script:
|
||||
- make -- compile${PROD_BUILD}-backend IN_CI=true IN_CONTAINER=true
|
||||
artifacts:
|
||||
paths:
|
||||
- ${CI_PROJECT_DIR}/.stack-work/
|
||||
- bin/
|
||||
cache: &backend-cache
|
||||
|
||||
lint:
|
||||
stage: lint
|
||||
script:
|
||||
- make -- lint${PROD_BUILD}-backend IN_CI=true IN_CONTAINER=true
|
||||
cache: *backend-cache
|
||||
|
||||
test:
|
||||
stage: test
|
||||
needs:
|
||||
- compile
|
||||
script:
|
||||
- make -- test${PROD_BUILD}-backend IN_CI=true IN_CONTAINER=true
|
||||
cache: *backend-cache
|
||||
|
||||
container:
|
||||
stage: release
|
||||
image: ${IMAGE_BUILDER}
|
||||
needs:
|
||||
- compile
|
||||
- lint
|
||||
- test
|
||||
script:
|
||||
- IMAGE_TAG=`./.gitlab-ci/version.pl`
|
||||
- buildah bud -t ${IMAGE_TAG} --volume ${CI_PROJECT_DIR}/bin/:/tmp/uniworx-bin --file docker/fradrive/Dockerfile
|
||||
- buildah push --creds "${CI_REGISTRY_USER}:${CI_JOB_TOKEN}" ${IMAGE_TAG} ${CI_REGISTRY_IMAGE}/${IMAGE_TAG}
|
||||
rules:
|
||||
- if: '"${PROD_BUILD}" == "-prod"'
|
||||
when: always
|
||||
@ -1,25 +0,0 @@
|
||||
diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc
|
||||
index 514ab3bf9..25dab18bb 100644
|
||||
--- a/src/libstore/filetransfer.cc
|
||||
+++ b/src/libstore/filetransfer.cc
|
||||
@@ -696,6 +696,8 @@ struct curlFileTransfer : public FileTransfer
|
||||
std::string scheme = get(params, "scheme").value_or("");
|
||||
std::string endpoint = get(params, "endpoint").value_or("");
|
||||
|
||||
+ debug("enqueueFileTransfer: scheme: %s", scheme);
|
||||
+
|
||||
S3Helper s3Helper(profile, region, scheme, endpoint);
|
||||
|
||||
// FIXME: implement ETag
|
||||
diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc
|
||||
index 6bfbee044..ff406e5e4 100644
|
||||
--- a/src/libstore/s3-binary-cache-store.cc
|
||||
+++ b/src/libstore/s3-binary-cache-store.cc
|
||||
@@ -126,6 +126,7 @@ ref<Aws::Client::ClientConfiguration> S3Helper::makeConfig(const string & region
|
||||
initAWS();
|
||||
auto res = make_ref<Aws::Client::ClientConfiguration>();
|
||||
res->region = region;
|
||||
+ debug("configuring scheme %s", scheme);
|
||||
if (!scheme.empty()) {
|
||||
res->scheme = Aws::Http::SchemeMapper::FromString(scheme.c_str());
|
||||
}
|
||||
@ -1,3 +0,0 @@
|
||||
SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
43
.gitlab-ci/dynamci.pl
Executable file
43
.gitlab-ci/dynamci.pl
Executable file
@ -0,0 +1,43 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
my %kv = ();
|
||||
|
||||
for(@ARGV) {
|
||||
if(!m#^\s*([^=\s]+)\s*=\s*(.*?)\s*$#) {
|
||||
die "$0: Bad parameter '$_': Not a key=value pair\n"
|
||||
}
|
||||
$kv{$1} = $2;
|
||||
}
|
||||
|
||||
my $invar = 0;
|
||||
|
||||
LOOP: while(my $line = <STDIN>) {
|
||||
if(1==$invar) {
|
||||
if($line=~m/^(\s+)(\S+)(\s*:\s*)(\S+)(.*)/) {
|
||||
my ($pre1, $key, $pre2, $var, $post) = ($1, $2, $3, $4, $5);
|
||||
if('#dyn#' eq $var) {
|
||||
if(not exists $kv{$key}) {
|
||||
die "$0: No value given for key '$key' in the parameters but it is defined in input!\n"
|
||||
}
|
||||
my $v = $kv{$key};
|
||||
delete $kv{$key};
|
||||
print "$pre1$key$pre2$v$post\n";
|
||||
next LOOP;
|
||||
}
|
||||
} elsif($line=~m/^[^#\t ]/) {
|
||||
$invar = 2
|
||||
}
|
||||
}
|
||||
if(0==$invar and $line=~m#^\s*variables\s*:\s*$#) {
|
||||
$invar = 1;
|
||||
}
|
||||
print $line;
|
||||
}
|
||||
|
||||
my @rem = sort keys %kv;
|
||||
|
||||
die "$0: Variables occur in parameter but not in input: @rem!\n" if @rem;
|
||||
|
||||
@ -1,13 +0,0 @@
|
||||
diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc
|
||||
index 6bfbee044..51d86c4e6 100644
|
||||
--- a/src/libstore/s3-binary-cache-store.cc
|
||||
+++ b/src/libstore/s3-binary-cache-store.cc
|
||||
@@ -209,7 +209,7 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual
|
||||
S3Helper s3Helper;
|
||||
|
||||
S3BinaryCacheStoreImpl(
|
||||
- const std::string & scheme,
|
||||
+ const std::string & uriScheme,
|
||||
const std::string & bucketName,
|
||||
const Params & params)
|
||||
: StoreConfig(params)
|
||||
@ -1,3 +0,0 @@
|
||||
SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
63
.gitlab-ci/frontend.yml
Normal file
63
.gitlab-ci/frontend.yml
Normal file
@ -0,0 +1,63 @@
|
||||
# SPDX-FileCopyrightText: 2024 Sarah Vaupel <sarah.vaupel@uniworx.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
### IMPORTANT NOTICE ###
|
||||
# Our pipeline consists of static and dynamic parts.
|
||||
#
|
||||
# This file only contains the dynamic frontend parts of our pipeline.
|
||||
# Static jobs are defined in .gitlab-ci.yml.
|
||||
#
|
||||
# The marker "#dyn#" (without quotes) will be replaced by concrete values.
|
||||
###
|
||||
|
||||
variables:
|
||||
FRONTEND_IMAGE_VERSION: #dyn#
|
||||
|
||||
stages:
|
||||
- compile
|
||||
- lint
|
||||
- test
|
||||
|
||||
default:
|
||||
image:
|
||||
name: ${CI_REGISTRY_IMAGE}/${CI_COMMIT_REF_SLUG}/frontend:${FRONTEND_IMAGE_VERSION}
|
||||
entrypoint: [""]
|
||||
docker:
|
||||
platform: x86_64
|
||||
artifacts:
|
||||
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
|
||||
expire_in: "1 day"
|
||||
retry: 2
|
||||
interruptible: true
|
||||
|
||||
|
||||
compile:
|
||||
stage: compile
|
||||
script:
|
||||
- make -- compile-frontend IN_CI=true IN_CONTAINER=true
|
||||
artifacts:
|
||||
paths:
|
||||
- ${CI_PROJECT_DIR}/node_modules
|
||||
- ${CI_PROJECT_DIR}/well-known
|
||||
- ${CI_PROJECT_DIR}/config/webpack.yml
|
||||
cache:
|
||||
- &frontend-cache
|
||||
key: default-frontend
|
||||
paths:
|
||||
- ${CI_PROJECT_DIR}/.npm/
|
||||
- ${CI_PROJECT_DIR}/.well-known-cache/
|
||||
|
||||
lint:
|
||||
stage: lint
|
||||
script:
|
||||
- make -- lint-frontend IN_CI=true IN_CONTAINER=true
|
||||
cache: *frontend-cache
|
||||
|
||||
test:
|
||||
stage: test
|
||||
needs:
|
||||
- compile
|
||||
script:
|
||||
- make -- test-frontend IN_CI=true IN_CONTAINER=true
|
||||
cache: *frontend-cache
|
||||
@ -1,11 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
#!/usr/bin/bash
|
||||
|
||||
export MC_HOST_minio=http://$(cat /minio-gitlab-runner-cache/accesskey):$(cat /minio-gitlab-runner-cache/secretkey)@minio-gitlab-runner-cache
|
||||
|
||||
mc mb --ignore-existing minio/nix-cache
|
||||
@ -1,8 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
{ pkgs ? import <nixpkgs> {} }:
|
||||
pkgs.nixUnstable.overrideAttrs (oldAttrs: {
|
||||
patches = oldAttrs.patches or [] ++ [ ./fix-aws-scheme.patch ];
|
||||
})
|
||||
52
.gitlab-ci/pull-frontend-artifacts.pl
Executable file
52
.gitlab-ci/pull-frontend-artifacts.pl
Executable file
@ -0,0 +1,52 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
my $dir = ".artifacts.tmp";
|
||||
my ($token, $id) = @ARGV;
|
||||
die "usage: $0 [token] [id]" unless defined $token and defined $id;
|
||||
die "id in bad format" unless $id=~m#^[0-9]+$#;
|
||||
|
||||
if(!-d $dir) {
|
||||
mkdir($dir) or die "Cannot create directory '$dir', because: $!\n";
|
||||
}
|
||||
|
||||
system(qq(curl --globoff --header "PRIVATE-TOKEN: $token" "https://gitlab.uniworx.de/api/v4/projects/5/pipelines/$id/bridges" > $dir/bridges));
|
||||
my $pips = pparse("$dir/bridges", {id=>qq#."downstream_pipeline"."id"#}, {name=>""});
|
||||
|
||||
my $fe = $pips->{frontend}{id};
|
||||
|
||||
die "No frontend pipeline found!" unless $fe;
|
||||
|
||||
system(qq(curl --globoff --header "PRIVATE-TOKEN: $token" "https://gitlab.uniworx.de/api/v4/projects/5/pipelines/$fe/jobs" > $dir/fe-jobs));
|
||||
my $arte = pparse("$dir/fe-jobs", {id=>""}, {name=>"", web_url=>"", artifacts=>""});
|
||||
|
||||
system(qq#curl --output $dir/artifacts.zip --location --header "PRIVATE-TOKEN: $token" "https://gitlab.uniworx.de/api/v4/projects/5/jobs/$arte->{compile}{id}/artifacts"#);
|
||||
|
||||
|
||||
sub pparse {
|
||||
my ($file, $numerical, $alpha) = @_;
|
||||
my %all = ();
|
||||
for my $k(keys %$numerical) {
|
||||
$all{$k} = $numerical->{$k} || qq#."$k"#;
|
||||
}
|
||||
for my $k(keys %$alpha) {
|
||||
$all{$k} = $alpha->{$k} || qq#."$k"#;
|
||||
}
|
||||
my $select = join ', ', map {qq#"$_": $all{$_}#} sort keys %all;
|
||||
my $cont = qx(cat $file | jq -c '.[] | {$select}');
|
||||
my @cont = split m/\R/, $cont;
|
||||
my %ret = ();
|
||||
for my $c(@cont) {
|
||||
my %block = ();
|
||||
for(keys %$numerical) {
|
||||
$block{$_} = $1 if $c=~m#"$_":([0-9]+)#;
|
||||
}
|
||||
for(keys %$alpha) {
|
||||
$block{$_} = $1 if $c=~m#"$_":"([^"]*)"#;
|
||||
}
|
||||
$ret{$block{name}} =\%block;
|
||||
}
|
||||
return \%ret
|
||||
}
|
||||
@ -1,14 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
#!/usr/bin/bash
|
||||
|
||||
set -eu
|
||||
set -f # disable globbing
|
||||
export IFS=' '
|
||||
|
||||
echo "Signing and uploading paths" $OUT_PATHS
|
||||
exec nix copy --to "s3://nix-cache?region=us-east-1&scheme=http&endpoint=minio-gitlab-runner-cache&secret-key=${NIX_CACHE_KEYFILE}" $OUT_PATHS
|
||||
617
.gitlab-ci/version.pl
Executable file
617
.gitlab-ci/version.pl
Executable file
@ -0,0 +1,617 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
use Data::Dumper;
|
||||
|
||||
# Version changes:
|
||||
# v[x].[y].[z] -- Main version number
|
||||
# v[x].[y].[z]-test-[branchstring]-num -- test/branch/devel version number
|
||||
# on main/master: Biggest version so far, increment by occuring changes
|
||||
# on other branches: find version; be it branch string, old format or main version number;
|
||||
# increments from there. Increment version number, but on global conflict use new version number
|
||||
|
||||
# Actions and their results
|
||||
# chore -> +patch
|
||||
# feat -> +minor
|
||||
# fix -> +patch
|
||||
# [a-z]+! -> +major
|
||||
# perf -> +patch
|
||||
# refactor -> +patch
|
||||
# test -> +patch
|
||||
# style -> +patch
|
||||
# revert -> =
|
||||
# docs -> +patch
|
||||
# build -> =
|
||||
# ci -> =
|
||||
|
||||
# parameters with default values
|
||||
my %par = ();
|
||||
my %parKinds = (
|
||||
vcslog=>{
|
||||
arity=>1,
|
||||
def=>'git log --pretty=tformat:"%H :::: %d :::: %s"',
|
||||
help=>'set command which outputs the log information to be used; reads from STDIN if value is set to "-"',
|
||||
},
|
||||
vcstags=>{
|
||||
arity=>1,
|
||||
def=>'git tag',
|
||||
help=>'set command which outputs the used tags',
|
||||
},
|
||||
vcsbranch=>{
|
||||
arity=>1,
|
||||
def=>'git rev-parse --abbrev-ref HEAD',
|
||||
help=>'set command to find out the current branch name',
|
||||
},
|
||||
kind=>{
|
||||
arity=>1,
|
||||
def=>'v',
|
||||
help=>'set tag kind of version numbers; this option resets autokind to "". Implemented kinds: v: main version; t: test version',
|
||||
auto=>sub { $par{autokind}='' },
|
||||
},
|
||||
autokind=>{
|
||||
arity=>1,
|
||||
def=>'main=v,master=v,test=t,*=t',
|
||||
help=>'determine the tag kind from branch name instead of fixed value; use the first fitting glob',
|
||||
},
|
||||
change=>{
|
||||
arity=>1,
|
||||
def=>'chore=patch,feat=minor,feature=minor,fix=patch,BREAK=major,perf=patch,refactor=patch,test=patch,style=patch,revert=null,docs=patch,build=null,ci=null',
|
||||
help=>'how to react on which commit type; can be partially given. Actions are: "null", "major", "minor", "patch" or state "invalid" for removing this type',
|
||||
},
|
||||
changelog=>{
|
||||
arity=>1,
|
||||
def=>'',
|
||||
help=>'File to add the changelog to; no changelog is written if this parameter is empty.'
|
||||
},
|
||||
changelogout=>{
|
||||
arity=>1,
|
||||
def=>'',
|
||||
help=>'Use this file name to write the changelog to, but use "changelog" to read the old changelog. If not set for both versions the parameter changelog is used.',
|
||||
},
|
||||
vcsurl=>{
|
||||
arity=>1,
|
||||
def=>'',
|
||||
help=>'Repository URL for changelog; for example "https://gitlab.example.doc/proj/proj/"',
|
||||
},
|
||||
v=>{def=>0,arity=>0,help=>'verbose'},
|
||||
h=>{def=>0,arity=>0,help=>'help'},
|
||||
);
|
||||
|
||||
for my $k(keys %parKinds) {
|
||||
$par{$k} = $parKinds{$k}{def}
|
||||
}
|
||||
|
||||
#for my $p(@ARGV) {
|
||||
#
|
||||
#}
|
||||
{
|
||||
my $i = 0;
|
||||
while($i<@ARGV) {
|
||||
if($ARGV[$i]=~m#^-(.*)#) {
|
||||
my $key = $1;
|
||||
if(not exists $parKinds{$key}) {
|
||||
die "$0: Unknown parameter: -$key\n";
|
||||
}
|
||||
my $pk = $parKinds{$key};
|
||||
die "$0: Too few parameters for '-$key'\n" if $i+$pk->{arity}>@ARGV;
|
||||
my @par = @ARGV[$i+1..$i+$pk->{arity}];
|
||||
#warn "<< @par >>";
|
||||
$i++;
|
||||
$i += $pk->{arity};
|
||||
if($pk->{arity}) {
|
||||
$par{$key} = $par[0]
|
||||
} else {
|
||||
$par{$key}=1
|
||||
}
|
||||
if(exists $pk->{auto}) {
|
||||
$pk->{auto}->()
|
||||
}
|
||||
} else {
|
||||
die "$0: Bad parameter: $ARGV[$i]\n"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if($par{'h'}) {
|
||||
print "Usage: $0 [flags and options]\n\nAvailable options:\n";
|
||||
for my $k(sort keys %parKinds) {
|
||||
print " -$k\n $parKinds{$k}{help}\n";
|
||||
if($parKinds{$k}{arity}) {
|
||||
print " Default value: $parKinds{$k}{def}\n";
|
||||
} else {
|
||||
print " This is a flag and not an option\n";
|
||||
}
|
||||
print "\n";
|
||||
}
|
||||
exit 0
|
||||
}
|
||||
|
||||
if($par{autokind}) {
|
||||
my $branch = `$par{vcsbranch}`;
|
||||
my @rules = split /,/, $par{autokind};
|
||||
RULES: {
|
||||
for my $r(@rules) {
|
||||
if($r!~m#(.*)=(.*)#) {
|
||||
die "$0: Bad rule in autokind: $r\n";
|
||||
}
|
||||
my ($glob, $kind) = ($1, $2);
|
||||
if(globString($glob, $branch)) {
|
||||
$par{'kind'} = $kind;
|
||||
last RULES
|
||||
}
|
||||
}
|
||||
warn "$0: No autokind rule matches; leaving the kind unchanged.\n"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if($par{'v'}) {
|
||||
print "VERBOSE: Parameters\n";
|
||||
for my $k(sort keys %par) {
|
||||
print " $k: $par{$k}\n"
|
||||
}
|
||||
}
|
||||
|
||||
my %typeReact = ();
|
||||
for my $as(split /,/, $par{change}) {
|
||||
if($as=~m#(.*)=(.*)#) {
|
||||
$typeReact{$1} = $2;
|
||||
} else {
|
||||
warn "$0: Unexpected change parameter: '$as'"
|
||||
}
|
||||
}
|
||||
|
||||
if($par{changelog} and not $par{vcsurl}) {
|
||||
die "Parameter 'changelog' given, but parameter 'vcsurl' is not. Please state the url of your repository for computation of a changelog.\n"
|
||||
}
|
||||
|
||||
#my @have = split /\n/, `$par{vcstags}`;
|
||||
#
|
||||
#my @keep = grep { $_ } map { m#^($par{kind})([0-9].*)# ? [$1,$2] : undef } @have;
|
||||
#
|
||||
#my @oldVersions = ();
|
||||
|
||||
sub globString {
|
||||
my ($glob, $string) = @_;
|
||||
my @glob = map { m#\*# ? '*' : $_ } $glob=~m#(\?|\*+|[^\?\*]+)#g;
|
||||
my %matchCache = ();
|
||||
my $match = undef;
|
||||
my $matchCore = sub {
|
||||
my ($i, $j) = @_;
|
||||
return 1 if $i==@glob and $j==length $string;
|
||||
return 0 if $i>=@glob or $j>=length $string;
|
||||
return $match->($i+1,$j+1) if '?' eq $glob[$i];
|
||||
if('*' eq $glob[$i]) {
|
||||
for my $jj($j..length($string)) {
|
||||
return 1 if $match->($i+1, $jj);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
return $match->($i+1, $j+length($glob[$i])) if
|
||||
$glob[$i] eq substr($string, $j, length($glob[$i]));
|
||||
return 0
|
||||
};
|
||||
$match = sub {
|
||||
my ($i, $j) = @_;
|
||||
my $ij = "$i $j";
|
||||
my $res = $matchCache{$ij};
|
||||
if(not defined $res) {
|
||||
$res = $matchCore->($i, $j);
|
||||
$matchCache{$ij} = $res;
|
||||
}
|
||||
$res
|
||||
};
|
||||
$match->(0,0);
|
||||
}
|
||||
|
||||
sub parseVersion {
|
||||
my $v = shift;
|
||||
if(not defined $v) {
|
||||
my $c = join " ", caller;
|
||||
warn "$0: internal error (parseVersion called on undef at $c)\n";
|
||||
return undef
|
||||
}
|
||||
my ($pre,$ma,$mi,$p,$sp,$brn,$brv) = ();
|
||||
if($v=~m#^([a-z]*)([0-9]+)$#) {
|
||||
$pre = $1;
|
||||
$ma = $2;
|
||||
} elsif($v=~m#^([a-z]*)([0-9]+)\.([0-9]+)$#) {
|
||||
$pre = $1;
|
||||
$ma = $2;
|
||||
$mi = $3
|
||||
} elsif($v=~m#^([a-z]*)([0-9]+)\.([0-9]+)\.([0-9]+)$#) {
|
||||
$pre = $1;
|
||||
$ma = $2;
|
||||
$mi = $3;
|
||||
$p = $4;
|
||||
} elsif($v=~m#^([a-z]*)([0-9]+)\.([0-9]+)\.([0-9]+)-test-([a-z]+)-([0-9\.]+)$#) {
|
||||
$pre = $1;
|
||||
$ma = $2;
|
||||
$mi = $3;
|
||||
$p = $4;
|
||||
$sp = $5;
|
||||
$brn = $6;
|
||||
$brv = $7;
|
||||
} elsif($v=~m#^([a-z]*)([0-9]+)\.([0-9]+)\.([0-9]+)-(.*)$#) {
|
||||
$pre = $1;
|
||||
$ma = $2;
|
||||
$mi = $3;
|
||||
$p = $4;
|
||||
$sp = $5;
|
||||
} else {
|
||||
warn "$0: unexpected old version number: $v\n" if $par{v};
|
||||
return undef
|
||||
}
|
||||
$pre = 'v' if '' eq $pre;
|
||||
return {
|
||||
prefix=>$pre,
|
||||
major=>$ma,
|
||||
minor=>$mi,
|
||||
patch=>$p,
|
||||
subpatch=>$sp,
|
||||
branchname=>$brn,
|
||||
branchversion=>$brv,
|
||||
}
|
||||
}
|
||||
|
||||
#@oldVersions = sort {
|
||||
# ($a->{major} // 0) <=> ($b->{major} // 0) ||
|
||||
# ($a->{minor} // 0) <=> ($b->{minor} // 0) ||
|
||||
# ($a->{patch} // 0) <=> ($b->{patch} // 0) ||
|
||||
# ($a->{subpatch} // '') <=> ($b->{subpatch} // '')
|
||||
#} @oldVersions;
|
||||
|
||||
sub vsCompare {
|
||||
my ($vp, $wp) = @_;
|
||||
my ($v, $w) = ($vp, $wp);
|
||||
my ($verr, $werr) = (0,0);
|
||||
unless(ref $v) {
|
||||
eval { $v = parseVersion($v) };
|
||||
$verr = 1 if $@ or not defined $v;
|
||||
}
|
||||
unless(ref $w) {
|
||||
eval { $w = parseVersion($w) };
|
||||
$werr = 1 if $@ or not defined $w;
|
||||
}
|
||||
if($verr and $werr) {
|
||||
return $vp cmp $wp;
|
||||
}
|
||||
if($verr) {
|
||||
return -1
|
||||
}
|
||||
if($werr) {
|
||||
return 1
|
||||
}
|
||||
#for($v, $w) {
|
||||
# $_ = parseVersion($_) unless ref $_;
|
||||
#}
|
||||
if('v' eq $v->{prefix} and 'v' eq $w->{prefix}) {
|
||||
return(
|
||||
($v->{major} // 0) <=> ($w->{major} // 0) ||
|
||||
($v->{minor} // 0) <=> ($w->{minor} // 0) ||
|
||||
($v->{patch} // 0) <=> ($w->{patch} // 0) ||
|
||||
($v->{branchname} // '') cmp ($w->{branchname} // '') ||
|
||||
($v->{branchversion} // '') <=> ($w->{branchversion} // '') ||
|
||||
($v->{subpatch} // '') cmp ($w->{subpatch} // '')
|
||||
)
|
||||
} elsif('v' eq $v->{prefix} and 'v' ne $w->{prefix}) {
|
||||
return 1;
|
||||
} elsif('v' ne $v->{prefix} and 'v' eq $w->{prefix}) {
|
||||
return -1;
|
||||
} else {
|
||||
return vsStringDebug($v) cmp vsStringDebug($w)
|
||||
}
|
||||
}
|
||||
|
||||
sub vsStringDebug {
|
||||
my $v = shift;
|
||||
my $ret =
|
||||
("[" . ($v->{prefix} // 'undef') . "]") .
|
||||
($v->{major} // 'undef') . "." .
|
||||
($v->{minor} // 'undef') . "." .
|
||||
($v->{patch} // 'undef');
|
||||
$ret .= "-[$v->{subpatch}]" if defined $v->{subpatch};
|
||||
$ret .= "-test-" . ($v->{branchname} // 'undef') . "-" . ($v->{branchversion} // 'undef');
|
||||
return $ret
|
||||
}
|
||||
|
||||
sub vsString {
|
||||
my $v = shift;
|
||||
my $ret =
|
||||
($v->{major} // 0) . "." .
|
||||
($v->{minor} // 0) . "." .
|
||||
($v->{patch} // 0);
|
||||
$ret .= "-$v->{subpatch}" if defined $v->{subpatch};
|
||||
return $ret
|
||||
}
|
||||
|
||||
sub vsJustVersion {
|
||||
my $v = shift;
|
||||
my $ret =
|
||||
($v->{major} // 0) . "." .
|
||||
($v->{minor} // 0) . "." .
|
||||
($v->{patch} // 0);
|
||||
return $ret
|
||||
}
|
||||
|
||||
sub vsTestVersion {
|
||||
my $v = shift;
|
||||
my $ret =
|
||||
'v' .
|
||||
($v->{major} // 0) . "." .
|
||||
($v->{minor} // 0) . "." .
|
||||
($v->{patch} // 0) . "-test-" .
|
||||
($v->{branchname} // 'a') .
|
||||
($v->{branchversion} // '0.0.0');
|
||||
return $ret
|
||||
}
|
||||
|
||||
#print vsStringDebug($_), "\n" for @oldVersions;
|
||||
|
||||
#print " << $_->[1] >>\n" for @keep;
|
||||
|
||||
my @versionsOrig = ();
|
||||
if('-' eq $par{vcslog}) {
|
||||
@versionsOrig = <STDIN>;
|
||||
chomp for @versionsOrig
|
||||
} else {
|
||||
@versionsOrig = split /\n/, `$par{vcslog}`;
|
||||
}
|
||||
my @versions = ();
|
||||
for my $v(@versionsOrig) {
|
||||
if($v=~m#^(.*?\S)\s*::::\s*(.*?)\s*::::\s*(.*)#) {
|
||||
push @versions, {
|
||||
hash => $1,
|
||||
meta => $2,
|
||||
subject => $3
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#print Data::Dumper::Dumper(\@versions);
|
||||
|
||||
my @change = ();
|
||||
my $tag = undef;
|
||||
|
||||
my @versionPast = ();
|
||||
|
||||
VERSION: for my $v(@versions) {
|
||||
#if($v->{meta}=~m#tag\s*:\s*\Q$par{kind}\E(.*)\)#) {
|
||||
# $tag=$1;
|
||||
# last VERSION
|
||||
#}
|
||||
if($v->{meta}=~m#tag\s*:\s*([vtd]b?[0-9\.]+(?:-.*)?)\)#) {
|
||||
$v->{version} = $1;
|
||||
push @versionPast, $v->{version}
|
||||
}
|
||||
next if $v->{subject}=~m#^\s*(?:Merge (?:branch|remote)|Revert )#;
|
||||
if($v->{subject}=~m#^\s*([a-z]+)\s*(!?)\s*#) {
|
||||
my ($type, $break) = ($1, $2);
|
||||
if(exists $typeReact{$type}) {
|
||||
my $react = $typeReact{$type};
|
||||
next VERSION if 'null' eq $react;
|
||||
my %h = %$v;
|
||||
$h{react} = $react;
|
||||
push @change, \%h
|
||||
} else {
|
||||
warn "$0: cannot react on commit message '$v->{subject}', type '$type' unknown\n" if $par{$v};
|
||||
}
|
||||
} else {
|
||||
warn "$0: commit message not parseable: $v->{subject}\n" if $par{$v};
|
||||
}
|
||||
}
|
||||
|
||||
#$tag = parseVersion($tag);
|
||||
|
||||
for my $r(reverse @change) {
|
||||
if('major' eq $r->{react}) {
|
||||
$tag->{major}++;
|
||||
$tag->{minor}=0;
|
||||
$tag->{patch}=0;
|
||||
$tag->{subpatch}=undef;
|
||||
} elsif('minor' eq $r->{react}) {
|
||||
$tag->{minor}++;
|
||||
$tag->{patch}=0;
|
||||
$tag->{subpatch}=undef;
|
||||
} elsif('patch' eq $r->{react}) {
|
||||
$tag->{patch}++;
|
||||
$tag->{subpatch}=undef;
|
||||
} else {
|
||||
die "$0: Cannot perform modification '$r->{react}' (probably internal error)"
|
||||
}
|
||||
}
|
||||
|
||||
#print Data::Dumper::Dumper(\@change, $tag);
|
||||
#for my $c(@change) {
|
||||
# print "==\n";
|
||||
# for my $k(sort keys %$c) {
|
||||
# print " $k: $c->{$k}\n"
|
||||
# }
|
||||
# print "\n"
|
||||
#}
|
||||
#
|
||||
#print "\n";
|
||||
#for my $v(@versionPast) {
|
||||
# my $vv = vsStringDebug(parseVersion($v));
|
||||
# print "VERSION $v --> $vv\n"
|
||||
#}
|
||||
|
||||
my @allVersions = split /\n/, `$par{vcstags}`;
|
||||
|
||||
my @sortAll = sort {vsCompare($b, $a)} @allVersions;
|
||||
my @sortSee = sort {vsCompare($b, $a)} @versionPast;
|
||||
#print "all: $sortAll[0] -- see: $sortSee[0]\n";
|
||||
#
|
||||
#print vsString($tag), "\n";
|
||||
|
||||
my $mainVersion = 'v' eq $par{kind};
|
||||
|
||||
my $highStart = $mainVersion ? $sortAll[0] : $sortSee[0];
|
||||
my $highSee = $sortSee[0];
|
||||
my %reactCollect = ();
|
||||
SEARCHVERSION: for my $v(@versions) {
|
||||
next unless $v->{version};
|
||||
next unless $v->{react};
|
||||
$reactCollect{$v->{react}} = 1;
|
||||
if($highSee eq $v->{version}) {
|
||||
last SEARCHVERSION;
|
||||
}
|
||||
}
|
||||
|
||||
sub justVersionInc {
|
||||
my ($v, $react) = @_;
|
||||
my $vv = parseVersion($v);
|
||||
$vv->{patch}++ if $react->{patch};
|
||||
do {$vv->{minor}++; $vv->{patch}=0} if $react->{minor};
|
||||
do {$vv->{major}++; $vv->{minor}=0; $vv->{patch}=0} if $react->{major};
|
||||
return vsJustVersion($vv);
|
||||
}
|
||||
|
||||
my $newVersion = undef;
|
||||
|
||||
if($mainVersion) {
|
||||
$newVersion = "v" . justVersionInc($highStart, \%reactCollect);
|
||||
} else {
|
||||
my $v = parseVersion($highStart);
|
||||
if(exists $v->{branchname}) {
|
||||
$v->{branchversion} = justVersionInc($v->{branchversion} // '0.0.0', \%reactCollect);
|
||||
} else {
|
||||
$v->{branchname} = 'a';
|
||||
$v->{branchversion} = '0.0.0';
|
||||
}
|
||||
$newVersion = vsTestVersion($v);
|
||||
}
|
||||
|
||||
my %allVersions = ();
|
||||
for(@allVersions) {
|
||||
$allVersions{$_} = 1
|
||||
}
|
||||
while(exists $allVersions{$newVersion}) {
|
||||
if($mainVersion) {
|
||||
die "$0: probably internal error (collision in main version)\n"
|
||||
}
|
||||
my $v = parseVersion($newVersion);
|
||||
$v->{branchname} //= 'a';
|
||||
$v->{branchname}++;
|
||||
$newVersion = vsTestVersion($v);
|
||||
}
|
||||
|
||||
print "$newVersion\n";
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# If we want a changelog
|
||||
if($par{changelog}) {
|
||||
#print "Changelog file: '$par{changelog}'\n";
|
||||
# TODO at the moment we only extend a changelog; starting with a fresh one is not supportet yet
|
||||
my $fh = undef;
|
||||
open($fh, '<', $par{changelog}) or die "Could not read changelog file '$par{changelog}', because: $!";
|
||||
my @changelog = <$fh>;
|
||||
close $fh;
|
||||
my %seen = ();
|
||||
my @sects = ([]);
|
||||
for(@changelog) {
|
||||
push @sects, [] if m/^## /;
|
||||
push @{$sects[-1]}, $_;
|
||||
if(m#/commit/([a-f0-9]+)\s*\)\s*\)\s*$#) {
|
||||
$seen{$1} = 1;
|
||||
}
|
||||
}
|
||||
my $head = shift @sects;
|
||||
#print Data::Dumper::Dumper($head);
|
||||
#print " << $sects[0][0] >>\n";
|
||||
if($sects[0][0]=~m/^##\s*\[([^\]\[]+)\]\(/ and $1 eq $newVersion) {
|
||||
shift @sects;
|
||||
}
|
||||
for my $s(@sects) {
|
||||
my $hh = $s->[0];
|
||||
chomp $hh;
|
||||
my $cnt = @$s;
|
||||
#print " $hh\n $cnt lines\n\n"
|
||||
}
|
||||
#print Data::Dumper::Dumper($versions[0]);
|
||||
for my $v(@versions) {
|
||||
#print Data::Dumper::Dumper($v);
|
||||
my $hash = $v->{hash};
|
||||
my $see = 'new';
|
||||
$see = 'old' if $seen{$hash};
|
||||
#print "$hash -> $see ($v->{subject})\n";
|
||||
}
|
||||
my $changelogout = $par{changelogout} || $par{changelog};
|
||||
my $changelogfh = undef;
|
||||
open($changelogfh, '>', $changelogout) or die "$0: Could not write '$changelogout', because: $!\n";
|
||||
my %extend = ();
|
||||
my %when = (
|
||||
'fix' => 'Bug Fixes',
|
||||
'hotfix' => 'Bug Fixes',
|
||||
'feat' => 'Features',
|
||||
'feature' => 'Features',
|
||||
);
|
||||
SELECTCHANGELOG: for my $v(@versions) {
|
||||
last SELECTCHANGELOG if $seen{$v->{hash}};
|
||||
next unless $v->{subject}=~m#^\s*([a-z]+)\s*(!?)\s*((?:\(.*?\))?)\s*:\s*(.*?)\s*$#i;
|
||||
my ($kind, $break, $context, $msg) = ($1, $2, $3, $4);
|
||||
my $where = $when{$kind};
|
||||
$where = 'BREAKING CHANGES' if '!' eq $break;
|
||||
next unless $where;
|
||||
my $short = substr $v->{hash}, 0, 7;
|
||||
my $contS = '';
|
||||
if($context=~m#\((.*)\)#) {
|
||||
$contS = "**$1:** ";
|
||||
}
|
||||
my $row = qq#* $contS$msg ([$short]($par{vcsurl}commit/$v->{hash}))#;
|
||||
push @{$extend{$where}}, {
|
||||
msg=>$msg,
|
||||
context=>$context,
|
||||
orig=>$v,
|
||||
row=>$row,
|
||||
};
|
||||
}
|
||||
#print Data::Dumper::Dumper(\%extend);
|
||||
my $preVersion = '';
|
||||
if($sects[0][0]=~m/^##\s*\[([^\]\[]+)\]\(/) {
|
||||
$preVersion = $1;
|
||||
$preVersion =~ s#^v?#v#;
|
||||
}
|
||||
my $today = do {
|
||||
my @time = localtime;
|
||||
my $year = $time[5]+1900;
|
||||
my $month = $time[4]+1;
|
||||
my $day = $time[3];
|
||||
sprintf("%04i-%02i-%02i", $year, $month, $day)
|
||||
};
|
||||
print $changelogfh qq!# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
||||
|
||||
## [$newVersion]($par{vcsurl}/compare/$preVersion...$newVersion) ($today)
|
||||
|
||||
!;
|
||||
for my $variant('BREAKING CHANGES', 'Features', 'Bug Fixes') {
|
||||
my @all = map {$_->{row}} @{$extend{$variant}};
|
||||
next unless @all;
|
||||
my $msg = join "\n", @all;
|
||||
print $changelogfh qq/### $variant\n\n$msg\n\n/
|
||||
}
|
||||
for(@sects) {
|
||||
print $changelogfh $_ for @$_
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@ -1,14 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
#!/usr/bin/bash
|
||||
|
||||
mkdir -p $(dirname ${AWS_SHARED_CREDENTIALS_FILE})
|
||||
cat > ${AWS_SHARED_CREDENTIALS_FILE} <<EOF
|
||||
[default]
|
||||
aws_access_key_id = $(cat /minio-gitlab-runner-cache/accesskey)
|
||||
aws_secret_access_key = $(cat /minio-gitlab-runner-cache/secretkey)
|
||||
EOF
|
||||
12
.npmrc.gup
12
.npmrc.gup
@ -1,12 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
if command -V gup 1>&- 2>&-; then
|
||||
gup --always
|
||||
fi
|
||||
|
||||
cat >${1:-.npmrc} <<EOF
|
||||
@fortawesome:registry=https://npm.fontawesome.com/
|
||||
//npm.fontawesome.com/:_authToken=${FONTAWESOME_NPM_AUTH_TOKEN}
|
||||
EOF
|
||||
9
.reuse/dep5
Normal file
9
.reuse/dep5
Normal file
@ -0,0 +1,9 @@
|
||||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: FraDrive
|
||||
Upstream-Contact: Sarah Vaupel <sarah.vaupel@uniworx.de>
|
||||
Source: https://gitlab.uniworx.de/fradrive/fradrive
|
||||
|
||||
Files: assets/fonts/fradrive/*
|
||||
Copyright: 2010 Google Corporation with Reserved Font Arimo, Tinos and Cousine
|
||||
Copyright: 2012 Red Hat, Inc. with Reserved Font Name Liberation
|
||||
License: OFL-1.1-RFN
|
||||
102
LICENSES/OFL-1.1-RFN.txt
Normal file
102
LICENSES/OFL-1.1-RFN.txt
Normal file
@ -0,0 +1,102 @@
|
||||
Digitized data copyright (c) 2010 Google Corporation
|
||||
with Reserved Font Arimo, Tinos and Cousine.
|
||||
Copyright (c) 2012 Red Hat, Inc.
|
||||
with Reserved Font Name Liberation.
|
||||
|
||||
This Font Software is licensed under the SIL Open Font License,
|
||||
Version 1.1.
|
||||
|
||||
This license is copied below, and is also available with a FAQ at:
|
||||
http://scripts.sil.org/OFL
|
||||
|
||||
SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
|
||||
|
||||
PREAMBLE The goals of the Open Font License (OFL) are to stimulate
|
||||
worldwide development of collaborative font projects, to support the font
|
||||
creation efforts of academic and linguistic communities, and to provide
|
||||
a free and open framework in which fonts may be shared and improved in
|
||||
partnership with others.
|
||||
|
||||
The OFL allows the licensed fonts to be used, studied, modified and
|
||||
redistributed freely as long as they are not sold by themselves.
|
||||
The fonts, including any derivative works, can be bundled, embedded,
|
||||
redistributed and/or sold with any software provided that any reserved
|
||||
names are not used by derivative works. The fonts and derivatives,
|
||||
however, cannot be released under any other type of license. The
|
||||
requirement for fonts to remain under this license does not apply to
|
||||
any document created using the fonts or their derivatives.
|
||||
|
||||
|
||||
|
||||
DEFINITIONS
|
||||
"Font Software" refers to the set of files released by the Copyright
|
||||
Holder(s) under this license and clearly marked as such.
|
||||
This may include source files, build scripts and documentation.
|
||||
|
||||
"Reserved Font Name" refers to any names specified as such after the
|
||||
copyright statement(s).
|
||||
|
||||
"Original Version" refers to the collection of Font Software components
|
||||
as distributed by the Copyright Holder(s).
|
||||
|
||||
"Modified Version" refers to any derivative made by adding to, deleting,
|
||||
or substituting ? in part or in whole ?
|
||||
any of the components of the Original Version, by changing formats or
|
||||
by porting the Font Software to a new environment.
|
||||
|
||||
"Author" refers to any designer, engineer, programmer, technical writer
|
||||
or other person who contributed to the Font Software.
|
||||
|
||||
|
||||
PERMISSION & CONDITIONS
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of the Font Software, to use, study, copy, merge, embed, modify,
|
||||
redistribute, and sell modified and unmodified copies of the Font
|
||||
Software, subject to the following conditions:
|
||||
|
||||
1) Neither the Font Software nor any of its individual components,in
|
||||
Original or Modified Versions, may be sold by itself.
|
||||
|
||||
2) Original or Modified Versions of the Font Software may be bundled,
|
||||
redistributed and/or sold with any software, provided that each copy
|
||||
contains the above copyright notice and this license. These can be
|
||||
included either as stand-alone text files, human-readable headers or
|
||||
in the appropriate machine-readable metadata fields within text or
|
||||
binary files as long as those fields can be easily viewed by the user.
|
||||
|
||||
3) No Modified Version of the Font Software may use the Reserved Font
|
||||
Name(s) unless explicit written permission is granted by the
|
||||
corresponding Copyright Holder. This restriction only applies to the
|
||||
primary font name as presented to the users.
|
||||
|
||||
4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font
|
||||
Software shall not be used to promote, endorse or advertise any
|
||||
Modified Version, except to acknowledge the contribution(s) of the
|
||||
Copyright Holder(s) and the Author(s) or with their explicit written
|
||||
permission.
|
||||
|
||||
5) The Font Software, modified or unmodified, in part or in whole, must
|
||||
be distributed entirely under this license, and must not be distributed
|
||||
under any other license. The requirement for fonts to remain under
|
||||
this license does not apply to any document created using the Font
|
||||
Software.
|
||||
|
||||
|
||||
|
||||
TERMINATION
|
||||
This license becomes null and void if any of the above conditions are not met.
|
||||
|
||||
|
||||
|
||||
DISCLAIMER
|
||||
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
|
||||
OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE
|
||||
COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
|
||||
DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER
|
||||
DEALINGS IN THE FONT SOFTWARE.
|
||||
|
||||
274
Makefile
Normal file
274
Makefile
Normal file
@ -0,0 +1,274 @@
|
||||
SHELL=bash
|
||||
|
||||
export CONTAINER_COMMAND ?= podman
|
||||
export CONTAINER_INTERACTIVE
|
||||
export CONTAINER_PORTS
|
||||
export IN_CONTAINER ?= false
|
||||
export IN_CI ?= false
|
||||
export WATCH
|
||||
export db ?= -cf
|
||||
export DEV_PORT_HTTP
|
||||
export DEV_PORT_HTTPS
|
||||
|
||||
###########################
|
||||
##### GENERAL TARGETS #####
|
||||
|
||||
.PHONY: help
|
||||
help:
|
||||
@echo "Nothing to see here, go away"
|
||||
|
||||
.PHONY: all
|
||||
all:
|
||||
@echo "TODO"
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
rm -rf node_modules .npm .cache assets/icons assets/favicons static well-known
|
||||
rm -rf .stack .stack-work .stack-work-build .stack-work-run .stack-work-test .stack-work-doc
|
||||
rm -rf bin/ .Dockerfile .dev-port-http .dev-port-https
|
||||
.PHONY: clean-all
|
||||
clean-all: clean
|
||||
$(CONTAINER_COMMAND) system prune --all --force --volumes
|
||||
$(CONTAINER_COMMAND) image prune --all --force
|
||||
$(CONTAINER_COMMAND) volume prune --force
|
||||
|
||||
.PHONY: release
|
||||
release:
|
||||
./.gitlab-ci/version.pl -changelog CHANGELOG.md
|
||||
git add CHANGELOG.md
|
||||
VERSION=`.gitlab-ci/version.pl`
|
||||
git tag ${VERSION}
|
||||
git commit -m "chore(release): ${VERSION}"
|
||||
git push
|
||||
|
||||
.PHONY: %-shell
|
||||
%-shell:
|
||||
MOUNT_DIR=/mnt/fradrive ; \
|
||||
FRADRIVE_SERVICE=$* ; \
|
||||
$(MAKE) -- --image-build FRADRIVE_SERVICE=$${FRADRIVE_SERVICE} ; \
|
||||
$(CONTAINER_COMMAND) run -it -v $(PWD):$${MOUNT_DIR} $(CONTAINER_PORTS) --env IN_CONTAINER=true --entrypoint /bin/bash --name fradrive.$${FRADRIVE_SERVICE}.interactive.$$(date +'%Y-%m-%dT%H-%M-%S') fradrive/$${FRADRIVE_SERVICE}
|
||||
|
||||
##### GENERAL TARGETS #####
|
||||
###########################
|
||||
|
||||
|
||||
############################################
|
||||
##### UNIFIED FRONTEND/BACKEND TARGETS #####
|
||||
|
||||
.PHONY: serve
|
||||
serve: CONTAINER_INTERACTIVE=-it
|
||||
serve:
|
||||
$(MAKE) serve-database &
|
||||
$(MAKE) serve-frontend &
|
||||
$(MAKE) serve-backend
|
||||
|
||||
.PHONY: compile
|
||||
compile: compile-frontend compile-backend
|
||||
|
||||
.PHONY: lint
|
||||
lint: lint-frontend lint-backend
|
||||
|
||||
.PHONY: test
|
||||
test: test-frontend test-backend i18n-check
|
||||
|
||||
##### UNIFIED FRONTEND/BACKEND TARGETS #####
|
||||
############################################
|
||||
|
||||
|
||||
############################
|
||||
##### FRONTEND TARGETS #####
|
||||
|
||||
.PHONY: %-frontend
|
||||
%-frontend: FRADRIVE_SERVICE=frontend
|
||||
%-frontend: --image-build --containerized---%-frontend;
|
||||
|
||||
.PHONY: --%-frontend
|
||||
--%-frontend: --containerized---frontend-dependencies-frontend;
|
||||
|
||||
.PHONY: --compile-frontend
|
||||
--compile-frontend: --frontend-dependencies
|
||||
npx -- webpack --progress $(WATCH)
|
||||
|
||||
.PHONY: serve-frontend
|
||||
serve-frontend: CONTAINER_INTERACTIVE=-it
|
||||
serve-frontend:
|
||||
$(MAKE) -- --containerized---compile-frontend WATCH=--watch
|
||||
|
||||
# .PHONY: --serve-frontend
|
||||
# --serve-frontend: WATCH=--watch
|
||||
# --serve-frontend: --compile-frontend;
|
||||
|
||||
.PHONY: --lint-frontend
|
||||
--lint-frontend: eslint.config.js
|
||||
npx -- eslint frontend/src $(FIX)
|
||||
@echo Hooray! There are no hints.
|
||||
|
||||
.PHONY: --test-frontend
|
||||
--test-frontend: karma.conf.cjs
|
||||
@echo Karma frontend tests are currently broken after npm update and have therefor been temporarily disabled.
|
||||
# npx -- karma start --conf karma.conf.cjs $(WATCH)
|
||||
|
||||
.PHONY: --frontend-dependencies
|
||||
--frontend-dependencies: node_modules package.json package-lock.json assets;
|
||||
|
||||
node_modules: package.json package-lock.json
|
||||
npm ci --cache .npm --prefer-offline
|
||||
|
||||
package-lock.json: package.json
|
||||
npm install --cache .npm --prefer-offline
|
||||
|
||||
assets: assets/favicons assets/icons
|
||||
assets/favicons:
|
||||
./utils/faviconize.pl assets/favicon.svg long assets/favicons
|
||||
assets/icons: node_modules
|
||||
./utils/renamer.pl node_modules/@fortawesome/fontawesome-free/svgs/solid utils/rename-fa.json assets/icons/fradrive
|
||||
./utils/renamer.pl node_modules/@fortawesome/fontawesome-free/svgs/regular utils/rename-fa.json assets/icons/fradrive
|
||||
|
||||
well-known: node_modules assets
|
||||
npx webpack --progress
|
||||
|
||||
##### FRONTEND TARGETS #####
|
||||
############################
|
||||
|
||||
|
||||
###########################
|
||||
##### BACKEND TARGETS #####
|
||||
|
||||
.PHONY: %-backend
|
||||
%-backend: FRADRIVE_SERVICE=backend
|
||||
%-backend: --image-build --containerized---%-dev-backend;
|
||||
|
||||
.PHONY: %-prod-backend
|
||||
%-prod-backend: FRADRIVE_SERVICE=backend
|
||||
%-prod-backend: --image-build --containerized---%-prod-backend;
|
||||
|
||||
.PHONY: --%-dev-backend
|
||||
--%-dev-backend: FRADRIVE_SERVICE=backend
|
||||
--%-dev-backend: stackopts=--flag uniworx:dev
|
||||
--%-dev-backend: --image-build --containerized---%-backend;
|
||||
|
||||
.PHONY: --%-prod-backend
|
||||
--%-prod-backend: FRADRIVE_SERVICE=backend
|
||||
--%-prod-backend: stackopts=--flag uniworx:-dev
|
||||
--%-prod-backend: --image-build --containerized-%-backend;
|
||||
|
||||
.PHONY: serve-backend
|
||||
serve-backend: CONTAINER_INTERACTIVE=-it
|
||||
serve-backend:
|
||||
DEV_PORT_HTTP=`docker/backend/dev_port.pl 3000 | tee .dev-port-http`; \
|
||||
DEV_PORT_HTTPS=`docker/backend/dev_port.pl 3443 | tee .dev-port-https`; \
|
||||
$(MAKE) -- --containerized---serve-dev-backend DEV_PORT_HTTP=$${DEV_PORT_HTTP} DEV_PORT_HTTPS=$${DEV_PORT_HTTPS}
|
||||
.PHONY: --serve-dev-backend
|
||||
--serve-dev-backend: start.sh
|
||||
DEV_PORT_HTTP=`cat .dev-port-http`; \
|
||||
DEV_PORT_HTTPS=`cat .dev-port-https`; \
|
||||
./start.sh
|
||||
|
||||
.PHONY: --compile-backend
|
||||
--compile-backend:
|
||||
stack build --fast --profile --library-profiling --executable-profiling --flag uniworx:-library-only --local-bin-path $$(pwd)/bin $(stackopts)
|
||||
|
||||
.PHONY: --lint-backend
|
||||
--lint-backend:
|
||||
stack build --test --fast --flag uniworx:library-only uniworx:test:hlint $(stackopts)
|
||||
|
||||
.PHONY: --test-backend
|
||||
--test-backend:
|
||||
stack build --test --coverage --fast --flag uniworx:library-only $(stackopts)
|
||||
|
||||
# TODO: maybe deprecated
|
||||
.PHONY: database
|
||||
database: .stack compile-backend-prod
|
||||
export SERVER_SESSION_ACID_FALLBACK=$${SERVER_SESSION_ACID_FALLBACK:-true}
|
||||
export AVSPASS=$${AVSPASS:-nopasswordset}
|
||||
stack exec uniworxdb -- $(db)
|
||||
|
||||
.PHONY: serve-database
|
||||
serve-database: CONTAINER_INTERACTIVE=-it
|
||||
serve-database: --containerized-database
|
||||
|
||||
.PHONY: database-%
|
||||
database-%:
|
||||
@echo "This target, intended to fill, clear, migrate, ... the database using uniworxdb from inside the backend container is yet to be implemented"
|
||||
exit 1
|
||||
|
||||
# .PHONY: .stack
|
||||
.stack: stack.yaml stack.yaml.lock
|
||||
.stack:
|
||||
$(MAKE) -- --image-run---.stack
|
||||
.PHONY: --.stack
|
||||
--.stack: stack.yaml stack.yaml.lock
|
||||
stack build --fast --only-dependencies $(stackopts)
|
||||
|
||||
# TODO: deprecated, remove
|
||||
# .stack-work.lock:
|
||||
# [ "${FLOCKER}" != "$0" ] && exec env FLOCKER="$0" flock -en .stack-work.lock "$0" "$@" || :
|
||||
|
||||
##### BACKEND TARGETS #####
|
||||
###########################
|
||||
|
||||
|
||||
#############################
|
||||
##### CONTAINER TARGETS #####
|
||||
|
||||
.PHONY: --containerized-database
|
||||
--containerized-database: FRADRIVE_SERVICE=database
|
||||
# port forwarding is disabled in --network=host mode; nevertheless it is stated here for documentation reasons
|
||||
--containerized-database: CONTAINER_PORTS=-p 5432:5432/tcp
|
||||
--containerized-database: --image-build
|
||||
if [ "$(IN_CONTAINER)" == "false" ] ; then \
|
||||
$(CONTAINER_COMMAND) run -it $(CONTAINER_PORTS) --name fradrive.$(FRADRIVE_SERVICE).$$(date +'%Y-%m-%dT%H-%M-%S') fradrive/$(FRADRIVE_SERVICE) ; \
|
||||
fi
|
||||
|
||||
.PHONY: --containerized-%-frontend
|
||||
--containerized-%-frontend: FRADRIVE_SERVICE=frontend
|
||||
#--containerized-%-frontend: --image-build --image-run-%-frontend;
|
||||
--containerized-%-frontend: --image-build
|
||||
$(MAKE) -- --image-run-$*-frontend
|
||||
|
||||
.PHONY: --containerized-%-backend
|
||||
--containerized-%-backend: FRADRIVE_SERVICE=backend
|
||||
#--containerized-%-backend: --image-build --image-run-%-backend;
|
||||
--containerized-%-backend: CONTAINER_PORTS=-p 127.0.0.1:$(DEV_PORT_HTTP):3000/tcp -p 127.0.0.1:$(DEV_PORT_HTTPS):3443/tcp
|
||||
--containerized-%-backend: --image-build
|
||||
$(MAKE) -- --image-run-$*-backend
|
||||
|
||||
.PHONY: image-rebuild
|
||||
image-rebuild-%:
|
||||
$(MAKE) -- --image-build FRADRIVE_SERVICE=$* NO_CACHE=--no-cache
|
||||
.PHONY: --image-build
|
||||
--image-build:
|
||||
rm -f .Dockerfile
|
||||
ln -s docker/$(FRADRIVE_SERVICE)/Dockerfile .Dockerfile
|
||||
MOUNT_DIR=/mnt/fradrive; \
|
||||
PROJECT_DIR=/mnt/fradrive; \
|
||||
if [ "$(IN_CI)" == "true" ] ; then \
|
||||
PROJECT_DIR=/fradrive; \
|
||||
fi; \
|
||||
if [ "$(IN_CONTAINER)" == "false" ] ; then \
|
||||
$(CONTAINER_COMMAND) build $(NO_CACHE) -v $(PWD):$${MOUNT_DIR} --env IN_CONTAINER=true --build-arg MOUNT_DIR=$${MOUNT_DIR} --build-arg PROJECT_DIR=$${PROJECT_DIR} --tag fradrive/$(FRADRIVE_SERVICE) --file $(PWD)/.Dockerfile ; \
|
||||
fi
|
||||
|
||||
.PHONY: --image-run-%
|
||||
--image-run-%:
|
||||
MOUNT_DIR=/mnt/fradrive; \
|
||||
if [ "$(IN_CONTAINER)" == "true" ] ; then \
|
||||
$(MAKE) -- $* ; \
|
||||
else \
|
||||
$(CONTAINER_COMMAND) run $(CONTAINER_INTERACTIVE) -v $(PWD):$${MOUNT_DIR} $(CONTAINER_PORTS) --env IN_CONTAINER=true --env FRADRIVE_MAKE_TARGET=$* --env WATCH=$(WATCH) --name fradrive.$(FRADRIVE_SERVICE).$$(date +'%Y-%m-%dT%H-%M-%S') fradrive/$(FRADRIVE_SERVICE) ; \
|
||||
fi
|
||||
|
||||
##### CONTAINER TARGETS #####
|
||||
#############################
|
||||
|
||||
|
||||
.PHONY: i18n-check
|
||||
i18n-check: --image-run---i18n-check
|
||||
.PHONY: --i18n-check
|
||||
--i18n-check:
|
||||
./missing-translations.sh
|
||||
@echo No missing translations.
|
||||
|
||||
%.lock:
|
||||
[ -e $@ ] || touch $@
|
||||
flock -en $@ true
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 10 KiB |
@ -1,3 +0,0 @@
|
||||
SPDX-FileCopyrightText: 2022 Fraport AG
|
||||
|
||||
SPDX-License-Identifier: LicenseRef-Fraport-Corporate-Design
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 1.1 KiB |
@ -1,3 +0,0 @@
|
||||
SPDX-FileCopyrightText: 2022 Fraport AG
|
||||
|
||||
SPDX-License-Identifier: LicenseRef-Fraport-Corporate-Design
|
||||
@ -1,3 +0,0 @@
|
||||
SPDX-FileCopyrightText: 2022 Fraport AG
|
||||
|
||||
SPDX-License-Identifier: LicenseRef-Fraport-Corporate-Design
|
||||
16
assets/fonts/fradrive/AUTHORS
Normal file
16
assets/fonts/fradrive/AUTHORS
Normal file
@ -0,0 +1,16 @@
|
||||
AUTHORS
|
||||
|
||||
Current Contributors (sorted alphabetically):
|
||||
|
||||
- Vishal Vijayraghavan <vishalvvr at fedoraproject dot org>
|
||||
Project Owner/ Maintainer (Current)
|
||||
Red Hat, Inc.
|
||||
|
||||
Previous Contributors
|
||||
- Pravin Satpute <psatpute at redhat dot com>
|
||||
Project Owner/ Maintainer
|
||||
Red Hat, Inc.
|
||||
|
||||
- Steve Matteson
|
||||
Original Designer
|
||||
Ascender, Inc.
|
||||
79
assets/fonts/fradrive/ChangeLog
Normal file
79
assets/fonts/fradrive/ChangeLog
Normal file
@ -0,0 +1,79 @@
|
||||
* Thu Sep 30 2021 Vishal Vijayraghavan <vishalvijayraghavan@gmail.com>
|
||||
- Releasing liberation-fonts 2.1.5 version
|
||||
- Resolves #40: More fixes to Superscript and subscript Numbers missing issues
|
||||
-- fixed inconsistent weight, missing glyphs and GSUB issues
|
||||
|
||||
* Tue May 04 2021 Vishal Vijayraghavan <vishalvijayraghavan@gmail.com>
|
||||
- Releasing liberation-fonts 2.1.4 version
|
||||
- Resolves #40: Superscript and subscript Numbers missing
|
||||
- Resolves #24: Gender symbol are inconsistent in Sans
|
||||
|
||||
* Tue Feb 23 2021 Vishal Vijayraghavan <vishalvijayraghavan@gmail.com>
|
||||
- Releasing liberation-fonts 2.1.3 version
|
||||
- Resolves #37: U+2262,2669,266C too wide
|
||||
- Resolves Bugzilla #1464310: Tilded G not works with Liberation Sans and Serif
|
||||
|
||||
* Mon Dec 21 2020 Vishal Vijayraghavan <vishalvijayraghavan@gmail.com>
|
||||
- Releasing liberation-fonts 2.1.2 version
|
||||
- Resolves #25: liberation-fonts doesn't have proper <==> symbol
|
||||
- Resolves #33: Liberation Mono: U+20BF is too wide
|
||||
- Resolves #14: Liberation mono fonts are not recognized as monospace by fontconfig and cairo
|
||||
|
||||
* Wed Jun 03 2020 Vishal Vijayraghavan <vishalvijayraghavan@gmail.com>
|
||||
- Releasing liberation-fonts 2.1.1 version
|
||||
- Few more fixes to Bugzilla #1072095: Liberation Sans renders most Latin combining characters incorrectly
|
||||
|
||||
* Mon Feb 10 2020 Vishal Vijayraghavan <vishalvijayraghavan@gmail.com>
|
||||
- Releasing liberation-fonts 2.1.0 version
|
||||
- Updated release versioning scheme
|
||||
- Resolved Bugzilla #1072095: Liberation Sans renders most Latin combining characters incorrectly
|
||||
- Resolved Pagure issue-11: Combining diaerasis below does not work except U
|
||||
- Resolved GitHub issue-19: Incorrect glyph name mapped to unicode
|
||||
- Resolved Pagure issue-5: Incorrect glyph of Cent sign (U+00A2) in Sans and Mono style
|
||||
- Resolved Pagure issue-28 : U+25D2 and U+25D3 circle with lower / upper half black are backwards
|
||||
|
||||
* Mon Mar 4 2019 Vishal Vijayraghavan <vishalvijayraghavan@gmail.com>
|
||||
- Releasing liberation-fonts 2.00.5 version
|
||||
- Resolved issue-10: Improving lowercase Cyrillic glyps localized for Macedonian and Serbian, Patch fix by Dimitrij Mijoski
|
||||
- Resolved #1014357: U+266B incorrect glyph with extra beam
|
||||
-- Added two new glyphs U+266C and U+2669
|
||||
- Resolved issue-13: COMBINING LONG SOLIDUS OVERLAY (U+0338) not centred on base character.
|
||||
- Validated Missing Points at Extrema, Non-integral coordinates, Wrong Direction issues for newly added and existing glyphs
|
||||
|
||||
* Mon Nov 05 2018 Vishal Vijayraghavan <vishalvijayraghavan@gmail.com>
|
||||
- Releasing liberation-fonts 2.00.4 version, it includes few bug fixes and enhancements as follows:
|
||||
-- Added Bitcoin sign #1533798
|
||||
-- Fixed Incorrect lowercase Cyrillic BE for Macedonian language in liberation v2 (look like Greek delta), Patch fix by Dimitrij Mijoski #1574410
|
||||
-- Fixed Liberation Sans Mono Enhancement Request: Modification needed for "l" Character, Patch fix by Nikolaus Waxweiler #1574410
|
||||
|
||||
* Tue Sep 18 2018 Vishal Vijayraghavan <vishalvijayraghavan@gmail.com>
|
||||
- Resolved #1574410: Incorrect lowercase Cyrillic BE for Macedonian language in liberation v2 (look like Greek delta)
|
||||
- Patch fix by Dimitrij Mijoski: https://pagure.io/liberation-fonts/pull-request/21
|
||||
- Updated LiberationMono-Bold, LiberationMono-Regular, LiberationSans-Bold, LiberationSans-Regular, LiberationSerif-Bold, LiberationSerif-Regular
|
||||
|
||||
* Thu May 17 2018 Pravin Satpute <psatpute AT redhat DOT com> - 2.00.3
|
||||
- Releasing liberation-fonts 2.00.3 version, it includes fix for few bugs.
|
||||
- This release was pending from long time, will work on other open bugs
|
||||
post this release.
|
||||
|
||||
* Tue Oct 14 2014 Pravin Satpute <psatpute AT redhat DOT com>
|
||||
- Resolved #1096336: Liberation 2.00.x missing unicode hyphen (U+2010)
|
||||
- Added U+2007 character in Liberation Mono
|
||||
- Imported missing gpos tables from Arimo #1072095
|
||||
- Missing MIDDLE DOT (u+00B7) glyph for Liberation Sans Italic #1084493
|
||||
- Rendering of Unicode tie bars could be improved #1076190
|
||||
|
||||
* Thu Oct 04 2012 Pravin Satpute <psatpute AT redhat DOT com>
|
||||
- Resolved "Glyphs with multiple unicode encodings inhibit subsetting" #851790
|
||||
- Resolved #851791, #854601 and #851825
|
||||
- Following GASP table version as per Liberation old version. (Anti-aliasing disabled)
|
||||
- Added support for Serbian glyphs for wikipedia #657849
|
||||
- In Monospace fonts, isFixedPitch bit set via script for getting it recognized as Monospace in putty.exe
|
||||
|
||||
* Fri Jul 06 2012 Pravin Satpute <psatpute AT redhat DOT com>
|
||||
- Initial version of Liberation fonts based on croscore fonts version 1.21.0
|
||||
- Converted TTF files into SFD files to be open source.
|
||||
- Update Copyright and License file
|
||||
- set fsType bit to 0, Installable Embedding is allowed.
|
||||
- Absolute value in HHeadAscent/Descent values for maintaining Metric compatibility.
|
||||
|
||||
BIN
assets/fonts/fradrive/FRADriveMono-Bold.ttf
Normal file
BIN
assets/fonts/fradrive/FRADriveMono-Bold.ttf
Normal file
Binary file not shown.
BIN
assets/fonts/fradrive/FRADriveMono-BoldItalic.ttf
Normal file
BIN
assets/fonts/fradrive/FRADriveMono-BoldItalic.ttf
Normal file
Binary file not shown.
BIN
assets/fonts/fradrive/FRADriveMono-Italic.ttf
Normal file
BIN
assets/fonts/fradrive/FRADriveMono-Italic.ttf
Normal file
Binary file not shown.
BIN
assets/fonts/fradrive/FRADriveMono-Regular.ttf
Normal file
BIN
assets/fonts/fradrive/FRADriveMono-Regular.ttf
Normal file
Binary file not shown.
BIN
assets/fonts/fradrive/FRADriveSans-Bold.ttf
Normal file
BIN
assets/fonts/fradrive/FRADriveSans-Bold.ttf
Normal file
Binary file not shown.
BIN
assets/fonts/fradrive/FRADriveSans-BoldItalic.ttf
Normal file
BIN
assets/fonts/fradrive/FRADriveSans-BoldItalic.ttf
Normal file
Binary file not shown.
BIN
assets/fonts/fradrive/FRADriveSans-Italic.ttf
Normal file
BIN
assets/fonts/fradrive/FRADriveSans-Italic.ttf
Normal file
Binary file not shown.
BIN
assets/fonts/fradrive/FRADriveSans-Regular.ttf
Normal file
BIN
assets/fonts/fradrive/FRADriveSans-Regular.ttf
Normal file
Binary file not shown.
86
assets/fonts/fradrive/README.md
Normal file
86
assets/fonts/fradrive/README.md
Normal file
@ -0,0 +1,86 @@
|
||||
Liberation Fonts
|
||||
=================
|
||||
|
||||
The Liberation Fonts is font collection which aims to provide document
|
||||
layout compatibility as usage of Times New Roman, Arial, Courier New.
|
||||
|
||||
|
||||
Requirements
|
||||
=================
|
||||
|
||||
* [fontforge](http://fontforge.sourceforge.net)
|
||||
* [python fonttools](https://pypi.org/project/fonttools/)
|
||||
|
||||
|
||||
Install
|
||||
============
|
||||
|
||||
1. Get source
|
||||
```
|
||||
$ git clone https://github.com/liberationfonts/liberation-fonts.git
|
||||
```
|
||||
|
||||
- Or downloading the tar.gz file from [releases](https://github.com/fontforge/fontforge/releases).
|
||||
|
||||
- Extract the tar file:
|
||||
```
|
||||
$ tar zxvf liberation-fonts-[VERSION].tar.gz
|
||||
```
|
||||
2. Build from the source
|
||||
```
|
||||
$ cd liberation-fonts or $ cd liberation-fonts-[VERSION]
|
||||
$ make
|
||||
```
|
||||
The binary font files will be available in 'liberation-fonts-ttf-[VERSION]' directory.
|
||||
|
||||
3. Install to system
|
||||
|
||||
Fedora Users :
|
||||
- One can manually install the fonts by copying the TTFs to `~/.fonts` for user wide usage
|
||||
- and/or to `/usr/share/fonts/liberation` for system-wide availability.
|
||||
- Then, run `fc-cache` to let that cached.
|
||||
|
||||
Other distributions :
|
||||
please check out corresponding documentation.
|
||||
|
||||
|
||||
Usage
|
||||
==========
|
||||
|
||||
Simply select preferred liberation font in applications and start using.
|
||||
|
||||
|
||||
License
|
||||
============
|
||||
|
||||
This Font Software is licensed under the SIL Open Font License,
|
||||
Version 1.1.
|
||||
|
||||
Please read file "LICENSE" for details.
|
||||
|
||||
|
||||
For Maintainers
|
||||
====================
|
||||
|
||||
1. Before packaging a new release based on a new source tarball, you have to update the version suffix in the Makefile:
|
||||
```
|
||||
VER = [NEW_VERSION]
|
||||
```
|
||||
2. After updating Makefile VER attribute, update all font metadata by executing:
|
||||
```
|
||||
$ make versionupdate
|
||||
```
|
||||
can verfy changes using ftinfo/otfinfo or fontforge itself.
|
||||
3. It is highly recommended that file 'ChangeLog' is updated to reflect changes.
|
||||
|
||||
4. Create a tarball with the following command:
|
||||
```
|
||||
$ make dist
|
||||
```
|
||||
The new versioned tarball will be available in the dist/ folder as `liberation-fonts-[NEW_VERSION].tar.gz.`
|
||||
5. Create github tag for that [NEW_VERSION] and upload dist tarball
|
||||
|
||||
Credits
|
||||
============
|
||||
|
||||
Please read file "AUTHORS" for list of contributors.
|
||||
4
assets/fonts/fradrive/TODO
Normal file
4
assets/fonts/fradrive/TODO
Normal file
@ -0,0 +1,4 @@
|
||||
Here are todo for next release
|
||||
1) Serbian glyph for wikipedia https://bugzilla.redhat.com/show_bug.cgi?id=657849
|
||||
2) Liberation Mono not recognizing as Mono in Windows application #861003
|
||||
- presently it is patch, we have to update zero width characters to fixed width
|
||||
@ -1,41 +0,0 @@
|
||||
{
|
||||
"name": "App",
|
||||
"icons": [
|
||||
{
|
||||
"src": "\/android-icon-36x36.png",
|
||||
"sizes": "36x36",
|
||||
"type": "image\/png",
|
||||
"density": "0.75"
|
||||
},
|
||||
{
|
||||
"src": "\/android-icon-48x48.png",
|
||||
"sizes": "48x48",
|
||||
"type": "image\/png",
|
||||
"density": "1.0"
|
||||
},
|
||||
{
|
||||
"src": "\/android-icon-72x72.png",
|
||||
"sizes": "72x72",
|
||||
"type": "image\/png",
|
||||
"density": "1.5"
|
||||
},
|
||||
{
|
||||
"src": "\/android-icon-96x96.png",
|
||||
"sizes": "96x96",
|
||||
"type": "image\/png",
|
||||
"density": "2.0"
|
||||
},
|
||||
{
|
||||
"src": "\/android-icon-144x144.png",
|
||||
"sizes": "144x144",
|
||||
"type": "image\/png",
|
||||
"density": "3.0"
|
||||
},
|
||||
{
|
||||
"src": "\/android-icon-192x192.png",
|
||||
"sizes": "192x192",
|
||||
"type": "image\/png",
|
||||
"density": "4.0"
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -1,3 +0,0 @@
|
||||
SPDX-FileCopyrightText: 2022 Steffen Jost <jost@cip.ifi.lmu.de>
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
23
babel.config.cjs
Normal file
23
babel.config.cjs
Normal file
@ -0,0 +1,23 @@
|
||||
module.exports = (api) => {
|
||||
api.cache(true);
|
||||
|
||||
const presets = [
|
||||
[ '@babel/preset-env'
|
||||
]
|
||||
];
|
||||
|
||||
const plugins = [
|
||||
["@babel/plugin-proposal-decorators", { "legacy": true }],
|
||||
["@babel/plugin-syntax-dynamic-import"],
|
||||
["@babel/plugin-transform-class-properties", { "loose": true }],
|
||||
["@babel/plugin-transform-private-methods", { "loose": true }],
|
||||
["@babel/plugin-transform-private-property-in-object", { "loose": true }],
|
||||
["@babel/plugin-transform-modules-commonjs"],
|
||||
["@babel/transform-runtime"],
|
||||
];
|
||||
|
||||
return {
|
||||
presets,
|
||||
plugins,
|
||||
};
|
||||
}
|
||||
35
compose.yaml
Normal file
35
compose.yaml
Normal file
@ -0,0 +1,35 @@
|
||||
services:
|
||||
frontend:
|
||||
# image: registry.uniworx.de/fradrive/fradrive/frontend # TODO: reference to current branch required; how to do that here?
|
||||
# pull_policy: if_not_present
|
||||
build:
|
||||
dockerfile: ./docker/frontend/Dockerfile
|
||||
context: .
|
||||
environment:
|
||||
- PROJECT_DIR=/fradrive
|
||||
volumes:
|
||||
- &fradrive-mnt .:/tmp/fradrive
|
||||
backend:
|
||||
# image: registry.uniworx.de/fradrive/fradrive/backend
|
||||
# pull_policy: if_not_present
|
||||
build:
|
||||
dockerfile: ./docker/backend/Dockerfile
|
||||
context: ./
|
||||
volumes:
|
||||
- *fradrive-mnt
|
||||
depends_on:
|
||||
- frontend
|
||||
stdin_open: true
|
||||
database:
|
||||
# image: registry.uniworx.de/fradrive/fradrive/database
|
||||
# pull_policy: if_not_present
|
||||
build: ./docker/database
|
||||
ports:
|
||||
- "9876:5432"
|
||||
# privileged: true
|
||||
|
||||
# driver: local
|
||||
# driver_opts:
|
||||
# type: none
|
||||
# o: bind
|
||||
# device: ./
|
||||
@ -1,76 +0,0 @@
|
||||
{
|
||||
"masterPicture": "assets/favicon.svg",
|
||||
"design": {
|
||||
"desktop_browser": {},
|
||||
"ios": {
|
||||
"picture_aspect": "background_and_margin",
|
||||
"margin": "5%",
|
||||
"background_color": "#ffffff",
|
||||
"startup_image": {
|
||||
"background_color": "#ffffff"
|
||||
},
|
||||
"app_name": "FRADrive",
|
||||
"assets": {
|
||||
"ios6_and_prior_icons": false,
|
||||
"ios7_and_later_icons": true,
|
||||
"precomposed_icons": true,
|
||||
"declare_only_default_icon": true
|
||||
}
|
||||
},
|
||||
"windows": {
|
||||
"picture_aspect": "white_silhouette",
|
||||
"background_color": "#0a9342",
|
||||
"app_name": "FRADrive"
|
||||
},
|
||||
"firefox_app": {
|
||||
"picture_aspect": "circle",
|
||||
"keep_picture_in_circle": false,
|
||||
"circle_inner_margin": "5%",
|
||||
"background_color": "#ffffff",
|
||||
"overlay": false,
|
||||
"manifest": {
|
||||
"app_name": "FRADrive",
|
||||
"app_description": {
|
||||
"_i18n": true,
|
||||
"de-de-formal": "Ein webbasiertes Schulungsverwaltungssystem",
|
||||
"en-eu": "A web based training management system"
|
||||
},
|
||||
"developer_name": "Uni2work-Team",
|
||||
"developer_url": "https://uni2work.ifi.lmu.de/info",
|
||||
"display": "browser",
|
||||
"start_url": "/"
|
||||
}
|
||||
},
|
||||
"android_chrome": {
|
||||
"picture_aspect": "shadow",
|
||||
"manifest": {
|
||||
"name": "FRADrive",
|
||||
"display": "browser",
|
||||
"orientation": "portrait",
|
||||
"start_url": "/"
|
||||
},
|
||||
"assets": {
|
||||
"legacy_icon": true,
|
||||
"low_resolution_icons": false
|
||||
}
|
||||
},
|
||||
"safari_pinned_tab": {
|
||||
"picture_aspect": "silhouette",
|
||||
"theme_color": "#0a9342"
|
||||
},
|
||||
"coast": {
|
||||
"picture_aspect": "background_and_margin",
|
||||
"background_color": "#ffffff",
|
||||
"margin": "10%"
|
||||
},
|
||||
"open_graph": {
|
||||
"picture_aspect": "background_and_margin",
|
||||
"background_color": "#ffffff",
|
||||
"margin": "10%",
|
||||
"ratio": "square"
|
||||
}
|
||||
},
|
||||
"settings": {
|
||||
"html_code_file": true
|
||||
}
|
||||
}
|
||||
@ -1,3 +0,0 @@
|
||||
SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Steffen Jost <jost@tcs.ifi.lmu.de>
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
5
config/i18n.json
Normal file
5
config/i18n.json
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"_i18n": true,
|
||||
"de-de-formal": "Ein webbasiertes Schulungsverwaltungssystem",
|
||||
"en-eu": "A web-based training management system"
|
||||
}
|
||||
@ -123,7 +123,7 @@ auth-pw-hash:
|
||||
database:
|
||||
user: "_env:PGUSER:uniworx"
|
||||
password: "_env:PGPASS:uniworx"
|
||||
host: "_env:PGHOST:127.0.0.1"
|
||||
host: "_env:PGHOST:host.docker.internal"
|
||||
port: "_env:PGPORT:5432"
|
||||
# See config/test-settings.yml for an override during tests
|
||||
database: "_env:PGDATABASE:uniworx"
|
||||
|
||||
31
docker/backend/Dockerfile
Normal file
31
docker/backend/Dockerfile
Normal file
@ -0,0 +1,31 @@
|
||||
FROM debian:12.5
|
||||
|
||||
RUN apt-get -y update && apt-get -y install haskell-stack git
|
||||
RUN apt-get -y update && apt-get -y install alex g++ happy libghc-zlib-dev libpq-dev libsodium-dev locales locales-all pkg-config
|
||||
RUN apt-get -y update && apt-get -y install llvm
|
||||
|
||||
RUN apt-get -y update && apt-get -y install libbz2-dev libexpat1-dev
|
||||
|
||||
ENV LANG=en_US.UTF-8
|
||||
|
||||
# locally these two should be identical, so that compilation results are written out into the file dir.
|
||||
# in CI-pipelines these two should be different, so that the container caches the compilation results.
|
||||
ARG MOUNT_DIR=/mnt/fradrive
|
||||
ARG PROJECT_DIR=/fradrive
|
||||
|
||||
RUN mkdir -p "${PROJECT_DIR}"
|
||||
RUN if [ "${PROJECT_DIR}" != "${MOUNT_DIR}" ] ; then cp -r "${MOUNT_DIR}"/* "${PROJECT_DIR}" ; fi
|
||||
WORKDIR "${PROJECT_DIR}"
|
||||
ENV HOME="${PROJECT_DIR}"
|
||||
|
||||
RUN mkdir -p "${PROJECT_DIR}/.stack"
|
||||
ENV STACK_ROOT="${PROJECT_DIR}/.stack"
|
||||
|
||||
RUN make .stack STACK_ROOT=${STACK_ROOT} IN_CONTAINER=true
|
||||
RUN stack build yesod-bin
|
||||
|
||||
ENV FRADRIVE_MAKE_TARGET=serve-backend
|
||||
ENTRYPOINT make -- ${FRADRIVE_MAKE_TARGET} STACK_ROOT="${STACK_ROOT}" IN_CONTAINER=true
|
||||
|
||||
EXPOSE 3000/tcp
|
||||
EXPOSE 3443/tcp
|
||||
19
docker/backend/dev_port.pl
Executable file
19
docker/backend/dev_port.pl
Executable file
@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
my $port = $ARGV[0];
|
||||
my $used_ports = `netstat -tulan`;
|
||||
|
||||
my %p=();
|
||||
my $addr=qr((?:\d+.\d+.\d+.\d+|[0-9a-f:]+));
|
||||
for(split m/\R/, $used_ports) {
|
||||
if(m#$addr:(\d+)\s+$addr:(?:\d+|\*)\s+#) {
|
||||
$p{$1}=1;
|
||||
}
|
||||
};
|
||||
|
||||
$port++ while $p{$port};
|
||||
|
||||
print $port
|
||||
18
docker/database/Dockerfile
Normal file
18
docker/database/Dockerfile
Normal file
@ -0,0 +1,18 @@
|
||||
FROM docker.io/postgres:12
|
||||
|
||||
# Allow for connecting to database without password authentication
|
||||
ENV POSTGRES_HOST_AUTH_METHOD=trust
|
||||
|
||||
RUN cp /mnt/fradrive/docker/database/schema.sql /schema.sql && chown postgres:postgres /schema.sql
|
||||
RUN cp /mnt/fradrive/docker/database/initdb.sh /etc/fradrive-db && chmod 755 /etc/fradrive-db
|
||||
|
||||
USER postgres
|
||||
|
||||
# postgresql.conf and postgres_hba.conf resulted in error (Invalid data directory or sth); using -o/--options in initdb.sh instead
|
||||
# COPY --chown=postgres:postgres --chmod=644 ./postgresql.conf /etc/postgresql/12/main/postgresql.conf
|
||||
# COPY --chown=postgres:postgres --chmod=644 ./pg_hba.conf /etc/postgresql/12/main/pg_hba.conf
|
||||
# ADD ./schema.sql /schema.sql
|
||||
# ADD --chmod=755 ./initdb.sh /etc/fradrive-db
|
||||
|
||||
ENTRYPOINT /etc/fradrive-db
|
||||
EXPOSE 5432/tcp
|
||||
14
docker/database/initdb.sh
Normal file
14
docker/database/initdb.sh
Normal file
@ -0,0 +1,14 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Init and start the postgres daemon
|
||||
initdb --no-locale
|
||||
pg_ctl start -w -o "-c listen_addresses=0.0.0.0 -c unix_socket_permissions=0700 -c max_connections=9990 -c shared_preload_libraries=pg_stat_statements -c session_preload_libraries=auto_explain -c auto_explain.log_min_duration=100ms" # COPY postgresql.conf and postgres_hba.conf resulted in error (Invalid data directory)
|
||||
POSTGRID=`cat /var/lib/postgresql/data/postmaster.pid | perl -le '<>=~m#(\d+)# and print $1'`
|
||||
|
||||
# Create uniworx and uniworx_test database
|
||||
psql -f /schema.sql postgres
|
||||
|
||||
# Wait for postgres daemon to terminate
|
||||
while [ -e /proc/$POSTGRID ]; do
|
||||
sleep 0.5;
|
||||
done
|
||||
1
docker/database/pg_hba.conf
Normal file
1
docker/database/pg_hba.conf
Normal file
@ -0,0 +1 @@
|
||||
local all all trust
|
||||
6
docker/database/postgresql.conf
Normal file
6
docker/database/postgresql.conf
Normal file
@ -0,0 +1,6 @@
|
||||
listen_addresses=0.0.0.0
|
||||
unix_socket_permissions=0700
|
||||
max_connections=9990
|
||||
shared_preload_libraries=pg_stat_statements
|
||||
session_preload_libraries=auto_explain
|
||||
auto_explain.log_min_duration=100ms
|
||||
5
docker/database/schema.sql
Normal file
5
docker/database/schema.sql
Normal file
@ -0,0 +1,5 @@
|
||||
CREATE USER uniworx WITH SUPERUSER;
|
||||
CREATE DATABASE uniworx_test;
|
||||
GRANT ALL ON DATABASE uniworx_test TO uniworx;
|
||||
CREATE DATABASE uniworx;
|
||||
GRANT ALL ON DATABASE uniworx TO uniworx;
|
||||
31
docker/fradrive/Dockerfile
Normal file
31
docker/fradrive/Dockerfile
Normal file
@ -0,0 +1,31 @@
|
||||
FROM debian:12.5
|
||||
|
||||
RUN apt-get -y update
|
||||
|
||||
# setup locales
|
||||
RUN apt-get -y install locales locales-all
|
||||
RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && \
|
||||
locale-gen
|
||||
ENV LANG=en_US.UTF-8 LANGUAGE=en_US:en LC_ALL=en_US.UTF-8
|
||||
|
||||
# Binary runtime dependencies
|
||||
# TODO: minimize texlive dependencies, switch to basic schemes where possible
|
||||
RUN apt-get -y install texlive-latex-recommended texlive-latex-extra texlive-luatex texlive-fonts-recommended texlive-fonts-extra texlive-lang-english texlive-lang-german
|
||||
|
||||
# Add uniworx user and directories
|
||||
RUN mkdir -p /var/lib
|
||||
RUN mkdir -p /var/log
|
||||
RUN groupadd -r uniworx
|
||||
RUN useradd -r -g uniworx -d /var/lib/uniworx -M uniworx --uid 999
|
||||
RUN mkdir -p /var/lib/uniworx && chown -R uniworx:uniworx /var/lib/uniworx
|
||||
RUN mkdir -p /var/log/uniworx && chown -R uniworx:uniworx /var/log/uniworx
|
||||
|
||||
# TODO: is this still needed?
|
||||
# RUN install -d -g uniworx -o uniworx -m 0750 /var/lib/uniworx
|
||||
# RUN install -d -g uniworx -o uniworx -m 0755 /var/log/uniworx
|
||||
RUN cp /tmp/uniworx-bin/uniworx /usr/bin/uniworx
|
||||
|
||||
USER uniworx
|
||||
ENTRYPOINT fradrive-entrypoint.sh
|
||||
EXPOSE 8080/tcp
|
||||
VOLUME /var/lib/uniworx /var/log
|
||||
19
docker/fradrive/fradrive-entrypoint.sh
Normal file
19
docker/fradrive/fradrive-entrypoint.sh
Normal file
@ -0,0 +1,19 @@
|
||||
#!/bin/bash -xe
|
||||
|
||||
cTime=$(date -Is)
|
||||
|
||||
# export LOGDEST=/var/log/uniworx/${cTime}.log # kubernetes prefers log via stdout
|
||||
|
||||
typeset -a configs
|
||||
|
||||
configDir=${CONFIG_DIR-/cfg}
|
||||
configs=()
|
||||
if [[ -d "${configDir}" ]]; then
|
||||
while IFS= read -d $'\0' cfg; do
|
||||
configs+=("${(q)cfg}")
|
||||
done < <(find "${configDir}" \( -name '*.yml' -o -name '*.yaml' \) -print0 | sort -rz)
|
||||
fi
|
||||
|
||||
cd /var/lib/uniworx
|
||||
|
||||
exec -- uniworx ${configs}
|
||||
32
docker/frontend/Dockerfile
Normal file
32
docker/frontend/Dockerfile
Normal file
@ -0,0 +1,32 @@
|
||||
FROM debian:12.5
|
||||
|
||||
# Basic dependencies
|
||||
RUN apt-get -y update && apt-get -y install curl npm
|
||||
|
||||
# Build and watch dependencies
|
||||
RUN apt-get -y update && apt-get -y install exiftool
|
||||
RUN apt-get -y update && apt-get -y install imagemagick
|
||||
|
||||
# Test dependencies
|
||||
RUN apt-get -y update && apt-get -y install chromium
|
||||
ENV CHROME_BIN=chromium
|
||||
|
||||
# TODO: use dotenv for npm version?
|
||||
RUN npm install -g n
|
||||
RUN n 20.17.0
|
||||
|
||||
# locally these two should be identical, so that compilation results are written out into the file dir.
|
||||
# in CI-pipelines these two should be different, so that the container caches the compilation results.
|
||||
ARG MOUNT_DIR=/mnt/fradrive
|
||||
ARG PROJECT_DIR=/fradrive
|
||||
RUN mkdir -p ${PROJECT_DIR}
|
||||
RUN if [ "${PROJECT_DIR}" != "${MOUNT_DIR}" ] ; then cp -r ${MOUNT_DIR}/* ${PROJECT_DIR} ; fi
|
||||
WORKDIR ${PROJECT_DIR}
|
||||
ENV HOME=${PROJECT_DIR}
|
||||
|
||||
#RUN make node_modules IN_CONTAINER=true
|
||||
#RUN make well-known IN_CONTAINER=true
|
||||
RUN make -- --frontend-dependencies
|
||||
|
||||
ENV FRADRIVE_MAKE_TARGET=watch-frontend
|
||||
ENTRYPOINT make -- ${FRADRIVE_MAKE_TARGET} IN_CONTAINER=true CHROME_BIN=${CHROME_BIN}
|
||||
33
docker/podman/Dockerfile
Normal file
33
docker/podman/Dockerfile
Normal file
@ -0,0 +1,33 @@
|
||||
# Debian-based podman daemon image for building docker images
|
||||
# inside docker containers (e.g. gitlab runners).
|
||||
#
|
||||
# Yoinked with love from:
|
||||
# https://www.redhat.com/sysadmin/podman-inside-container
|
||||
|
||||
FROM debian:12.5
|
||||
|
||||
RUN apt-get -y update
|
||||
|
||||
RUN apt-get -y install make podman podman-compose fuse-overlayfs
|
||||
|
||||
RUN useradd podman; \
|
||||
echo podman:10000:5000 > /etc/subuid; \
|
||||
echo podman:10000:5000 > /etc/subgid;
|
||||
|
||||
VOLUME /var/lib/containers
|
||||
VOLUME /home/podman/.local/share/containers
|
||||
|
||||
ADD https://raw.githubusercontent.com/containers/image_build/main/podman/containers.conf /etc/containers/containers.conf
|
||||
ADD https://raw.githubusercontent.com/containers/image_build/main/podman/podman-containers.conf /home/podman/.config/containers/containers.conf
|
||||
|
||||
RUN chown podman:podman -R /home/podman
|
||||
|
||||
# chmod containers.conf and adjust storage.conf to enable Fuse storage.
|
||||
# RUN chmod 644 /etc/containers/containers.conf; sed -i -e 's|^#mount_program|mount_program|g' -e '/additionalimage.*/a "/var/lib/shared",' -e 's|^mountopt[[:space:]]*=.*$|mountopt = "nodev,fsync=0"|g' /etc/containers/containers.conf
|
||||
# RUN echo -e '[storage]\ndriver="zfs"\nmount_program="zfs"\nadditionalimage=/var/lib/shared\nmountopt="nodev,fsync=0"' >> /etc/containers/containers.conf
|
||||
RUN chmod 644 /etc/containers/containers.conf
|
||||
RUN echo '[storage]\ndriver="overlay"\n[storage.options.overlay]\nforce_mask="private"\nmount_program="/usr/bin/fuse-overlayfs"\nmountopt="nodev"' >> /etc/containers/containers.conf
|
||||
RUN mkdir -p /root/.config/containers/ && echo '[storage]\ndriver="overlay"\n[storage.options.overlay]\nforce_mask="private"\nmount_program="/usr/bin/fuse-overlayfs"\nmountopt="nodev"' > /root/.config/containers/storage.conf
|
||||
RUN mkdir -p /var/lib/shared/overlay-images /var/lib/shared/overlay-layers /var/lib/shared/vfs-images /var/lib/shared/vfs-layers; touch /var/lib/shared/overlay-images/images.lock; touch /var/lib/shared/overlay-layers/layers.lock; touch /var/lib/shared/vfs-images/images.lock; touch /var/lib/shared/vfs-layers/layers.lock
|
||||
|
||||
ENV _CONTAINERS_USERNS_CONFIGURED=""
|
||||
33
eslint.config.js
Normal file
33
eslint.config.js
Normal file
@ -0,0 +1,33 @@
|
||||
import js from "@eslint/js";
|
||||
import globals from "globals";
|
||||
import babelParser from "@babel/eslint-parser";
|
||||
|
||||
export default [
|
||||
js.configs.recommended,
|
||||
{
|
||||
files: ["**/*.js"],
|
||||
plugins: {},
|
||||
languageOptions: {
|
||||
ecmaVersion: 2018,
|
||||
globals: {
|
||||
...globals.browser,
|
||||
...globals.es6,
|
||||
...globals.node,
|
||||
...globals.jasmine,
|
||||
Atomics: "readonly",
|
||||
SharedArrayBuffer: "readonly",
|
||||
flatpickr: "readonly",
|
||||
$: "readonly",
|
||||
},
|
||||
parser: babelParser,
|
||||
},
|
||||
rules: {
|
||||
"no-console": "off",
|
||||
"no-extra-semi": "off",
|
||||
"semi": ["error", "always"],
|
||||
"comma-dangle": ["error", "always-multiline"],
|
||||
"quotes": ["error", "single"],
|
||||
"no-var": "error",
|
||||
},
|
||||
},
|
||||
];
|
||||
618
flake.lock
618
flake.lock
@ -25,22 +25,22 @@
|
||||
"rev": "40393c938111ac78232dc2c7eec5edb4a22d03e8",
|
||||
"revCount": 62,
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/HaskellNet-SSL.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/HaskellNet-SSL.git"
|
||||
},
|
||||
"original": {
|
||||
"ref": "uni2work",
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/HaskellNet-SSL.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/HaskellNet-SSL.git"
|
||||
}
|
||||
},
|
||||
"cabal-32": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1603716527,
|
||||
"narHash": "sha256-sDbrmur9Zfp4mPKohCD8IDZfXJ0Tjxpmr2R+kg5PpSY=",
|
||||
"narHash": "sha256-X0TFfdD4KZpwl0Zr6x+PLxUt/VyKQfX7ylXHdmZIL+w=",
|
||||
"owner": "haskell",
|
||||
"repo": "cabal",
|
||||
"rev": "94aaa8e4720081f9c75497e2735b90f6a819b08e",
|
||||
"rev": "48bf10787e27364730dd37a42b603cee8d6af7ee",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -53,11 +53,11 @@
|
||||
"cabal-34": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1622475795,
|
||||
"narHash": "sha256-chwTL304Cav+7p38d9mcb+egABWmxo2Aq+xgVBgEb/U=",
|
||||
"lastModified": 1645834128,
|
||||
"narHash": "sha256-wG3d+dOt14z8+ydz4SL7pwGfe7SiimxcD/LOuPCV6xM=",
|
||||
"owner": "haskell",
|
||||
"repo": "cabal",
|
||||
"rev": "b086c1995cdd616fc8d91f46a21e905cc50a1049",
|
||||
"rev": "5ff598c67f53f7c4f48e31d722ba37172230c462",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -67,6 +67,23 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"cabal-36": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1669081697,
|
||||
"narHash": "sha256-I5or+V7LZvMxfbYgZATU4awzkicBwwok4mVoje+sGmU=",
|
||||
"owner": "haskell",
|
||||
"repo": "cabal",
|
||||
"rev": "8fd619e33d34924a94e691c5fea2c42f0fc7f144",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "haskell",
|
||||
"ref": "3.6",
|
||||
"repo": "cabal",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"cardano-shell": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
@ -92,12 +109,12 @@
|
||||
"rev": "f8170266ab25b533576e96715bedffc5aa4f19fa",
|
||||
"revCount": 153,
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/colonnade.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/colonnade.git"
|
||||
},
|
||||
"original": {
|
||||
"ref": "uni2work",
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/colonnade.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/colonnade.git"
|
||||
}
|
||||
},
|
||||
"conduit-resumablesink": {
|
||||
@ -109,12 +126,12 @@
|
||||
"rev": "cbea6159c2975d42f948525e03e12fc390da53c5",
|
||||
"revCount": 10,
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/conduit-resumablesink.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/conduit-resumablesink.git"
|
||||
},
|
||||
"original": {
|
||||
"ref": "uni2work",
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/conduit-resumablesink.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/conduit-resumablesink.git"
|
||||
}
|
||||
},
|
||||
"cryptoids": {
|
||||
@ -126,29 +143,29 @@
|
||||
"rev": "130b0dcbf2b09ccdf387b50262f1efbbbf1819e3",
|
||||
"revCount": 44,
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/cryptoids.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/cryptoids.git"
|
||||
},
|
||||
"original": {
|
||||
"ref": "uni2work",
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/cryptoids.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/cryptoids.git"
|
||||
}
|
||||
},
|
||||
"cryptonite": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1624444174,
|
||||
"narHash": "sha256-sDMA4ej1NIModAt7PQvcgIknI3KwfzcAp9YQUSe4CWw=",
|
||||
"lastModified": 1704764911,
|
||||
"narHash": "sha256-VuEWT2Bd4aSJyRcXpB+lsGDqxrTHB/uRvILzYWLNfxk=",
|
||||
"ref": "uni2work",
|
||||
"rev": "71a630edaf5f22c464e24fac8d9d310f4055ea1f",
|
||||
"revCount": 1202,
|
||||
"rev": "f78fca2504bb767d632a3bac8dbbc23367eff0e9",
|
||||
"revCount": 1220,
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/cryptonite.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/cryptonite.git"
|
||||
},
|
||||
"original": {
|
||||
"ref": "uni2work",
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/cryptonite.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/cryptonite.git"
|
||||
}
|
||||
},
|
||||
"encoding": {
|
||||
@ -160,12 +177,12 @@
|
||||
"rev": "22fc3bb14841d8d50997aa47f1be3852e666f787",
|
||||
"revCount": 162,
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/encoding.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/encoding.git"
|
||||
},
|
||||
"original": {
|
||||
"ref": "uni2work",
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/encoding.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/encoding.git"
|
||||
}
|
||||
},
|
||||
"esqueleto": {
|
||||
@ -177,12 +194,46 @@
|
||||
"rev": "e18dd125c5ea26fa4e88bed079b61d8c1365ee37",
|
||||
"revCount": 708,
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/esqueleto.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/esqueleto.git"
|
||||
},
|
||||
"original": {
|
||||
"ref": "uni2work",
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/esqueleto.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/esqueleto.git"
|
||||
}
|
||||
},
|
||||
"flake-compat": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1672831974,
|
||||
"narHash": "sha256-z9k3MfslLjWQfnjBtEtJZdq3H7kyi2kQtUThfTgdRk0=",
|
||||
"owner": "input-output-hk",
|
||||
"repo": "flake-compat",
|
||||
"rev": "45f2638735f8cdc40fe302742b79f248d23eb368",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "input-output-hk",
|
||||
"ref": "hkm/gitlab-fix",
|
||||
"repo": "flake-compat",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-compat": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1672831974,
|
||||
"narHash": "sha256-z9k3MfslLjWQfnjBtEtJZdq3H7kyi2kQtUThfTgdRk0=",
|
||||
"owner": "input-output-hk",
|
||||
"repo": "flake-compat",
|
||||
"rev": "45f2638735f8cdc40fe302742b79f248d23eb368",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "input-output-hk",
|
||||
"ref": "hkm/gitlab-fix",
|
||||
"repo": "flake-compat",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils": {
|
||||
@ -190,11 +241,11 @@
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1681202837,
|
||||
"narHash": "sha256-H+Rh19JDwRtpVPAWp64F+rlEtxUWBAQW28eAi3SRSzg=",
|
||||
"lastModified": 1705309234,
|
||||
"narHash": "sha256-uNRRNRKmJyCRC/8y1RqBkqWBLM034y4qN7EprSdmgyA=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "cfacdce06f30d2b68473a46042957675eebb3401",
|
||||
"rev": "1ef2e671c3b0c19053962c07dbda38332dcebf26",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -204,21 +255,6 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils_2": {
|
||||
"locked": {
|
||||
"lastModified": 1623875721,
|
||||
"narHash": "sha256-A8BU7bjS5GirpAUv4QA+QnJ4CceLHkcXdRp4xITDB0s=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "f7e004a55b120c02ecb6219596820fcd32ca8772",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"ghc-8.6.5-iohk": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
@ -236,14 +272,51 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"ghc98X": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1696643148,
|
||||
"narHash": "sha256-E02DfgISH7EvvNAu0BHiPvl1E5FGMDi0pWdNZtIBC9I=",
|
||||
"ref": "ghc-9.8",
|
||||
"rev": "443e870d977b1ab6fc05f47a9a17bc49296adbd6",
|
||||
"revCount": 61642,
|
||||
"submodules": true,
|
||||
"type": "git",
|
||||
"url": "https://gitlab.haskell.org/ghc/ghc"
|
||||
},
|
||||
"original": {
|
||||
"ref": "ghc-9.8",
|
||||
"submodules": true,
|
||||
"type": "git",
|
||||
"url": "https://gitlab.haskell.org/ghc/ghc"
|
||||
}
|
||||
},
|
||||
"ghc99": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1701580282,
|
||||
"narHash": "sha256-drA01r3JrXnkKyzI+owMZGxX0JameMzjK0W5jJE/+V4=",
|
||||
"ref": "refs/heads/master",
|
||||
"rev": "f5eb0f2982e9cf27515e892c4bdf634bcfb28459",
|
||||
"revCount": 62197,
|
||||
"submodules": true,
|
||||
"type": "git",
|
||||
"url": "https://gitlab.haskell.org/ghc/ghc"
|
||||
},
|
||||
"original": {
|
||||
"submodules": true,
|
||||
"type": "git",
|
||||
"url": "https://gitlab.haskell.org/ghc/ghc"
|
||||
}
|
||||
},
|
||||
"hackage": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1629940355,
|
||||
"narHash": "sha256-o9/U8R/JtyHIcxhMLaWYP+D/52B6LH/ikCyNZ7+mymI=",
|
||||
"lastModified": 1705796710,
|
||||
"narHash": "sha256-BdAqEqx6rdp8O8lu9yW1nXa8/da7+/QPgVjCJVEXyWw=",
|
||||
"owner": "input-output-hk",
|
||||
"repo": "hackage.nix",
|
||||
"rev": "429deb2a137084c011310bad92f4cecf244f2fc2",
|
||||
"rev": "31d4fed569912819adbf66b580489b45dc80a29a",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -257,26 +330,44 @@
|
||||
"HTTP": "HTTP",
|
||||
"cabal-32": "cabal-32",
|
||||
"cabal-34": "cabal-34",
|
||||
"cabal-36": "cabal-36",
|
||||
"cardano-shell": "cardano-shell",
|
||||
"flake-utils": "flake-utils_2",
|
||||
"flake-compat": "flake-compat",
|
||||
"ghc-8.6.5-iohk": "ghc-8.6.5-iohk",
|
||||
"ghc98X": "ghc98X",
|
||||
"ghc99": "ghc99",
|
||||
"hackage": "hackage",
|
||||
"hls-1.10": "hls-1.10",
|
||||
"hls-2.0": "hls-2.0",
|
||||
"hls-2.2": "hls-2.2",
|
||||
"hls-2.3": "hls-2.3",
|
||||
"hls-2.4": "hls-2.4",
|
||||
"hls-2.5": "hls-2.5",
|
||||
"hls-2.6": "hls-2.6",
|
||||
"hpc-coveralls": "hpc-coveralls",
|
||||
"nix-tools": "nix-tools",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"hydra": "hydra",
|
||||
"iserv-proxy": "iserv-proxy",
|
||||
"nixpkgs": [
|
||||
"haskell-nix",
|
||||
"nixpkgs-unstable"
|
||||
],
|
||||
"nixpkgs-2003": "nixpkgs-2003",
|
||||
"nixpkgs-2009": "nixpkgs-2009",
|
||||
"nixpkgs-2105": "nixpkgs-2105",
|
||||
"nixpkgs-2111": "nixpkgs-2111",
|
||||
"nixpkgs-2205": "nixpkgs-2205",
|
||||
"nixpkgs-2211": "nixpkgs-2211",
|
||||
"nixpkgs-2305": "nixpkgs-2305",
|
||||
"nixpkgs-2311": "nixpkgs-2311",
|
||||
"nixpkgs-unstable": "nixpkgs-unstable",
|
||||
"old-ghc-nix": "old-ghc-nix",
|
||||
"stackage": "stackage"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1629940530,
|
||||
"narHash": "sha256-cf/bshYWloj+rJSmOcFokN6Rs1lpFkiECKpykN3JBH8=",
|
||||
"lastModified": 1705798224,
|
||||
"narHash": "sha256-/zJa0hC58vLD8PqTEQNeN9EJAQpbS+YluJhLVstgqY8=",
|
||||
"owner": "input-output-hk",
|
||||
"repo": "haskell.nix",
|
||||
"rev": "5fcd4faf98fc8ca8287e2c7bc1fff71dfd340f1f",
|
||||
"rev": "2a31673a97ed3efbae9835ea7334528d2bc4b6ab",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -285,6 +376,125 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"hls-1.10": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1680000865,
|
||||
"narHash": "sha256-rc7iiUAcrHxwRM/s0ErEsSPxOR3u8t7DvFeWlMycWgo=",
|
||||
"owner": "haskell",
|
||||
"repo": "haskell-language-server",
|
||||
"rev": "b08691db779f7a35ff322b71e72a12f6e3376fd9",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "haskell",
|
||||
"ref": "1.10.0.0",
|
||||
"repo": "haskell-language-server",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"hls-2.0": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1687698105,
|
||||
"narHash": "sha256-OHXlgRzs/kuJH8q7Sxh507H+0Rb8b7VOiPAjcY9sM1k=",
|
||||
"owner": "haskell",
|
||||
"repo": "haskell-language-server",
|
||||
"rev": "783905f211ac63edf982dd1889c671653327e441",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "haskell",
|
||||
"ref": "2.0.0.1",
|
||||
"repo": "haskell-language-server",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"hls-2.2": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1693064058,
|
||||
"narHash": "sha256-8DGIyz5GjuCFmohY6Fa79hHA/p1iIqubfJUTGQElbNk=",
|
||||
"owner": "haskell",
|
||||
"repo": "haskell-language-server",
|
||||
"rev": "b30f4b6cf5822f3112c35d14a0cba51f3fe23b85",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "haskell",
|
||||
"ref": "2.2.0.0",
|
||||
"repo": "haskell-language-server",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"hls-2.3": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1695910642,
|
||||
"narHash": "sha256-tR58doOs3DncFehHwCLczJgntyG/zlsSd7DgDgMPOkI=",
|
||||
"owner": "haskell",
|
||||
"repo": "haskell-language-server",
|
||||
"rev": "458ccdb55c9ea22cd5d13ec3051aaefb295321be",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "haskell",
|
||||
"ref": "2.3.0.0",
|
||||
"repo": "haskell-language-server",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"hls-2.4": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1699862708,
|
||||
"narHash": "sha256-YHXSkdz53zd0fYGIYOgLt6HrA0eaRJi9mXVqDgmvrjk=",
|
||||
"owner": "haskell",
|
||||
"repo": "haskell-language-server",
|
||||
"rev": "54507ef7e85fa8e9d0eb9a669832a3287ffccd57",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "haskell",
|
||||
"ref": "2.4.0.1",
|
||||
"repo": "haskell-language-server",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"hls-2.5": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1701080174,
|
||||
"narHash": "sha256-fyiR9TaHGJIIR0UmcCb73Xv9TJq3ht2ioxQ2mT7kVdc=",
|
||||
"owner": "haskell",
|
||||
"repo": "haskell-language-server",
|
||||
"rev": "27f8c3d3892e38edaef5bea3870161815c4d014c",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "haskell",
|
||||
"ref": "2.5.0.0",
|
||||
"repo": "haskell-language-server",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"hls-2.6": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1705325287,
|
||||
"narHash": "sha256-+P87oLdlPyMw8Mgoul7HMWdEvWP/fNlo8jyNtwME8E8=",
|
||||
"owner": "haskell",
|
||||
"repo": "haskell-language-server",
|
||||
"rev": "6e0b342fa0327e628610f2711f8c3e4eaaa08b1e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "haskell",
|
||||
"ref": "2.6.0.0",
|
||||
"repo": "haskell-language-server",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"hpc-coveralls": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
@ -301,6 +511,46 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"hydra": {
|
||||
"inputs": {
|
||||
"nix": "nix",
|
||||
"nixpkgs": [
|
||||
"haskell-nix",
|
||||
"hydra",
|
||||
"nix",
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1671755331,
|
||||
"narHash": "sha256-hXsgJj0Cy0ZiCiYdW2OdBz5WmFyOMKuw4zyxKpgUKm4=",
|
||||
"owner": "NixOS",
|
||||
"repo": "hydra",
|
||||
"rev": "f48f00ee6d5727ae3e488cbf9ce157460853fea8",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"id": "hydra",
|
||||
"type": "indirect"
|
||||
}
|
||||
},
|
||||
"iserv-proxy": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1691634696,
|
||||
"narHash": "sha256-MZH2NznKC/gbgBu8NgIibtSUZeJ00HTLJ0PlWKCBHb0=",
|
||||
"ref": "hkm/remote-iserv",
|
||||
"rev": "43a979272d9addc29fbffc2e8542c5d96e993d73",
|
||||
"revCount": 14,
|
||||
"type": "git",
|
||||
"url": "https://gitlab.haskell.org/hamishmack/iserv-proxy.git"
|
||||
},
|
||||
"original": {
|
||||
"ref": "hkm/remote-iserv",
|
||||
"type": "git",
|
||||
"url": "https://gitlab.haskell.org/hamishmack/iserv-proxy.git"
|
||||
}
|
||||
},
|
||||
"ldap-client": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
@ -310,12 +560,44 @@
|
||||
"rev": "01afaf599ba6f8a9d804c269e91d3190b249d3f0",
|
||||
"revCount": 61,
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/ldap-client.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/ldap-client.git"
|
||||
},
|
||||
"original": {
|
||||
"ref": "uni2work",
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/ldap-client.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/ldap-client.git"
|
||||
}
|
||||
},
|
||||
"lowdown-src": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1633514407,
|
||||
"narHash": "sha256-Dw32tiMjdK9t3ETl5fzGrutQTzh2rufgZV4A/BbxuD4=",
|
||||
"owner": "kristapsdz",
|
||||
"repo": "lowdown",
|
||||
"rev": "d2c2b44ff6c27b936ec27358a2653caaef8f73b8",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "kristapsdz",
|
||||
"repo": "lowdown",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"lowdown-src": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1633514407,
|
||||
"narHash": "sha256-Dw32tiMjdK9t3ETl5fzGrutQTzh2rufgZV4A/BbxuD4=",
|
||||
"owner": "kristapsdz",
|
||||
"repo": "lowdown",
|
||||
"rev": "d2c2b44ff6c27b936ec27358a2653caaef8f73b8",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "kristapsdz",
|
||||
"repo": "lowdown",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"memcached-binary": {
|
||||
@ -327,59 +609,64 @@
|
||||
"rev": "b7071df50bad3a251a544b984e4bf98fa09b8fae",
|
||||
"revCount": 28,
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/memcached-binary.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/memcached-binary.git"
|
||||
},
|
||||
"original": {
|
||||
"ref": "uni2work",
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/memcached-binary.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/memcached-binary.git"
|
||||
}
|
||||
},
|
||||
"minio-hs": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1597069863,
|
||||
"narHash": "sha256-JmMajaLT4+zt+w2koDkaloFL8ugmrQBlcYKj+78qn9M=",
|
||||
"lastModified": 1705548354,
|
||||
"narHash": "sha256-wuJYScDu1hGlasE4rzUEi9ouvEiQYWcHF9jRngiQ3Z4=",
|
||||
"ref": "uni2work",
|
||||
"rev": "42103ab247057c04c8ce7a83d9d4c160713a3df1",
|
||||
"revCount": 197,
|
||||
"rev": "fafc203e1bace1998264d1ce4340fb801e877b51",
|
||||
"revCount": 223,
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/minio-hs.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/minio-hs.git"
|
||||
},
|
||||
"original": {
|
||||
"ref": "uni2work",
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/minio-hs.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/minio-hs.git"
|
||||
}
|
||||
},
|
||||
"nix-tools": {
|
||||
"flake": false,
|
||||
"nix": {
|
||||
"inputs": {
|
||||
"lowdown-src": "lowdown-src",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"nixpkgs-regression": "nixpkgs-regression"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1626997434,
|
||||
"narHash": "sha256-1judQmP298ao6cGUNxcGhcAXHOnA9qSLvWk/ZtoUL7w=",
|
||||
"owner": "input-output-hk",
|
||||
"repo": "nix-tools",
|
||||
"rev": "c8c5e6a6fbb12a73598d1a434984a36e880ce3cf",
|
||||
"lastModified": 1661606874,
|
||||
"narHash": "sha256-9+rpYzI+SmxJn+EbYxjGv68Ucp22bdFUSy/4LkHkkDQ=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nix",
|
||||
"rev": "11e45768b34fdafdcf019ddbd337afa16127ff0f",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "input-output-hk",
|
||||
"repo": "nix-tools",
|
||||
"owner": "NixOS",
|
||||
"ref": "2.11.0",
|
||||
"repo": "nix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1624291665,
|
||||
"narHash": "sha256-kNkaoa3dai9WOi7fsPklCCWZ8hRAkXx0ZUhpYKShyUk=",
|
||||
"lastModified": 1657693803,
|
||||
"narHash": "sha256-G++2CJ9u0E7NNTAi9n5G8TdDmGJXcIjkJ3NF8cetQB8=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "3c6f3f84af60a8ed5b8a79cf3026b7630fcdefb8",
|
||||
"rev": "365e1b3a859281cf11b94f87231adeabbdd878a2",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixpkgs-20.09-darwin",
|
||||
"ref": "nixos-22.05-small",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
@ -400,29 +687,13 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-2009": {
|
||||
"locked": {
|
||||
"lastModified": 1624271064,
|
||||
"narHash": "sha256-qns/uRW7MR2EfVf6VEeLgCsCp7pIOjDeR44JzTF09MA=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "46d1c3f28ca991601a53e9a14fdd53fcd3dd8416",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixpkgs-20.09-darwin",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-2105": {
|
||||
"locked": {
|
||||
"lastModified": 1624291665,
|
||||
"narHash": "sha256-kNkaoa3dai9WOi7fsPklCCWZ8hRAkXx0ZUhpYKShyUk=",
|
||||
"lastModified": 1659914493,
|
||||
"narHash": "sha256-lkA5X3VNMKirvA+SUzvEhfA7XquWLci+CGi505YFAIs=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "3c6f3f84af60a8ed5b8a79cf3026b7630fcdefb8",
|
||||
"rev": "022caabb5f2265ad4006c1fa5b1ebe69fb0c3faf",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -432,50 +703,146 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-recent": {
|
||||
"nixpkgs-2111": {
|
||||
"locked": {
|
||||
"lastModified": 1669833724,
|
||||
"narHash": "sha256-/HEZNyGbnQecrgJnfE8d0WC5c1xuPSD2LUpB6YXlg4c=",
|
||||
"lastModified": 1659446231,
|
||||
"narHash": "sha256-hekabNdTdgR/iLsgce5TGWmfIDZ86qjPhxDg/8TlzhE=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "4d2b37a84fad1091b9de401eb450aae66f1a741e",
|
||||
"rev": "eabc38219184cc3e04a974fe31857d8e0eac098d",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "22.11",
|
||||
"ref": "nixpkgs-21.11-darwin",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-2205": {
|
||||
"locked": {
|
||||
"lastModified": 1685573264,
|
||||
"narHash": "sha256-Zffu01pONhs/pqH07cjlF10NnMDLok8ix5Uk4rhOnZQ=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "380be19fbd2d9079f677978361792cb25e8a3635",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixpkgs-22.05-darwin",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-2211": {
|
||||
"locked": {
|
||||
"lastModified": 1688392541,
|
||||
"narHash": "sha256-lHrKvEkCPTUO+7tPfjIcb7Trk6k31rz18vkyqmkeJfY=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "ea4c80b39be4c09702b0cb3b42eab59e2ba4f24b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixpkgs-22.11-darwin",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-2305": {
|
||||
"locked": {
|
||||
"lastModified": 1701362232,
|
||||
"narHash": "sha256-GVdzxL0lhEadqs3hfRLuj+L1OJFGiL/L7gCcelgBlsw=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "d2332963662edffacfddfad59ff4f709dde80ffe",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixpkgs-23.05-darwin",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-2311": {
|
||||
"locked": {
|
||||
"lastModified": 1701386440,
|
||||
"narHash": "sha256-xI0uQ9E7JbmEy/v8kR9ZQan6389rHug+zOtZeZFiDJk=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "293822e55ec1872f715a66d0eda9e592dc14419f",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixpkgs-23.11-darwin",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-recent": {
|
||||
"locked": {
|
||||
"lastModified": 1659446231,
|
||||
"narHash": "sha256-hekabNdTdgR/iLsgce5TGWmfIDZ86qjPhxDg/8TlzhE=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "eabc38219184cc3e04a974fe31857d8e0eac098d",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixpkgs-21.11-darwin",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-regression": {
|
||||
"locked": {
|
||||
"lastModified": 1643052045,
|
||||
"narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-unstable": {
|
||||
"locked": {
|
||||
"lastModified": 1628785280,
|
||||
"narHash": "sha256-2B5eMrEr6O8ff2aQNeVxTB+9WrGE80OB4+oM6T7fOcc=",
|
||||
"lastModified": 1694822471,
|
||||
"narHash": "sha256-6fSDCj++lZVMZlyqOe9SIOL8tYSBz1bI8acwovRwoX8=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "6525bbc06a39f26750ad8ee0d40000ddfdc24acb",
|
||||
"rev": "47585496bcb13fb72e4a90daeea2f434e2501998",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixpkgs-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "47585496bcb13fb72e4a90daeea2f434e2501998",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs_2": {
|
||||
"locked": {
|
||||
"lastModified": 1622516815,
|
||||
"narHash": "sha256-ZjBd81a6J3TwtlBr3rHsZspYUwT9OdhDk+a/SgSEf7I=",
|
||||
"lastModified": 1701282334,
|
||||
"narHash": "sha256-MxCVrXY6v4QmfTwIysjjaX0XUhqBbxTWWB4HXtDYsdk=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "7e9b0dff974c89e070da1ad85713ff3c20b0ca97",
|
||||
"rev": "057f9aecfb71c4437d2b27d3323df7f93c010b7e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "21.05",
|
||||
"ref": "23.11",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
@ -483,16 +850,16 @@
|
||||
"old-ghc-nix": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1621819714,
|
||||
"narHash": "sha256-EJCnYQSWk7FRLwS0lZgTWIiQ6pcvDX1VuD6LGD4Uwzs=",
|
||||
"lastModified": 1631092763,
|
||||
"narHash": "sha256-sIKgO+z7tj4lw3u6oBZxqIhDrzSkvpHtv0Kki+lh9Fg=",
|
||||
"owner": "angerman",
|
||||
"repo": "old-ghc-nix",
|
||||
"rev": "f089a6f090cdb35fcf95f865fc6a31ba6b3ac4eb",
|
||||
"rev": "af48a7a7353e418119b6dfe3cd1463a657f342b8",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "angerman",
|
||||
"ref": "master2",
|
||||
"ref": "master",
|
||||
"repo": "old-ghc-nix",
|
||||
"type": "github"
|
||||
}
|
||||
@ -512,7 +879,6 @@
|
||||
"memcached-binary": "memcached-binary",
|
||||
"minio-hs": "minio-hs",
|
||||
"nixpkgs": "nixpkgs_2",
|
||||
"nixpkgs-recent": "nixpkgs-recent",
|
||||
"serversession": "serversession",
|
||||
"xss-sanitize": "xss-sanitize",
|
||||
"yesod": "yesod",
|
||||
@ -528,22 +894,22 @@
|
||||
"rev": "b9d76def10da1260c7f6aa82bda32111f37a952b",
|
||||
"revCount": 174,
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/serversession.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/serversession.git"
|
||||
},
|
||||
"original": {
|
||||
"ref": "uni2work",
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/serversession.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/serversession.git"
|
||||
}
|
||||
},
|
||||
"stackage": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1629940472,
|
||||
"narHash": "sha256-LFrNC25OpNoS6dGG5om+UGP8YdYjp01Qm6cenbemaVg=",
|
||||
"lastModified": 1705795852,
|
||||
"narHash": "sha256-Po+1G5KgHVRbP/PzK3HgdI1ZS7XJtP63vJmpSZMvFV8=",
|
||||
"owner": "input-output-hk",
|
||||
"repo": "stackage.nix",
|
||||
"rev": "c50dd0527babf85818cc67a7099e532e453752db",
|
||||
"rev": "8adfc78e62d3dbc3498a03579a50f3cf70cd4328",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -576,29 +942,29 @@
|
||||
"rev": "dc928c3a456074b8777603bea20e81937321777f",
|
||||
"revCount": 114,
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/xss-sanitize.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/xss-sanitize.git"
|
||||
},
|
||||
"original": {
|
||||
"ref": "uni2work",
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/xss-sanitize.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/xss-sanitize.git"
|
||||
}
|
||||
},
|
||||
"yesod": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1625061191,
|
||||
"narHash": "sha256-K0X2MwUStChml1DlJ7t4yBMDwrMe6j/780nJtSy9Hss=",
|
||||
"lastModified": 1705542497,
|
||||
"narHash": "sha256-DYri6G3LeL3Gu11K0gAcUOxMwyKrLVkNnb5oTjHKRro=",
|
||||
"ref": "uni2work",
|
||||
"rev": "a59f63e0336ee61f7a90b8778e9147305d3127bb",
|
||||
"revCount": 5053,
|
||||
"rev": "9f8d26371d4760f8985e7bbe00c3ac16be1301bc",
|
||||
"revCount": 5208,
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/yesod.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/yesod.git"
|
||||
},
|
||||
"original": {
|
||||
"ref": "uni2work",
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/yesod.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/yesod.git"
|
||||
}
|
||||
},
|
||||
"zip-stream": {
|
||||
@ -610,12 +976,12 @@
|
||||
"rev": "843683d024f767de236f74d24a3348f69181a720",
|
||||
"revCount": 39,
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/zip-stream.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/zip-stream.git"
|
||||
},
|
||||
"original": {
|
||||
"ref": "uni2work",
|
||||
"type": "git",
|
||||
"url": "https://gitlab.ifi.lmu.de/uni2work/haskell/zip-stream.git"
|
||||
"url": "https://gitlab.uniworx.de/haskell/zip-stream.git"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
77
flake.nix
77
flake.nix
@ -1,4 +1,4 @@
|
||||
# SPDX-FileCopyrightText: 2022-2023 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor@kleen.consulting>
|
||||
# SPDX-FileCopyrightText: 2022-2024 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor@kleen.consulting>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
@ -12,14 +12,7 @@
|
||||
type = "github";
|
||||
owner = "NixOS";
|
||||
repo = "nixpkgs";
|
||||
# ref = "6525bbc06a39f26750ad8ee0d40000ddfdc24acb";
|
||||
ref = "21.05";
|
||||
};
|
||||
nixpkgs-recent = {
|
||||
type = "github";
|
||||
owner = "NixOS";
|
||||
repo = "nixpkgs";
|
||||
ref = "22.11";
|
||||
ref = "23.11";
|
||||
};
|
||||
flake-utils = {
|
||||
type = "github";
|
||||
@ -29,64 +22,64 @@
|
||||
};
|
||||
|
||||
encoding = {
|
||||
url = "git+https://gitlab.ifi.lmu.de/uni2work/haskell/encoding.git?ref=uni2work";
|
||||
url = "git+https://gitlab.uniworx.de/haskell/encoding.git?ref=uni2work";
|
||||
flake = false;
|
||||
};
|
||||
memcached-binary = {
|
||||
url = "git+https://gitlab.ifi.lmu.de/uni2work/haskell/memcached-binary.git?ref=uni2work";
|
||||
url = "git+https://gitlab.uniworx.de/haskell/memcached-binary.git?ref=uni2work";
|
||||
flake = false;
|
||||
};
|
||||
conduit-resumablesink = {
|
||||
url = "git+https://gitlab.ifi.lmu.de/uni2work/haskell/conduit-resumablesink.git?ref=uni2work";
|
||||
url = "git+https://gitlab.uniworx.de/haskell/conduit-resumablesink.git?ref=uni2work";
|
||||
flake = false;
|
||||
};
|
||||
HaskellNet-SSL = {
|
||||
url = "git+https://gitlab.ifi.lmu.de/uni2work/haskell/HaskellNet-SSL.git?ref=uni2work";
|
||||
url = "git+https://gitlab.uniworx.de/haskell/HaskellNet-SSL.git?ref=uni2work";
|
||||
flake = false;
|
||||
};
|
||||
ldap-client = {
|
||||
url = "git+https://gitlab.ifi.lmu.de/uni2work/haskell/ldap-client.git?ref=uni2work";
|
||||
url = "git+https://gitlab.uniworx.de/haskell/ldap-client.git?ref=uni2work";
|
||||
flake = false;
|
||||
};
|
||||
serversession = {
|
||||
url = "git+https://gitlab.ifi.lmu.de/uni2work/haskell/serversession.git?ref=uni2work";
|
||||
url = "git+https://gitlab.uniworx.de/haskell/serversession.git?ref=uni2work";
|
||||
flake = false;
|
||||
};
|
||||
xss-sanitize = {
|
||||
url = "git+https://gitlab.ifi.lmu.de/uni2work/haskell/xss-sanitize.git?ref=uni2work";
|
||||
url = "git+https://gitlab.uniworx.de/haskell/xss-sanitize.git?ref=uni2work";
|
||||
flake = false;
|
||||
};
|
||||
colonnade = {
|
||||
url = "git+https://gitlab.ifi.lmu.de/uni2work/haskell/colonnade.git?ref=uni2work";
|
||||
url = "git+https://gitlab.uniworx.de/haskell/colonnade.git?ref=uni2work";
|
||||
flake = false;
|
||||
};
|
||||
minio-hs = {
|
||||
url = "git+https://gitlab.ifi.lmu.de/uni2work/haskell/minio-hs.git?ref=uni2work";
|
||||
url = "git+https://gitlab.uniworx.de/haskell/minio-hs.git?ref=uni2work";
|
||||
flake = false;
|
||||
};
|
||||
cryptoids = {
|
||||
url = "git+https://gitlab.ifi.lmu.de/uni2work/haskell/cryptoids.git?ref=uni2work";
|
||||
url = "git+https://gitlab.uniworx.de/haskell/cryptoids.git?ref=uni2work";
|
||||
flake = false;
|
||||
};
|
||||
zip-stream = {
|
||||
url = "git+https://gitlab.ifi.lmu.de/uni2work/haskell/zip-stream.git?ref=uni2work";
|
||||
url = "git+https://gitlab.uniworx.de/haskell/zip-stream.git?ref=uni2work";
|
||||
flake = false;
|
||||
};
|
||||
yesod = {
|
||||
url = "git+https://gitlab.ifi.lmu.de/uni2work/haskell/yesod.git?ref=uni2work";
|
||||
url = "git+https://gitlab.uniworx.de/haskell/yesod.git?ref=uni2work";
|
||||
flake = false;
|
||||
};
|
||||
cryptonite = {
|
||||
url = "git+https://gitlab.ifi.lmu.de/uni2work/haskell/cryptonite.git?ref=uni2work";
|
||||
url = "git+https://gitlab.uniworx.de/haskell/cryptonite.git?ref=uni2work";
|
||||
flake = false;
|
||||
};
|
||||
esqueleto = {
|
||||
url = "git+https://gitlab.ifi.lmu.de/uni2work/haskell/esqueleto.git?ref=uni2work";
|
||||
url = "git+https://gitlab.uniworx.de/haskell/esqueleto.git?ref=uni2work";
|
||||
flake = false;
|
||||
};
|
||||
};
|
||||
|
||||
outputs = inputs@{ self, nixpkgs, nixpkgs-recent, flake-utils, haskell-nix, ... }: flake-utils.lib.eachSystem ["x86_64-linux"]
|
||||
outputs = inputs@{ self, nixpkgs, flake-utils, haskell-nix, ... }: flake-utils.lib.eachSystem ["x86_64-linux"]
|
||||
(system:
|
||||
let frontendSource = pkgs.lib.sourceByRegex ./. [
|
||||
"^(assets|frontend)(/.*)?$"
|
||||
@ -112,17 +105,19 @@
|
||||
|
||||
overlays = [
|
||||
(final: prev: let
|
||||
pkgs-recent = import nixpkgs-recent { inherit system; };
|
||||
pkgs = import nixpkgs { inherit system; };
|
||||
in {
|
||||
inherit (pkgs-recent) dockerTools node2nix glibcLocalesUtf8 tzdata chromium minio minio-client skopeo; inherit (pkgs-recent.stdenv) fetchurlBoot;
|
||||
inherit (pkgs-recent) gup dockerTools nodejs-14_x glibcLocalesUtf8 tzdata chromium minio minio-client skopeo;
|
||||
inherit (pkgs-recent.stdenv) fetchurlBoot make;
|
||||
inherit (pkgs-recent.coreutils) touch;
|
||||
stack = pkgs.symlinkJoin {
|
||||
inherit (pkgs-recent.stack) name;
|
||||
paths = [pkgs-recent.stack];
|
||||
nativeBuildInputs = [pkgs-recent.makeWrapper];
|
||||
inherit (pkgs.stack) name;
|
||||
paths = [pkgs.stack];
|
||||
nativeBuildInputs = [pkgs.makeWrapper];
|
||||
|
||||
postBuild = ''
|
||||
wrapProgram $out/bin/stack \
|
||||
--prefix PATH : "${prev.lib.makeBinPath [pkgs-recent.nix]}" \
|
||||
--prefix PATH : "${prev.lib.makeBinPath [pkgs.nix]}" \
|
||||
--add-flags "\
|
||||
--nix \
|
||||
--no-nix-pure \
|
||||
@ -133,36 +128,14 @@
|
||||
};
|
||||
})
|
||||
|
||||
(import ./nix/maildev)
|
||||
haskell-nix.overlay
|
||||
(import ./nix/uniworx { inherit inputs frontendSource backendSource; gitRevision = if self ? rev then self.rev else null; })
|
||||
(import ./nix/docker { inherit self; })
|
||||
(import ./nix/parse-changelog.nix {})
|
||||
];
|
||||
|
||||
haskellFlake = pkgs.uniworx.flake {};
|
||||
|
||||
inherit (pkgs.lib) recursiveUpdate;
|
||||
in {
|
||||
packages = haskellFlake.packages // {
|
||||
inherit (pkgs) uniworxNodeDependencies uniworxWellKnown uniworxFrontend uniworxTestDocker uniworxDocker changelogJson;
|
||||
};
|
||||
|
||||
apps = haskellFlake.apps // {
|
||||
calculateMaterializedSha = flake-utils.lib.mkApp { drv = pkgs.uniworx.stack-nix.passthru.calculateMaterializedSha; exePath = ""; };
|
||||
jqChangelogJson = flake-utils.lib.mkApp { drv = pkgs.jqChangelogJson; };
|
||||
};
|
||||
|
||||
checks = haskellFlake.checks // {
|
||||
uniworxFrontend = pkgs.uniworxFrontend.check;
|
||||
};
|
||||
|
||||
devShell = import ./shell.nix { pkgs = self.legacyPackages.${system}; nixpkgsPath = nixpkgs; };
|
||||
|
||||
legacyPackages = pkgs.lib.foldr (overlay: acc: acc // recursiveUpdate (overlay self.legacyPackages.${system} pkgs) pkgs) {} overlays;
|
||||
|
||||
defaultPackage = self.packages.${system}."uniworx:exe:uniworx";
|
||||
defaultApp = self.apps.${system}."uniworx:exe:uniworx";
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
@ -1,14 +1,18 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor.kleen@ifi.lmu.de>, David Mosbach <david.mosbach@uniworx.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// SPDX-License-Identifier: LicenseRef-Fraport-Corporate-Design
|
||||
|
||||
@use "~@fortawesome/fontawesome-pro/scss/fontawesome" with ( $fa-font-path: "~@fortawesome/fontawesome-pro/webfonts" )
|
||||
//@use "~@fortawesome/fontawesome-pro/scss/fontawesome" with ( $fa-font-path: "~@fortawesome/fontawesome-pro/webfonts" )
|
||||
|
||||
@forward "~@fortawesome/fontawesome-pro/scss/fontawesome"
|
||||
//@forward "~@fortawesome/fontawesome-pro/scss/fontawesome"
|
||||
|
||||
@use "~@fortawesome/fontawesome-pro/scss/solid"
|
||||
//@use "~@fortawesome/fontawesome-pro/scss/solid"
|
||||
|
||||
@use "icons.scss"
|
||||
|
||||
$icons: '~/assets/icons/fradrive'
|
||||
|
||||
@function ico-content($ico)
|
||||
@return url('#{$icons}/#{$ico}.svg')
|
||||
|
||||
@use "~typeface-roboto" as roboto
|
||||
@use "~typeface-source-sans-pro" as source-sans-pro
|
||||
@use "~typeface-source-code-pro" as source-code-pro
|
||||
|
||||
@ -1,9 +1,10 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>,Sarah Vaupel <vaupel.sarah@campus.lmu.de>,Steffen Jost <jost@cip.ifi.lmu.de>,Wolfgang Witt <Wolfgang.Witt@campus.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor.kleen@ifi.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>,Sarah Vaupel <vaupel.sarah@campus.lmu.de>,Steffen Jost <jost@cip.ifi.lmu.de>,Wolfgang Witt <Wolfgang.Witt@campus.lmu.de>,David Mosbach <david.mosbach@uniworx.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// SPDX-License-Identifier: LicenseRef-Fraport-Corporate-Design
|
||||
|
||||
@use "common" as *
|
||||
@use "icons"
|
||||
|
||||
\:root
|
||||
// THEME INDEPENDENT COLORS
|
||||
@ -27,9 +28,9 @@
|
||||
--color-fontsec: #5b5861
|
||||
|
||||
// FONTS
|
||||
--font-base: "Source Sans Pro", "Trebuchet MS", sans-serif
|
||||
--font-logo: "Roboto", var(--font-base)
|
||||
--font-monospace: "Source Code Pro", monospace
|
||||
--font-base: 'Arial', sans-serif
|
||||
--font-logo: var(--font-base)
|
||||
--font-monospace: 'Arial Mono', monospace
|
||||
|
||||
// DIMENSIONS
|
||||
--header-height: 100px
|
||||
@ -62,6 +63,7 @@ body
|
||||
--color-link: var(--color-font)
|
||||
--color-link-hover: var(--color-font)
|
||||
--color-lmu-box-border: var(--color-lightwhite)
|
||||
--filter-primary: invert(7%) sepia(83%) saturate(4889%) hue-rotate(241deg) brightness(106%) contrast(169%)
|
||||
|
||||
&.theme--lavender
|
||||
--color-primary: #584c9c
|
||||
@ -71,6 +73,7 @@ body
|
||||
--color-darker: #3c2765
|
||||
--color-link: var(--color-dark)
|
||||
--color-link-hover: var(--color-darker)
|
||||
--filter-primary: invert(28%) sepia(36%) saturate(1286%) hue-rotate(212deg) brightness(97%) contrast(83%)
|
||||
|
||||
&.theme--neutral-blue
|
||||
--color-primary: #3E606F
|
||||
@ -78,6 +81,7 @@ body
|
||||
--color-lighter: rgb(145, 159, 170)
|
||||
--color-dark: rgb(42, 74, 88)
|
||||
--color-darker: #193441
|
||||
--filter-primary: invert(35%) sepia(8%) saturate(2168%) hue-rotate(153deg) brightness(88%) contrast(80%)
|
||||
|
||||
&.theme--aberdeen-reds
|
||||
--color-primary: #820333
|
||||
@ -85,6 +89,7 @@ body
|
||||
--color-lighter: #F0433A
|
||||
--color-dark: #540032
|
||||
--color-darker: #2E112D
|
||||
--filter-primary: invert(12%) sepia(38%) saturate(6051%) hue-rotate(322deg) brightness(91%) contrast(110%)
|
||||
|
||||
&.theme--moss-green
|
||||
--color-primary: #5C996B
|
||||
@ -92,6 +97,7 @@ body
|
||||
--color-lighter: #99FFB2
|
||||
--color-dark: #3D6647
|
||||
--color-darker: #1F3324
|
||||
--filter-primary: invert(57%) sepia(19%) saturate(788%) hue-rotate(82deg) brightness(92%) contrast(87%)
|
||||
|
||||
&.theme--sky-love
|
||||
--color-primary: #87ABE5
|
||||
@ -101,6 +107,7 @@ body
|
||||
--color-darker: #6B7BC9
|
||||
--color-link: var(--color-lightblack)
|
||||
--color-link-hover: var(--color-darker)
|
||||
--filter-primary: invert(55%) sepia(47%) saturate(394%) hue-rotate(180deg) brightness(115%) contrast(80%)
|
||||
|
||||
// END THEMES
|
||||
|
||||
@ -263,6 +270,9 @@ button:not(.btn-link),
|
||||
&.btn-danger
|
||||
background-color: var(--color-error-dark)
|
||||
|
||||
span
|
||||
@extend .white-ico
|
||||
|
||||
.buttongroup
|
||||
display: grid
|
||||
grid: min-content / auto-flow max-content
|
||||
@ -450,9 +460,9 @@ input[type="button"].btn-info:not(.btn-link):hover,
|
||||
color: inherit
|
||||
|
||||
&::before
|
||||
@extend .fas
|
||||
@extend .ico-link
|
||||
|
||||
content: fa-content($fa-var-link)
|
||||
content:""
|
||||
margin-right: 0.25em
|
||||
|
||||
&.table__th-link::before
|
||||
@ -655,7 +665,6 @@ section
|
||||
margin: 0 auto 0.5rem
|
||||
|
||||
&::before
|
||||
@extend .fas
|
||||
|
||||
position: absolute
|
||||
display: flex
|
||||
@ -675,6 +684,13 @@ section
|
||||
&.notification--broad
|
||||
max-width: none
|
||||
|
||||
.fas
|
||||
position: absolute
|
||||
left: 20px
|
||||
top: 0
|
||||
height: 100%
|
||||
width: 60px
|
||||
|
||||
&:first-child
|
||||
margin-top: 0
|
||||
&:last-child
|
||||
@ -1271,6 +1287,9 @@ ul.breadcrumbs__list
|
||||
margin: 0 5px
|
||||
margin-top: 1px
|
||||
|
||||
span
|
||||
@extend .white-ico
|
||||
|
||||
a.breadcrumbs__home
|
||||
opacity: 0.5
|
||||
margin-right: 7px
|
||||
@ -1281,6 +1300,10 @@ a.breadcrumbs__home
|
||||
&:hover
|
||||
opacity: 1
|
||||
|
||||
i
|
||||
@extend .white-ico
|
||||
|
||||
|
||||
.breadcrumbs__last-item
|
||||
font-weight: 600
|
||||
opacity: 1
|
||||
|
||||
164
frontend/src/icons.scss
Normal file
164
frontend/src/icons.scss
Normal file
@ -0,0 +1,164 @@
|
||||
// SPDX-FileCopyrightText: 2024 David Mosbach <david.mosbach@uniworx.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// SPDX-License-Identifier: LicenseRef-Fraport-Corporate-Design
|
||||
|
||||
|
||||
$ico-width: 30px;
|
||||
|
||||
$icons: new,
|
||||
ok,
|
||||
not-ok,
|
||||
warning,
|
||||
problem,
|
||||
visible,
|
||||
invisible,
|
||||
course-favourite-manual,
|
||||
course-favourite-automatic,
|
||||
course-favourite-off,
|
||||
enrol-true,
|
||||
enrol-false,
|
||||
planned,
|
||||
announce,
|
||||
exam,
|
||||
exam-register-true,
|
||||
exam-register-false,
|
||||
exam-auto-occurrence-nudge-up,
|
||||
exam-auto-occurrence-nudge-down,
|
||||
exam-auto-occurrence-ignore,
|
||||
exam-auto-occurrence-reconsider,
|
||||
comment-true,
|
||||
comment-false,
|
||||
link,
|
||||
file-donwload,
|
||||
file-upload,
|
||||
file-zip,
|
||||
file-csv,
|
||||
sft-question,
|
||||
sft-hint,
|
||||
sft-solution,
|
||||
sft-marking,
|
||||
email,
|
||||
register-template,
|
||||
no-correctors,
|
||||
remove-user,
|
||||
tooltip-default,
|
||||
notification-success,
|
||||
notification-info,
|
||||
notification-warning,
|
||||
notification-error,
|
||||
notification-nonactive,
|
||||
favourite,
|
||||
language,
|
||||
nav-container-close,
|
||||
page-action-children-close,
|
||||
menu-news,
|
||||
menu-help,
|
||||
menu-profile,
|
||||
menu-login,
|
||||
menu-logout,
|
||||
breadcrumbs-home,
|
||||
menu-extra,
|
||||
menu-course-list,
|
||||
menu-corrections,
|
||||
menu-exams,
|
||||
menu-admin,
|
||||
menu-lms,
|
||||
menu-qualification,
|
||||
page-action-primary-expand,
|
||||
page-action-secondary,
|
||||
breadcrumb-separator,
|
||||
file-upload-session,
|
||||
standalone-field-error,
|
||||
file-user,
|
||||
notification,
|
||||
notification-sent,
|
||||
no-notification,
|
||||
personal-identification,
|
||||
menu-workflows,
|
||||
video,
|
||||
submission-user-duplicate,
|
||||
submission-no-users,
|
||||
reset,
|
||||
blocked,
|
||||
certificate,
|
||||
print-center,
|
||||
letter,
|
||||
at,
|
||||
supervisor,
|
||||
supervisor-foreign,
|
||||
waiting-for-user,
|
||||
expired,
|
||||
locked,
|
||||
unlocked,
|
||||
trash,
|
||||
reset-tries,
|
||||
company,
|
||||
edit,
|
||||
user-edit,
|
||||
placeholder,
|
||||
loading;
|
||||
|
||||
|
||||
@each $name in $icons {
|
||||
.ico-#{$name} {
|
||||
background-image: url('../../assets/icons/fradrive/#{$name}.svg');
|
||||
background-size: contain;
|
||||
background-repeat: no-repeat;
|
||||
background-position: center;
|
||||
aspect-ratio: 1/1;
|
||||
min-width: 1em;
|
||||
font-size: inherit;
|
||||
display: inline-block;
|
||||
vertical-align: middle;
|
||||
}
|
||||
}
|
||||
|
||||
.white-ico {
|
||||
filter: invert(100%) sepia(20%) saturate(901%) hue-rotate(47deg) brightness(106%) contrast(101%);
|
||||
}
|
||||
|
||||
.fw-ico {
|
||||
width: $ico-width;
|
||||
}
|
||||
|
||||
.small-ico {
|
||||
font-size: 1em;
|
||||
}
|
||||
|
||||
.medium-ico {
|
||||
font-size: 1.5em;
|
||||
}
|
||||
|
||||
.large-ico {
|
||||
font-size: 2em;
|
||||
}
|
||||
|
||||
.ico-spin {
|
||||
animation-name: ico-spin;
|
||||
animation-delay: 0s;
|
||||
animation-duration: 3s;
|
||||
animation-direction: normal;
|
||||
animation-iteration-count: infinite;
|
||||
animation-timing-function: linear;
|
||||
}
|
||||
|
||||
@media (prefers-reduced-motion: reduce) {
|
||||
.ico-spin {
|
||||
animation-delay: -1ms;
|
||||
animation-duration: 1ms;
|
||||
animation-iteration-count: 1;
|
||||
transition-delay: 0s;
|
||||
transition-duration: 0s;
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes ico-spin {
|
||||
0% {
|
||||
transform: rotate(0deg);
|
||||
}
|
||||
100% {
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Johannes Eder <ederj@cip.ifi.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>,Sarah Vaupel <vaupel.sarah@campus.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor.kleen@ifi.lmu.de>,Johannes Eder <ederj@cip.ifi.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>,Sarah Vaupel <vaupel.sarah@campus.lmu.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
/* global global:writable */
|
||||
/* global:writable */
|
||||
|
||||
import semver from 'semver';
|
||||
import sodium from 'sodium-javascript';
|
||||
@ -365,7 +365,7 @@ export class StorageManager {
|
||||
}
|
||||
|
||||
addHistoryListener(listener, options=this._options, ...args) {
|
||||
const modified_listener = (function(event, ...listener_args) { // eslint-disable-line no-unused-vars
|
||||
const modified_listener = (function(event, ...listener_args) {
|
||||
|
||||
// do not propagate popstate events with empty state
|
||||
if(event.state === null)
|
||||
@ -498,13 +498,11 @@ function encrypt(plaintext, key) {
|
||||
if (!plaintext) return '';
|
||||
if (!key) throw new Error('Cannot encrypt plaintext without a valid key!');
|
||||
|
||||
/* eslint-disable no-undef */
|
||||
// TODO use const if possible
|
||||
let plaintextB = Buffer.from(plaintext);
|
||||
let cipherB = Buffer.alloc(plaintextB.length + sodium.crypto_secretbox_MACBYTES);
|
||||
let nonceB = Buffer.alloc(sodium.crypto_secretbox_NONCEBYTES);
|
||||
let keyB = Buffer.from(key);
|
||||
/* eslint-enable no-undef */
|
||||
|
||||
sodium.crypto_secretbox_easy(cipherB, plaintextB, nonceB, keyB);
|
||||
|
||||
@ -520,13 +518,11 @@ function decrypt(ciphertext, key) {
|
||||
if (!ciphertext) return '';
|
||||
if (!key) throw new Error('Cannot decrypt ciphertext without a valid key!');
|
||||
|
||||
/* eslint-disable no-undef */
|
||||
// TODO use const if possible
|
||||
let cipherB = Buffer.from(ciphertext);
|
||||
let plaintextB = Buffer.alloc(cipherB.length - sodium.crypto_secretbox_MACBYTES);
|
||||
let nonceB = Buffer.alloc(sodium.crypto_secretbox_NONCEBYTES);
|
||||
let keyB = Buffer.from(key);
|
||||
/* eslint-enable no-undef */
|
||||
|
||||
sodium.crypto_secretbox_open_easy(plaintextB, cipherB, nonceB, keyB);
|
||||
|
||||
|
||||
@ -171,7 +171,7 @@ export class Alerts {
|
||||
}
|
||||
};
|
||||
|
||||
_createAlertElement(type, content, icon = 'info-circle') {
|
||||
_createAlertElement(type, content, icon = 'notification-info') {
|
||||
const alertElement = document.createElement('div');
|
||||
alertElement.classList.add(ALERT_CLASS, 'alert-' + type);
|
||||
|
||||
@ -179,7 +179,7 @@ export class Alerts {
|
||||
alertCloser.classList.add(ALERT_CLOSER_CLASS);
|
||||
|
||||
const alertIcon = document.createElement('div');
|
||||
alertIcon.classList.add(ALERT_ICON_CLASS, 'fas', 'fa-' + icon);
|
||||
alertIcon.classList.add(ALERT_ICON_CLASS, 'ico-' + icon);
|
||||
|
||||
const alertContent = document.createElement('div');
|
||||
alertContent.classList.add(ALERT_CONTENT_CLASS);
|
||||
|
||||
@ -1,9 +1,10 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Gregor Kleen <gregor.kleen@ifi.lmu.de>,David Mosbach <david.mosbach@uniworx.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// SPDX-License-Identifier: LicenseRef-Fraport-Corporate-Design
|
||||
|
||||
@use "../../common" as *
|
||||
@use '../../icons'
|
||||
|
||||
.alerts
|
||||
position: fixed
|
||||
@ -24,9 +25,9 @@
|
||||
cursor: pointer
|
||||
|
||||
&::before
|
||||
@extend .fas
|
||||
@extend .ico-nav-container-close
|
||||
|
||||
content: fa-content($fa-var-chevron-up)
|
||||
content: ""
|
||||
position: absolute
|
||||
left: 50%
|
||||
top: 0
|
||||
@ -111,7 +112,7 @@
|
||||
.alert__icon
|
||||
text-align: right
|
||||
position: absolute
|
||||
left: 0px
|
||||
left: 8px
|
||||
bottom: 0
|
||||
width: 50px
|
||||
height: 100%
|
||||
@ -152,9 +153,9 @@
|
||||
color: white
|
||||
|
||||
&::before
|
||||
@extend .fas
|
||||
@extend .ico-not-ok
|
||||
|
||||
content: fa-content($fa-var-times)
|
||||
content: ""
|
||||
position: absolute
|
||||
top: 50%
|
||||
left: 50%
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>,Sarah Vaupel <vaupel.sarah@campus.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>,Sarah Vaupel <vaupel.sarah@campus.lmu.de>,David Mosbach <david.mosbach@uniworx.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
@ -301,7 +301,7 @@ export class ExamCorrect {
|
||||
users: [user],
|
||||
status: STATUS.LOADING,
|
||||
};
|
||||
if (results && results !== {}) rowInfo.results = results;
|
||||
if (results && results != {}) rowInfo.results = results;
|
||||
if (result !== undefined) rowInfo.result = result;
|
||||
this._addRow(rowInfo);
|
||||
|
||||
@ -461,7 +461,7 @@ export class ExamCorrect {
|
||||
for (let [k, v] of Object.entries(newEntry.results)) {
|
||||
const resultCell = row.cells.item(this._cIndices.get(k));
|
||||
if (v === null) {
|
||||
resultCell.innerHTML = '<i class="fas fa-fw fa-trash"></i>';
|
||||
resultCell.innerHTML = '<i class="fas fw-ico ico-trash"></i>';
|
||||
resultCell.classList.remove('exam-correct--result-unconfirmed');
|
||||
} else if (v && v.result !== undefined && v.result !== null) {
|
||||
resultCell.innerHTML = v.result;
|
||||
@ -499,7 +499,7 @@ export class ExamCorrect {
|
||||
else
|
||||
html = examResult.status;
|
||||
} else if (examResult === null) {
|
||||
html = '<i class="fas fa-fw fa-trash"></i>';
|
||||
html = '<i class="fas fw-ico ico-trash"></i>';
|
||||
}
|
||||
|
||||
return html;
|
||||
@ -598,7 +598,7 @@ export class ExamCorrect {
|
||||
const partCell = document.createElement('TD');
|
||||
|
||||
if (partResult === null) {
|
||||
partCell.innerHTML = '<i class="fas fa-fw fa-trash"></i>';
|
||||
partCell.innerHTML = '<i class="fas fw-ico ico-trash"></i>';
|
||||
} else {
|
||||
partCell.innerHTML = partResult;
|
||||
}
|
||||
@ -683,10 +683,10 @@ function userToHTML(user) {
|
||||
}
|
||||
|
||||
function setStatus(elem, status) {
|
||||
const successClasses = ['fas', 'fa-fw', 'fa-check', 'exam-correct--success'];
|
||||
const ambiguousClasses = ['fas', 'fa-fw', 'fa-question', 'exam-correct--ambiguous'];
|
||||
const errorClasses = ['fas', 'fa-fw', 'fa-times', 'exam-correct--error'];
|
||||
const loadingClasses = ['fas', 'fa-fw', 'fa-spinner-third', 'fa-spin'];
|
||||
const successClasses = ['fas', 'fw-ico', 'ico-ok', 'exam-correct--success'];
|
||||
const ambiguousClasses = ['fas', 'fw-ico', 'ico-menu-help', 'exam-correct--ambiguous'];
|
||||
const errorClasses = ['fas', 'fw-ico', 'ico-not-ok', 'exam-correct--error'];
|
||||
const loadingClasses = ['fas', 'fw-ico', 'ico-loading', 'ico-spin'];
|
||||
|
||||
elem.classList.remove(...successClasses, ...ambiguousClasses, ...errorClasses, ...loadingClasses);
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>,Sarah Vaupel <vaupel.sarah@campus.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>,Sarah Vaupel <vaupel.sarah@campus.lmu.de>,David Mosbach <david.mosbach@uniworx.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
@ -104,8 +104,8 @@ export class HideColumns {
|
||||
|
||||
const hider = document.createElement('span');
|
||||
|
||||
const hiderIcon = document.createElement('i');
|
||||
hiderIcon.classList.add('fas', 'fa-fw');
|
||||
const hiderIcon = document.createElement('span');
|
||||
hiderIcon.classList.add('fas');
|
||||
hider.appendChild(hiderIcon);
|
||||
|
||||
const hiderContent = document.createElement('span');
|
||||
@ -240,8 +240,8 @@ export class HideColumns {
|
||||
|
||||
updateHiderIcon(hider, hidden) {
|
||||
Array.from(hider.getElementsByClassName('fas')).forEach(hiderIcon => {
|
||||
hiderIcon.classList.remove(hidden ? 'fa-eye' : 'fa-eye-slash');
|
||||
hiderIcon.classList.add(hidden ? 'fa-eye-slash' : 'fa-eye');
|
||||
hiderIcon.classList.remove(hidden ? 'ico-visible' : 'ico-invisible');
|
||||
hiderIcon.classList.add(hidden ? 'ico-invisible' : 'ico-visible');
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@ -1,8 +1,10 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Gregor Kleen <gregor.kleen@ifi.lmu.de>,David Mosbach <david.mosbach@uniworx.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// SPDX-License-Identifier: LicenseRef-Fraport-Corporate-Design
|
||||
|
||||
@use "../../icons.scss"
|
||||
|
||||
.table-hider
|
||||
background-color: #fff
|
||||
color: var(--color-link)
|
||||
@ -16,6 +18,9 @@
|
||||
transform-origin: top
|
||||
z-index: 1
|
||||
|
||||
.fas
|
||||
filter: var(--filter-primary)
|
||||
|
||||
&:hover
|
||||
background-color: var(--color-grey-light)
|
||||
|
||||
@ -66,5 +71,9 @@
|
||||
&:empty
|
||||
margin: 0
|
||||
|
||||
.fas
|
||||
@extend .white-ico
|
||||
|
||||
|
||||
.hide-columns--hidden-cell
|
||||
display: none
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>,David Mosbach <david.mosbach@uniworx.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
@ -42,7 +42,7 @@ export class Password {
|
||||
this._wrapperEl.appendChild(this._toggleContainerEl);
|
||||
|
||||
this._iconEl = document.createElement('i');
|
||||
this._iconEl.classList.add('fas', 'fa-fw');
|
||||
this._iconEl.classList.add('fas');
|
||||
this._toggleContainerEl.appendChild(this._iconEl);
|
||||
|
||||
parentEl.insertBefore(this._wrapperEl, siblingEl);
|
||||
@ -91,7 +91,7 @@ export class Password {
|
||||
|
||||
updateVisibleIcon(visible) {
|
||||
function visibleClass(visible) {
|
||||
return 'fa-' + (visible ? 'eye' : 'eye-slash');
|
||||
return `ico-${visible ? '' : 'in'}visible`;
|
||||
}
|
||||
|
||||
this._iconEl.classList.remove(visibleClass(!visible));
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
/* global global:writable */
|
||||
/* global:writable */
|
||||
|
||||
import { Utility } from '../../core/utility';
|
||||
import { Datepicker } from '../form/datepicker';
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Gregor Kleen <gregor.kleen@ifi.lmu.de>,David Mosbach <david.mosbach@uniworx.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// SPDX-License-Identifier: LicenseRef-Fraport-Corporate-Design
|
||||
@ -98,9 +98,10 @@ div.modal__trigger
|
||||
z-index: 20
|
||||
|
||||
&::before
|
||||
@extend .fas
|
||||
@extend .ico-not-ok
|
||||
@extend .white-ico
|
||||
|
||||
content: fa-content($fa-var-times)
|
||||
content: ""
|
||||
color: white
|
||||
|
||||
.modal__content
|
||||
|
||||
@ -1,8 +1,10 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Gregor Kleen <gregor.kleen@ifi.lmu.de>,David Mosbach <david.mosbach@uniworx.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// SPDX-License-Identifier: LicenseRef-Fraport-Corporate-Design
|
||||
|
||||
@use '../../icons.scss'
|
||||
|
||||
.navbar-container
|
||||
position: relative
|
||||
|
||||
@ -170,6 +172,9 @@
|
||||
transition: opacity 0.2s ease
|
||||
margin-bottom: 7px
|
||||
|
||||
span
|
||||
@extend .white-ico
|
||||
|
||||
.navbar__link-label
|
||||
transition: opacity .2s ease
|
||||
padding: 2px 4px
|
||||
@ -253,6 +258,11 @@
|
||||
.navbar__link-wrapper
|
||||
color: var(--color-dark)
|
||||
|
||||
.navbar__link-icon
|
||||
.white-ico
|
||||
filter: var(--filter-primary)
|
||||
|
||||
|
||||
.navbar__list-item--active .navbar__link-wrapper
|
||||
color: var(--color-dark)
|
||||
|
||||
@ -263,6 +273,7 @@
|
||||
.navbar__link-icon
|
||||
opacity: 1
|
||||
|
||||
|
||||
// sticky state
|
||||
.navbar--sticky
|
||||
height: var(--header-height-collapsed)
|
||||
|
||||
@ -1,9 +1,10 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Gregor Kleen <gregor.kleen@ifi.lmu.de>,David Mosbach <david.mosbach@uniworx.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// SPDX-License-Identifier: LicenseRef-Fraport-Corporate-Design
|
||||
|
||||
@use "../../common" as *
|
||||
@use "../../icons.scss"
|
||||
|
||||
.tooltip
|
||||
position: relative
|
||||
@ -63,11 +64,19 @@
|
||||
.table__th &
|
||||
color: white
|
||||
|
||||
.fas
|
||||
@extend .white-ico
|
||||
|
||||
.tooltip.tooltip__inline
|
||||
.tooltip__handle
|
||||
height: 1.0rem
|
||||
line-height: 1.0rem
|
||||
font-size: 1.0rem
|
||||
.fas
|
||||
filter: var(--filter-primary)
|
||||
.table__th & .fas
|
||||
@extend .white-ico
|
||||
|
||||
|
||||
.tooltip__content
|
||||
position: absolute
|
||||
@ -112,6 +121,11 @@
|
||||
left: unset
|
||||
top: unset
|
||||
transform: unset
|
||||
.tooltip__handle
|
||||
.fas
|
||||
filter: var(--filter-primary)
|
||||
.table__th & .fas
|
||||
@extend .white-ico
|
||||
|
||||
.tooltip--spread
|
||||
width: 100%
|
||||
@ -123,10 +137,10 @@
|
||||
padding: 4px 17px 4px 4px
|
||||
|
||||
&::after
|
||||
@extend .fas
|
||||
@extend .fa-fw
|
||||
@extend .ico-notification-nonactive
|
||||
@extend .fw-ico
|
||||
|
||||
content: '\f129'
|
||||
content: ''
|
||||
position: absolute
|
||||
right: 2px
|
||||
top: 6px
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
// SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Sarah Vaupel <vaupel.sarah@campus.lmu.de>
|
||||
// SPDX-FileCopyrightText: 2022-2024 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor.kleen@ifi.lmu.de>, Sarah Vaupel <vaupel.sarah@campus.lmu.de>
|
||||
//
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
@ -20,7 +20,7 @@ module.exports = function(config) {
|
||||
logLevel: config.LOG_WARN, //config.LOG_DISABLE, config.LOG_ERROR, config.LOG_INFO, config.LOG_DEBUG
|
||||
|
||||
//list of frameworks you want to use, only jasmine is installed automatically
|
||||
frameworks: ['jasmine'],
|
||||
frameworks: ['jasmine', 'browserify'],
|
||||
//list of browsers to launch and capture
|
||||
browsers: ['ChromeHeadless'],
|
||||
//list of reporters to use
|
||||
@ -56,8 +56,11 @@ module.exports = function(config) {
|
||||
},
|
||||
preprocessors: {
|
||||
//add webpack as preprocessor to support require() in test-suits .js files
|
||||
'./frontend/src/**/*.js': ['webpack']
|
||||
'./frontend/src/**/*.js': ['browserify']
|
||||
},
|
||||
plugins: [
|
||||
'karma-browserify'
|
||||
],
|
||||
webpackMiddleware: {
|
||||
//turn off webpack bash output when run the tests
|
||||
noInfo: true,
|
||||
@ -1,15 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
final: prev:
|
||||
let
|
||||
awsPatch = prev.fetchurl {
|
||||
url = "https://github.com/gkleen/nix/commit/fd67a0f927ec0711eba59714939ff939fc95db38.diff";
|
||||
hash = "sha256-1dJ9zGQvYu5b47O2NjdggSSinlGQDcqBwXoZcKUGfYQ=";
|
||||
};
|
||||
in {
|
||||
nixUnstable = prev.nixUnstable.overrideAttrs (oldAttrs: {
|
||||
patches = oldAttrs.patches or [] ++ [ awsPatch ];
|
||||
});
|
||||
}
|
||||
217
nix/develop.nix
217
nix/develop.nix
@ -1,217 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
{ pkgs
|
||||
, prev ? pkgs
|
||||
, doPortOffset ? true
|
||||
, doDevelopEnv ? true
|
||||
}:
|
||||
|
||||
with prev.lib;
|
||||
|
||||
let
|
||||
withDevelop = action: ''
|
||||
#!${pkgs.zsh}/bin/zsh -e
|
||||
|
||||
${optionalString doDevelopEnv ''
|
||||
basePath=$(pwd)
|
||||
exec 4<>''${basePath}/.develop.env
|
||||
|
||||
flockRes=
|
||||
set +e
|
||||
${pkgs.util-linux}/bin/flock -en 4; flockRes=$?
|
||||
set -e
|
||||
if [[ ''${flockRes} -ne 0 ]]; then
|
||||
echo "Could not take exclusive lock; is another develop running?" >&2
|
||||
exit ''${flockRes}
|
||||
fi
|
||||
''}
|
||||
|
||||
cleanup() {
|
||||
set +e -x
|
||||
type cleanup_postgres &>/dev/null && cleanup_postgres
|
||||
type cleanup_widget_memcached &>/dev/null && cleanup_widget_memcached
|
||||
type cleanup_session_memcached &>/dev/null && cleanup_session_memcached
|
||||
type cleanup_cache_memcached &>/dev/null && cleanup_cache_memcached
|
||||
type cleanup_minio &>/dev/null && cleanup_minio
|
||||
type cleanup_maildev &>/dev/null && cleanup_maildev
|
||||
|
||||
${optionalString doDevelopEnv ''
|
||||
[ -f "''${basePath}/.develop.env" ] && rm -vf "''${basePath}/.develop.env"
|
||||
''}
|
||||
set +x
|
||||
}
|
||||
|
||||
trap cleanup EXIT
|
||||
|
||||
export PORT_OFFSET=${if doPortOffset then "$(((16#$(echo \"fradrive $(whoami)\" | sha256sum | head -c 16)) % 1000))" else "0"}
|
||||
|
||||
if [[ -z "$PGHOST" ]]; then
|
||||
set -xe
|
||||
|
||||
pgDir=$(mktemp -d --tmpdir=''${XDG_RUNTIME_DIR} postgresql.XXXXXX)
|
||||
pgSockDir=$(mktemp -d --tmpdir=''${XDG_RUNTIME_DIR} postgresql.sock.XXXXXX)
|
||||
pgLogFile=$(mktemp --tmpdir=''${XDG_RUNTIME_DIR} postgresql.XXXXXX.log)
|
||||
initdb --no-locale -D ''${pgDir}
|
||||
pg_ctl start -D ''${pgDir} -l ''${pgLogFile} -w -o "-k ''${pgSockDir} -c listen_addresses=''' -c hba_file='${postgresHba}' -c unix_socket_permissions=0700 -c max_connections=9990 -c shared_preload_libraries=pg_stat_statements -c auto_explain.log_min_duration=100ms"
|
||||
psql -h ''${pgSockDir} -f ${postgresSchema} postgres
|
||||
printf "Postgres logfile is %s\nPostgres socket directory is %s\n" ''${pgLogFile} ''${pgSockDir}
|
||||
|
||||
export PGHOST=''${pgSockDir}
|
||||
export PGLOG=''${pgLogFile}
|
||||
|
||||
cleanup_postgres() {
|
||||
set +e -x
|
||||
pg_ctl stop -D ''${pgDir}
|
||||
rm -rvf ''${pgDir} ''${pgSockDir} ''${pgLogFile}
|
||||
set +x
|
||||
}
|
||||
|
||||
set +xe
|
||||
fi
|
||||
|
||||
if [[ -z "$WIDGET_MEMCACHED_HOST" ]]; then
|
||||
set -xe
|
||||
|
||||
memcached -l localhost -p $(($PORT_OFFSET + 11211)) &>/dev/null &
|
||||
widget_memcached_pid=$!
|
||||
|
||||
export WIDGET_MEMCACHED_HOST=localhost
|
||||
export WIDGET_MEMCACHED_PORT=$(($PORT_OFFSET + 11211))
|
||||
|
||||
cleanup_widget_memcached() {
|
||||
[[ -n "$widget_memcached_pid" ]] && kill $widget_memcached_pid
|
||||
}
|
||||
|
||||
set +xe
|
||||
fi
|
||||
|
||||
if [[ -z "$SESSION_MEMCACHED_HOST" ]]; then
|
||||
set -xe
|
||||
|
||||
memcached -l localhost -p $(($PORT_OFFSET + 11212)) &>/dev/null &
|
||||
session_memcached_pid=$!
|
||||
|
||||
export SESSION_MEMCACHED_HOST=localhost
|
||||
export SESSION_MEMCACHED_PORT=$(($PORT_OFFSET + 11212))
|
||||
|
||||
cleanup_session_memcached() {
|
||||
[[ -n "$session_memcached_pid" ]] && kill $session_memcached_pid
|
||||
}
|
||||
|
||||
set +xe
|
||||
fi
|
||||
|
||||
if [[ -z "$MEMCACHED_HOST" ]]; then
|
||||
set -xe
|
||||
|
||||
memcached -l localhost -p $(($PORT_OFFSET + 11213)) &>/dev/null &
|
||||
memcached_pid=$!
|
||||
|
||||
export MEMCACHED_HOST=localhost
|
||||
export MEMCACHED_PORT=$(($PORT_OFFSET + 11212))
|
||||
|
||||
cleanup_session_memcached() {
|
||||
[[ -n "$memcached_pid" ]] && kill $memcached_pid
|
||||
}
|
||||
|
||||
set +xe
|
||||
fi
|
||||
|
||||
if [[ -z "$UPLOAD_S3_HOST" ]]; then
|
||||
set -xe
|
||||
|
||||
cleanup_minio() {
|
||||
[[ -n "$minio_pid" ]] && kill $minio_pid
|
||||
[[ -n "''${MINIO_DIR}" ]] && rm -rvf ''${MINIO_DIR}
|
||||
[[ -n "''${MINIO_LOGFILE}" ]] && rm -rvf ''${MINIO_LOGFILE}
|
||||
}
|
||||
|
||||
export MINIO_DIR=$(mktemp -d --tmpdir=''${XDG_RUNTIME_DIR} minio.XXXXXX)
|
||||
export MINIO_LOGFILE=$(mktemp --tmpdir=''${XDG_RUNTIME_DIR} minio.XXXXXX.log)
|
||||
export MINIO_ACCESS_KEY=$(${pkgs.pwgen}/bin/pwgen -s 16 1)
|
||||
export MINIO_SECRET_KEY=$(${pkgs.pwgen}/bin/pwgen -s 32 1)
|
||||
|
||||
minio server --address localhost:$(($PORT_OFFSET + 9000)) ''${MINIO_DIR} &>''${MINIO_LOGFILE} &
|
||||
minio_pid=$!
|
||||
|
||||
export UPLOAD_S3_HOST=localhost
|
||||
export UPLOAD_S3_PORT=$(($PORT_OFFSET + 9000))
|
||||
export UPLOAD_S3_SSL=false
|
||||
export UPLOAD_S3_KEY_ID=''${MINIO_ACCESS_KEY}
|
||||
export UPLOAD_S3_KEY=''${MINIO_SECRET_KEY}
|
||||
|
||||
sleep 1
|
||||
|
||||
set +xe
|
||||
fi
|
||||
|
||||
${optionalString (pkgs.nodePackages ? "maildev") ''
|
||||
if [[ -z "$SMTPHOST" ]]; then
|
||||
set -xe
|
||||
|
||||
cleanup_maildev() {
|
||||
[[ -n "$maildev_pid" ]] && kill $maildev_pid
|
||||
}
|
||||
|
||||
TMPDIR=''${XDG_RUNTIME_DIR} ${pkgs.nodePackages.maildev}/bin/maildev --smtp $(($PORT_OFFSET + 1025)) --web $(($PORT_OFFSET + 8080)) --ip localhost --web-ip localhost &>/dev/null &
|
||||
maildev_pid=$!
|
||||
|
||||
export SMTPHOST=localhost
|
||||
export SMTPPORT=$(($PORT_OFFSET + 1025))
|
||||
export SMTPSSL=none
|
||||
|
||||
set +xe
|
||||
fi
|
||||
''}
|
||||
|
||||
${optionalString doDevelopEnv ''
|
||||
set -xe
|
||||
|
||||
cat >&4 <<EOF
|
||||
PORT_OFFSET=''${PORT_OFFSET}
|
||||
|
||||
PGHOST=''${pgSockDir}
|
||||
PGLOG=''${pgLogFile}
|
||||
|
||||
WIDGET_MEMCACHED_HOST=localhost
|
||||
WIDGET_MEMCACHED_PORT=$(($PORT_OFFSET + 11211))
|
||||
|
||||
SESSION_MEMCACHED_HOST=localhost
|
||||
SESSION_MEMCACHED_PORT=$(($PORT_OFFSET + 11212))
|
||||
|
||||
MEMCACHED_HOST=localhost
|
||||
MEMCACHED_PORT=$(($PORT_OFFSET + 11212))
|
||||
|
||||
MINIO_DIR=''${MINIO_DIR}
|
||||
MINIO_LOGFILE=''${MINIO_LOGFILE}
|
||||
UPLOAD_S3_HOST=localhost
|
||||
UPLOAD_S3_PORT=$(($PORT_OFFSET + 9000))
|
||||
UPLOAD_S3_SSL=false
|
||||
UPLOAD_S3_KEY_ID=''${MINIO_ACCESS_KEY}
|
||||
UPLOAD_S3_KEY=''${MINIO_SECRET_KEY}
|
||||
|
||||
SMTPHOST=''${SMTPHOST}
|
||||
SMTPPORT=''${SMTPPORT}
|
||||
SMTPSSL=''${SMTPSSL}
|
||||
EOF
|
||||
|
||||
set +xe
|
||||
''}
|
||||
|
||||
${action}
|
||||
'';
|
||||
|
||||
postgresSchema = prev.writeText "schema.sql" ''
|
||||
CREATE USER uniworx WITH SUPERUSER;
|
||||
CREATE DATABASE uniworx_test;
|
||||
GRANT ALL ON DATABASE uniworx_test TO uniworx;
|
||||
CREATE DATABASE uniworx;
|
||||
GRANT ALL ON DATABASE uniworx TO uniworx;
|
||||
'';
|
||||
|
||||
postgresHba = prev.writeText "hba_file" ''
|
||||
local all all trust
|
||||
'';
|
||||
in withDevelop
|
||||
@ -1,116 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022-2023 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor@kleen.consulting>, Steffen Jost <jost@tcs.ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
{ self }: final: prev:
|
||||
|
||||
with prev.lib;
|
||||
|
||||
let
|
||||
created =
|
||||
let
|
||||
fromDate = builtins.readFile (prev.runCommand "date" { nativeBuildInputs = with final; [ coreutils ]; } ''
|
||||
printf '%s' $(date -Is -d '@${toString self.lastModified}') > $out
|
||||
'');
|
||||
in if self ? lastModified then fromDate else "1970-01-01T00:00:01Z";
|
||||
|
||||
mkUniworxDocker = { isTest }: prev.dockerTools.buildImage {
|
||||
name = "uniworx${optionalString isTest "-test"}";
|
||||
tag =
|
||||
let
|
||||
versionFile = if isTest then ./test-version.json else ./version.json;
|
||||
in (builtins.fromJSON (prev.lib.readFile versionFile)).version;
|
||||
inherit created;
|
||||
|
||||
contents = with final; [
|
||||
uniworx.uniworx.components.exes.uniworx
|
||||
prev.dockerTools.binSh findutils coreutils
|
||||
iana-etc
|
||||
# for PDF creation with Pandoc and LuaTeX
|
||||
#cups # needed for interface with print center -- did not work as intended, requires lpd running
|
||||
busybox # should provide a working lpr -- to be tested
|
||||
htop
|
||||
pdftk # for encrypting pdfs
|
||||
#texlive.combined.scheme-medium # too large for container in LMU build environment.
|
||||
(texlive.combine {
|
||||
inherit (texlive) scheme-basic
|
||||
babel-german babel-english booktabs textpos
|
||||
enumitem eurosym koma-script parskip xcolor dejavu
|
||||
# required fro LuaTeX
|
||||
luatexbase lualatex-math unicode-math selnolig
|
||||
;
|
||||
})
|
||||
# just for manual testing within the pod, may be removef for production?
|
||||
curl wget netcat openldap
|
||||
unixtools.netstat htop gnugrep
|
||||
locale
|
||||
];
|
||||
|
||||
runAsRoot = ''
|
||||
#!${final.stdenv.shell}
|
||||
|
||||
${prev.dockerTools.shadowSetup}
|
||||
|
||||
mkdir -p /var/lib
|
||||
|
||||
groupadd -r uniworx
|
||||
useradd -r -g uniworx -d /var/lib/uniworx -M uniworx --uid 999
|
||||
install -d -g uniworx -o uniworx -m 0750 /var/lib/uniworx
|
||||
|
||||
mkdir -p /var/log
|
||||
install -d -g uniworx -o uniworx -m 0755 /var/log/uniworx
|
||||
|
||||
# just to see how to create directories here
|
||||
mkdir -p /testdir
|
||||
'';
|
||||
|
||||
config =
|
||||
let
|
||||
entrypoint = prev.writeScriptBin "uniworx-entrypoint" ''
|
||||
#!${final.zsh}/bin/zsh -xe
|
||||
|
||||
cTime=$(date -Is)
|
||||
|
||||
# export LOGDEST=/var/log/uniworx/''${cTime}.log # kubernetes prefers log via stdout
|
||||
typeset -a configs
|
||||
configs=()
|
||||
configDir=''${CONFIG_DIR-/cfg}
|
||||
if [[ -d "''${configDir}" ]]; then
|
||||
while IFS= read -d $'\0' cfg; do
|
||||
configs+=("''${(q)cfg}")
|
||||
done < <(find "''${configDir}" \( -name '*.yml' -o -name '*.yaml' \) -print0 | sort -rz)
|
||||
fi
|
||||
configs+=('${uniworxConfig}')
|
||||
cd /var/lib/uniworx
|
||||
exec -- uniworx ''${configs}
|
||||
'';
|
||||
postgresSchema = prev.writeText "schema.sql" ''
|
||||
CREATE USER uniworx WITH SUPERUSER;
|
||||
CREATE DATABASE uniworx;
|
||||
GRANT ALL ON DATABASE uniworx TO uniworx;
|
||||
'';
|
||||
|
||||
postgresHba = prev.writeText "hba_file" ''
|
||||
local all all trust
|
||||
'';
|
||||
uniworxConfig = prev.writeText "uni2work.yml" ''
|
||||
port: 8080
|
||||
approot: "_env:APPROOT:http://localhost:8080"
|
||||
'';
|
||||
in {
|
||||
Cmd = [ "${entrypoint}/bin/uniworx-entrypoint" ];
|
||||
User = "uniworx:uniworx";
|
||||
ExposedPorts = {
|
||||
"8080/tcp" = {};
|
||||
};
|
||||
Volumes = {
|
||||
"/var/lib/uniworx" = {};
|
||||
"/var/log" = {};
|
||||
};
|
||||
};
|
||||
};
|
||||
in
|
||||
mapAttrs (_name: mkUniworxDocker) {
|
||||
uniworxTestDocker = { isTest = true; };
|
||||
uniworxDocker = { isTest = false; };
|
||||
}
|
||||
@ -1,3 +0,0 @@
|
||||
{
|
||||
"version": "27.4.18"
|
||||
}
|
||||
@ -1,3 +0,0 @@
|
||||
SPDX-FileCopyrightText: 2022-2023 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor.kleen@ifi.lmu.de>, Steffen Jost <jost@tcs.ifi.lmu.de>
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
@ -1,3 +0,0 @@
|
||||
{
|
||||
"version": "27.4.59"
|
||||
}
|
||||
@ -1,3 +0,0 @@
|
||||
SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Steffen Jost <jost@tcs.ifi.lmu.de>
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
@ -1,19 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022-2023 Gregor Kleen <gregor@kleen.consulting>, Sarah Vaupel <sarah.vaupel@ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
{pkgs ? import <nixpkgs> {
|
||||
inherit system;
|
||||
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs-14_x"}:
|
||||
|
||||
let
|
||||
nodeEnv = import ./node-env.nix {
|
||||
inherit (pkgs) stdenv lib python2 runCommand writeTextFile writeShellScript;
|
||||
inherit pkgs nodejs;
|
||||
libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
|
||||
};
|
||||
in
|
||||
import ./node-packages.nix {
|
||||
inherit (pkgs) fetchurl fetchurlBoot nix-gitignore stdenv lib fetchgit;
|
||||
inherit nodeEnv;
|
||||
}
|
||||
@ -1,689 +0,0 @@
|
||||
# This file originates from node2nix
|
||||
|
||||
{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile, writeShellScript}:
|
||||
|
||||
let
|
||||
# Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
|
||||
utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;
|
||||
|
||||
python = if nodejs ? python then nodejs.python else python2;
|
||||
|
||||
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
|
||||
tarWrapper = runCommand "tarWrapper" {} ''
|
||||
mkdir -p $out/bin
|
||||
|
||||
cat > $out/bin/tar <<EOF
|
||||
#! ${stdenv.shell} -e
|
||||
$(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
|
||||
EOF
|
||||
|
||||
chmod +x $out/bin/tar
|
||||
'';
|
||||
|
||||
# Function that generates a TGZ file from a NPM project
|
||||
buildNodeSourceDist =
|
||||
{ name, version, src, ... }:
|
||||
|
||||
stdenv.mkDerivation {
|
||||
name = "node-tarball-${name}-${version}";
|
||||
inherit src;
|
||||
buildInputs = [ nodejs ];
|
||||
buildPhase = ''
|
||||
export HOME=$TMPDIR
|
||||
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
|
||||
'';
|
||||
installPhase = ''
|
||||
mkdir -p $out/tarballs
|
||||
mv $tgzFile $out/tarballs
|
||||
mkdir -p $out/nix-support
|
||||
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
|
||||
'';
|
||||
};
|
||||
|
||||
# Common shell logic
|
||||
installPackage = writeShellScript "install-package" ''
|
||||
installPackage() {
|
||||
local packageName=$1 src=$2
|
||||
|
||||
local strippedName
|
||||
|
||||
local DIR=$PWD
|
||||
cd $TMPDIR
|
||||
|
||||
unpackFile $src
|
||||
|
||||
# Make the base dir in which the target dependency resides first
|
||||
mkdir -p "$(dirname "$DIR/$packageName")"
|
||||
|
||||
if [ -f "$src" ]
|
||||
then
|
||||
# Figure out what directory has been unpacked
|
||||
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
|
||||
|
||||
# Restore write permissions to make building work
|
||||
find "$packageDir" -type d -exec chmod u+x {} \;
|
||||
chmod -R u+w "$packageDir"
|
||||
|
||||
# Move the extracted tarball into the output folder
|
||||
mv "$packageDir" "$DIR/$packageName"
|
||||
elif [ -d "$src" ]
|
||||
then
|
||||
# Get a stripped name (without hash) of the source directory.
|
||||
# On old nixpkgs it's already set internally.
|
||||
if [ -z "$strippedName" ]
|
||||
then
|
||||
strippedName="$(stripHash $src)"
|
||||
fi
|
||||
|
||||
# Restore write permissions to make building work
|
||||
chmod -R u+w "$strippedName"
|
||||
|
||||
# Move the extracted directory into the output folder
|
||||
mv "$strippedName" "$DIR/$packageName"
|
||||
fi
|
||||
|
||||
# Change to the package directory to install dependencies
|
||||
cd "$DIR/$packageName"
|
||||
}
|
||||
'';
|
||||
|
||||
# Bundle the dependencies of the package
|
||||
#
|
||||
# Only include dependencies if they don't exist. They may also be bundled in the package.
|
||||
includeDependencies = {dependencies}:
|
||||
lib.optionalString (dependencies != []) (
|
||||
''
|
||||
mkdir -p node_modules
|
||||
cd node_modules
|
||||
''
|
||||
+ (lib.concatMapStrings (dependency:
|
||||
''
|
||||
if [ ! -e "${dependency.packageName}" ]; then
|
||||
${composePackage dependency}
|
||||
fi
|
||||
''
|
||||
) dependencies)
|
||||
+ ''
|
||||
cd ..
|
||||
''
|
||||
);
|
||||
|
||||
# Recursively composes the dependencies of a package
|
||||
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
|
||||
builtins.addErrorContext "while evaluating node package '${packageName}'" ''
|
||||
installPackage "${packageName}" "${src}"
|
||||
${includeDependencies { inherit dependencies; }}
|
||||
cd ..
|
||||
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
|
||||
'';
|
||||
|
||||
pinpointDependencies = {dependencies, production}:
|
||||
let
|
||||
pinpointDependenciesFromPackageJSON = writeTextFile {
|
||||
name = "pinpointDependencies.js";
|
||||
text = ''
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
|
||||
function resolveDependencyVersion(location, name) {
|
||||
if(location == process.env['NIX_STORE']) {
|
||||
return null;
|
||||
} else {
|
||||
var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
|
||||
|
||||
if(fs.existsSync(dependencyPackageJSON)) {
|
||||
var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
|
||||
|
||||
if(dependencyPackageObj.name == name) {
|
||||
return dependencyPackageObj.version;
|
||||
}
|
||||
} else {
|
||||
return resolveDependencyVersion(path.resolve(location, ".."), name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function replaceDependencies(dependencies) {
|
||||
if(typeof dependencies == "object" && dependencies !== null) {
|
||||
for(var dependency in dependencies) {
|
||||
var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
|
||||
|
||||
if(resolvedVersion === null) {
|
||||
process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
|
||||
} else {
|
||||
dependencies[dependency] = resolvedVersion;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Read the package.json configuration */
|
||||
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
|
||||
|
||||
/* Pinpoint all dependencies */
|
||||
replaceDependencies(packageObj.dependencies);
|
||||
if(process.argv[2] == "development") {
|
||||
replaceDependencies(packageObj.devDependencies);
|
||||
}
|
||||
else {
|
||||
packageObj.devDependencies = {};
|
||||
}
|
||||
replaceDependencies(packageObj.optionalDependencies);
|
||||
replaceDependencies(packageObj.peerDependencies);
|
||||
|
||||
/* Write the fixed package.json file */
|
||||
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
|
||||
'';
|
||||
};
|
||||
in
|
||||
''
|
||||
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
|
||||
|
||||
${lib.optionalString (dependencies != [])
|
||||
''
|
||||
if [ -d node_modules ]
|
||||
then
|
||||
cd node_modules
|
||||
${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
|
||||
cd ..
|
||||
fi
|
||||
''}
|
||||
'';
|
||||
|
||||
# Recursively traverses all dependencies of a package and pinpoints all
|
||||
# dependencies in the package.json file to the versions that are actually
|
||||
# being used.
|
||||
|
||||
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
|
||||
''
|
||||
if [ -d "${packageName}" ]
|
||||
then
|
||||
cd "${packageName}"
|
||||
${pinpointDependencies { inherit dependencies production; }}
|
||||
cd ..
|
||||
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
|
||||
fi
|
||||
'';
|
||||
|
||||
# Extract the Node.js source code which is used to compile packages with
|
||||
# native bindings
|
||||
nodeSources = runCommand "node-sources" {} ''
|
||||
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
|
||||
mv node-* $out
|
||||
'';
|
||||
|
||||
# Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
|
||||
addIntegrityFieldsScript = writeTextFile {
|
||||
name = "addintegrityfields.js";
|
||||
text = ''
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
|
||||
function augmentDependencies(baseDir, dependencies) {
|
||||
for(var dependencyName in dependencies) {
|
||||
var dependency = dependencies[dependencyName];
|
||||
|
||||
// Open package.json and augment metadata fields
|
||||
var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
|
||||
var packageJSONPath = path.join(packageJSONDir, "package.json");
|
||||
|
||||
if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
|
||||
console.log("Adding metadata fields to: "+packageJSONPath);
|
||||
var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
|
||||
|
||||
if(dependency.integrity) {
|
||||
packageObj["_integrity"] = dependency.integrity;
|
||||
} else {
|
||||
packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
|
||||
}
|
||||
|
||||
if(dependency.resolved) {
|
||||
packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
|
||||
} else {
|
||||
packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
|
||||
}
|
||||
|
||||
if(dependency.from !== undefined) { // Adopt from property if one has been provided
|
||||
packageObj["_from"] = dependency.from;
|
||||
}
|
||||
|
||||
fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
|
||||
}
|
||||
|
||||
// Augment transitive dependencies
|
||||
if(dependency.dependencies !== undefined) {
|
||||
augmentDependencies(packageJSONDir, dependency.dependencies);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(fs.existsSync("./package-lock.json")) {
|
||||
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
|
||||
|
||||
if(![1, 2].includes(packageLock.lockfileVersion)) {
|
||||
process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if(packageLock.dependencies !== undefined) {
|
||||
augmentDependencies(".", packageLock.dependencies);
|
||||
}
|
||||
}
|
||||
'';
|
||||
};
|
||||
|
||||
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
|
||||
reconstructPackageLock = writeTextFile {
|
||||
name = "reconstructpackagelock.js";
|
||||
text = ''
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
|
||||
var packageObj = JSON.parse(fs.readFileSync("package.json"));
|
||||
|
||||
var lockObj = {
|
||||
name: packageObj.name,
|
||||
version: packageObj.version,
|
||||
lockfileVersion: 2,
|
||||
requires: true,
|
||||
packages: {
|
||||
"": {
|
||||
name: packageObj.name,
|
||||
version: packageObj.version,
|
||||
license: packageObj.license,
|
||||
bin: packageObj.bin,
|
||||
dependencies: packageObj.dependencies,
|
||||
engines: packageObj.engines,
|
||||
optionalDependencies: packageObj.optionalDependencies
|
||||
}
|
||||
},
|
||||
dependencies: {}
|
||||
};
|
||||
|
||||
function augmentPackageJSON(filePath, packages, dependencies) {
|
||||
var packageJSON = path.join(filePath, "package.json");
|
||||
if(fs.existsSync(packageJSON)) {
|
||||
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
|
||||
packages[filePath] = {
|
||||
version: packageObj.version,
|
||||
integrity: "sha1-000000000000000000000000000=",
|
||||
dependencies: packageObj.dependencies,
|
||||
engines: packageObj.engines,
|
||||
optionalDependencies: packageObj.optionalDependencies
|
||||
};
|
||||
dependencies[packageObj.name] = {
|
||||
version: packageObj.version,
|
||||
integrity: "sha1-000000000000000000000000000=",
|
||||
dependencies: {}
|
||||
};
|
||||
processDependencies(path.join(filePath, "node_modules"), packages, dependencies[packageObj.name].dependencies);
|
||||
}
|
||||
}
|
||||
|
||||
function processDependencies(dir, packages, dependencies) {
|
||||
if(fs.existsSync(dir)) {
|
||||
var files = fs.readdirSync(dir);
|
||||
|
||||
files.forEach(function(entry) {
|
||||
var filePath = path.join(dir, entry);
|
||||
var stats = fs.statSync(filePath);
|
||||
|
||||
if(stats.isDirectory()) {
|
||||
if(entry.substr(0, 1) == "@") {
|
||||
// When we encounter a namespace folder, augment all packages belonging to the scope
|
||||
var pkgFiles = fs.readdirSync(filePath);
|
||||
|
||||
pkgFiles.forEach(function(entry) {
|
||||
if(stats.isDirectory()) {
|
||||
var pkgFilePath = path.join(filePath, entry);
|
||||
augmentPackageJSON(pkgFilePath, packages, dependencies);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
augmentPackageJSON(filePath, packages, dependencies);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
processDependencies("node_modules", lockObj.packages, lockObj.dependencies);
|
||||
|
||||
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
|
||||
'';
|
||||
};
|
||||
|
||||
# Script that links bins defined in package.json to the node_modules bin directory
|
||||
# NPM does not do this for top-level packages itself anymore as of v7
|
||||
linkBinsScript = writeTextFile {
|
||||
name = "linkbins.js";
|
||||
text = ''
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
|
||||
var packageObj = JSON.parse(fs.readFileSync("package.json"));
|
||||
|
||||
var nodeModules = Array(packageObj.name.split("/").length).fill("..").join(path.sep);
|
||||
|
||||
if(packageObj.bin !== undefined) {
|
||||
fs.mkdirSync(path.join(nodeModules, ".bin"))
|
||||
|
||||
if(typeof packageObj.bin == "object") {
|
||||
Object.keys(packageObj.bin).forEach(function(exe) {
|
||||
if(fs.existsSync(packageObj.bin[exe])) {
|
||||
console.log("linking bin '" + exe + "'");
|
||||
fs.symlinkSync(
|
||||
path.join("..", packageObj.name, packageObj.bin[exe]),
|
||||
path.join(nodeModules, ".bin", exe)
|
||||
);
|
||||
}
|
||||
else {
|
||||
console.log("skipping non-existent bin '" + exe + "'");
|
||||
}
|
||||
})
|
||||
}
|
||||
else {
|
||||
if(fs.existsSync(packageObj.bin)) {
|
||||
console.log("linking bin '" + packageObj.bin + "'");
|
||||
fs.symlinkSync(
|
||||
path.join("..", packageObj.name, packageObj.bin),
|
||||
path.join(nodeModules, ".bin", packageObj.name.split("/").pop())
|
||||
);
|
||||
}
|
||||
else {
|
||||
console.log("skipping non-existent bin '" + packageObj.bin + "'");
|
||||
}
|
||||
}
|
||||
}
|
||||
else if(packageObj.directories !== undefined && packageObj.directories.bin !== undefined) {
|
||||
fs.mkdirSync(path.join(nodeModules, ".bin"))
|
||||
|
||||
fs.readdirSync(packageObj.directories.bin).forEach(function(exe) {
|
||||
if(fs.existsSync(path.join(packageObj.directories.bin, exe))) {
|
||||
console.log("linking bin '" + exe + "'");
|
||||
fs.symlinkSync(
|
||||
path.join("..", packageObj.name, packageObj.directories.bin, exe),
|
||||
path.join(nodeModules, ".bin", exe)
|
||||
);
|
||||
}
|
||||
else {
|
||||
console.log("skipping non-existent bin '" + exe + "'");
|
||||
}
|
||||
})
|
||||
}
|
||||
'';
|
||||
};
|
||||
|
||||
prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
|
||||
let
|
||||
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
|
||||
in
|
||||
''
|
||||
# Pinpoint the versions of all dependencies to the ones that are actually being used
|
||||
echo "pinpointing versions of dependencies..."
|
||||
source $pinpointDependenciesScriptPath
|
||||
|
||||
# Patch the shebangs of the bundled modules to prevent them from
|
||||
# calling executables outside the Nix store as much as possible
|
||||
patchShebangs .
|
||||
|
||||
# Deploy the Node.js package by running npm install. Since the
|
||||
# dependencies have been provided already by ourselves, it should not
|
||||
# attempt to install them again, which is good, because we want to make
|
||||
# it Nix's responsibility. If it needs to install any dependencies
|
||||
# anyway (e.g. because the dependency parameters are
|
||||
# incomplete/incorrect), it fails.
|
||||
#
|
||||
# The other responsibilities of NPM are kept -- version checks, build
|
||||
# steps, postprocessing etc.
|
||||
|
||||
export HOME=$TMPDIR
|
||||
cd "${packageName}"
|
||||
runHook preRebuild
|
||||
|
||||
${lib.optionalString bypassCache ''
|
||||
${lib.optionalString reconstructLock ''
|
||||
if [ -f package-lock.json ]
|
||||
then
|
||||
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
|
||||
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
|
||||
rm package-lock.json
|
||||
else
|
||||
echo "No package-lock.json file found, reconstructing..."
|
||||
fi
|
||||
|
||||
node ${reconstructPackageLock}
|
||||
''}
|
||||
|
||||
node ${addIntegrityFieldsScript}
|
||||
''}
|
||||
|
||||
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
|
||||
|
||||
runHook postRebuild
|
||||
|
||||
if [ "''${dontNpmInstall-}" != "1" ]
|
||||
then
|
||||
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
|
||||
rm -f npm-shrinkwrap.json
|
||||
|
||||
npm ${forceOfflineFlag} --nodedir=${nodeSources} --no-bin-links --ignore-scripts ${npmFlags} ${lib.optionalString production "--production"} install
|
||||
fi
|
||||
|
||||
# Link executables defined in package.json
|
||||
node ${linkBinsScript}
|
||||
'';
|
||||
|
||||
# Builds and composes an NPM package including all its dependencies
|
||||
buildNodePackage =
|
||||
{ name
|
||||
, packageName
|
||||
, version ? null
|
||||
, dependencies ? []
|
||||
, buildInputs ? []
|
||||
, production ? true
|
||||
, npmFlags ? ""
|
||||
, dontNpmInstall ? false
|
||||
, bypassCache ? false
|
||||
, reconstructLock ? false
|
||||
, preRebuild ? ""
|
||||
, dontStrip ? true
|
||||
, unpackPhase ? "true"
|
||||
, buildPhase ? "true"
|
||||
, meta ? {}
|
||||
, ... }@args:
|
||||
|
||||
let
|
||||
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ];
|
||||
in
|
||||
stdenv.mkDerivation ({
|
||||
name = "${name}${if version == null then "" else "-${version}"}";
|
||||
buildInputs = [ tarWrapper python nodejs ]
|
||||
++ lib.optional (stdenv.isLinux) utillinux
|
||||
++ lib.optional (stdenv.isDarwin) libtool
|
||||
++ buildInputs;
|
||||
|
||||
inherit nodejs;
|
||||
|
||||
inherit dontStrip; # Stripping may fail a build for some package deployments
|
||||
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
|
||||
|
||||
compositionScript = composePackage args;
|
||||
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
|
||||
|
||||
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
|
||||
|
||||
installPhase = ''
|
||||
source ${installPackage}
|
||||
|
||||
# Create and enter a root node_modules/ folder
|
||||
mkdir -p $out/lib/node_modules
|
||||
cd $out/lib/node_modules
|
||||
|
||||
# Compose the package and all its dependencies
|
||||
source $compositionScriptPath
|
||||
|
||||
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
|
||||
|
||||
# Create symlink to the deployed executable folder, if applicable
|
||||
if [ -d "$out/lib/node_modules/.bin" ]
|
||||
then
|
||||
ln -s $out/lib/node_modules/.bin $out/bin
|
||||
|
||||
# Fixup all executables
|
||||
ls $out/bin/* | while read i
|
||||
do
|
||||
file="$(readlink -f "$i")"
|
||||
chmod u+rwx "$file"
|
||||
if isScript "$file"
|
||||
then
|
||||
sed -i 's/\r$//' "$file" # convert crlf to lf
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Create symlinks to the deployed manual page folders, if applicable
|
||||
if [ -d "$out/lib/node_modules/${packageName}/man" ]
|
||||
then
|
||||
mkdir -p $out/share
|
||||
for dir in "$out/lib/node_modules/${packageName}/man/"*
|
||||
do
|
||||
mkdir -p $out/share/man/$(basename "$dir")
|
||||
for page in "$dir"/*
|
||||
do
|
||||
ln -s $page $out/share/man/$(basename "$dir")
|
||||
done
|
||||
done
|
||||
fi
|
||||
|
||||
# Run post install hook, if provided
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
meta = {
|
||||
# default to Node.js' platforms
|
||||
platforms = nodejs.meta.platforms;
|
||||
} // meta;
|
||||
} // extraArgs);
|
||||
|
||||
# Builds a node environment (a node_modules folder and a set of binaries)
|
||||
buildNodeDependencies =
|
||||
{ name
|
||||
, packageName
|
||||
, version ? null
|
||||
, src
|
||||
, dependencies ? []
|
||||
, buildInputs ? []
|
||||
, production ? true
|
||||
, npmFlags ? ""
|
||||
, dontNpmInstall ? false
|
||||
, bypassCache ? false
|
||||
, reconstructLock ? false
|
||||
, dontStrip ? true
|
||||
, unpackPhase ? "true"
|
||||
, buildPhase ? "true"
|
||||
, ... }@args:
|
||||
|
||||
let
|
||||
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
|
||||
in
|
||||
stdenv.mkDerivation ({
|
||||
name = "node-dependencies-${name}${if version == null then "" else "-${version}"}";
|
||||
|
||||
buildInputs = [ tarWrapper python nodejs ]
|
||||
++ lib.optional (stdenv.isLinux) utillinux
|
||||
++ lib.optional (stdenv.isDarwin) libtool
|
||||
++ buildInputs;
|
||||
|
||||
inherit dontStrip; # Stripping may fail a build for some package deployments
|
||||
inherit dontNpmInstall unpackPhase buildPhase;
|
||||
|
||||
includeScript = includeDependencies { inherit dependencies; };
|
||||
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
|
||||
|
||||
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
|
||||
|
||||
installPhase = ''
|
||||
source ${installPackage}
|
||||
|
||||
mkdir -p $out/${packageName}
|
||||
cd $out/${packageName}
|
||||
|
||||
source $includeScriptPath
|
||||
|
||||
# Create fake package.json to make the npm commands work properly
|
||||
cp ${src}/package.json .
|
||||
chmod 644 package.json
|
||||
${lib.optionalString bypassCache ''
|
||||
if [ -f ${src}/package-lock.json ]
|
||||
then
|
||||
cp ${src}/package-lock.json .
|
||||
chmod 644 package-lock.json
|
||||
fi
|
||||
''}
|
||||
|
||||
# Go to the parent folder to make sure that all packages are pinpointed
|
||||
cd ..
|
||||
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
|
||||
|
||||
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
|
||||
|
||||
# Expose the executables that were installed
|
||||
cd ..
|
||||
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
|
||||
|
||||
mv ${packageName} lib
|
||||
ln -s $out/lib/node_modules/.bin $out/bin
|
||||
'';
|
||||
} // extraArgs);
|
||||
|
||||
# Builds a development shell
|
||||
buildNodeShell =
|
||||
{ name
|
||||
, packageName
|
||||
, version ? null
|
||||
, src
|
||||
, dependencies ? []
|
||||
, buildInputs ? []
|
||||
, production ? true
|
||||
, npmFlags ? ""
|
||||
, dontNpmInstall ? false
|
||||
, bypassCache ? false
|
||||
, reconstructLock ? false
|
||||
, dontStrip ? true
|
||||
, unpackPhase ? "true"
|
||||
, buildPhase ? "true"
|
||||
, ... }@args:
|
||||
|
||||
let
|
||||
nodeDependencies = buildNodeDependencies args;
|
||||
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "unpackPhase" "buildPhase" ];
|
||||
in
|
||||
stdenv.mkDerivation ({
|
||||
name = "node-shell-${name}${if version == null then "" else "-${version}"}";
|
||||
|
||||
buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
|
||||
buildCommand = ''
|
||||
mkdir -p $out/bin
|
||||
cat > $out/bin/shell <<EOF
|
||||
#! ${stdenv.shell} -e
|
||||
$shellHook
|
||||
exec ${stdenv.shell}
|
||||
EOF
|
||||
chmod +x $out/bin/shell
|
||||
'';
|
||||
|
||||
# Provide the dependencies in a development shell through the NODE_PATH environment variable
|
||||
inherit nodeDependencies;
|
||||
shellHook = lib.optionalString (dependencies != []) ''
|
||||
export NODE_PATH=${nodeDependencies}/lib/node_modules
|
||||
export PATH="${nodeDependencies}/bin:$PATH"
|
||||
'';
|
||||
} // extraArgs);
|
||||
in
|
||||
{
|
||||
buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
|
||||
buildNodePackage = lib.makeOverridable buildNodePackage;
|
||||
buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
|
||||
buildNodeShell = lib.makeOverridable buildNodeShell;
|
||||
}
|
||||
@ -1,3 +0,0 @@
|
||||
SPDX-FileCopyrightText: 2022-2023 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor@kleen.consulting>
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,3 +0,0 @@
|
||||
SPDX-FileCopyrightText: 2022-2023 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor@kleen.consulting>
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
@ -1,8 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
final: prev:
|
||||
{
|
||||
nodePackages = (import ./node2nix.nix { pkgs = final; inherit (final.config) system; }) // prev.nodePackages;
|
||||
}
|
||||
@ -1,571 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
# This file originates from node2nix
|
||||
|
||||
{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile}:
|
||||
|
||||
let
|
||||
# Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
|
||||
utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;
|
||||
|
||||
python = if nodejs ? python then nodejs.python else python2;
|
||||
|
||||
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
|
||||
tarWrapper = runCommand "tarWrapper" {} ''
|
||||
mkdir -p $out/bin
|
||||
|
||||
cat > $out/bin/tar <<EOF
|
||||
#! ${stdenv.shell} -e
|
||||
$(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
|
||||
EOF
|
||||
|
||||
chmod +x $out/bin/tar
|
||||
'';
|
||||
|
||||
# Function that generates a TGZ file from a NPM project
|
||||
buildNodeSourceDist =
|
||||
{ name, version, src, ... }:
|
||||
|
||||
stdenv.mkDerivation {
|
||||
name = "node-tarball-${name}-${version}";
|
||||
inherit src;
|
||||
buildInputs = [ nodejs ];
|
||||
buildPhase = ''
|
||||
export HOME=$TMPDIR
|
||||
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
|
||||
'';
|
||||
installPhase = ''
|
||||
mkdir -p $out/tarballs
|
||||
mv $tgzFile $out/tarballs
|
||||
mkdir -p $out/nix-support
|
||||
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
|
||||
'';
|
||||
};
|
||||
|
||||
includeDependencies = {dependencies}:
|
||||
lib.optionalString (dependencies != [])
|
||||
(lib.concatMapStrings (dependency:
|
||||
''
|
||||
# Bundle the dependencies of the package
|
||||
mkdir -p node_modules
|
||||
cd node_modules
|
||||
|
||||
# Only include dependencies if they don't exist. They may also be bundled in the package.
|
||||
if [ ! -e "${dependency.name}" ]
|
||||
then
|
||||
${composePackage dependency}
|
||||
fi
|
||||
|
||||
cd ..
|
||||
''
|
||||
) dependencies);
|
||||
|
||||
# Recursively composes the dependencies of a package
|
||||
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
|
||||
builtins.addErrorContext "while evaluating node package '${packageName}'" ''
|
||||
DIR=$(pwd)
|
||||
cd $TMPDIR
|
||||
|
||||
unpackFile ${src}
|
||||
|
||||
# Make the base dir in which the target dependency resides first
|
||||
mkdir -p "$(dirname "$DIR/${packageName}")"
|
||||
|
||||
if [ -f "${src}" ]
|
||||
then
|
||||
# Figure out what directory has been unpacked
|
||||
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
|
||||
|
||||
# Restore write permissions to make building work
|
||||
find "$packageDir" -type d -exec chmod u+x {} \;
|
||||
chmod -R u+w "$packageDir"
|
||||
|
||||
# Move the extracted tarball into the output folder
|
||||
mv "$packageDir" "$DIR/${packageName}"
|
||||
elif [ -d "${src}" ]
|
||||
then
|
||||
# Get a stripped name (without hash) of the source directory.
|
||||
# On old nixpkgs it's already set internally.
|
||||
if [ -z "$strippedName" ]
|
||||
then
|
||||
strippedName="$(stripHash ${src})"
|
||||
fi
|
||||
|
||||
# Restore write permissions to make building work
|
||||
chmod -R u+w "$strippedName"
|
||||
|
||||
# Move the extracted directory into the output folder
|
||||
mv "$strippedName" "$DIR/${packageName}"
|
||||
fi
|
||||
|
||||
# Unset the stripped name to not confuse the next unpack step
|
||||
unset strippedName
|
||||
|
||||
# Include the dependencies of the package
|
||||
cd "$DIR/${packageName}"
|
||||
${includeDependencies { inherit dependencies; }}
|
||||
cd ..
|
||||
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
|
||||
'';
|
||||
|
||||
pinpointDependencies = {dependencies, production}:
|
||||
let
|
||||
pinpointDependenciesFromPackageJSON = writeTextFile {
|
||||
name = "pinpointDependencies.js";
|
||||
text = ''
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
|
||||
function resolveDependencyVersion(location, name) {
|
||||
if(location == process.env['NIX_STORE']) {
|
||||
return null;
|
||||
} else {
|
||||
var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
|
||||
|
||||
if(fs.existsSync(dependencyPackageJSON)) {
|
||||
var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
|
||||
|
||||
if(dependencyPackageObj.name == name) {
|
||||
return dependencyPackageObj.version;
|
||||
}
|
||||
} else {
|
||||
return resolveDependencyVersion(path.resolve(location, ".."), name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function replaceDependencies(dependencies) {
|
||||
if(typeof dependencies == "object" && dependencies !== null) {
|
||||
for(var dependency in dependencies) {
|
||||
var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
|
||||
|
||||
if(resolvedVersion === null) {
|
||||
process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
|
||||
} else {
|
||||
dependencies[dependency] = resolvedVersion;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Read the package.json configuration */
|
||||
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
|
||||
|
||||
/* Pinpoint all dependencies */
|
||||
replaceDependencies(packageObj.dependencies);
|
||||
if(process.argv[2] == "development") {
|
||||
replaceDependencies(packageObj.devDependencies);
|
||||
}
|
||||
replaceDependencies(packageObj.optionalDependencies);
|
||||
|
||||
/* Write the fixed package.json file */
|
||||
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
|
||||
'';
|
||||
};
|
||||
in
|
||||
''
|
||||
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
|
||||
|
||||
${lib.optionalString (dependencies != [])
|
||||
''
|
||||
if [ -d node_modules ]
|
||||
then
|
||||
cd node_modules
|
||||
${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
|
||||
cd ..
|
||||
fi
|
||||
''}
|
||||
'';
|
||||
|
||||
# Recursively traverses all dependencies of a package and pinpoints all
|
||||
# dependencies in the package.json file to the versions that are actually
|
||||
# being used.
|
||||
|
||||
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
|
||||
''
|
||||
if [ -d "${packageName}" ]
|
||||
then
|
||||
cd "${packageName}"
|
||||
${pinpointDependencies { inherit dependencies production; }}
|
||||
cd ..
|
||||
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
|
||||
fi
|
||||
'';
|
||||
|
||||
# Extract the Node.js source code which is used to compile packages with
|
||||
# native bindings
|
||||
nodeSources = runCommand "node-sources" {} ''
|
||||
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
|
||||
mv node-* $out
|
||||
'';
|
||||
|
||||
# Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
|
||||
addIntegrityFieldsScript = writeTextFile {
|
||||
name = "addintegrityfields.js";
|
||||
text = ''
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
|
||||
function augmentDependencies(baseDir, dependencies) {
|
||||
for(var dependencyName in dependencies) {
|
||||
var dependency = dependencies[dependencyName];
|
||||
|
||||
// Open package.json and augment metadata fields
|
||||
var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
|
||||
var packageJSONPath = path.join(packageJSONDir, "package.json");
|
||||
|
||||
if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
|
||||
console.log("Adding metadata fields to: "+packageJSONPath);
|
||||
var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
|
||||
|
||||
if(dependency.integrity) {
|
||||
packageObj["_integrity"] = dependency.integrity;
|
||||
} else {
|
||||
packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
|
||||
}
|
||||
|
||||
if(dependency.resolved) {
|
||||
packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
|
||||
} else {
|
||||
packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
|
||||
}
|
||||
|
||||
if(dependency.from !== undefined) { // Adopt from property if one has been provided
|
||||
packageObj["_from"] = dependency.from;
|
||||
}
|
||||
|
||||
fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
|
||||
}
|
||||
|
||||
// Augment transitive dependencies
|
||||
if(dependency.dependencies !== undefined) {
|
||||
augmentDependencies(packageJSONDir, dependency.dependencies);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(fs.existsSync("./package-lock.json")) {
|
||||
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
|
||||
|
||||
if(![1, 2].includes(packageLock.lockfileVersion)) {
|
||||
process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if(packageLock.dependencies !== undefined) {
|
||||
augmentDependencies(".", packageLock.dependencies);
|
||||
}
|
||||
}
|
||||
'';
|
||||
};
|
||||
|
||||
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
|
||||
reconstructPackageLock = writeTextFile {
|
||||
name = "addintegrityfields.js";
|
||||
text = ''
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
|
||||
var packageObj = JSON.parse(fs.readFileSync("package.json"));
|
||||
|
||||
var lockObj = {
|
||||
name: packageObj.name,
|
||||
version: packageObj.version,
|
||||
lockfileVersion: 1,
|
||||
requires: true,
|
||||
dependencies: {}
|
||||
};
|
||||
|
||||
function augmentPackageJSON(filePath, dependencies) {
|
||||
var packageJSON = path.join(filePath, "package.json");
|
||||
if(fs.existsSync(packageJSON)) {
|
||||
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
|
||||
dependencies[packageObj.name] = {
|
||||
version: packageObj.version,
|
||||
integrity: "sha1-000000000000000000000000000=",
|
||||
dependencies: {}
|
||||
};
|
||||
processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies);
|
||||
}
|
||||
}
|
||||
|
||||
function processDependencies(dir, dependencies) {
|
||||
if(fs.existsSync(dir)) {
|
||||
var files = fs.readdirSync(dir);
|
||||
|
||||
files.forEach(function(entry) {
|
||||
var filePath = path.join(dir, entry);
|
||||
var stats = fs.statSync(filePath);
|
||||
|
||||
if(stats.isDirectory()) {
|
||||
if(entry.substr(0, 1) == "@") {
|
||||
// When we encounter a namespace folder, augment all packages belonging to the scope
|
||||
var pkgFiles = fs.readdirSync(filePath);
|
||||
|
||||
pkgFiles.forEach(function(entry) {
|
||||
if(stats.isDirectory()) {
|
||||
var pkgFilePath = path.join(filePath, entry);
|
||||
augmentPackageJSON(pkgFilePath, dependencies);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
augmentPackageJSON(filePath, dependencies);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
processDependencies("node_modules", lockObj.dependencies);
|
||||
|
||||
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
|
||||
'';
|
||||
};
|
||||
|
||||
prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
|
||||
let
|
||||
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
|
||||
in
|
||||
''
|
||||
# Pinpoint the versions of all dependencies to the ones that are actually being used
|
||||
echo "pinpointing versions of dependencies..."
|
||||
source $pinpointDependenciesScriptPath
|
||||
|
||||
# Patch the shebangs of the bundled modules to prevent them from
|
||||
# calling executables outside the Nix store as much as possible
|
||||
patchShebangs .
|
||||
|
||||
# Deploy the Node.js package by running npm install. Since the
|
||||
# dependencies have been provided already by ourselves, it should not
|
||||
# attempt to install them again, which is good, because we want to make
|
||||
# it Nix's responsibility. If it needs to install any dependencies
|
||||
# anyway (e.g. because the dependency parameters are
|
||||
# incomplete/incorrect), it fails.
|
||||
#
|
||||
# The other responsibilities of NPM are kept -- version checks, build
|
||||
# steps, postprocessing etc.
|
||||
|
||||
export HOME=$TMPDIR
|
||||
cd "${packageName}"
|
||||
runHook preRebuild
|
||||
|
||||
${lib.optionalString bypassCache ''
|
||||
${lib.optionalString reconstructLock ''
|
||||
if [ -f package-lock.json ]
|
||||
then
|
||||
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
|
||||
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
|
||||
rm package-lock.json
|
||||
else
|
||||
echo "No package-lock.json file found, reconstructing..."
|
||||
fi
|
||||
|
||||
node ${reconstructPackageLock}
|
||||
''}
|
||||
|
||||
node ${addIntegrityFieldsScript}
|
||||
''}
|
||||
|
||||
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
|
||||
|
||||
if [ "''${dontNpmInstall-}" != "1" ]
|
||||
then
|
||||
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
|
||||
rm -f npm-shrinkwrap.json
|
||||
|
||||
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} install
|
||||
fi
|
||||
'';
|
||||
|
||||
# Builds and composes an NPM package including all its dependencies
|
||||
buildNodePackage =
|
||||
{ name
|
||||
, packageName
|
||||
, version
|
||||
, dependencies ? []
|
||||
, buildInputs ? []
|
||||
, production ? true
|
||||
, npmFlags ? ""
|
||||
, dontNpmInstall ? false
|
||||
, bypassCache ? false
|
||||
, reconstructLock ? false
|
||||
, preRebuild ? ""
|
||||
, dontStrip ? true
|
||||
, unpackPhase ? "true"
|
||||
, buildPhase ? "true"
|
||||
, ... }@args:
|
||||
|
||||
let
|
||||
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" ];
|
||||
in
|
||||
stdenv.mkDerivation ({
|
||||
name = "node_${name}-${version}";
|
||||
buildInputs = [ tarWrapper python nodejs ]
|
||||
++ lib.optional (stdenv.isLinux) utillinux
|
||||
++ lib.optional (stdenv.isDarwin) libtool
|
||||
++ buildInputs;
|
||||
|
||||
inherit nodejs;
|
||||
|
||||
inherit dontStrip; # Stripping may fail a build for some package deployments
|
||||
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
|
||||
|
||||
compositionScript = composePackage args;
|
||||
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
|
||||
|
||||
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
|
||||
|
||||
installPhase = ''
|
||||
# Create and enter a root node_modules/ folder
|
||||
mkdir -p $out/lib/node_modules
|
||||
cd $out/lib/node_modules
|
||||
|
||||
# Compose the package and all its dependencies
|
||||
source $compositionScriptPath
|
||||
|
||||
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
|
||||
|
||||
# Create symlink to the deployed executable folder, if applicable
|
||||
if [ -d "$out/lib/node_modules/.bin" ]
|
||||
then
|
||||
ln -s $out/lib/node_modules/.bin $out/bin
|
||||
fi
|
||||
|
||||
# Create symlinks to the deployed manual page folders, if applicable
|
||||
if [ -d "$out/lib/node_modules/${packageName}/man" ]
|
||||
then
|
||||
mkdir -p $out/share
|
||||
for dir in "$out/lib/node_modules/${packageName}/man/"*
|
||||
do
|
||||
mkdir -p $out/share/man/$(basename "$dir")
|
||||
for page in "$dir"/*
|
||||
do
|
||||
ln -s $page $out/share/man/$(basename "$dir")
|
||||
done
|
||||
done
|
||||
fi
|
||||
|
||||
# Run post install hook, if provided
|
||||
runHook postInstall
|
||||
'';
|
||||
} // extraArgs);
|
||||
|
||||
# Builds a node environment (a node_modules folder and a set of binaries)
|
||||
buildNodeDependencies =
|
||||
{ name
|
||||
, packageName
|
||||
, version
|
||||
, src
|
||||
, dependencies ? []
|
||||
, buildInputs ? []
|
||||
, production ? true
|
||||
, npmFlags ? ""
|
||||
, dontNpmInstall ? false
|
||||
, bypassCache ? false
|
||||
, reconstructLock ? false
|
||||
, dontStrip ? true
|
||||
, unpackPhase ? "true"
|
||||
, buildPhase ? "true"
|
||||
, ... }@args:
|
||||
|
||||
let
|
||||
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
|
||||
in
|
||||
stdenv.mkDerivation ({
|
||||
name = "node-dependencies-${name}-${version}";
|
||||
|
||||
buildInputs = [ tarWrapper python nodejs ]
|
||||
++ lib.optional (stdenv.isLinux) utillinux
|
||||
++ lib.optional (stdenv.isDarwin) libtool
|
||||
++ buildInputs;
|
||||
|
||||
inherit dontStrip; # Stripping may fail a build for some package deployments
|
||||
inherit dontNpmInstall unpackPhase buildPhase;
|
||||
|
||||
includeScript = includeDependencies { inherit dependencies; };
|
||||
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
|
||||
|
||||
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/${packageName}
|
||||
cd $out/${packageName}
|
||||
|
||||
source $includeScriptPath
|
||||
|
||||
# Create fake package.json to make the npm commands work properly
|
||||
cp ${src}/package.json .
|
||||
chmod 644 package.json
|
||||
${lib.optionalString bypassCache ''
|
||||
if [ -f ${src}/package-lock.json ]
|
||||
then
|
||||
cp ${src}/package-lock.json .
|
||||
fi
|
||||
''}
|
||||
|
||||
# Go to the parent folder to make sure that all packages are pinpointed
|
||||
cd ..
|
||||
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
|
||||
|
||||
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
|
||||
|
||||
# Expose the executables that were installed
|
||||
cd ..
|
||||
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
|
||||
|
||||
mv ${packageName} lib
|
||||
ln -s $out/lib/node_modules/.bin $out/bin
|
||||
'';
|
||||
} // extraArgs);
|
||||
|
||||
# Builds a development shell
|
||||
buildNodeShell =
|
||||
{ name
|
||||
, packageName
|
||||
, version
|
||||
, src
|
||||
, dependencies ? []
|
||||
, buildInputs ? []
|
||||
, production ? true
|
||||
, npmFlags ? ""
|
||||
, dontNpmInstall ? false
|
||||
, bypassCache ? false
|
||||
, reconstructLock ? false
|
||||
, dontStrip ? true
|
||||
, unpackPhase ? "true"
|
||||
, buildPhase ? "true"
|
||||
, ... }@args:
|
||||
|
||||
let
|
||||
nodeDependencies = buildNodeDependencies args;
|
||||
in
|
||||
stdenv.mkDerivation {
|
||||
name = "node-shell-${name}-${version}";
|
||||
|
||||
buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
|
||||
buildCommand = ''
|
||||
mkdir -p $out/bin
|
||||
cat > $out/bin/shell <<EOF
|
||||
#! ${stdenv.shell} -e
|
||||
$shellHook
|
||||
exec ${stdenv.shell}
|
||||
EOF
|
||||
chmod +x $out/bin/shell
|
||||
'';
|
||||
|
||||
# Provide the dependencies in a development shell through the NODE_PATH environment variable
|
||||
inherit nodeDependencies;
|
||||
shellHook = lib.optionalString (dependencies != []) ''
|
||||
export NODE_PATH=${nodeDependencies}/lib/node_modules
|
||||
export PATH="${nodeDependencies}/bin:$PATH"
|
||||
'';
|
||||
};
|
||||
in
|
||||
{
|
||||
buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
|
||||
buildNodePackage = lib.makeOverridable buildNodePackage;
|
||||
buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
|
||||
buildNodeShell = lib.makeOverridable buildNodeShell;
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,21 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
# This file has been generated by node2nix 1.9.0. Do not edit!
|
||||
|
||||
{pkgs ? import <nixpkgs> {
|
||||
inherit system;
|
||||
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs-14_x"}:
|
||||
|
||||
let
|
||||
nodeEnv = import ./node-env.nix {
|
||||
inherit (pkgs) stdenv lib python2 runCommand writeTextFile;
|
||||
inherit pkgs nodejs;
|
||||
libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
|
||||
};
|
||||
in
|
||||
import ./node-packages.nix {
|
||||
inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit;
|
||||
inherit nodeEnv;
|
||||
}
|
||||
@ -1,17 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
{}: prev: final: rec {
|
||||
changelogJson = prev.runCommand "changelog.json" {
|
||||
} ''
|
||||
ln -s ${final.uniworxNodeDependencies}/lib/node_modules ./node_modules
|
||||
export PATH="${final.uniworxNodeDependencies}/bin:$PATH"
|
||||
|
||||
changelog-parser ${../CHANGELOG.md} > $out
|
||||
'';
|
||||
|
||||
jqChangelogJson = prev.writeShellScriptBin "jq-changelog" ''
|
||||
exec -- ${final.jq}/bin/jq $@ < ${changelogJson}
|
||||
'';
|
||||
}
|
||||
@ -1,95 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022-2023 Gregor Kleen <gregor@kleen.consulting>, Steffen Jost <jost@cip.ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
{ inputs, backendSource, gitRevision ? null, ... }: final: prev:
|
||||
|
||||
with prev.lib;
|
||||
|
||||
let
|
||||
haskellInputs = ["encoding" "memcached-binary" "conduit-resumablesink" "HaskellNet-SSL" "ldap-client" "serversession" "xss-sanitize" "colonnade" "minio-hs" "cryptoids" "zip-stream" "yesod" "cryptonite" "esqueleto"];
|
||||
in {
|
||||
uniworx = final.haskell-nix.stackProject {
|
||||
src = prev.stdenv.mkDerivation {
|
||||
name = "uniworx-src";
|
||||
src = backendSource;
|
||||
|
||||
phases = ["unpackPhase" "patchPhase" "installPhase"];
|
||||
|
||||
patchPhase = ''
|
||||
substitute stack-flake.yaml stack.yaml \
|
||||
${concatMapStringsSep " \\\n" (pkgName: "--replace @${pkgName}@ ${inputs."${pkgName}"}") haskellInputs}
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out
|
||||
cp -pr --reflink=auto ./. $out
|
||||
'';
|
||||
};
|
||||
compiler-nix-name = "ghc8104";
|
||||
# stack-sha256 = "1n7z294ldv2rjkfj1vs3kqmnbp34m2scrmyrp5kwmga9vp86fd9z"; # produces errors gregor does not understand :(
|
||||
modules = [
|
||||
{
|
||||
packages = {
|
||||
encoding.src = inputs.encoding;
|
||||
memcached-binary.src = inputs.memcached-binary;
|
||||
conduit-resumablesink.src = inputs.conduit-resumablesink;
|
||||
HaskellNet-SSL.src = inputs.HaskellNet-SSL;
|
||||
ldap-client.src = inputs.ldap-client;
|
||||
serversession.src = "${inputs.serversession}/serversession";
|
||||
serversession-backend-acid-state.src = "${inputs.serversession}/serversession-backend-acid-state";
|
||||
xss-sanitize.src = inputs.xss-sanitize;
|
||||
colonnade.src = "${inputs.colonnade}/colonnade";
|
||||
minio-hs.src = inputs.minio-hs;
|
||||
cryptoids-class.src = "${inputs.cryptoids}/cryptoids-class";
|
||||
cryptoids-types.src = "${inputs.cryptoids}/cryptoids-types";
|
||||
cryptoids.src = "${inputs.cryptoids}/cryptoids";
|
||||
filepath-crypto.src = "${inputs.cryptoids}/filepath-crypto";
|
||||
uuid-crypto.src = "${inputs.cryptoids}/uuid-crypto";
|
||||
zip-stream.src = inputs.zip-stream;
|
||||
yesod.src = "${inputs.yesod}/yesod";
|
||||
yesod-core.src = "${inputs.yesod}/yesod-core";
|
||||
yesod-static.src = "${inputs.yesod}/yesod-static";
|
||||
yesod-persistent.src = "${inputs.yesod}/yesod-persistent";
|
||||
yesod-form.src = "${inputs.yesod}/yesod-form";
|
||||
yesod-auth.src = "${inputs.yesod}/yesod-auth";
|
||||
yesod-test.src = "${inputs.yesod}/yesod-test";
|
||||
cryptonite.src = inputs.cryptonite;
|
||||
esqueleto.src = inputs.esqueleto;
|
||||
};
|
||||
}
|
||||
{
|
||||
packages.uniworx = {
|
||||
postUnpack = ''
|
||||
${final.xorg.lndir}/bin/lndir -silent ${prev.uniworxFrontend} $sourceRoot
|
||||
chmod a+w -R $sourceRoot
|
||||
'';
|
||||
preBuild = ''
|
||||
export TZDIR=${final.tzdata}/share/zoneinfo
|
||||
${optionalString (gitRevision != null) ''
|
||||
export GIT_REVISION=${gitRevision}
|
||||
''}
|
||||
'';
|
||||
components.library.build-tools = with final.pkgs; [ llvm_9 ];
|
||||
components.exes.uniworx.build-tools = with final.pkgs; [ llvm_9 ];
|
||||
components.exes.uniworxdb.build-tools = with final.pkgs; [ llvm_9 ];
|
||||
components.exes.uniworxload.build-tools = with final.pkgs; [ llvm_9 ];
|
||||
components.tests.yesod = {
|
||||
build-tools = with final.pkgs; [ llvm_9 final.uniworx.hsPkgs.hspec-discover ];
|
||||
testWrapper =
|
||||
let
|
||||
testWrapper = prev.writeScript "test-wrapper" (import ../develop.nix { inherit prev; pkgs = final; doDevelopEnv = false; } "$@");
|
||||
testWrapperWrapped = prev.runCommand "test-wrapper" { buildInputs = [final.makeWrapper]; } ''
|
||||
makeWrapper ${testWrapper} $out \
|
||||
--prefix PATH : ${final.postgresql_12}/bin \
|
||||
--prefix PATH : ${final.minio}/bin \
|
||||
--prefix PATH : ${final.memcached}/bin
|
||||
'';
|
||||
in singleton (toString testWrapperWrapped);
|
||||
};
|
||||
components.tests.hlint.build-tools = with final.pkgs; [ llvm_9 final.uniworx.hsPkgs.hlint-test ];
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
}
|
||||
@ -1,10 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022-2023 Gregor Kleen <gregor@kleen.consulting>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
{ inputs, frontendSource, backendSource, gitRevision ? null }: final: prev: prev.lib.composeManyExtensions [
|
||||
(import ./node-dependencies.nix { inherit inputs; })
|
||||
(import ./well-known.nix { inherit frontendSource; })
|
||||
(import ./frontend.nix { inherit frontendSource; })
|
||||
(import ./backend.nix { inherit backendSource inputs gitRevision; })
|
||||
] final prev
|
||||
@ -1,62 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
{ frontendSource, ... }: final: prev:
|
||||
let
|
||||
setupNodeDeps = ''
|
||||
ln -s ${final.uniworxNodeDependencies}/lib/node_modules ./node_modules
|
||||
export PATH="${final.uniworxNodeDependencies}/bin:$PATH"
|
||||
'';
|
||||
in {
|
||||
uniworxFrontend = prev.stdenv.mkDerivation {
|
||||
name = "uniworx-frontend";
|
||||
srcs = [frontendSource prev.uniworxWellKnown];
|
||||
sourceRoot = "source";
|
||||
|
||||
phases = ["unpackPhase" "checkPhase" "buildPhase" "installPhase"];
|
||||
|
||||
postUnpack = ''
|
||||
${final.xorg.lndir}/bin/lndir -silent ../uniworx-well-known $sourceRoot
|
||||
'';
|
||||
|
||||
preBuild = setupNodeDeps;
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
|
||||
webpack --progress
|
||||
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
preCheck = ''
|
||||
${setupNodeDeps}
|
||||
export FONTCONFIG_FILE="${final.fontconfig.out}/etc/fonts/fonts.conf"
|
||||
export FONTCONFIG_PATH="${final.fontconfig.out}/etc/fonts/"
|
||||
export CHROME_BIN="${final.chromium}/bin/chromium-browser"
|
||||
'';
|
||||
checkPhase = ''
|
||||
runHook preCheck
|
||||
|
||||
eslint frontend/src
|
||||
karma start --conf karma.conf.js
|
||||
|
||||
runHook postCheck
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out $out/config
|
||||
cp -r --reflink=auto well-known static $out
|
||||
cp -r --reflink=auto config/webpack.yml $out/config
|
||||
'';
|
||||
|
||||
passthru.check = final.uniworxFrontend.overrideAttrs (oldAttrs: {
|
||||
name = "${oldAttrs.name}-check";
|
||||
phases = ["unpackPhase" "buildPhase"];
|
||||
buildPhase = ''
|
||||
mkdir $out
|
||||
( ${oldAttrs.checkPhase} ) | tee $out/test-stdout
|
||||
'';
|
||||
});
|
||||
};
|
||||
}
|
||||
@ -1,7 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022-2023 Gregor Kleen <gregor@kleen.consulting>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
{ ... }: final: prev: {
|
||||
uniworxNodeDependencies = (prev.callPackage ../frontend {}).nodeDependencies;
|
||||
}
|
||||
@ -1,26 +0,0 @@
|
||||
# SPDX-FileCopyrightText: 2022-2023 Gregor Kleen <gregor@kleen.consulting>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
{ frontendSource, ... }: final: prev: {
|
||||
uniworxWellKnown = prev.stdenv.mkDerivation {
|
||||
name = "uniworx-well-known";
|
||||
src = frontendSource;
|
||||
|
||||
phases = ["unpackPhase" "buildPhase" "installPhase" "fixupPhase"];
|
||||
|
||||
buildPhase = ''
|
||||
ln -s ${final.uniworxNodeDependencies}/lib/node_modules ./node_modules
|
||||
export PATH="${final.uniworxNodeDependencies}/bin:${prev.exiftool}/bin:$PATH"
|
||||
webpack --progress
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out
|
||||
cp -r --reflink=auto well-known $out/.nix-well-known
|
||||
'';
|
||||
|
||||
outputHashMode = "recursive";
|
||||
outputHash = "sha256-18MfdmJX3q826Q4p2F3SnwS2fCjLN0LUpIV/jqUPH4I==";
|
||||
};
|
||||
}
|
||||
26013
package-lock.json
generated
26013
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user