chore(merge): merge branch 'master' into add_company_admins to update develop

This commit is contained in:
Steffen Jost 2021-09-27 15:30:02 +02:00
commit 650a2abebd
39 changed files with 19024 additions and 646 deletions

4
..develop.cmd Executable file
View File

@ -0,0 +1,4 @@
#!/usr/bin/env bash
echo "Develop running... Press Enter to terminate"
read

2
.gitignore vendored
View File

@ -39,6 +39,7 @@ tunnel.log
/static
/well-known
/.well-known-cache
/.nix-well-known
/**/tmp-*
/testdata/bigAlloc_*.csv
/sessions
@ -49,3 +50,4 @@ tunnel.log
.develop.env
**/result
**/result-*
.develop.cmd

View File

@ -6,383 +6,324 @@ workflow:
default:
image:
name: fpco/stack-build:lts-17.15
name: registry.gitlab.com/fradrive/fradrive/nix-unstable:0.1.0
variables:
STACK_ROOT: "${CI_PROJECT_DIR}/.stack"
CHROME_BIN: "/usr/bin/chromium-browser"
POSTGRES_DB: uniworx_test
POSTGRES_USER: uniworx
POSTGRES_PASSWORD: uniworx
MINIO_ACCESS_KEY: gOel7KvadwNKgjjy
MINIO_SECRET_KEY: ugO5pkEla7F0JW9MdPwLi4MWLT5ZbqAL
UPLOAD_S3_HOST: localhost
UPLOAD_S3_PORT: 9000
UPLOAD_S3_KEY_ID: gOel7KvadwNKgjjy
UPLOAD_S3_KEY: ugO5pkEla7F0JW9MdPwLi4MWLT5ZbqAL
N_PREFIX: "${HOME}/.n"
PACKAGE_REGISTRY_URL: "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/uni2work"
NIX_PATH: "nixpkgs=http://nixos.org/channels/nixos-21.05/nixexprs.tar.xz"
AWS_SHARED_CREDENTIALS_FILE: "/etc/aws/credentials"
TRANSFER_METER_FREQUENCY: "2s"
stages:
- setup
- frontend:build
- yesod:build
- lint
- backend:build
- test
- docker
- prepare release
- upload packages
- release
# - deploy
npm install:
cache:
- &npm-cache
key: default-npm
paths:
- .npm
- node_modules
stage: setup
script:
- ./.npmrc.gup
- npm ci --cache .npm --prefer-offline
before_script: &npm
- rm -rvf /etc/apt/sources.list /etc/apt/sources.list.d
- install -v -T -m 0644 ${APT_SOURCES_LIST} /etc/apt/sources.list
- apt-get update -y
- npm install -g n
- n 14.8.0
- export PATH="${N_PREFIX}/bin:$PATH"
- npm install -g npm
- hash -r
- apt-get -y install openssh-client exiftool
- install -v -m 0700 -d ~/.ssh
- install -v -T -m 0644 ${SSH_KNOWN_HOSTS} ~/.ssh/known_hosts
- install -v -T -m 0400 ${SSH_DEPLOY_KEY} ~/.ssh/deploy && echo "IdentityFile ~/.ssh/deploy" >> ~/.ssh/config;
artifacts:
paths:
- node_modules/
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
expire_in: "1 day"
retry: 2
interruptible: true
frontend:build:
cache:
- &frontend-cache
key: default-frontend
paths:
- .well-known-cache
node dependencies:
stage: frontend:build
script:
- npm run frontend:build
before_script: *npm
needs:
- job: npm install
artifacts: true
artifacts:
paths:
- static
- well-known
- config/webpack.yml
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
expire_in: "1 day"
retry: 2
interruptible: true
frontend:lint:
stage: lint
script:
- npm run frontend:lint
before_script: *npm
needs:
- job: npm install
artifacts: true
retry: 2
interruptible: true
yesod:build:dev:
cache:
- &stack-dev-cache
key: default-stack-dev
paths:
- .stack
- .stack-work
stage: yesod:build
script:
- stack build --test --copy-bins --local-bin-path $(pwd)/bin --fast --flag uniworx:-library-only --flag uniworx:dev --flag uniworx:pedantic --no-strip --no-run-tests
- cp $(stack path --dist-dir)/build/hlint/hlint bin/test-hlint
- cp $(stack path --dist-dir)/build/yesod/yesod bin/test-yesod
needs:
- job: npm install # transitive
artifacts: false
- job: frontend:build
artifacts: true
before_script: &haskell
- rm -rvf /etc/apt/sources.list /etc/apt/sources.list.d
- install -v -T -m 0644 ${APT_SOURCES_LIST} /etc/apt/sources.list
- apt-get update -y
- apt-get install -y --no-install-recommends locales-all openssh-client git-restore-mtime
- wget http://newmirror.rz.ifi.lmu.de/ubuntu/archive/pool/main/libs/libsodium/libsodium-dev_1.0.18-1_amd64.deb http://newmirror.rz.ifi.lmu.de/ubuntu/archive/pool/main/libs/libsodium/libsodium23_1.0.18-1_amd64.deb
- apt install ./libsodium23_1.0.18-1_amd64.deb ./libsodium-dev_1.0.18-1_amd64.deb
- rm -v libsodium23_1.0.18-1_amd64.deb libsodium-dev_1.0.18-1_amd64.deb
- source .gitlab-ci/construct-flake-url.sh
- nix -L build -o result "${FLAKE}#uniworxNodeDependencies"
- nix-store --export $(nix-store -qR result) | xz -T0 -2 > node-dependencies.nar.xz
before_script: &nix-before
- mkdir -p /etc/nix
- install -m 0644 .gitlab-ci/nix-bootstrap.conf /etc/nix/nix.conf
- .gitlab-ci/write-minio-creds.sh
- .gitlab-ci/make-minio-bucket.sh
- cp -pr --reflink=auto -L .gitlab-ci/upload-to-cache.sh /etc/nix/upload-to-cache.sh
- install -m 0644 .gitlab-ci/nix.conf /etc/nix/nix.conf
- git config --global init.defaultBranch master
- install -v -m 0700 -d ~/.ssh
- install -v -T -m 0644 ${SSH_KNOWN_HOSTS} ~/.ssh/known_hosts
- install -v -T -m 0400 ${SSH_DEPLOY_KEY} ~/.ssh/deploy && echo "IdentityFile ~/.ssh/deploy" >> ~/.ssh/config;
- stack install happy
- export PATH="${HOME}/.local/bin:$PATH"
- hash -r
- git restore-mtime
- install -v -T -m 0644 "${SSH_KNOWN_HOSTS}" ~/.ssh/known_hosts
- install -v -T -m 0400 "${SSH_DEPLOY_KEY}" ~/.ssh/deploy && echo "IdentityFile ~/.ssh/deploy" >> ~/.ssh/config;
- install -v -T -m 0644 "${FONTAWESOME_NPM_AUTH_FILE}" /etc/fontawesome-token
- |
if [ -n "${REGISTRY_AUTHFILE}" ]; then
mkdir -p ~/.docker
install -v -T -m 0400 "${REGISTRY_AUTHFILE}" ~/.docker/config.json
fi
artifacts:
paths:
- bin/
- node-dependencies.nar.xz
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
expire_in: "1 day"
rules:
- if: $CI_COMMIT_REF_NAME =~ /(^v[0-9].*)|((^|\/)profile($|\/))/
when: manual
allow_failure: true
- when: on_success
retry: 2
interruptible: true
yesod:build:
cache:
- &stack-cache
key: default-stack
paths:
- .stack
- .stack-work
stage: yesod:build
well known:
stage: frontend:build
script:
- stack build --test --copy-bins --local-bin-path $(pwd)/bin --flag uniworx:-library-only --flag uniworx:-dev --flag uniworx:pedantic --no-strip --no-run-tests
- cp $(stack path --dist-dir)/build/hlint/hlint bin/test-hlint
- cp $(stack path --dist-dir)/build/yesod/yesod bin/test-yesod
- xzcat node-dependencies.nar.xz | nix-store --import
- source .gitlab-ci/construct-flake-url.sh
- nix -L build -o result "${FLAKE}#uniworxWellKnown"
- nix-store --export $(nix-store -qR result) | xz -T0 -2 > well-known.nar.xz
before_script: *nix-before
needs:
- job: npm install # transitive
artifacts: false
- job: frontend:build
- job: node dependencies
artifacts: true
before_script: *haskell
artifacts:
paths:
- bin/
- well-known.nar.xz
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
expire_in: "1 day"
rules:
- if: $CI_COMMIT_REF_NAME =~ /^v[0-9].*/
when: on_success
- when: manual
allow_failure: true
retry: 2
interruptible: true
resource_group: ram
yesod:build:profile:
cache:
- &stack-profile-cache
key: default-stack-profile
paths:
- .stack
- .stack-work
stage: yesod:build
frontend:
stage: frontend:build
script:
- stack build --profile --copy-bins --local-bin-path $(pwd)/bin --flag uniworx:-library-only --flag uniworx:-dev --flag uniworx:pedantic --no-strip
- xzcat node-dependencies.nar.xz | nix-store --import
- xzcat well-known.nar.xz | nix-store --import
- source .gitlab-ci/construct-flake-url.sh
- nix -L build -o result "${FLAKE}#uniworxFrontend"
- nix-store --export $(nix-store -qR result) | xz -T0 -2 > frontend.nar.xz
before_script: *nix-before
needs:
- job: npm install # transitive
artifacts: false
- job: frontend:build
- job: node dependencies
artifacts: true
before_script: *haskell
artifacts:
paths:
- bin/
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
expire_in: "1 day"
rules:
- if: $CI_COMMIT_REF_NAME =~ /(^|\/)profile($|\/)/
when: on_success
- when: manual
allow_failure: true
retry: 2
interruptible: true
resource_group: ram
yesod:test:yesod:
stage: test
services: &test-services
- name: postgres:10.10
alias: postgres
- name: minio/minio:RELEASE.2020-08-27T05-16-20Z
alias: minio
command: ["minio", "server", "/data"]
needs:
- job: npm install # transitive
artifacts: false
- job: frontend:build #transitive
artifacts: false
- job: yesod:build
- job: well known
artifacts: true
rules:
- if: $CI_COMMIT_REF_NAME =~ /^v[0-9].*/
when: on_success
script:
- bin/test-yesod
artifacts:
paths:
- frontend.nar.xz
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
expire_in: "1 day"
retry: 2
interruptible: true
yesod:test:yesod:dev:
stage: test
services: *test-services
uniworx:lib:uniworx:
stage: backend:build
script:
- xzcat frontend.nar.xz | nix-store --import
- source .gitlab-ci/construct-flake-url.sh
- nix -L build -o result "${FLAKE}#uniworx:lib:uniworx"
- nix-store --export $(nix-store -qR result) | xz -T0 -2 > uniworx:lib:uniworx.nar.xz
before_script: *nix-before
needs:
- job: npm install # transitive
- job: node dependencies # transitive
artifacts: false
- job: frontend:build #transitive
- job: well known # transitive
artifacts: false
- job: yesod:build:dev
- job: frontend
artifacts: true
rules:
- if: $CI_COMMIT_REF_NAME =~ /(^v[0-9].*)|((^|\/)profile($|\/))/
when: manual
allow_failure: true
- when: on_success
script:
- bin/test-yesod
retry: 2
interruptible: true
yesod:test:hlint:
stage: lint
cache: &hlint_cache
key: hlint
paths:
- .stack
- .stack-work
needs:
- job: npm install # transitive
artifacts: false
- job: frontend:build #transitive
artifacts: false
- job: yesod:build
artifacts: false
rules:
- if: $CI_COMMIT_REF_NAME =~ /^v[0-9].*/
when: on_success
before_script: *haskell
script:
- stack install hlint
- stack exec -- hlint --cc src > gl-code-quality-report.json || jq . gl-code-quality-report.json
artifacts:
paths:
- gl-code-quality-report.json
- uniworx:lib:uniworx.nar.xz
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
expire_in: "1 day"
reports:
codequality: gl-code-quality-report.json
retry: 2
interruptible: true
yesod:test:hlint:dev:
stage: lint
cache: *hlint_cache
needs:
- job: npm install # transitive
artifacts: false
- job: frontend:build #transitive
artifacts: false
- job: yesod:build:dev
artifacts: false
rules:
- if: $CI_COMMIT_REF_NAME =~ /(^v[0-9].*)|((^|\/)profile($|\/))/
when: manual
allow_failure: true
- when: on_success
before_script: *haskell
uniworx:exe:uniworx:
stage: backend:build
script:
- stack install hlint
- stack exec -- hlint --cc src > gl-code-quality-report.json || jq . gl-code-quality-report.json
- xzcat uniworx:lib:uniworx.nar.xz | nix-store --import
- source .gitlab-ci/construct-flake-url.sh
- nix -L build -o result "${FLAKE}#uniworx:exe:uniworx"
- nix-store --export $(nix-store -qR result) | xz -T0 -2 > uniworx:exe:uniworx.nar.xz
before_script: *nix-before
needs:
- job: node dependencies # transitive
artifacts: false
- job: well known # transitive
artifacts: false
- job: frontend # tranitive
artifacts: false
- job: uniworx:lib:uniworx
artifacts: true
artifacts:
paths:
- gl-code-quality-report.json
- uniworx:exe:uniworx.nar.xz
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
expire_in: "1 day"
reports:
codequality: gl-code-quality-report.json
retry: 2
interruptible: true
frontend:test:
uniworx:exe:uniworx-wflint:
stage: backend:build
script:
- xzcat uniworx:lib:uniworx.nar.xz | nix-store --import
- source .gitlab-ci/construct-flake-url.sh
- nix -L build -o result "${FLAKE}#uniworx:exe:uniworx-wflint"
- nix-store --export $(nix-store -qR result) | xz -T0 -2 > uniworx:exe:uniworx-wflint.nar.xz
before_script: *nix-before
needs:
- job: node dependencies # transitive
artifacts: false
- job: well known # transitive
artifacts: false
- job: frontend # tranitive
artifacts: false
- job: uniworx:lib:uniworx
artifacts: true
artifacts:
paths:
- uniworx:exe:uniworx-wflint.nar.xz
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
expire_in: "1 day"
retry: 2
interruptible: true
uniworx:exe:uniworxdb:
stage: backend:build
script:
- xzcat uniworx:lib:uniworx.nar.xz | nix-store --import
- source .gitlab-ci/construct-flake-url.sh
- nix -L build -o result "${FLAKE}#uniworx:exe:uniworxdb"
- nix-store --export $(nix-store -qR result) | xz -T0 -2 > uniworx:exe:uniworxdb.nar.xz
before_script: *nix-before
needs:
- job: node dependencies # transitive
artifacts: false
- job: well known # transitive
artifacts: false
- job: frontend # tranitive
artifacts: false
- job: uniworx:lib:uniworx
artifacts: true
artifacts:
paths:
- uniworx:exe:uniworxdb.nar.xz
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
expire_in: "1 day"
retry: 2
interruptible: true
uniworx:exe:uniworxload:
stage: backend:build
script:
- xzcat uniworx:lib:uniworx.nar.xz | nix-store --import
- source .gitlab-ci/construct-flake-url.sh
- nix -L build -o result "${FLAKE}#uniworx:exe:uniworxload"
- nix-store --export $(nix-store -qR result) | xz -T0 -2 > uniworx:exe:uniworxload.nar.xz
before_script: *nix-before
needs:
- job: node dependencies # transitive
artifacts: false
- job: well known # transitive
artifacts: false
- job: frontend # tranitive
artifacts: false
- job: uniworx:lib:uniworx
artifacts: true
artifacts:
paths:
- uniworx:exe:uniworxload.nar.xz
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
expire_in: "1 day"
retry: 2
interruptible: true
check:
stage: test
script:
- npm run frontend:test
- xzcat frontend.nar.xz | nix-store --import
- xzcat uniworx:lib:uniworx.nar.xz | nix-store --import
- source .gitlab-ci/construct-flake-url.sh
- nix -L flake check ${FLAKE}
before_script: *nix-before
needs:
- job: npm install
- job: node dependencies # transitive
artifacts: false
- job: well known # transitive
artifacts: false
- job: frontend
artifacts: true
- job: uniworx:lib:uniworx
artifacts: true
before_script:
- rm -rvf /etc/apt/sources.list /etc/apt/sources.list.d
- install -v -T -m 0644 ${APT_SOURCES_LIST} /etc/apt/sources.list
- apt-get update -y
- npm install -g n
- n 14.8.0
- export PATH="${N_PREFIX}/bin:$PATH"
- npm install -g npm
- hash -r
- apt-get install -y --no-install-recommends chromium-browser
retry: 2
interruptible: true
parse-changelog:
demo docker:
stage: docker
script:
- xzcat uniworx:exe:uniworx.nar.xz | nix-store --import
- xzcat uniworx:exe:uniworxdb.nar.xz | nix-store --import
- source .gitlab-ci/construct-flake-url.sh
- nix -L build -o result "${FLAKE}#uniworxDemoDocker"
- cp -pr --reflink=auto -L result uniworx-demo.tar.gz
before_script: *nix-before
needs:
- job: node dependencies # transitive
artifacts: false
- job: well known # transitive
artifacts: false
- job: frontend # tranitive
artifacts: false
- job: uniworx:lib:uniworx # transitive
artifacts: false
- job: uniworx:exe:uniworx
artifacts: true
- job: uniworx:exe:uniworxdb
artifacts: true
- job: check # sanity
artifacts: false
artifacts:
paths:
- uniworx-demo.tar.gz
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
expire_in: "1 day"
retry: 2
interruptible: true
rules:
- if: $CI_COMMIT_REF_NAME =~ /^v[0-9].*/
when: manual
allow_failure: true
docker:
stage: docker
script:
- xzcat uniworx:exe:uniworx.nar.xz | nix-store --import
- source .gitlab-ci/construct-flake-url.sh
- nix -L build -o result "${FLAKE}#uniworxDocker"
- cp -pr --reflink=auto -L result uniworx.tar.gz
before_script: *nix-before
needs:
- job: node dependencies # transitive
artifacts: false
- job: well known # transitive
artifacts: false
- job: frontend # tranitive
artifacts: false
- job: uniworx:lib:uniworx # transitive
artifacts: false
- job: uniworx:exe:uniworx
artifacts: true
- job: check # sanity
artifacts: false
artifacts:
paths:
- uniworx.tar.gz
name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}"
expire_in: "1 day"
retry: 2
interruptible: true
rules:
- if: $CI_COMMIT_REF_NAME =~ /^v[0-9].*/
when: manual
allow_failure: true
parse changelog:
stage: prepare release
needs:
- job: npm install
- job: node dependencies
artifacts: true
rules:
- if: $CI_COMMIT_REF_NAME =~ /^v[0-9].*/
when: on_success
before_script:
- rm -rvf /etc/apt/sources.list /etc/apt/sources.list.d
- install -v -T -m 0644 ${APT_SOURCES_LIST} /etc/apt/sources.list
- apt-get update -y
- apt-get install -y --no-install-recommends jq
before_script: *nix-before
script:
- npm run parse-changelog
- |
jq -r '.versions[0].version' changelog.json > .current-version
- |
jq -r '.versions[0].body' changelog.json > .current-changelog.md
- xzcat node-dependencies.nar.xz | nix-store --import
- source .gitlab-ci/construct-flake-url.sh
- nix -L run "${FLAKE}#jqChangelogJson" -- -r '.versions[0].version' > .current-version
- nix -L run "${FLAKE}#jqChangelogJson" -- -r '.versions[0].body' > .current-changelog.md
- echo "VERSION=$(cat .current-version)" >> build.env
artifacts:
reports:
dotenv: build.env
paths:
- .current-version
- .current-changelog.md
@ -391,103 +332,69 @@ parse-changelog:
retry: 2
interruptible: true
upload:
variables:
GIT_STRATEGY: none
stage: upload packages
image: curlimages/curl:latest
upload docker:
stage: docker
script:
- xzcat uniworx:exe:uniworx.nar.xz | nix-store --import
- source .gitlab-ci/construct-flake-url.sh
- nix -L run "${FLAKE}#pushUniworxDocker"
before_script: *nix-before
needs:
- job: npm install # transitive
- job: node dependencies # transitive
artifacts: false
- job: frontend:build # transitive
- job: well known # transitive
artifacts: false
- job: yesod:build
- job: frontend # tranitive
artifacts: false
- job: uniworx:lib:uniworx # transitive
artifacts: false
- job: uniworx:exe:uniworx
artifacts: true
- job: parse-changelog
artifacts: true
- job: frontend:lint # validation
artifacts: false
- job: frontend:test # validation
artifacts: false
- job: yesod:test:hlint # validation
artifacts: false
- job: yesod:test:yesod # validation
- job: check # sanity
artifacts: false
rules:
- if: $CI_COMMIT_REF_NAME =~ /^v[0-9].*/
when: on_success
before_script:
- export VERSION="$(cat .current-version)"
upload demo docker:
stage: docker
script:
- |
curl --header "JOB-TOKEN: ${CI_JOB_TOKEN}" --upload-file bin/uniworx ${PACKAGE_REGISTRY_URL}/${VERSION}/uniworx
- |
curl --header "JOB-TOKEN: ${CI_JOB_TOKEN}" --upload-file bin/uniworxdb ${PACKAGE_REGISTRY_URL}/${VERSION}/uniworxdb
- |
curl --header "JOB-TOKEN: ${CI_JOB_TOKEN}" --upload-file bin/uniworxload ${PACKAGE_REGISTRY_URL}/${VERSION}/uniworxload
- |
curl --header "JOB-TOKEN: ${CI_JOB_TOKEN}" --upload-file bin/uniworx-wflint ${PACKAGE_REGISTRY_URL}/${VERSION}/uniworx-wflint
- xzcat uniworx:exe:uniworx.nar.xz | nix-store --import
- xzcat uniworx:exe:uniworxdb.nar.xz | nix-store --import
- source .gitlab-ci/construct-flake-url.sh
- nix -L run "${FLAKE}#pushUniworxDemoDocker"
before_script: *nix-before
needs:
- job: node dependencies # transitive
artifacts: false
- job: well known # transitive
artifacts: false
- job: frontend # tranitive
artifacts: false
- job: uniworx:lib:uniworx # transitive
artifacts: false
- job: uniworx:exe:uniworx
artifacts: true
- job: uniworx:exe:uniworxdb
artifacts: true
- job: check # sanity
artifacts: false
rules:
- if: $CI_COMMIT_REF_NAME =~ /^v[0-9].*/
when: on_success
release:
variables:
GIT_STRATEGY: none
stage: release
image: registry.gitlab.com/gitlab-org/release-cli:latest
needs:
- job: upload
artifacts: false
- job: npm install # transitive
artifacts: false
- job: frontend:build # transitive
artifacts: false
- job: yesod:build # transitive
artifacts: false
- job: parse-changelog
artifacts: true
- job: frontend:lint # validation
artifacts: false
- job: frontend:test # validation
artifacts: false
- job: yesod:test:hlint # validation
artifacts: false
- job: yesod:test:yesod # validation
artifacts: false
rules:
- if: $CI_COMMIT_REF_NAME =~ /^v[0-9].*/
when: on_success
before_script:
- export VERSION="$(cat .current-version)"
script:
- |
release-cli create --name "${VERSION}" --tag-name $CI_COMMIT_TAG --description .current-changelog.md \
--assets-link "{\"name\":\"uniworx\",\"url\":\"${PACKAGE_REGISTRY_URL}/${VERSION}/uniworx\",\"filepath\":\"/uniworx\"}" \
--assets-link "{\"name\":\"uniworxdb\",\"url\":\"${PACKAGE_REGISTRY_URL}/${VERSION}/uniworxdb\",\"filepath\":\"/uniworxdb\"}" \
--assets-link "{\"name\":\"uniworxload\",\"url\":\"${PACKAGE_REGISTRY_URL}/${VERSION}/uniworxload\",\"filepath\":\"/uniworxload\"}" \
--assets-link "{\"name\":\"uniworx-wflint\",\"url\":\"${PACKAGE_REGISTRY_URL}/${VERSION}/uniworx-wflint\",\"filepath\":\"/uniworx-wflint\"}"
# deploy:uniworx3:
# cache: {}
# stage: deploy
# variables:
# GIT_STRATEGY: none
# script:
# - zip -qj - bin/uniworx bin/uniworxdb | ssh root@uniworx3.ifi.lmu.de /root/bin/accept_uni2work
# needs:
# - yesod:build
# - frontend:test # For sanity
# before_script:
# - rm -rvf /etc/apt/sources.list /etc/apt/sources.list.d
# - install -v -T -m 0644 ${APT_SOURCES_LIST} /etc/apt/sources.list
# - apt-get update -y
# - apt-get install -y --no-install-recommends openssh-client
# - install -v -m 0700 -d ~/.ssh
# - install -v -T -m 0644 ${SSH_KNOWN_HOSTS} ~/.ssh/known_hosts
# - install -v -T -m 0400 ${SSH_PRIVATE_KEY_UNIWORX3} ~/.ssh/uniworx3; echo "IdentityFile ~/.ssh/uniworx3" >> ~/.ssh/config;
# dependencies:
# - yesod:build
# only:
# variables:
# - $CI_COMMIT_REF_NAME =~ /^v[0-9].*/
# resource_group: uniworx3
- release-cli create --name "${VERSION}" --tag-name $CI_COMMIT_TAG --description .current-changelog.md
needs:
- job: check # sanity
artifacts: false
- job: parse changelog
artifacts: true

View File

@ -0,0 +1,6 @@
if [ -n "${CI_COMMIT_TAG}" ]; then
ref="refs/tags/${CI_COMMIT_TAG}"
else
ref="refs/heads/${CI_COMMIT_BRANCH}"
fi
export FLAKE="git+${CI_REPOSITORY_URL}?rev=${CI_COMMIT_SHA}&ref=${ref}"

View File

@ -0,0 +1,25 @@
diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc
index 514ab3bf9..25dab18bb 100644
--- a/src/libstore/filetransfer.cc
+++ b/src/libstore/filetransfer.cc
@@ -696,6 +696,8 @@ struct curlFileTransfer : public FileTransfer
std::string scheme = get(params, "scheme").value_or("");
std::string endpoint = get(params, "endpoint").value_or("");
+ debug("enqueueFileTransfer: scheme: %s", scheme);
+
S3Helper s3Helper(profile, region, scheme, endpoint);
// FIXME: implement ETag
diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc
index 6bfbee044..ff406e5e4 100644
--- a/src/libstore/s3-binary-cache-store.cc
+++ b/src/libstore/s3-binary-cache-store.cc
@@ -126,6 +126,7 @@ ref<Aws::Client::ClientConfiguration> S3Helper::makeConfig(const string & region
initAWS();
auto res = make_ref<Aws::Client::ClientConfiguration>();
res->region = region;
+ debug("configuring scheme %s", scheme);
if (!scheme.empty()) {
res->scheme = Aws::Http::SchemeMapper::FromString(scheme.c_str());
}

View File

@ -0,0 +1,13 @@
diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc
index 6bfbee044..51d86c4e6 100644
--- a/src/libstore/s3-binary-cache-store.cc
+++ b/src/libstore/s3-binary-cache-store.cc
@@ -209,7 +209,7 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual
S3Helper s3Helper;
S3BinaryCacheStoreImpl(
- const std::string & scheme,
+ const std::string & uriScheme,
const std::string & bucketName,
const Params & params)
: StoreConfig(params)

View File

@ -0,0 +1,5 @@
#!/usr/bin/bash
export MC_HOST_minio=http://$(cat /minio-gitlab-runner-cache/accesskey):$(cat /minio-gitlab-runner-cache/secretkey)@minio-gitlab-runner-cache
mc mb --ignore-existing minio/nix-cache

View File

@ -0,0 +1,5 @@
sandbox = true
experimental-features = nix-command flakes ca-references
substituters = https://hydra.iohk.io https://cache.nixos.org/
trusted-public-keys = hydra.iohk.io:f/Ea+s+dFdN+3Y/G+FDgSq+a5NEWhJGzdjvKNGv0/EQ= cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=
require-sigs = true

6
.gitlab-ci/nix.conf Normal file
View File

@ -0,0 +1,6 @@
sandbox = true
experimental-features = nix-command flakes ca-references
substituters = https://hydra.iohk.io https://cache.nixos.org/ s3://nix-cache?scheme=http&endpoint=minio-gitlab-runner-cache
trusted-public-keys = hydra.iohk.io:f/Ea+s+dFdN+3Y/G+FDgSq+a5NEWhJGzdjvKNGv0/EQ= cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= minio-gitlab-runner-cache:ZN5neq93MHqpnieHSBlxGgWvCL0WKrZ2S9QS+U5Bnro=
require-sigs = true
post-build-hook = /etc/nix/upload-to-cache.sh

View File

@ -0,0 +1,4 @@
{ pkgs ? import <nixpkgs> {} }:
pkgs.nixUnstable.overrideAttrs (oldAttrs: {
patches = oldAttrs.patches or [] ++ [ ./fix-aws-scheme.patch ];
})

8
.gitlab-ci/upload-to-cache.sh Executable file
View File

@ -0,0 +1,8 @@
#!/usr/bin/bash
set -eu
set -f # disable globbing
export IFS=' '
echo "Signing and uploading paths" $OUT_PATHS
exec nix copy --to "s3://nix-cache?region=us-east-1&scheme=http&endpoint=minio-gitlab-runner-cache&secret-key=${NIX_CACHE_KEYFILE}" $OUT_PATHS

View File

@ -0,0 +1,8 @@
#!/usr/bin/bash
mkdir -p $(dirname ${AWS_SHARED_CREDENTIALS_FILE})
cat > ${AWS_SHARED_CREDENTIALS_FILE} <<EOF
[default]
aws_access_key_id = $(cat /minio-gitlab-runner-cache/accesskey)
aws_secret_access_key = $(cat /minio-gitlab-runner-cache/secretkey)
EOF

View File

@ -18,6 +18,10 @@ module.exports = {
{
filename: 'package.yaml',
updater: standardVersionUpdaterYaml
},
{
filename: 'nix/docker/version.json',
type: 'json'
}
],
commitUrlFormat: 'https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/{{hash}}',

View File

@ -2,6 +2,30 @@
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
## [25.21.12](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v25.21.11...v25.21.12) (2021-09-23)
## [25.21.11](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v25.21.10...v25.21.11) (2021-09-23)
## [25.21.10](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v25.21.9...v25.21.10) (2021-09-23)
## [25.21.9](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v25.21.8...v25.21.9) (2021-09-23)
## [25.21.8](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v25.21.7...v25.21.8) (2021-09-22)
## [25.21.7](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v25.21.6...v25.21.7) (2021-09-22)
## [25.21.6](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v25.21.5...v25.21.6) (2021-09-21)
## [25.21.5](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v25.21.4...v25.21.5) (2021-09-21)
## [25.21.4](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v25.21.3...v25.21.4) (2021-09-21)
## [25.21.3](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v25.21.2...v25.21.3) (2021-09-20)
## [25.21.2](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v25.21.1...v25.21.2) (2021-09-20)
## [25.21.1](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v25.21.0...v25.21.1) (2021-09-20)
## [25.21.0](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v25.20.2...v25.21.0) (2021-08-20)

View File

@ -1,5 +1,206 @@
{
"nodes": {
"HTTP": {
"flake": false,
"locked": {
"lastModified": 1451647621,
"narHash": "sha256-oHIyw3x0iKBexEo49YeUDV1k74ZtyYKGR2gNJXXRxts=",
"owner": "phadej",
"repo": "HTTP",
"rev": "9bc0996d412fef1787449d841277ef663ad9a915",
"type": "github"
},
"original": {
"owner": "phadej",
"repo": "HTTP",
"type": "github"
}
},
"HaskellNet-SSL": {
"flake": false,
"locked": {
"lastModified": 1582382981,
"narHash": "sha256-fjw+bKaGrGw9uTUd6fWZCIz24uuSTA1VIeijZ+zSq/M=",
"ref": "uni2work",
"rev": "40393c938111ac78232dc2c7eec5edb4a22d03e8",
"revCount": 62,
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/HaskellNet-SSL.git"
},
"original": {
"ref": "uni2work",
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/HaskellNet-SSL.git"
}
},
"cabal-32": {
"flake": false,
"locked": {
"lastModified": 1603716527,
"narHash": "sha256-sDbrmur9Zfp4mPKohCD8IDZfXJ0Tjxpmr2R+kg5PpSY=",
"owner": "haskell",
"repo": "cabal",
"rev": "94aaa8e4720081f9c75497e2735b90f6a819b08e",
"type": "github"
},
"original": {
"owner": "haskell",
"ref": "3.2",
"repo": "cabal",
"type": "github"
}
},
"cabal-34": {
"flake": false,
"locked": {
"lastModified": 1622475795,
"narHash": "sha256-chwTL304Cav+7p38d9mcb+egABWmxo2Aq+xgVBgEb/U=",
"owner": "haskell",
"repo": "cabal",
"rev": "b086c1995cdd616fc8d91f46a21e905cc50a1049",
"type": "github"
},
"original": {
"owner": "haskell",
"ref": "3.4",
"repo": "cabal",
"type": "github"
}
},
"cardano-shell": {
"flake": false,
"locked": {
"lastModified": 1608537748,
"narHash": "sha256-PulY1GfiMgKVnBci3ex4ptk2UNYMXqGjJOxcPy2KYT4=",
"owner": "input-output-hk",
"repo": "cardano-shell",
"rev": "9392c75087cb9a3d453998f4230930dea3a95725",
"type": "github"
},
"original": {
"owner": "input-output-hk",
"repo": "cardano-shell",
"type": "github"
}
},
"colonnade": {
"flake": false,
"locked": {
"lastModified": 1592144408,
"narHash": "sha256-bQSOjbn56ZEjzqkC/oqZ331FdxF7tbhJndtXeRqWcq8=",
"ref": "uni2work",
"rev": "f8170266ab25b533576e96715bedffc5aa4f19fa",
"revCount": 153,
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/colonnade.git"
},
"original": {
"ref": "uni2work",
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/colonnade.git"
}
},
"conduit-resumablesink": {
"flake": false,
"locked": {
"lastModified": 1533667275,
"narHash": "sha256-+TR0tYRk1WFQRrFs6oO80jdlfY7OTyB7th7Hi/tDQMw=",
"ref": "uni2work",
"rev": "cbea6159c2975d42f948525e03e12fc390da53c5",
"revCount": 10,
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/conduit-resumablesink.git"
},
"original": {
"ref": "uni2work",
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/conduit-resumablesink.git"
}
},
"cryptoids": {
"flake": false,
"locked": {
"lastModified": 1602592869,
"narHash": "sha256-uhbIaVFXtdWmeDJl0ZDyJnE2Ul3bV324KkaK+ix6USA=",
"ref": "uni2work",
"rev": "130b0dcbf2b09ccdf387b50262f1efbbbf1819e3",
"revCount": 44,
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/cryptoids.git"
},
"original": {
"ref": "uni2work",
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/cryptoids.git"
}
},
"cryptonite": {
"flake": false,
"locked": {
"lastModified": 1624444174,
"narHash": "sha256-sDMA4ej1NIModAt7PQvcgIknI3KwfzcAp9YQUSe4CWw=",
"ref": "uni2work",
"rev": "71a630edaf5f22c464e24fac8d9d310f4055ea1f",
"revCount": 1202,
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/cryptonite.git"
},
"original": {
"ref": "uni2work",
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/cryptonite.git"
}
},
"docker-nixpkgs": {
"flake": false,
"locked": {
"lastModified": 1631525111,
"narHash": "sha256-dJKw280B1Hp0f6tSkBtkY0nrMT0DR/bmerrS3cQdBjI=",
"owner": "nix-community",
"repo": "docker-nixpkgs",
"rev": "df7f64139a0b2384fb2448dc2a406815f8d03867",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "docker-nixpkgs",
"type": "github"
}
},
"encoding": {
"flake": false,
"locked": {
"lastModified": 1582400874,
"narHash": "sha256-ukQw4tvjtvqR8HEPgPTSmvtTc6WeXEs3IfCLLNy+YJc=",
"ref": "uni2work",
"rev": "22fc3bb14841d8d50997aa47f1be3852e666f787",
"revCount": 162,
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/encoding.git"
},
"original": {
"ref": "uni2work",
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/encoding.git"
}
},
"esqueleto": {
"flake": false,
"locked": {
"lastModified": 1625584503,
"narHash": "sha256-kgNcNKODv4xkmS6+MKDAVDpbL/IQ5Imucst0uV4HzYU=",
"ref": "uni2work",
"rev": "b9987d94af9d7403eded8ca75ad761eb7fc06e4c",
"revCount": 697,
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/esqueleto.git"
},
"original": {
"ref": "uni2work",
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/esqueleto.git"
}
},
"flake-utils": {
"locked": {
"lastModified": 1623875721,
@ -11,31 +212,397 @@
},
"original": {
"owner": "numtide",
"ref": "master",
"repo": "flake-utils",
"type": "github"
}
},
"fontawesome-token": {
"flake": false,
"locked": {
"narHash": "sha256-XABvCxD/7zXRyyR01dr5IvK+8A0VoibiVUS5ZAU+f+o=",
"path": "/etc/fontawesome-token",
"type": "path"
},
"original": {
"path": "/etc/fontawesome-token",
"type": "path"
}
},
"ghc-8.6.5-iohk": {
"flake": false,
"locked": {
"lastModified": 1600920045,
"narHash": "sha256-DO6kxJz248djebZLpSzTGD6s8WRpNI9BTwUeOf5RwY8=",
"owner": "input-output-hk",
"repo": "ghc",
"rev": "95713a6ecce4551240da7c96b6176f980af75cae",
"type": "github"
},
"original": {
"owner": "input-output-hk",
"ref": "release/8.6.5-iohk",
"repo": "ghc",
"type": "github"
}
},
"hackage": {
"flake": false,
"locked": {
"lastModified": 1629940355,
"narHash": "sha256-o9/U8R/JtyHIcxhMLaWYP+D/52B6LH/ikCyNZ7+mymI=",
"owner": "input-output-hk",
"repo": "hackage.nix",
"rev": "429deb2a137084c011310bad92f4cecf244f2fc2",
"type": "github"
},
"original": {
"owner": "input-output-hk",
"repo": "hackage.nix",
"type": "github"
}
},
"haskell-nix": {
"inputs": {
"HTTP": "HTTP",
"cabal-32": "cabal-32",
"cabal-34": "cabal-34",
"cardano-shell": "cardano-shell",
"flake-utils": "flake-utils",
"ghc-8.6.5-iohk": "ghc-8.6.5-iohk",
"hackage": "hackage",
"hpc-coveralls": "hpc-coveralls",
"nix-tools": "nix-tools",
"nixpkgs": "nixpkgs",
"nixpkgs-2003": "nixpkgs-2003",
"nixpkgs-2009": "nixpkgs-2009",
"nixpkgs-2105": "nixpkgs-2105",
"nixpkgs-unstable": "nixpkgs-unstable",
"old-ghc-nix": "old-ghc-nix",
"stackage": "stackage"
},
"locked": {
"lastModified": 1629940530,
"narHash": "sha256-cf/bshYWloj+rJSmOcFokN6Rs1lpFkiECKpykN3JBH8=",
"owner": "input-output-hk",
"repo": "haskell.nix",
"rev": "5fcd4faf98fc8ca8287e2c7bc1fff71dfd340f1f",
"type": "github"
},
"original": {
"owner": "input-output-hk",
"repo": "haskell.nix",
"type": "github"
}
},
"hpc-coveralls": {
"flake": false,
"locked": {
"lastModified": 1607498076,
"narHash": "sha256-8uqsEtivphgZWYeUo5RDUhp6bO9j2vaaProQxHBltQk=",
"owner": "sevanspowell",
"repo": "hpc-coveralls",
"rev": "14df0f7d229f4cd2e79f8eabb1a740097fdfa430",
"type": "github"
},
"original": {
"owner": "sevanspowell",
"repo": "hpc-coveralls",
"type": "github"
}
},
"ldap-client": {
"flake": false,
"locked": {
"lastModified": 1582401733,
"narHash": "sha256-StLj8BnQqbl8fnE+xWlSOScVRGnl19cv8d1CBZ29O0k=",
"ref": "uni2work",
"rev": "01afaf599ba6f8a9d804c269e91d3190b249d3f0",
"revCount": 61,
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/ldap-client.git"
},
"original": {
"ref": "uni2work",
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/ldap-client.git"
}
},
"memcached-binary": {
"flake": false,
"locked": {
"lastModified": 1582403725,
"narHash": "sha256-40BNhNNYC/B4u16dKwBrkk5hmhncq805YC4y1aGFRqs=",
"ref": "uni2work",
"rev": "b7071df50bad3a251a544b984e4bf98fa09b8fae",
"revCount": 28,
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/memcached-binary.git"
},
"original": {
"ref": "uni2work",
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/memcached-binary.git"
}
},
"minio-hs": {
"flake": false,
"locked": {
"lastModified": 1597069863,
"narHash": "sha256-JmMajaLT4+zt+w2koDkaloFL8ugmrQBlcYKj+78qn9M=",
"ref": "uni2work",
"rev": "42103ab247057c04c8ce7a83d9d4c160713a3df1",
"revCount": 197,
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/minio-hs.git"
},
"original": {
"ref": "uni2work",
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/minio-hs.git"
}
},
"nix-tools": {
"flake": false,
"locked": {
"lastModified": 1626997434,
"narHash": "sha256-1judQmP298ao6cGUNxcGhcAXHOnA9qSLvWk/ZtoUL7w=",
"owner": "input-output-hk",
"repo": "nix-tools",
"rev": "c8c5e6a6fbb12a73598d1a434984a36e880ce3cf",
"type": "github"
},
"original": {
"owner": "input-output-hk",
"repo": "nix-tools",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1624788075,
"narHash": "sha256-xzO2aL5gGejNvey2jKGnbnFXbo99pdytlY5FF/IhvAE=",
"lastModified": 1624291665,
"narHash": "sha256-kNkaoa3dai9WOi7fsPklCCWZ8hRAkXx0ZUhpYKShyUk=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "4ba70da807359ed01d662763a96c7b442762e5ef",
"rev": "3c6f3f84af60a8ed5b8a79cf3026b7630fcdefb8",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "master",
"ref": "nixpkgs-20.09-darwin",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs-2003": {
"locked": {
"lastModified": 1620055814,
"narHash": "sha256-8LEHoYSJiL901bTMVatq+rf8y7QtWuZhwwpKE2fyaRY=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "1db42b7fe3878f3f5f7a4f2dc210772fd080e205",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-20.03-darwin",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs-2009": {
"locked": {
"lastModified": 1624271064,
"narHash": "sha256-qns/uRW7MR2EfVf6VEeLgCsCp7pIOjDeR44JzTF09MA=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "46d1c3f28ca991601a53e9a14fdd53fcd3dd8416",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-20.09-darwin",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs-2105": {
"locked": {
"lastModified": 1624291665,
"narHash": "sha256-kNkaoa3dai9WOi7fsPklCCWZ8hRAkXx0ZUhpYKShyUk=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "3c6f3f84af60a8ed5b8a79cf3026b7630fcdefb8",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-21.05-darwin",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs-unstable": {
"locked": {
"lastModified": 1628785280,
"narHash": "sha256-2B5eMrEr6O8ff2aQNeVxTB+9WrGE80OB4+oM6T7fOcc=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "6525bbc06a39f26750ad8ee0d40000ddfdc24acb",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"old-ghc-nix": {
"flake": false,
"locked": {
"lastModified": 1621819714,
"narHash": "sha256-EJCnYQSWk7FRLwS0lZgTWIiQ6pcvDX1VuD6LGD4Uwzs=",
"owner": "angerman",
"repo": "old-ghc-nix",
"rev": "f089a6f090cdb35fcf95f865fc6a31ba6b3ac4eb",
"type": "github"
},
"original": {
"owner": "angerman",
"ref": "master2",
"repo": "old-ghc-nix",
"type": "github"
}
},
"root": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs"
"HaskellNet-SSL": "HaskellNet-SSL",
"colonnade": "colonnade",
"conduit-resumablesink": "conduit-resumablesink",
"cryptoids": "cryptoids",
"cryptonite": "cryptonite",
"docker-nixpkgs": "docker-nixpkgs",
"encoding": "encoding",
"esqueleto": "esqueleto",
"flake-utils": [
"haskell-nix",
"flake-utils"
],
"fontawesome-token": "fontawesome-token",
"haskell-nix": "haskell-nix",
"ldap-client": "ldap-client",
"memcached-binary": "memcached-binary",
"minio-hs": "minio-hs",
"nixpkgs": [
"haskell-nix",
"nixpkgs-unstable"
],
"serversession": "serversession",
"tail-DateTime": "tail-DateTime",
"xss-sanitize": "xss-sanitize",
"yesod": "yesod",
"zip-stream": "zip-stream"
}
},
"serversession": {
"flake": false,
"locked": {
"lastModified": 1624794082,
"narHash": "sha256-mixhAsgOq0iiAhOcZfFSyl8CwS7utaKeZBz/Lix16hk=",
"ref": "uni2work",
"rev": "b9d76def10da1260c7f6aa82bda32111f37a952b",
"revCount": 174,
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/serversession.git"
},
"original": {
"ref": "uni2work",
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/serversession.git"
}
},
"stackage": {
"flake": false,
"locked": {
"lastModified": 1629940472,
"narHash": "sha256-LFrNC25OpNoS6dGG5om+UGP8YdYjp01Qm6cenbemaVg=",
"owner": "input-output-hk",
"repo": "stackage.nix",
"rev": "c50dd0527babf85818cc67a7099e532e453752db",
"type": "github"
},
"original": {
"owner": "input-output-hk",
"repo": "stackage.nix",
"type": "github"
}
},
"tail-DateTime": {
"flake": false,
"locked": {
"lastModified": 1576249828,
"narHash": "sha256-90okKfi5Nf5xw+IFsPXzdykdboLMbOYHmUVyNZS65Go=",
"ref": "uni2work",
"rev": "ef1237569ff22667acb64a9dfd64682ee55817eb",
"revCount": 132,
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/tail.DateTime.git"
},
"original": {
"ref": "uni2work",
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/tail.DateTime.git"
}
},
"xss-sanitize": {
"flake": false,
"locked": {
"lastModified": 1602593438,
"narHash": "sha256-2jAHm79x8f+ygGHq9gipF3h8+eMT7eCxVMOGMc/brKE=",
"ref": "uni2work",
"rev": "dc928c3a456074b8777603bea20e81937321777f",
"revCount": 114,
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/xss-sanitize.git"
},
"original": {
"ref": "uni2work",
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/xss-sanitize.git"
}
},
"yesod": {
"flake": false,
"locked": {
"lastModified": 1625061191,
"narHash": "sha256-K0X2MwUStChml1DlJ7t4yBMDwrMe6j/780nJtSy9Hss=",
"ref": "uni2work",
"rev": "a59f63e0336ee61f7a90b8778e9147305d3127bb",
"revCount": 5053,
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/yesod.git"
},
"original": {
"ref": "uni2work",
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/yesod.git"
}
},
"zip-stream": {
"flake": false,
"locked": {
"lastModified": 1599734754,
"narHash": "sha256-gqIlgj2dxVkqEMafuVL+dqZPoV+WLpOJwz1NhsKnH64=",
"ref": "uni2work",
"rev": "843683d024f767de236f74d24a3348f69181a720",
"revCount": 39,
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/zip-stream.git"
},
"original": {
"ref": "uni2work",
"type": "git",
"url": "ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/zip-stream.git"
}
}
},

153
flake.nix
View File

@ -1,32 +1,145 @@
{
inputs = {
nixpkgs = {
type = "github";
owner = "NixOS";
repo = "nixpkgs";
ref = "master";
};
flake-utils = {
type = "github";
owner = "numtide";
repo = "flake-utils";
ref = "master";
};
inputs.haskell-nix.url = "github:input-output-hk/haskell.nix";
inputs.nixpkgs.follows = "haskell-nix/nixpkgs-unstable";
inputs.flake-utils.follows = "haskell-nix/flake-utils";
inputs.docker-nixpkgs = {
url = "github:nix-community/docker-nixpkgs";
flake = false;
};
outputs = { self, nixpkgs, flake-utils, ... }: flake-utils.lib.eachDefaultSystem
inputs.encoding = {
url = "git+ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/encoding.git?ref=uni2work";
flake = false;
};
inputs.memcached-binary = {
url = "git+ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/memcached-binary.git?ref=uni2work";
flake = false;
};
inputs.conduit-resumablesink = {
url = "git+ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/conduit-resumablesink.git?ref=uni2work";
flake = false;
};
inputs.HaskellNet-SSL = {
url = "git+ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/HaskellNet-SSL.git?ref=uni2work";
flake = false;
};
inputs.ldap-client = {
url = "git+ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/ldap-client.git?ref=uni2work";
flake = false;
};
inputs.serversession = {
url = "git+ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/serversession.git?ref=uni2work";
flake = false;
};
inputs.xss-sanitize = {
url = "git+ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/xss-sanitize.git?ref=uni2work";
flake = false;
};
inputs.colonnade = {
url = "git+ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/colonnade.git?ref=uni2work";
flake = false;
};
inputs.minio-hs = {
url = "git+ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/minio-hs.git?ref=uni2work";
flake = false;
};
inputs.cryptoids = {
url = "git+ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/cryptoids.git?ref=uni2work";
flake = false;
};
inputs.zip-stream = {
url = "git+ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/zip-stream.git?ref=uni2work";
flake = false;
};
inputs.yesod = {
url = "git+ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/yesod.git?ref=uni2work";
flake = false;
};
inputs.cryptonite = {
url = "git+ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/cryptonite.git?ref=uni2work";
flake = false;
};
inputs.esqueleto = {
url = "git+ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/esqueleto.git?ref=uni2work";
flake = false;
};
inputs.tail-DateTime = {
url = "git+ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/tail.DateTime.git?ref=uni2work";
flake = false;
};
inputs.fontawesome-token = {
url = "path:/etc/fontawesome-token";
flake = false;
};
outputs = inputs@{ self, nixpkgs, flake-utils, haskell-nix, docker-nixpkgs, ... }: flake-utils.lib.eachSystem ["x86_64-linux"]
(system:
let pkgs = import nixpkgs {
inherit system;
let frontendSource = pkgs.lib.sourceByRegex ./. [
"^(assets|frontend)(/.*)?$"
"^config(/(favicon\.json|robots\.txt))?$"
"^(webpack|postcss)\.config\.js$"
"^karma\.conf\.js$"
"^(package|jsconfig|\.eslintrc)\.json$"
"^\.babelrc$"
];
backendSource = pkgs.lib.sourceByRegex ./. [
"^(\.hlint|package|stack-flake)\.yaml$"
"^stack\.yaml\.lock$"
"^(assets|app|hlint|load|messages|models|src|templates|test|testdata|wflint)(/.*)?$"
"^config(/(archive-types|mimetypes|personalised-sheet-files-collate|settings\.yml|submission-blacklist|test-settings\.yml|video-types|wordlist\.txt))?$"
"^routes$"
"^testdata(/.*)?$"
];
pkgs = import nixpkgs {
inherit system overlays;
config.allowUnfree = true;
};
overlay = import ./nix/maildev;
overlays = [
# remove once https://github.com/NixOS/nix/pull/5266 has landed in nixpkgs used here (see flake.lock)
(import ./nix/aws-patch.nix)
# end remove
inherit (pkgs.lib) recursiveUpdate;
(import "${docker-nixpkgs}/overlay.nix")
(import ./nix/maildev)
haskell-nix.overlay
(import ./nix/uniworx { inherit inputs frontendSource backendSource; })
(import ./nix/docker { inherit self; })
(import ./nix/parse-changelog.nix {})
];
haskellFlake = pkgs.uniworx.flake {};
mkPushDocker = imageName: dockerImage: pkgs.writeScriptBin "push-${dockerImage.imageName}" ''
#!${pkgs.zsh}/bin/zsh -xe
target=''${1-docker://registry.gitlab.com/fradrive/fradrive/${imageName}:${dockerImage.imageTag}}
[[ -n "''${1}" ]] && shift
${pkgs.skopeo}/bin/skopeo ''${@} --insecure-policy copy docker-archive://${dockerImage} ''${target}
'';
in {
devShell = import ./shell.nix { pkgs = self.legacyPackages.${system}; };
packages = haskellFlake.packages // {
inherit (pkgs) uniworxNodeDependencies uniworxWellKnown uniworxFrontend uniworxDemoDocker uniworxDocker ciDocker changelogJson;
};
apps = haskellFlake.apps // {
pushUniworxDemoDocker = flake-utils.lib.mkApp { drv = mkPushDocker "uniworx-demo" pkgs.uniworxDemoDocker; };
pushUniworxDocker = flake-utils.lib.mkApp { drv = mkPushDocker "uniworx" pkgs.uniworxDocker; };
pushCIDocker = flake-utils.lib.mkApp { drv = mkPushDocker "nix-unstable" pkgs.ciDocker; };
calculateMaterializedSha = flake-utils.lib.mkApp { drv = pkgs.uniworx.stack-nix.passthru.calculateMaterializedSha; exePath = ""; };
jqChangelogJson = flake-utils.lib.mkApp { drv = pkgs.jqChangelogJson; };
};
checks = haskellFlake.checks // {
uniworxFrontend = pkgs.uniworxFrontend.check;
};
devShell = import ./shell.nix { inherit pkgs; };
legacyPackages = recursiveUpdate (overlay self.legacyPackages.${system} pkgs) pkgs;
legacyPackages = pkgs;
defaultPackage = self.packages.${system}."uniworx:exe:uniworx";
defaultApp = self.apps.${system}."uniworx:exe:uniworx";
}
);
}

View File

@ -15,4 +15,4 @@ if [[ -d .stack-work-doc ]]; then
trap move-back EXIT
fi
stack hoogle -- ${@:-server --local --port $((${PORT_OFFSET:-0} + 8081))}
stack hoogle -- ${@:-server --local --port $((${PORT_OFFSET:-$(((16#$(echo \"fradrive $(whoami)\" | sha256sum | head -c 16)) % 1000))} + 8081))}

11
nix/aws-patch.nix Normal file
View File

@ -0,0 +1,11 @@
final: prev:
let
awsPatch = prev.fetchurl {
url = "https://github.com/gkleen/nix/commit/fd67a0f927ec0711eba59714939ff939fc95db38.diff";
hash = "sha256-1dJ9zGQvYu5b47O2NjdggSSinlGQDcqBwXoZcKUGfYQ=";
};
in {
nixUnstable = prev.nixUnstable.overrideAttrs (oldAttrs: {
patches = oldAttrs.patches or [] ++ [ awsPatch ];
});
}

213
nix/develop.nix Normal file
View File

@ -0,0 +1,213 @@
{ pkgs
, prev ? pkgs
, doPortOffset ? true
, doDevelopEnv ? true
}:
with prev.lib;
let
withDevelop = action: ''
#!${pkgs.zsh}/bin/zsh -e
${optionalString doDevelopEnv ''
basePath=$(pwd)
exec 4<>''${basePath}/.develop.env
flockRes=
set +e
${pkgs.util-linux}/bin/flock -en 4; flockRes=$?
set -e
if [[ ''${flockRes} -ne 0 ]]; then
echo "Could not take exclusive lock; is another develop running?" >&2
exit ''${flockRes}
fi
''}
cleanup() {
set +e -x
type cleanup_postgres &>/dev/null && cleanup_postgres
type cleanup_widget_memcached &>/dev/null && cleanup_widget_memcached
type cleanup_session_memcached &>/dev/null && cleanup_session_memcached
type cleanup_cache_memcached &>/dev/null && cleanup_cache_memcached
type cleanup_minio &>/dev/null && cleanup_minio
type cleanup_maildev &>/dev/null && cleanup_maildev
${optionalString doDevelopEnv ''
[ -f "''${basePath}/.develop.env" ] && rm -vf "''${basePath}/.develop.env"
''}
set +x
}
trap cleanup EXIT
export PORT_OFFSET=${if doPortOffset then "$(((16#$(echo \"fradrive $(whoami)\" | sha256sum | head -c 16)) % 1000))" else "0"}
if [[ -z "$PGHOST" ]]; then
set -xe
pgDir=$(mktemp -d --tmpdir=''${XDG_RUNTIME_DIR} postgresql.XXXXXX)
pgSockDir=$(mktemp -d --tmpdir=''${XDG_RUNTIME_DIR} postgresql.sock.XXXXXX)
pgLogFile=$(mktemp --tmpdir=''${XDG_RUNTIME_DIR} postgresql.XXXXXX.log)
initdb --no-locale -D ''${pgDir}
pg_ctl start -D ''${pgDir} -l ''${pgLogFile} -w -o "-k ''${pgSockDir} -c listen_addresses=''' -c hba_file='${postgresHba}' -c unix_socket_permissions=0700 -c max_connections=9990 -c shared_preload_libraries=pg_stat_statements -c auto_explain.log_min_duration=100ms"
psql -h ''${pgSockDir} -f ${postgresSchema} postgres
printf "Postgres logfile is %s\nPostgres socket directory is %s\n" ''${pgLogFile} ''${pgSockDir}
export PGHOST=''${pgSockDir}
export PGLOG=''${pgLogFile}
cleanup_postgres() {
set +e -x
pg_ctl stop -D ''${pgDir}
rm -rvf ''${pgDir} ''${pgSockDir} ''${pgLogFile}
set +x
}
set +xe
fi
if [[ -z "$WIDGET_MEMCACHED_HOST" ]]; then
set -xe
memcached -l localhost -p $(($PORT_OFFSET + 11211)) &>/dev/null &
widget_memcached_pid=$!
export WIDGET_MEMCACHED_HOST=localhost
export WIDGET_MEMCACHED_PORT=$(($PORT_OFFSET + 11211))
cleanup_widget_memcached() {
[[ -n "$widget_memcached_pid" ]] && kill $widget_memcached_pid
}
set +xe
fi
if [[ -z "$SESSION_MEMCACHED_HOST" ]]; then
set -xe
memcached -l localhost -p $(($PORT_OFFSET + 11212)) &>/dev/null &
session_memcached_pid=$!
export SESSION_MEMCACHED_HOST=localhost
export SESSION_MEMCACHED_PORT=$(($PORT_OFFSET + 11212))
cleanup_session_memcached() {
[[ -n "$session_memcached_pid" ]] && kill $session_memcached_pid
}
set +xe
fi
if [[ -z "$MEMCACHED_HOST" ]]; then
set -xe
memcached -l localhost -p $(($PORT_OFFSET + 11213)) &>/dev/null &
memcached_pid=$!
export MEMCACHED_HOST=localhost
export MEMCACHED_PORT=$(($PORT_OFFSET + 11212))
cleanup_session_memcached() {
[[ -n "$memcached_pid" ]] && kill $memcached_pid
}
set +xe
fi
if [[ -z "$UPLOAD_S3_HOST" ]]; then
set -xe
cleanup_minio() {
[[ -n "$minio_pid" ]] && kill $minio_pid
[[ -n "''${MINIO_DIR}" ]] && rm -rvf ''${MINIO_DIR}
[[ -n "''${MINIO_LOGFILE}" ]] && rm -rvf ''${MINIO_LOGFILE}
}
export MINIO_DIR=$(mktemp -d --tmpdir=''${XDG_RUNTIME_DIR} minio.XXXXXX)
export MINIO_LOGFILE=$(mktemp --tmpdir=''${XDG_RUNTIME_DIR} minio.XXXXXX.log)
export MINIO_ACCESS_KEY=$(${pkgs.pwgen}/bin/pwgen -s 16 1)
export MINIO_SECRET_KEY=$(${pkgs.pwgen}/bin/pwgen -s 32 1)
minio server --address localhost:$(($PORT_OFFSET + 9000)) ''${MINIO_DIR} &>''${MINIO_LOGFILE} &
minio_pid=$!
export UPLOAD_S3_HOST=localhost
export UPLOAD_S3_PORT=$(($PORT_OFFSET + 9000))
export UPLOAD_S3_SSL=false
export UPLOAD_S3_KEY_ID=''${MINIO_ACCESS_KEY}
export UPLOAD_S3_KEY=''${MINIO_SECRET_KEY}
sleep 1
set +xe
fi
${optionalString (pkgs.nodePackages ? "maildev") ''
if [[ -z "$SMTPHOST" ]]; then
set -xe
cleanup_maildev() {
[[ -n "$maildev_pid" ]] && kill $maildev_pid
}
TMPDIR=''${XDG_RUNTIME_DIR} ${pkgs.nodePackages.maildev}/bin/maildev --smtp $(($PORT_OFFSET + 1025)) --web $(($PORT_OFFSET + 8080)) --ip localhost --web-ip localhost &>/dev/null &
maildev_pid=$!
export SMTPHOST=localhost
export SMTPPORT=$(($PORT_OFFSET + 1025))
export SMTPSSL=none
set +xe
fi
''}
${optionalString doDevelopEnv ''
set -xe
cat >&4 <<EOF
PORT_OFFSET=''${PORT_OFFSET}
PGHOST=''${pgSockDir}
PGLOG=''${pgLogFile}
WIDGET_MEMCACHED_HOST=localhost
WIDGET_MEMCACHED_PORT=$(($PORT_OFFSET + 11211))
SESSION_MEMCACHED_HOST=localhost
SESSION_MEMCACHED_PORT=$(($PORT_OFFSET + 11212))
MEMCACHED_HOST=localhost
MEMCACHED_PORT=$(($PORT_OFFSET + 11212))
MINIO_DIR=''${MINIO_DIR}
MINIO_LOGFILE=''${MINIO_LOGFILE}
UPLOAD_S3_HOST=localhost
UPLOAD_S3_PORT=$(($PORT_OFFSET + 9000))
UPLOAD_S3_SSL=false
UPLOAD_S3_KEY_ID=''${MINIO_ACCESS_KEY}
UPLOAD_S3_KEY=''${MINIO_SECRET_KEY}
SMTPHOST=''${SMTPHOST}
SMTPPORT=''${SMTPPORT}
SMTPSSL=''${SMTPSSL}
EOF
set +xe
''}
${action}
'';
postgresSchema = prev.writeText "schema.sql" ''
CREATE USER uniworx WITH SUPERUSER;
CREATE DATABASE uniworx_test;
GRANT ALL ON DATABASE uniworx_test TO uniworx;
CREATE DATABASE uniworx;
GRANT ALL ON DATABASE uniworx TO uniworx;
'';
postgresHba = prev.writeText "hba_file" ''
local all all trust
'';
in withDevelop

View File

@ -0,0 +1,3 @@
{
"version": "0.1.0"
}

147
nix/docker/default.nix Normal file
View File

@ -0,0 +1,147 @@
{ self }: final: prev:
with prev.lib;
let
created =
let
fromDate = builtins.readFile (prev.runCommand "date" { nativeBuildInputs = with final; [ coreutils ]; } ''
printf '%s' $(date -Is -d '@${toString self.lastModified}') > $out
'');
in if self ? lastModified then fromDate else "1970-01-01T00:00:01Z";
mkUniworxDocker = { isDemo }: prev.dockerTools.buildImage {
name = "uniworx${optionalString isDemo "-demo"}";
tag =
let
versionFile = if isDemo then ./demo-version.json else ./version.json;
in (builtins.fromJSON (prev.lib.readFile versionFile)).version;
inherit created;
contents = with final; [
uniworx.uniworx.components.exes.uniworx
prev.dockerTools.binSh findutils coreutils
] ++ optionals isDemo [ postgresql_12 memcached uniworx.uniworx.components.exes.uniworxdb ];
runAsRoot = ''
#!${final.stdenv.shell}
${prev.dockerTools.shadowSetup}
mkdir -p /var/lib
groupadd -r uniworx
useradd -r -g uniworx -d /var/lib/uniworx -M uniworx
install -d -g uniworx -o uniworx -m 0750 /var/lib/uniworx
mkdir -p /var/log
install -d -g uniworx -o uniworx -m 0755 /var/log/uniworx
${optionalString isDemo ''
install -d -g uniworx -o uniworx -m 0750 /var/lib/postgres
install -d -g uniworx -o uniworx -m 0750 /var/lib/memcached
install -d -g uniworx -o uniworx -m 0755 /var/log/postgres
install -d -g uniworx -o uniworx -m 0755 /var/log/memcached
mkdir -p /run
install -d -g uniworx -o uniworx -m 0755 /run/postgres
''}
'';
config =
let
entrypoint = prev.writeScriptBin "uniworx-entrypoint" ''
#!${final.zsh}/bin/zsh -xe
cTime=$(date -Is)
${optionalString isDemo ''
pgDir=/var/lib/postgres
pgSockDir=/run/postgres
pgLogFile=/var/log/postgres/''${cTime}.log
export PGHOST=''${pgSockDir}
export PGLOG=''${pgLogFile}
pgNew=
if [[ -n "$(find ''${pgDir} -maxdepth 0 -type d -empty 2>/dev/null)" ]]; then
pgNew=1
fi
[[ -z "''${pgNew}" ]] || initdb --no-locale --encoding=UTF8 --username postgres --pgdata ''${pgDir}
pg_ctl start -D ''${pgDir} -l ''${pgLogFile} -w -o "-k ''${pgSockDir} -c listen_addresses= -c hba_file=${postgresHba} -c unix_socket_permissions=0777 -c max_connections=9990 -c shared_preload_libraries=pg_stat_statements -c auto_explain.log_min_duration=100ms"
[[ -z "''${pgNew}" ]] || psql -f ${postgresSchema} postgres postgres
( cd /var/lib/memcached; memcached -p 11212 ) &>/var/log/memcached/''${cTime}.log &
export SESSION_MEMCACHED_HOST=localhost
export SESSION_MEMCACHED_PORT=11212
''}
export LOGDEST=/var/log/uniworx/''${cTime}.log
typeset -a configs
configs=()
configDir=''${CONFIG_DIR-/cfg}
if [[ -d "''${configDir}" ]]; then
while IFS= read -d $'\0' cfg; do
configs+=("''${(q)cfg}")
done < <(find "''${configDir}" \( -name '*.yml' -o -name '*.yaml' \) -print0 | sort -rz)
fi
configs+=('${uniworxConfig}')
cd /var/lib/uniworx
${optionalString isDemo ''
[[ -z "''${pgNew}" ]] || uniworxdb -f ''${configs}
''}
exec -- uniworx ''${configs}
'';
postgresSchema = prev.writeText "schema.sql" ''
CREATE USER uniworx WITH SUPERUSER;
CREATE DATABASE uniworx;
GRANT ALL ON DATABASE uniworx TO uniworx;
'';
postgresHba = prev.writeText "hba_file" ''
local all all trust
'';
uniworxConfig = prev.writeText "uni2work.yml" ''
port: 8080
approot: "_env:APPROOT:http://localhost:8080"
'';
in {
Cmd = [ "${entrypoint}/bin/uniworx-entrypoint" ];
User = "uniworx:uniworx";
ExposedPorts = {
"8080/tcp" = {};
};
Volumes = {
"/var/lib/uniworx" = {};
"/var/log" = {};
} // optionalAttrs isDemo {
"/var/lib/postgres" = {};
};
};
};
in
{
ciDocker = prev.dockerTools.buildImageWithNixDb rec {
name = "uniworx-ci";
inherit created;
tag = (builtins.fromJSON (prev.lib.readFile ./ci-version.json)).version;
fromImage = prev.docker-nixpkgs.nix-unstable;
contents = with final; [
bash coreutils
minio-client
xz
];
runAsRoot = ''
#!${final.stdenv.shell}
${final.coreutils}/bin/install -v -m 0777 -d /var/tmp
'';
};
} // mapAttrs (_name: mkUniworxDocker) {
uniworxDemoDocker = { isDemo = true; };
uniworxDocker = { isDemo = false; };
}

View File

@ -0,0 +1,3 @@
{
"version": "0.3.0"
}

3
nix/docker/version.json Normal file
View File

@ -0,0 +1,3 @@
{
"version": "25.21.12"
}

17
nix/frontend/default.nix Normal file
View File

@ -0,0 +1,17 @@
# This file has been generated by node2nix 1.9.0. Do not edit!
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs-14_x"}:
let
nodeEnv = import ./node-env.nix {
inherit (pkgs) stdenv lib python2 runCommand writeTextFile;
inherit pkgs nodejs;
libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
};
in
import ./node-packages.nix {
inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit;
inherit nodeEnv;
}

567
nix/frontend/node-env.nix Normal file
View File

@ -0,0 +1,567 @@
# This file originates from node2nix
{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile}:
let
# Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;
python = if nodejs ? python then nodejs.python else python2;
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
tarWrapper = runCommand "tarWrapper" {} ''
mkdir -p $out/bin
cat > $out/bin/tar <<EOF
#! ${stdenv.shell} -e
$(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
EOF
chmod +x $out/bin/tar
'';
# Function that generates a TGZ file from a NPM project
buildNodeSourceDist =
{ name, version, src, ... }:
stdenv.mkDerivation {
name = "node-tarball-${name}-${version}";
inherit src;
buildInputs = [ nodejs ];
buildPhase = ''
export HOME=$TMPDIR
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
'';
installPhase = ''
mkdir -p $out/tarballs
mv $tgzFile $out/tarballs
mkdir -p $out/nix-support
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
'';
};
includeDependencies = {dependencies}:
lib.optionalString (dependencies != [])
(lib.concatMapStrings (dependency:
''
# Bundle the dependencies of the package
mkdir -p node_modules
cd node_modules
# Only include dependencies if they don't exist. They may also be bundled in the package.
if [ ! -e "${dependency.name}" ]
then
${composePackage dependency}
fi
cd ..
''
) dependencies);
# Recursively composes the dependencies of a package
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
builtins.addErrorContext "while evaluating node package '${packageName}'" ''
DIR=$(pwd)
cd $TMPDIR
unpackFile ${src}
# Make the base dir in which the target dependency resides first
mkdir -p "$(dirname "$DIR/${packageName}")"
if [ -f "${src}" ]
then
# Figure out what directory has been unpacked
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
# Restore write permissions to make building work
find "$packageDir" -type d -exec chmod u+x {} \;
chmod -R u+w "$packageDir"
# Move the extracted tarball into the output folder
mv "$packageDir" "$DIR/${packageName}"
elif [ -d "${src}" ]
then
# Get a stripped name (without hash) of the source directory.
# On old nixpkgs it's already set internally.
if [ -z "$strippedName" ]
then
strippedName="$(stripHash ${src})"
fi
# Restore write permissions to make building work
chmod -R u+w "$strippedName"
# Move the extracted directory into the output folder
mv "$strippedName" "$DIR/${packageName}"
fi
# Unset the stripped name to not confuse the next unpack step
unset strippedName
# Include the dependencies of the package
cd "$DIR/${packageName}"
${includeDependencies { inherit dependencies; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
'';
pinpointDependencies = {dependencies, production}:
let
pinpointDependenciesFromPackageJSON = writeTextFile {
name = "pinpointDependencies.js";
text = ''
var fs = require('fs');
var path = require('path');
function resolveDependencyVersion(location, name) {
if(location == process.env['NIX_STORE']) {
return null;
} else {
var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
if(fs.existsSync(dependencyPackageJSON)) {
var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
if(dependencyPackageObj.name == name) {
return dependencyPackageObj.version;
}
} else {
return resolveDependencyVersion(path.resolve(location, ".."), name);
}
}
}
function replaceDependencies(dependencies) {
if(typeof dependencies == "object" && dependencies !== null) {
for(var dependency in dependencies) {
var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
if(resolvedVersion === null) {
process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
} else {
dependencies[dependency] = resolvedVersion;
}
}
}
}
/* Read the package.json configuration */
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
/* Pinpoint all dependencies */
replaceDependencies(packageObj.dependencies);
if(process.argv[2] == "development") {
replaceDependencies(packageObj.devDependencies);
}
replaceDependencies(packageObj.optionalDependencies);
/* Write the fixed package.json file */
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
'';
};
in
''
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
${lib.optionalString (dependencies != [])
''
if [ -d node_modules ]
then
cd node_modules
${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
cd ..
fi
''}
'';
# Recursively traverses all dependencies of a package and pinpoints all
# dependencies in the package.json file to the versions that are actually
# being used.
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
''
if [ -d "${packageName}" ]
then
cd "${packageName}"
${pinpointDependencies { inherit dependencies production; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
fi
'';
# Extract the Node.js source code which is used to compile packages with
# native bindings
nodeSources = runCommand "node-sources" {} ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv node-* $out
'';
# Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
addIntegrityFieldsScript = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
function augmentDependencies(baseDir, dependencies) {
for(var dependencyName in dependencies) {
var dependency = dependencies[dependencyName];
// Open package.json and augment metadata fields
var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
var packageJSONPath = path.join(packageJSONDir, "package.json");
if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
console.log("Adding metadata fields to: "+packageJSONPath);
var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
if(dependency.integrity) {
packageObj["_integrity"] = dependency.integrity;
} else {
packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
}
if(dependency.resolved) {
packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
} else {
packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
}
if(dependency.from !== undefined) { // Adopt from property if one has been provided
packageObj["_from"] = dependency.from;
}
fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
}
// Augment transitive dependencies
if(dependency.dependencies !== undefined) {
augmentDependencies(packageJSONDir, dependency.dependencies);
}
}
}
if(fs.existsSync("./package-lock.json")) {
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
if(![1, 2].includes(packageLock.lockfileVersion)) {
process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
process.exit(1);
}
if(packageLock.dependencies !== undefined) {
augmentDependencies(".", packageLock.dependencies);
}
}
'';
};
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
reconstructPackageLock = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var lockObj = {
name: packageObj.name,
version: packageObj.version,
lockfileVersion: 1,
requires: true,
dependencies: {}
};
function augmentPackageJSON(filePath, dependencies) {
var packageJSON = path.join(filePath, "package.json");
if(fs.existsSync(packageJSON)) {
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
dependencies[packageObj.name] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: {}
};
processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies);
}
}
function processDependencies(dir, dependencies) {
if(fs.existsSync(dir)) {
var files = fs.readdirSync(dir);
files.forEach(function(entry) {
var filePath = path.join(dir, entry);
var stats = fs.statSync(filePath);
if(stats.isDirectory()) {
if(entry.substr(0, 1) == "@") {
// When we encounter a namespace folder, augment all packages belonging to the scope
var pkgFiles = fs.readdirSync(filePath);
pkgFiles.forEach(function(entry) {
if(stats.isDirectory()) {
var pkgFilePath = path.join(filePath, entry);
augmentPackageJSON(pkgFilePath, dependencies);
}
});
} else {
augmentPackageJSON(filePath, dependencies);
}
}
});
}
}
processDependencies("node_modules", lockObj.dependencies);
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
'';
};
prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
let
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
in
''
# Pinpoint the versions of all dependencies to the ones that are actually being used
echo "pinpointing versions of dependencies..."
source $pinpointDependenciesScriptPath
# Patch the shebangs of the bundled modules to prevent them from
# calling executables outside the Nix store as much as possible
patchShebangs .
# Deploy the Node.js package by running npm install. Since the
# dependencies have been provided already by ourselves, it should not
# attempt to install them again, which is good, because we want to make
# it Nix's responsibility. If it needs to install any dependencies
# anyway (e.g. because the dependency parameters are
# incomplete/incorrect), it fails.
#
# The other responsibilities of NPM are kept -- version checks, build
# steps, postprocessing etc.
export HOME=$TMPDIR
cd "${packageName}"
runHook preRebuild
${lib.optionalString bypassCache ''
${lib.optionalString reconstructLock ''
if [ -f package-lock.json ]
then
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
rm package-lock.json
else
echo "No package-lock.json file found, reconstructing..."
fi
node ${reconstructPackageLock}
''}
node ${addIntegrityFieldsScript}
''}
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
if [ "''${dontNpmInstall-}" != "1" ]
then
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
rm -f npm-shrinkwrap.json
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} install
fi
'';
# Builds and composes an NPM package including all its dependencies
buildNodePackage =
{ name
, packageName
, version
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, preRebuild ? ""
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" ];
in
stdenv.mkDerivation ({
name = "node_${name}-${version}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit nodejs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
compositionScript = composePackage args;
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
installPhase = ''
# Create and enter a root node_modules/ folder
mkdir -p $out/lib/node_modules
cd $out/lib/node_modules
# Compose the package and all its dependencies
source $compositionScriptPath
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Create symlink to the deployed executable folder, if applicable
if [ -d "$out/lib/node_modules/.bin" ]
then
ln -s $out/lib/node_modules/.bin $out/bin
fi
# Create symlinks to the deployed manual page folders, if applicable
if [ -d "$out/lib/node_modules/${packageName}/man" ]
then
mkdir -p $out/share
for dir in "$out/lib/node_modules/${packageName}/man/"*
do
mkdir -p $out/share/man/$(basename "$dir")
for page in "$dir"/*
do
ln -s $page $out/share/man/$(basename "$dir")
done
done
fi
# Run post install hook, if provided
runHook postInstall
'';
} // extraArgs);
# Builds a node environment (a node_modules folder and a set of binaries)
buildNodeDependencies =
{ name
, packageName
, version
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
in
stdenv.mkDerivation ({
name = "node-dependencies-${name}-${version}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall unpackPhase buildPhase;
includeScript = includeDependencies { inherit dependencies; };
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
installPhase = ''
mkdir -p $out/${packageName}
cd $out/${packageName}
source $includeScriptPath
# Create fake package.json to make the npm commands work properly
cp ${src}/package.json .
chmod 644 package.json
${lib.optionalString bypassCache ''
if [ -f ${src}/package-lock.json ]
then
cp ${src}/package-lock.json .
fi
''}
# Go to the parent folder to make sure that all packages are pinpointed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Expose the executables that were installed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
mv ${packageName} lib
ln -s $out/lib/node_modules/.bin $out/bin
'';
} // extraArgs);
# Builds a development shell
buildNodeShell =
{ name
, packageName
, version
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
nodeDependencies = buildNodeDependencies args;
in
stdenv.mkDerivation {
name = "node-shell-${name}-${version}";
buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
buildCommand = ''
mkdir -p $out/bin
cat > $out/bin/shell <<EOF
#! ${stdenv.shell} -e
$shellHook
exec ${stdenv.shell}
EOF
chmod +x $out/bin/shell
'';
# Provide the dependencies in a development shell through the NODE_PATH environment variable
inherit nodeDependencies;
shellHook = lib.optionalString (dependencies != []) ''
export NODE_PATH=${nodeDependencies}/lib/node_modules
export PATH="${nodeDependencies}/bin:$PATH"
'';
};
in
{
buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
buildNodePackage = lib.makeOverridable buildNodePackage;
buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
buildNodeShell = lib.makeOverridable buildNodeShell;
}

16604
nix/frontend/node-packages.nix Normal file

File diff suppressed because it is too large Load Diff

13
nix/parse-changelog.nix Normal file
View File

@ -0,0 +1,13 @@
{}: prev: final: rec {
changelogJson = prev.runCommand "changelog.json" {
} ''
ln -s ${final.uniworxNodeDependencies}/lib/node_modules ./node_modules
export PATH="${final.uniworxNodeDependencies}/bin:$PATH"
changelog-parser ${../CHANGELOG.md} > $out
'';
jqChangelogJson = prev.writeShellScriptBin "jq-changelog" ''
exec -- ${final.jq}/bin/jq $@ < ${changelogJson}
'';
}

89
nix/uniworx/backend.nix Normal file
View File

@ -0,0 +1,89 @@
{ inputs, backendSource, ... }: final: prev:
with prev.lib;
let
haskellInputs = ["encoding" "memcached-binary" "conduit-resumablesink" "HaskellNet-SSL" "ldap-client" "serversession" "xss-sanitize" "colonnade" "minio-hs" "cryptoids" "zip-stream" "yesod" "cryptonite" "esqueleto"];
in {
uniworx = final.haskell-nix.stackProject {
src = prev.stdenv.mkDerivation {
name = "uniworx-src";
src = backendSource;
phases = ["unpackPhase" "patchPhase" "installPhase"];
patchPhase = ''
substitute stack-flake.yaml stack.yaml \
${concatMapStringsSep " \\\n" (pkgName: "--replace @${pkgName}@ ${inputs."${pkgName}"}") haskellInputs}
'';
installPhase = ''
mkdir -p $out
cp -pr --reflink=auto ./. $out
'';
};
compiler-nix-name = "ghc8104";
# stack-sha256 = "1n7z294ldv2rjkfj1vs3kqmnbp34m2scrmyrp5kwmga9vp86fd9z"; # produces errors gregor does not understand :(
modules = [
{
packages = {
encoding.src = inputs.encoding;
memcached-binary.src = inputs.memcached-binary;
conduit-resumablesink.src = inputs.conduit-resumablesink;
HaskellNet-SSL.src = inputs.HaskellNet-SSL;
ldap-client.src = inputs.ldap-client;
serversession.src = "${inputs.serversession}/serversession";
serversession-backend-acid-state.src = "${inputs.serversession}/serversession-backend-acid-state";
xss-sanitize.src = inputs.xss-sanitize;
colonnade.src = "${inputs.colonnade}/colonnade";
minio-hs.src = inputs.minio-hs;
cryptoids-class.src = "${inputs.cryptoids}/cryptoids-class";
cryptoids-types.src = "${inputs.cryptoids}/cryptoids-types";
cryptoids.src = "${inputs.cryptoids}/cryptoids";
filepath-crypto.src = "${inputs.cryptoids}/filepath-crypto";
uuid-crypto.src = "${inputs.cryptoids}/uuid-crypto";
zip-stream.src = inputs.zip-stream;
yesod.src = "${inputs.yesod}/yesod";
yesod-core.src = "${inputs.yesod}/yesod-core";
yesod-static.src = "${inputs.yesod}/yesod-static";
yesod-persistent.src = "${inputs.yesod}/yesod-persistent";
yesod-form.src = "${inputs.yesod}/yesod-form";
yesod-auth.src = "${inputs.yesod}/yesod-auth";
yesod-test.src = "${inputs.yesod}/yesod-test";
cryptonite.src = inputs.cryptonite;
esqueleto.src = inputs.esqueleto;
};
}
{
packages.uniworx = {
postUnpack = ''
${final.xorg.lndir}/bin/lndir -silent ${prev.uniworxFrontend} $sourceRoot
chmod a+w -R $sourceRoot
'';
preBuild = ''
export TZDIR=${final.tzdata}/share/zoneinfo
'';
components.library.build-tools = with final.pkgs; [ llvm_9 ];
components.exes.uniworx.build-tools = with final.pkgs; [ llvm_9 ];
components.exes.uniworxdb.build-tools = with final.pkgs; [ llvm_9 ];
components.exes.uniworxload.build-tools = with final.pkgs; [ llvm_9 ];
components.exes.uniworx-wflint.build-tools = with final.pkgs; [ llvm_9 ];
components.tests.yesod = {
build-tools = with final.pkgs; [ llvm_9 final.uniworx.hsPkgs.hspec-discover ];
testWrapper =
let
testWrapper = prev.writeScript "test-wrapper" (import ../develop.nix { inherit prev; pkgs = final; doDevelopEnv = false; } "$@");
testWrapperWrapped = prev.runCommand "test-wrapper" { buildInputs = [final.makeWrapper]; } ''
makeWrapper ${testWrapper} $out \
--prefix PATH : ${final.postgresql_12}/bin \
--prefix PATH : ${final.minio}/bin \
--prefix PATH : ${final.memcached}/bin
'';
in singleton (toString testWrapperWrapped);
};
components.tests.hlint.build-tools = with final.pkgs; [ llvm_9 final.uniworx.hsPkgs.hlint-test ];
};
}
];
};
}

6
nix/uniworx/default.nix Normal file
View File

@ -0,0 +1,6 @@
{ inputs, frontendSource, backendSource }: final: prev: prev.lib.composeManyExtensions [
(import ./node-dependencies.nix { inherit inputs; })
(import ./well-known.nix { inherit frontendSource; })
(import ./frontend.nix { inherit frontendSource; })
(import ./backend.nix { inherit backendSource inputs; })
] final prev

58
nix/uniworx/frontend.nix Normal file
View File

@ -0,0 +1,58 @@
{ frontendSource, ... }: final: prev:
let
setupNodeDeps = ''
ln -s ${final.uniworxNodeDependencies}/lib/node_modules ./node_modules
export PATH="${final.uniworxNodeDependencies}/bin:$PATH"
'';
in {
uniworxFrontend = prev.stdenv.mkDerivation {
name = "uniworx-frontend";
srcs = [frontendSource prev.uniworxWellKnown];
sourceRoot = "source";
phases = ["unpackPhase" "checkPhase" "buildPhase" "installPhase"];
postUnpack = ''
${final.xorg.lndir}/bin/lndir -silent ../uniworx-well-known $sourceRoot
'';
preBuild = setupNodeDeps;
buildPhase = ''
runHook preBuild
webpack --progress
runHook postBuild
'';
preCheck = ''
${setupNodeDeps}
export FONTCONFIG_FILE="${final.fontconfig.out}/etc/fonts/fonts.conf"
export FONTCONFIG_PATH="${final.fontconfig.out}/etc/fonts/"
export CHROME_BIN="${final.chromium}/bin/chromium-browser"
'';
checkPhase = ''
runHook preCheck
eslint frontend/src
karma start --conf karma.conf.js
runHook postCheck
'';
installPhase = ''
mkdir -p $out $out/config
cp -r --reflink=auto well-known static $out
cp -r --reflink=auto config/webpack.yml $out/config
'';
passthru.check = final.uniworxFrontend.overrideAttrs (oldAttrs: {
name = "${oldAttrs.name}-check";
phases = ["unpackPhase" "buildPhase"];
buildPhase = ''
mkdir $out
( ${oldAttrs.checkPhase} ) | tee $out/test-stdout
'';
});
};
}

View File

@ -0,0 +1,15 @@
{ inputs, ... }: final: prev: {
uniworxNodeDependencies = (prev.callPackage ../frontend {}).nodeDependencies.override (oldArgs: {
dependencies =
let
srcOverrides = {
"tail.datetime" = inputs.tail-DateTime;
"@fortawesome/fontawesome-pro" = prev.fetchurl {
url = "https://npm.fontawesome.com/@fortawesome/fontawesome-pro/-/5.14.0/fontawesome-pro-5.14.0.tgz";
curlOpts = "-H @${prev.writeText "headers.txt" "Authorization: Bearer ${builtins.readFile inputs.fontawesome-token}"}";
hash = "sha256-jGvPrTKKL0rCWRZUEnJEmrOdHyQYs3M5709B1hjmFw4=";
};
};
in map (dep: dep // { src = srcOverrides."${dep.packageName}" or dep.src; }) oldArgs.dependencies;
});
}

View File

@ -0,0 +1,23 @@
{ frontendSource, ... }: final: prev: {
uniworxWellKnown = prev.stdenv.mkDerivation {
name = "uniworx-well-known";
src = frontendSource;
phases = ["unpackPhase" "buildPhase" "installPhase" "fixupPhase"];
buildPhase = ''
ln -s ${final.uniworxNodeDependencies}/lib/node_modules ./node_modules
export PATH="${final.uniworxNodeDependencies}/bin:${prev.exiftool}/bin:$PATH"
webpack --progress
'';
installPhase = ''
mkdir -p $out
cp -r --reflink=auto well-known $out/.nix-well-known
'';
outputHashMode = "recursive";
outputHashAlgo = "sha256";
outputHash = "tDaffdAT5EGPKdDJ2ovo9XSGdV48W3Efqe+iBmakh6g=";
};
}

2
package-lock.json generated
View File

@ -1,6 +1,6 @@
{
"name": "uni2work",
"version": "25.21.0",
"version": "25.21.12",
"lockfileVersion": 1,
"requires": true,
"dependencies": {

View File

@ -1,6 +1,6 @@
{
"name": "uni2work",
"version": "25.21.0",
"version": "25.21.12",
"description": "",
"keywords": [],
"author": "",
@ -122,7 +122,7 @@
"moment": "^2.27.0",
"npm": "^6.14.8",
"sodium-javascript": "^0.5.6",
"tail.datetime": "git+ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/tail.DateTime.git#master",
"tail.datetime": "git+ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/tail.DateTime.git#uni2work",
"toposort": "^2.0.2",
"whatwg-fetch": "^3.4.0"
}

View File

@ -1,5 +1,5 @@
name: uniworx
version: 25.21.0
version: 25.21.12
dependencies:
- base
- yesod
@ -253,6 +253,8 @@ when:
else:
ghc-options:
- -O -fllvm
data-files:
- testdata/**
library:
source-dirs: src
executables:
@ -276,6 +278,7 @@ executables:
- uniworx
other-modules:
- Database.Fill
- Paths_uniworx
when:
- condition: flag(library-only)
buildable: false

213
shell.nix
View File

@ -5,209 +5,19 @@ let
haskellPackages = pkgs.haskellPackages;
postgresSchema = pkgs.writeText "schema.sql" ''
CREATE USER uniworx WITH SUPERUSER;
CREATE DATABASE uniworx_test;
GRANT ALL ON DATABASE uniworx_test TO uniworx;
CREATE DATABASE uniworx;
GRANT ALL ON DATABASE uniworx TO uniworx;
'';
postgresHba = pkgs.writeText "hba_file" ''
local all all trust
'';
develop = pkgs.writeScriptBin "develop" ''
#!${pkgs.zsh}/bin/zsh -e
basePath=$(pwd)
exec 4<>''${basePath}/.develop.env
flockRes=
set +e
${pkgs.util-linux}/bin/flock -en 4; flockRes=$?
set -e
if [[ ''${flockRes} -ne 0 ]]; then
echo "Could not take exclusive lock; is another develop running?" >&2
exit ''${flockRes}
fi
cleanup() {
set +e -x
type cleanup_postgres &>/dev/null && cleanup_postgres
type cleanup_widget_memcached &>/dev/null && cleanup_widget_memcached
type cleanup_session_memcached &>/dev/null && cleanup_session_memcached
type cleanup_cache_memcached &>/dev/null && cleanup_cache_memcached
type cleanup_minio &>/dev/null && cleanup_minio
type cleanup_maildev &>/dev/null && cleanup_maildev
[ -f "''${basePath}/.develop.env" ] && rm -vf "''${basePath}/.develop.env"
set +x
}
trap cleanup EXIT
export PORT_OFFSET=$(((16#$(echo "fradrive $(whoami)" | sha256sum | head -c 16)) % 1000))
if [[ -z "$PGHOST" ]]; then
set -xe
pgDir=$(mktemp -d --tmpdir=''${XDG_RUNTIME_DIR} postgresql.XXXXXX)
pgSockDir=$(mktemp -d --tmpdir=''${XDG_RUNTIME_DIR} postgresql.sock.XXXXXX)
pgLogFile=$(mktemp --tmpdir=''${XDG_RUNTIME_DIR} postgresql.XXXXXX.log)
initdb --no-locale -D ''${pgDir}
pg_ctl start -D ''${pgDir} -l ''${pgLogFile} -w -o "-k ''${pgSockDir} -c listen_addresses=''' -c hba_file='${postgresHba}' -c unix_socket_permissions=0700 -c max_connections=9990 -c shared_preload_libraries=pg_stat_statements -c auto_explain.log_min_duration=100ms"
psql -h ''${pgSockDir} -f ${postgresSchema} postgres
printf "Postgres logfile is %s\nPostgres socket directory is %s\n" ''${pgLogFile} ''${pgSockDir}
export PGHOST=''${pgSockDir}
export PGLOG=''${pgLogFile}
cleanup_postgres() {
set +e -x
pg_ctl stop -D ''${pgDir}
rm -rvf ''${pgDir} ''${pgSockDir} ''${pgLogFile}
set +x
}
set +xe
fi
if [[ -z "$WIDGET_MEMCACHED_HOST" ]]; then
set -xe
memcached -l localhost -p $(($PORT_OFFSET + 11211)) &>/dev/null &
widget_memcached_pid=$!
export WIDGET_MEMCACHED_HOST=localhost
export WIDGET_MEMCACHED_PORT=$(($PORT_OFFSET + 11211))
cleanup_widget_memcached() {
[[ -n "$widget_memcached_pid" ]] && kill $widget_memcached_pid
}
set +xe
fi
if [[ -z "$SESSION_MEMCACHED_HOST" ]]; then
set -xe
memcached -l localhost -p $(($PORT_OFFSET + 11212)) &>/dev/null &
session_memcached_pid=$!
export SESSION_MEMCACHED_HOST=localhost
export SESSION_MEMCACHED_PORT=$(($PORT_OFFSET + 11212))
cleanup_session_memcached() {
[[ -n "$session_memcached_pid" ]] && kill $session_memcached_pid
}
set +xe
fi
if [[ -z "$MEMCACHED_HOST" ]]; then
set -xe
memcached -l localhost -p $(($PORT_OFFSET + 11213)) &>/dev/null &
memcached_pid=$!
export MEMCACHED_HOST=localhost
export MEMCACHED_PORT=$(($PORT_OFFSET + 11212))
cleanup_session_memcached() {
[[ -n "$memcached_pid" ]] && kill $memcached_pid
}
set +xe
fi
if [[ -z "$UPLOAD_S3_HOST" ]]; then
set -xe
cleanup_minio() {
[[ -n "$minio_pid" ]] && kill $minio_pid
[[ -n "''${MINIO_DIR}" ]] && rm -rvf ''${MINIO_DIR}
[[ -n "''${MINIO_LOGFILE}" ]] && rm -rvf ''${MINIO_LOGFILE}
}
export MINIO_DIR=$(mktemp -d --tmpdir=''${XDG_RUNTIME_DIR} minio.XXXXXX)
export MINIO_LOGFILE=$(mktemp --tmpdir=''${XDG_RUNTIME_DIR} minio.XXXXXX.log)
export MINIO_ACCESS_KEY=$(${pkgs.pwgen}/bin/pwgen -s 16 1)
export MINIO_SECRET_KEY=$(${pkgs.pwgen}/bin/pwgen -s 32 1)
minio server --address localhost:$(($PORT_OFFSET + 9000)) ''${MINIO_DIR} &>''${MINIO_LOGFILE} &
minio_pid=$!
export UPLOAD_S3_HOST=localhost
export UPLOAD_S3_PORT=$(($PORT_OFFSET + 9000))
export UPLOAD_S3_SSL=false
export UPLOAD_S3_KEY_ID=''${MINIO_ACCESS_KEY}
export UPLOAD_S3_KEY=''${MINIO_SECRET_KEY}
sleep 1
set +xe
fi
${optionalString (pkgs.nodePackages ? "maildev") ''
if [[ -z "$SMTPHOST" ]]; then
set -xe
cleanup_maildev() {
[[ -n "$maildev_pid" ]] && kill $maildev_pid
}
TMPDIR=''${XDG_RUNTIME_DIR} ${pkgs.nodePackages.maildev}/bin/maildev --smtp $(($PORT_OFFSET + 1025)) --web $(($PORT_OFFSET + 8080)) --ip localhost --web-ip localhost &>/dev/null &
maildev_pid=$!
export SMTPHOST=localhost
export SMTPPORT=$(($PORT_OFFSET + 1025))
export SMTPSSL=none
set +xe
develop = pkgs.writeScriptBin "develop" (import ./nix/develop.nix { inherit pkgs; } ''
if [ -x .develop.cmd ]; then
./.develop.cmd
else
if [ -n "$ZSH_VERSION" ]; then
autoload -U +X compinit && compinit
autoload -U +X bashcompinit && bashcompinit
fi
''}
eval "$(stack --bash-completion-script stack)"
set -xe
cat >&4 <<EOF
PORT_OFFSET=''${PORT_OFFSET}
PGHOST=''${pgSockDir}
PGLOG=''${pgLogFile}
WIDGET_MEMCACHED_HOST=localhost
WIDGET_MEMCACHED_PORT=$(($PORT_OFFSET + 11211))
SESSION_MEMCACHED_HOST=localhost
SESSION_MEMCACHED_PORT=$(($PORT_OFFSET + 11212))
MEMCACHED_HOST=localhost
MEMCACHED_PORT=$(($PORT_OFFSET + 11212))
MINIO_DIR=''${MINIO_DIR}
MINIO_LOGFILE=''${MINIO_LOGFILE}
UPLOAD_S3_HOST=localhost
UPLOAD_S3_PORT=$(($PORT_OFFSET + 9000))
UPLOAD_S3_SSL=false
UPLOAD_S3_KEY_ID=''${MINIO_ACCESS_KEY}
UPLOAD_S3_KEY=''${MINIO_SECRET_KEY}
SMTPHOST=''${SMTPHOST}
SMTPPORT=''${SMTPPORT}
SMTPSSL=''${SMTPSSL}
EOF
set +xe
if [ -n "$ZSH_VERSION" ]; then
autoload -U +X compinit && compinit
autoload -U +X bashcompinit && bashcompinit
$(getent passwd $USER | cut -d: -f 7)
fi
eval "$(stack --bash-completion-script stack)"
$(getent passwd $USER | cut -d: -f 7)
'';
'');
inDevelop = pkgs.writeScriptBin "in-develop" ''
#!${pkgs.zsh}/bin/zsh -e
@ -260,6 +70,5 @@ let
'';
in pkgs.mkShell {
name = "uni2work";
nativeBuildInputs = [develop inDevelop killallUni2work diffRunning] ++ (with pkgs; [ nodejs-14_x postgresql_12 openldap google-chrome exiftool memcached minio minio-client gup ]) ++ (with pkgs.haskellPackages; [ stack yesod-bin hlint cabal-install weeder profiteur ]);
nativeBuildInputs = [develop inDevelop killallUni2work diffRunning] ++ (with pkgs; [ nodejs-14_x postgresql_12 openldap google-chrome exiftool memcached minio minio-client gup skopeo ]) ++ (with pkgs.haskellPackages; [ stack yesod-bin hlint cabal-install weeder profiteur ]);
}

80
stack-flake.yaml Normal file
View File

@ -0,0 +1,80 @@
flags:
aeson:
cffi: true
rebuild-ghc-options: true
ghc-options:
"$everything": -fno-prof-auto
nix:
packages: []
pure: false
shell-file: ./stack.nix
add-gc-roots: true
extra-package-dbs: []
packages:
- .
extra-deps:
- @encoding@
- @memcached-binary@
- @conduit-resumablesink@
- @HaskellNet-SSL@
- @ldap-client@
- @serversession@/serversession
- @serversession@/serversession-backend-acid-state
- @xss-sanitize@
- @colonnade@/colonnade
- @minio-hs@
- @cryptoids@/cryptoids-class
- @cryptoids@/cryptoids-types
- @cryptoids@/cryptoids
- @cryptoids@/filepath-crypto
- @cryptoids@/uuid-crypto
- @zip-stream@
- @yesod@/yesod-core
- @yesod@/yesod-static
- @yesod@/yesod-persistent
- @yesod@/yesod-form
- @yesod@/yesod-auth
- @yesod@/yesod-test
- @yesod@/yesod
- @cryptonite@
- @esqueleto@
- git: https://github.com/jtdaugherty/HaskellNet.git
commit: 5aa1f3b009253b02c4822005ac59ee208a10a347
- git: https://github.com/gkleen/FastCDC.git
commit: 7326e2931454282df9081105dad812845db5c530
subdirs:
- gearhash
- fastcdc
- classy-prelude-yesod-1.5.0@sha256:8f7e183bdfd6d2ea9674284c4f285294ab086aff60d9be4e5d7d2f3c1a2b05b7,1330
- acid-state-0.16.0.1@sha256:d43f6ee0b23338758156c500290c4405d769abefeb98e9bc112780dae09ece6f,6207
# - commonmark-0.1.1.2@sha256:c06ab05f0f224ab7982502a96e17952823a9b6dae8505fb35194b0baa9e2a975,3278
# - commonmark-extensions-0.2.0.4@sha256:6a437bcfa3c757af4262b71336513619990eafb5cfdc33e57a499c93ad225608,3184
# - commonmark-pandoc-0.2.0.1@sha256:529c6e2c6cabf61558b66a28123eafc1d90d3324be29819f59f024e430312c1f,1105
- normaldistribution-1.1.0.3@sha256:2615b784c4112cbf6ffa0e2b55b76790290a9b9dff18a05d8c89aa374b213477,2160
# - pandoc-2.11.1.1@sha256:33a2092e86dc6c8cb9041b5b9bb12faa08c8508954815c79f1a4dfe240f5b499,39390
# - citeproc-0.1.1.1@sha256:c03ab98aeceda7770eec3eb76f5e95a98e4908ba7c1cb3b43ff482e876b17456,5395
- pkcs7-1.0.0.1@sha256:b26e5181868667abbde3ce17f9a61cf705eb695da073cdf82e1f9dfd6cc11176,3594
# - prometheus-metrics-ghc-1.0.1.1@sha256:d378a7186a967140fe0e09d325fe5e3bfd7b77a1123934b40f81fdfed2eacbdc,1233
- system-locale-0.3.0.0@sha256:13b3982403d8ac8cc6138e68802be8d8e7cf7ebc4cbc7e47e99e3c0dd1be066a,1529
- token-bucket-0.1.0.1@sha256:d8e85f2fc373939975e7ace7907baee177531ab6e43df94e330a2357e64a2d11,1899
- tz-0.1.3.5@sha256:fb17ca50a7d943e511c0ca70342dc83f66aa2532de2745632f1f5f9b1ad783c4,5086
- unidecode-0.1.0.4@sha256:99581ee1ea334a4596a09ae3642e007808457c66893b587e965b31f15cbf8c4d,1144
# - wai-middleware-prometheus-1.0.0@sha256:1625792914fb2139f005685be8ce519111451cfb854816e430fbf54af46238b4,1314
- hlint-test-0.1.0.0@sha256:e427c0593433205fc629fb05b74c6b1deb1de72d1571f26142de008f0d5ee7a9,1814
- network-arbitrary-0.7.0.0@sha256:0cd381c80ae20c16048936edcdb018b1d9fbe2b6ac8c44e908df403a5c6d7cd5,2520
# - process-extras-0.7.4@sha256:4e79289131415796c181889c4a226ebab7fc3b0d27b164f65e1aad123ae9b9e3,1759
# - ListLike-4.7.4@sha256:613b2967df738010e8f6f6b7c47d615f6fe42081f68eba7f946d5de7552aa8a4,3778
- saltine-0.2.0.0@sha256:2232a285ef326b0942bbcbfa6f465933a020f27e19552213e688fe371d66dddd,5198
- persistent-postgresql-2.13.0.3@sha256:43384bf8ed9c931c673e6abb763c8811113d1b7004095faaae1eb42e2cd52d8f,3601
resolver: lts-18.0
allow-newer: true

View File

@ -37,13 +37,13 @@ import System.Directory (getModificationTime)
import System.FilePath.Glob (glob)
testdataDir :: FilePath
testdataDir = "testdata"
testdataFile :: MonadIO m => FilePath -> m FilePath
testdataFile = liftIO . getDataFileName . ("testdata" </>)
insertFile :: ( HasFileReference fRef, PersistRecordBackend fRef SqlBackend ) => FileReferenceResidual fRef -> FilePath -> DB (Key fRef)
insertFile residual fileTitle = do
let fileContent = Just . C.sourceFile $ testdataDir </> fileTitle
filepath <- testdataFile fileTitle
let fileContent = Just $ C.sourceFile filepath
fileModified <- liftIO getCurrentTime
sinkFile' File{..} residual >>= insert
@ -625,8 +625,8 @@ fillDb = do
, tutorialRoom = Just $ case tw of
1 -> "A380"
2 -> "B747"
3 -> "A320"
_ -> "CRJ9"
3 -> "MD11"
_ -> "B777"
, tutorialRoomHidden = False
, tutorialTime = Occurrences
{ occurrencesScheduled = Set.empty
@ -1412,8 +1412,6 @@ fillDb = do
return . pure . AllocationPriorityNumericRecord matr . fromList $ sortOn Down prios
_other -> return mempty
liftIO . LBS.writeFile (testdataDir </> "bigAlloc_numeric.csv") $ Csv.encode numericPriorities
ordinalPriorities <- do
manyUsers' <- shuffleM manyUsers
flip foldMapM manyUsers' $ \uid -> do
@ -1423,7 +1421,10 @@ fillDb = do
return . pure $ Csv.Only matr
_other -> return mempty
liftIO . LBS.writeFile (testdataDir </> "bigAlloc_ordinal.csv") $ Csv.encode ordinalPriorities
liftIO . handle (\(_ :: IOException) -> return ()) $ do
haveTestdata <- doesDirectoryExist "testdata"
LBS.writeFile (bool id ("testdata" </>) haveTestdata "bigAlloc_numeric.csv") $ Csv.encode numericPriorities
LBS.writeFile (bool id ("testdata" </>) haveTestdata "bigAlloc_ordinal.csv") $ Csv.encode ordinalPriorities
forM_ universeF $ \changelogItem -> do
let ptn = "templates/i18n/changelog/" <> unpack (toPathPiece changelogItem) <> ".*"

View File

@ -24,20 +24,23 @@ const webpackVersion = require('webpack/package.json').version.split('.').slice(
const packageVersion = require('./package.json').version;
async function webpackConfig() {
const faviconNixDirectory = path.resolve(__dirname, '.nix-well-known');
let faviconApiVersion = undefined;
try {
const faviconApiChangelog = await request({
method: 'GET',
uri: 'https://realfavicongenerator.net/api/versions',
headers: {
'Accept': '*/*'
},
json: true
});
faviconApiVersion = faviconApiChangelog.filter(vObj => vObj.relevance.automated_update).slice(-1)[0].version;
} catch(e) {
console.error(e);
if (!fs.existsSync(faviconNixDirectory)) {
try {
const faviconApiChangelog = await request({
method: 'GET',
uri: 'https://realfavicongenerator.net/api/versions',
headers: {
'Accept': '*/*'
},
json: true
});
faviconApiVersion = faviconApiChangelog.filter(vObj => vObj.relevance.automated_update).slice(-1)[0].version;
} catch(e) {
console.error(e);
}
}
return {
@ -219,7 +222,16 @@ async function webpackConfig() {
const cacheDirectory = path.resolve(__dirname, '.well-known-cache', `${cacheDigest}-${lang}`);
if (fs.existsSync(cacheDirectory) && (!faviconApiVersion || faviconApiVersion === cachedVersion)) {
if (fs.existsSync(faviconNixDirectory)) {
console.log("Using favicons generated by nix");
return [
new CopyPlugin({
patterns: [
{ from: path.resolve(faviconNixDirectory, lang), to: path.resolve(__dirname, 'well-known', lang) }
]
})
];
} else if (fs.existsSync(cacheDirectory) && (!faviconApiVersion || faviconApiVersion === cachedVersion)) {
console.log(`Using cached well-known from ${cacheDirectory} for ${lang}`);
return [
new CopyPlugin({