diff --git a/.gitignore b/.gitignore index 39a52e97b..9fff5192f 100644 --- a/.gitignore +++ b/.gitignore @@ -39,6 +39,7 @@ tunnel.log /static /well-known /.well-known-cache +/.nix-well-known /**/tmp-* /testdata/bigAlloc_*.csv /sessions diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 176f7d6ed..02ee9c3f8 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -6,382 +6,252 @@ workflow: default: image: - name: fpco/stack-build:lts-17.15 - + name: registry.gitlab.com/fradrive/fradrive/nix-unstable:hcdba12p7ajl9xylgmw4kfn6p9c185xy + variables: - STACK_ROOT: "${CI_PROJECT_DIR}/.stack" - CHROME_BIN: "/usr/bin/chromium-browser" - POSTGRES_DB: uniworx_test - POSTGRES_USER: uniworx - POSTGRES_PASSWORD: uniworx - MINIO_ACCESS_KEY: gOel7KvadwNKgjjy - MINIO_SECRET_KEY: ugO5pkEla7F0JW9MdPwLi4MWLT5ZbqAL - UPLOAD_S3_HOST: localhost - UPLOAD_S3_PORT: 9000 - UPLOAD_S3_KEY_ID: gOel7KvadwNKgjjy - UPLOAD_S3_KEY: ugO5pkEla7F0JW9MdPwLi4MWLT5ZbqAL - N_PREFIX: "${HOME}/.n" - PACKAGE_REGISTRY_URL: "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/uni2work" + NIX_PATH: "nixpkgs=http://nixos.org/channels/nixos-21.05/nixexprs.tar.xz" + AWS_SHARED_CREDENTIALS_FILE: "/etc/aws/credentials" stages: - - setup - frontend:build - - yesod:build - - lint + - backend:build - test + - docker:build - prepare release - - upload packages + - upload - release -# - deploy - -npm install: - cache: - - &npm-cache - key: default-npm - paths: - - .npm - - node_modules - - stage: setup - script: - - ./.npmrc.gup - - npm ci --cache .npm --prefer-offline - before_script: &npm - - rm -rvf /etc/apt/sources.list /etc/apt/sources.list.d - - install -v -T -m 0644 ${APT_SOURCES_LIST} /etc/apt/sources.list - - apt-get update -y - - npm install -g n - - n 14.8.0 - - export PATH="${N_PREFIX}/bin:$PATH" - - npm install -g npm - - hash -r - - apt-get -y install openssh-client exiftool - - install -v -m 0700 -d ~/.ssh - - install -v -T -m 0644 ${SSH_KNOWN_HOSTS} ~/.ssh/known_hosts - - install -v -T -m 0400 ${SSH_DEPLOY_KEY} ~/.ssh/deploy && echo "IdentityFile ~/.ssh/deploy" >> ~/.ssh/config; - artifacts: - paths: - - node_modules/ - name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}" - expire_in: "1 day" - retry: 2 - interruptible: true - -frontend:build: - cache: - - &frontend-cache - key: default-frontend - paths: - - .well-known-cache +node dependencies: stage: frontend:build script: - - npm run frontend:build - before_script: *npm - needs: - - job: npm install - artifacts: true - artifacts: - paths: - - static - - well-known - - config/webpack.yml - name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}" - expire_in: "1 day" - retry: 2 - interruptible: true - -frontend:lint: - stage: lint - script: - - npm run frontend:lint - before_script: *npm - needs: - - job: npm install - artifacts: true - retry: 2 - interruptible: true - -yesod:build:dev: - cache: - - &stack-dev-cache - key: default-stack-dev - paths: - - .stack - - .stack-work - - stage: yesod:build - script: - - stack build --test --copy-bins --local-bin-path $(pwd)/bin --fast --flag uniworx:-library-only --flag uniworx:dev --flag uniworx:pedantic --no-strip --no-run-tests - - cp $(stack path --dist-dir)/build/hlint/hlint bin/test-hlint - - cp $(stack path --dist-dir)/build/yesod/yesod bin/test-yesod - needs: - - job: npm install # transitive - artifacts: false - - job: frontend:build - artifacts: true - before_script: &haskell - - rm -rvf /etc/apt/sources.list /etc/apt/sources.list.d - - install -v -T -m 0644 ${APT_SOURCES_LIST} /etc/apt/sources.list - - apt-get update -y - - apt-get install -y --no-install-recommends locales-all openssh-client git-restore-mtime - - wget http://newmirror.rz.ifi.lmu.de/ubuntu/archive/pool/main/libs/libsodium/libsodium-dev_1.0.18-1_amd64.deb http://newmirror.rz.ifi.lmu.de/ubuntu/archive/pool/main/libs/libsodium/libsodium23_1.0.18-1_amd64.deb - - apt install ./libsodium23_1.0.18-1_amd64.deb ./libsodium-dev_1.0.18-1_amd64.deb - - rm -v libsodium23_1.0.18-1_amd64.deb libsodium-dev_1.0.18-1_amd64.deb + - nix -L build -o result .#uniworxNodeDependencies + - nix-store --export $(nix-store -qR result) | xz > node-dependencies.nar.xz + before_script: &nix-before + - mkdir -p /etc/nix + - install -m 0644 .gitlab-ci/nix-bootstrap.conf /etc/nix/nix.conf + - .gitlab-ci/write-minio-creds.sh + - .gitlab-ci/make-minio-bucket.sh + - cp -pr --reflink=auto -L .gitlab-ci/upload-to-cache.sh /etc/nix/upload-to-cache.sh + - install -m 0644 .gitlab-ci/nix.conf /etc/nix/nix.conf + - git config --global init.defaultBranch master - install -v -m 0700 -d ~/.ssh - - install -v -T -m 0644 ${SSH_KNOWN_HOSTS} ~/.ssh/known_hosts - - install -v -T -m 0400 ${SSH_DEPLOY_KEY} ~/.ssh/deploy && echo "IdentityFile ~/.ssh/deploy" >> ~/.ssh/config; - - stack install happy - - export PATH="${HOME}/.local/bin:$PATH" - - hash -r - - git restore-mtime + - install -v -T -m 0644 "${SSH_KNOWN_HOSTS}" ~/.ssh/known_hosts + - install -v -T -m 0400 "${SSH_DEPLOY_KEY}" ~/.ssh/deploy && echo "IdentityFile ~/.ssh/deploy" >> ~/.ssh/config; + - install -v -T -m 0644 "${FONTAWESOME_NPM_AUTH_FILE}" /etc/fontawesome-token + - | + if [ -n "${REGISTRY_AUTHFILE}" ]; then + mkdir -p ~/.docker + install -v -T -m 0400 "${REGISTRY_AUTHFILE}" ~/.docker/config.json + fi artifacts: paths: - - bin/ + - node-dependencies.nar.xz name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}" expire_in: "1 day" - - rules: - - if: $CI_COMMIT_REF_NAME =~ /(^v[0-9].*)|((^|\/)profile($|\/))/ - when: manual - allow_failure: true - - when: on_success - retry: 2 interruptible: true - -yesod:build: - cache: - - &stack-cache - key: default-stack - paths: - - .stack - - .stack-work - - stage: yesod:build + +well known: + stage: frontend:build script: - - stack build --test --copy-bins --local-bin-path $(pwd)/bin --flag uniworx:-library-only --flag uniworx:-dev --flag uniworx:pedantic --no-strip --no-run-tests - - cp $(stack path --dist-dir)/build/hlint/hlint bin/test-hlint - - cp $(stack path --dist-dir)/build/yesod/yesod bin/test-yesod + - xzcat node-dependencies.nar.xz | nix-store --import + - nix -L build -o result .#uniworxWellKnown + - nix-store --export $(nix-store -qR result) | xz > well-known.nar.xz + before_script: *nix-before needs: - - job: npm install # transitive - artifacts: false - - job: frontend:build + - job: node dependencies artifacts: true - before_script: *haskell artifacts: paths: - - bin/ + - well-known.nar.xz name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}" expire_in: "1 day" - - rules: - - if: $CI_COMMIT_REF_NAME =~ /^v[0-9].*/ - when: on_success - - when: manual - allow_failure: true - retry: 2 interruptible: true - resource_group: ram - -yesod:build:profile: - cache: - - &stack-profile-cache - key: default-stack-profile - paths: - - .stack - - .stack-work - stage: yesod:build +frontend: + stage: frontend:build script: - - stack build --profile --copy-bins --local-bin-path $(pwd)/bin --flag uniworx:-library-only --flag uniworx:-dev --flag uniworx:pedantic --no-strip + - xzcat node-dependencies.nar.xz | nix-store --import + - xzcat well-known.nar.xz | nix-store --import + - nix -L build -o result .#uniworxFrontend + - nix-store --export $(nix-store -qR result) | xz > frontend.nar.xz + before_script: *nix-before needs: - - job: npm install # transitive - artifacts: false - - job: frontend:build + - job: node dependencies artifacts: true - before_script: *haskell - artifacts: - paths: - - bin/ - name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}" - expire_in: "1 day" - - rules: - - if: $CI_COMMIT_REF_NAME =~ /(^|\/)profile($|\/)/ - when: on_success - - when: manual - allow_failure: true - - retry: 2 - interruptible: true - resource_group: ram - -yesod:test:yesod: - stage: test - - services: &test-services - - name: postgres:10.10 - alias: postgres - - name: minio/minio:RELEASE.2020-08-27T05-16-20Z - alias: minio - command: ["minio", "server", "/data"] - - needs: - - job: npm install # transitive - artifacts: false - - job: frontend:build #transitive - artifacts: false - - job: yesod:build + - job: well known artifacts: true - - rules: - - if: $CI_COMMIT_REF_NAME =~ /^v[0-9].*/ - when: on_success - - script: - - bin/test-yesod - + artifacts: + paths: + - frontend.nar.xz + name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}" + expire_in: "1 day" retry: 2 interruptible: true -yesod:test:yesod:dev: - stage: test - - services: *test-services - +uniworx:lib:uniworx: + stage: backend:build + script: + - xzcat frontend.nar.xz | nix-store --import + - nix -L build -o result .#uniworx:lib:uniworx + - nix-store --export $(nix-store -qR result) | xz > uniworx:lib:uniworx.nar.xz + before_script: *nix-before needs: - - job: npm install # transitive - artifacts: false - - job: frontend:build #transitive - artifacts: false - - job: yesod:build:dev + - job: frontend artifacts: true - - rules: - - if: $CI_COMMIT_REF_NAME =~ /(^v[0-9].*)|((^|\/)profile($|\/))/ - when: manual - allow_failure: true - - when: on_success - - script: - - bin/test-yesod - - retry: 2 - interruptible: true - -yesod:test:hlint: - stage: lint - cache: &hlint_cache - key: hlint - paths: - - .stack - - .stack-work - - needs: - - job: npm install # transitive - artifacts: false - - job: frontend:build #transitive - artifacts: false - - job: yesod:build - artifacts: false - - rules: - - if: $CI_COMMIT_REF_NAME =~ /^v[0-9].*/ - when: on_success - - before_script: *haskell - script: - - stack install hlint - - stack exec -- hlint --cc src > gl-code-quality-report.json || jq . gl-code-quality-report.json - artifacts: paths: - - gl-code-quality-report.json + - uniworx:lib:uniworx.nar.xz name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}" expire_in: "1 day" - - reports: - codequality: gl-code-quality-report.json - retry: 2 interruptible: true - -yesod:test:hlint:dev: - stage: lint - cache: *hlint_cache - - needs: - - job: npm install # transitive - artifacts: false - - job: frontend:build #transitive - artifacts: false - - job: yesod:build:dev - artifacts: false - rules: - - if: $CI_COMMIT_REF_NAME =~ /(^v[0-9].*)|((^|\/)profile($|\/))/ - when: manual - allow_failure: true - - when: on_success - - before_script: *haskell +uniworx:exe:uniworx: + stage: backend:build script: - - stack install hlint - - stack exec -- hlint --cc src > gl-code-quality-report.json || jq . gl-code-quality-report.json - + - xzcat uniworx:lib:uniworx.nar.xz | nix-store --import + - nix -L build -o result .#uniworx:exe:uniworx + - nix-store --export $(nix-store -qR result) | xz > uniworx:exe:uniworx.nar.xz + before_script: *nix-before + needs: + - job: uniworx:lib:uniworx + artifacts: true artifacts: paths: - - gl-code-quality-report.json + - uniworx:exe:uniworx.nar.xz name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}" expire_in: "1 day" - - reports: - codequality: gl-code-quality-report.json - retry: 2 interruptible: true -frontend:test: +uniworx:exe:uniworx-wflint: + stage: backend:build + script: + - xzcat uniworx:lib:uniworx.nar.xz | nix-store --import + - nix -L build -o result .#uniworx:exe:uniworx-wflint + - nix-store --export $(nix-store -qR result) | xz > uniworx:exe:uniworx-wflint.nar.xz + before_script: *nix-before + needs: + - job: uniworx:lib:uniworx + artifacts: true + artifacts: + paths: + - uniworx:exe:uniworx-wflint.nar.xz + name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}" + expire_in: "1 day" + retry: 2 + interruptible: true + +uniworx:exe:uniworxdb: + stage: backend:build + script: + - xzcat uniworx:lib:uniworx.nar.xz | nix-store --import + - nix -L build -o result .#uniworx:exe:uniworxdb + - nix-store --export $(nix-store -qR result) | xz > uniworx:exe:uniworxdb.nar.xz + before_script: *nix-before + needs: + - job: uniworx:lib:uniworx + artifacts: true + artifacts: + paths: + - uniworx:exe:uniworxdb.nar.xz + name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}" + expire_in: "1 day" + retry: 2 + interruptible: true + +uniworx:exe:uniworxload: + stage: backend:build + script: + - xzcat uniworx:lib:uniworx.nar.xz | nix-store --import + - nix -L build -o result .#uniworx:exe:uniworxload + - nix-store --export $(nix-store -qR result) | xz > uniworx:exe:uniworxload.nar.xz + before_script: *nix-before + needs: + - job: uniworx:lib:uniworx + artifacts: true + artifacts: + paths: + - uniworx:exe:uniworxload.nar.xz + name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}" + expire_in: "1 day" + retry: 2 + interruptible: true + +check: stage: test script: - - npm run frontend:test + - xzcat frontend.nar.xz | nix-store --import + - xzcat uniworx:lib:uniworx.nar.xz | nix-store --import + - nix -L flake check + before_script: *nix-before needs: - - job: npm install + - job: frontend artifacts: true - before_script: - - rm -rvf /etc/apt/sources.list /etc/apt/sources.list.d - - install -v -T -m 0644 ${APT_SOURCES_LIST} /etc/apt/sources.list - - apt-get update -y - - npm install -g n - - n 14.8.0 - - export PATH="${N_PREFIX}/bin:$PATH" - - npm install -g npm - - hash -r - - apt-get install -y --no-install-recommends chromium-browser + - job: uniworx:lib:uniworx + artifacts: true + retry: 2 + interruptible: true + +demo docker: + stage: docker:build + script: + - xzcat uniworx:exe:uniworx.nar.xz | nix-store --import + - nix -L build -o result .#uniworxDemoDocker + - nix-store --export $(nix-store -qR result) > uniworx-demo.nar # image is already compressed, no need for xz + - cp -pr --reflink=auto -L result uniworx-demo.tar.gz + before_script: *nix-before + needs: + - job: uniworx:exe:uniworx + artifacts: true + - job: check + artifacts: false + artifacts: + paths: + - uniworx-demo.nar + - uniworx-demo.tar.gz + name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}" + expire_in: "1 day" + retry: 2 + interruptible: true + +docker: + stage: docker:build + script: + - xzcat uniworx:exe:uniworx.nar.xz | nix-store --import + - nix -L build -o result .#uniworxDocker + - nix-store --export $(nix-store -qR result) > uniworx.nar # image is already compressed, no need for xz + - cp -pr --reflink=auto -L result uniworx.tar.gz + before_script: *nix-before + needs: + - job: uniworx:exe:uniworx + artifacts: true + - job: check + artifacts: false + artifacts: + paths: + - uniworx.nar + - uniworx.tar.gz + name: "${CI_JOB_NAME}-${CI_COMMIT_SHORT_SHA}" + expire_in: "1 day" retry: 2 interruptible: true parse-changelog: stage: prepare release needs: - - job: npm install + - job: node dependencies artifacts: true rules: - if: $CI_COMMIT_REF_NAME =~ /^v[0-9].*/ when: on_success - before_script: - - rm -rvf /etc/apt/sources.list /etc/apt/sources.list.d - - install -v -T -m 0644 ${APT_SOURCES_LIST} /etc/apt/sources.list - - apt-get update -y - - apt-get install -y --no-install-recommends jq + before_script: *nix-before script: - - npm run parse-changelog - - | - jq -r '.versions[0].version' changelog.json > .current-version - - | - jq -r '.versions[0].body' changelog.json > .current-changelog.md + - xzcat node-dependencies.nar.xz | nix-store --import + - nix -L run .#jqChangelogJson -- -r '.versions[0].version' > .current-version + - nix -L run .#jqChangelogJson -- -r '.versions[0].body' > .current-changelog.md artifacts: paths: - .current-version @@ -391,103 +261,41 @@ parse-changelog: retry: 2 interruptible: true -upload: - variables: - GIT_STRATEGY: none - stage: upload packages - image: curlimages/curl:latest +upload docker: + stage: upload + script: + - nix-store --import < uniworx.nar + - nix -L run .#pushUniworxDocker + before_script: *nix-before needs: - - job: npm install # transitive - artifacts: false - - job: frontend:build # transitive - artifacts: false - - job: yesod:build + - job: docker artifacts: true - - job: parse-changelog - artifacts: true - - job: frontend:lint # validation - artifacts: false - - job: frontend:test # validation - artifacts: false - - job: yesod:test:hlint # validation - artifacts: false - - job: yesod:test:yesod # validation - artifacts: false rules: - if: $CI_COMMIT_REF_NAME =~ /^v[0-9].*/ when: on_success - before_script: - - export VERSION="$(cat .current-version)" + +upload demo docker: + stage: upload script: - - | - curl --header "JOB-TOKEN: ${CI_JOB_TOKEN}" --upload-file bin/uniworx ${PACKAGE_REGISTRY_URL}/${VERSION}/uniworx - - | - curl --header "JOB-TOKEN: ${CI_JOB_TOKEN}" --upload-file bin/uniworxdb ${PACKAGE_REGISTRY_URL}/${VERSION}/uniworxdb - - | - curl --header "JOB-TOKEN: ${CI_JOB_TOKEN}" --upload-file bin/uniworxload ${PACKAGE_REGISTRY_URL}/${VERSION}/uniworxload - - | - curl --header "JOB-TOKEN: ${CI_JOB_TOKEN}" --upload-file bin/uniworx-wflint ${PACKAGE_REGISTRY_URL}/${VERSION}/uniworx-wflint + - nix-store --import < uniworx-demo.nar + - nix -L run .#pushUniworxDemoDocker + before_script: *nix-before + needs: + - job: demo docker + artifacts: true + rules: + - if: $CI_COMMIT_REF_NAME =~ /^v[0-9].*/ + when: on_success release: variables: GIT_STRATEGY: none stage: release image: registry.gitlab.com/gitlab-org/release-cli:latest - needs: - - job: upload - artifacts: false - - job: npm install # transitive - artifacts: false - - job: frontend:build # transitive - artifacts: false - - job: yesod:build # transitive - artifacts: false - - job: parse-changelog - artifacts: true - - job: frontend:lint # validation - artifacts: false - - job: frontend:test # validation - artifacts: false - - job: yesod:test:hlint # validation - artifacts: false - - job: yesod:test:yesod # validation - artifacts: false rules: - if: $CI_COMMIT_REF_NAME =~ /^v[0-9].*/ when: on_success before_script: - export VERSION="$(cat .current-version)" script: - - | - release-cli create --name "${VERSION}" --tag-name $CI_COMMIT_TAG --description .current-changelog.md \ - --assets-link "{\"name\":\"uniworx\",\"url\":\"${PACKAGE_REGISTRY_URL}/${VERSION}/uniworx\",\"filepath\":\"/uniworx\"}" \ - --assets-link "{\"name\":\"uniworxdb\",\"url\":\"${PACKAGE_REGISTRY_URL}/${VERSION}/uniworxdb\",\"filepath\":\"/uniworxdb\"}" \ - --assets-link "{\"name\":\"uniworxload\",\"url\":\"${PACKAGE_REGISTRY_URL}/${VERSION}/uniworxload\",\"filepath\":\"/uniworxload\"}" \ - --assets-link "{\"name\":\"uniworx-wflint\",\"url\":\"${PACKAGE_REGISTRY_URL}/${VERSION}/uniworx-wflint\",\"filepath\":\"/uniworx-wflint\"}" - - -# deploy:uniworx3: -# cache: {} -# stage: deploy -# variables: -# GIT_STRATEGY: none -# script: -# - zip -qj - bin/uniworx bin/uniworxdb | ssh root@uniworx3.ifi.lmu.de /root/bin/accept_uni2work -# needs: -# - yesod:build -# - frontend:test # For sanity -# before_script: -# - rm -rvf /etc/apt/sources.list /etc/apt/sources.list.d -# - install -v -T -m 0644 ${APT_SOURCES_LIST} /etc/apt/sources.list -# - apt-get update -y -# - apt-get install -y --no-install-recommends openssh-client -# - install -v -m 0700 -d ~/.ssh -# - install -v -T -m 0644 ${SSH_KNOWN_HOSTS} ~/.ssh/known_hosts -# - install -v -T -m 0400 ${SSH_PRIVATE_KEY_UNIWORX3} ~/.ssh/uniworx3; echo "IdentityFile ~/.ssh/uniworx3" >> ~/.ssh/config; -# dependencies: -# - yesod:build - -# only: -# variables: -# - $CI_COMMIT_REF_NAME =~ /^v[0-9].*/ -# resource_group: uniworx3 + - release-cli create --name "${VERSION}" --tag-name $CI_COMMIT_TAG --description .current-changelog.md diff --git a/.gitlab-ci/debug-aws.patch b/.gitlab-ci/debug-aws.patch new file mode 100644 index 000000000..f4a250b96 --- /dev/null +++ b/.gitlab-ci/debug-aws.patch @@ -0,0 +1,25 @@ +diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc +index 514ab3bf9..25dab18bb 100644 +--- a/src/libstore/filetransfer.cc ++++ b/src/libstore/filetransfer.cc +@@ -696,6 +696,8 @@ struct curlFileTransfer : public FileTransfer + std::string scheme = get(params, "scheme").value_or(""); + std::string endpoint = get(params, "endpoint").value_or(""); + ++ debug("enqueueFileTransfer: scheme: %s", scheme); ++ + S3Helper s3Helper(profile, region, scheme, endpoint); + + // FIXME: implement ETag +diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc +index 6bfbee044..ff406e5e4 100644 +--- a/src/libstore/s3-binary-cache-store.cc ++++ b/src/libstore/s3-binary-cache-store.cc +@@ -126,6 +126,7 @@ ref S3Helper::makeConfig(const string & region + initAWS(); + auto res = make_ref(); + res->region = region; ++ debug("configuring scheme %s", scheme); + if (!scheme.empty()) { + res->scheme = Aws::Http::SchemeMapper::FromString(scheme.c_str()); + } diff --git a/.gitlab-ci/fix-aws-scheme.patch b/.gitlab-ci/fix-aws-scheme.patch new file mode 100644 index 000000000..ac9416262 --- /dev/null +++ b/.gitlab-ci/fix-aws-scheme.patch @@ -0,0 +1,13 @@ +diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc +index 6bfbee044..51d86c4e6 100644 +--- a/src/libstore/s3-binary-cache-store.cc ++++ b/src/libstore/s3-binary-cache-store.cc +@@ -209,7 +209,7 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual + S3Helper s3Helper; + + S3BinaryCacheStoreImpl( +- const std::string & scheme, ++ const std::string & uriScheme, + const std::string & bucketName, + const Params & params) + : StoreConfig(params) diff --git a/.gitlab-ci/make-minio-bucket.sh b/.gitlab-ci/make-minio-bucket.sh new file mode 100755 index 000000000..2bba31234 --- /dev/null +++ b/.gitlab-ci/make-minio-bucket.sh @@ -0,0 +1,5 @@ +#!/usr/bin/bash + +export MC_HOST_minio=http://$(cat /minio-gitlab-runner-cache/accesskey):$(cat /minio-gitlab-runner-cache/secretkey)@minio-gitlab-runner-cache + +mc mb --ignore-existing minio/nix-cache diff --git a/.gitlab-ci/nix-bootstrap.conf b/.gitlab-ci/nix-bootstrap.conf new file mode 100644 index 000000000..036c2fc5e --- /dev/null +++ b/.gitlab-ci/nix-bootstrap.conf @@ -0,0 +1,5 @@ +sandbox = true +experimental-features = nix-command flakes ca-references +substituters = https://hydra.iohk.io https://cache.nixos.org/ +trusted-public-keys = hydra.iohk.io:f/Ea+s+dFdN+3Y/G+FDgSq+a5NEWhJGzdjvKNGv0/EQ= cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= +require-sigs = true \ No newline at end of file diff --git a/.gitlab-ci/nix.conf b/.gitlab-ci/nix.conf new file mode 100644 index 000000000..29c91aba8 --- /dev/null +++ b/.gitlab-ci/nix.conf @@ -0,0 +1,6 @@ +sandbox = true +experimental-features = nix-command flakes ca-references +substituters = https://hydra.iohk.io https://cache.nixos.org/ s3://nix-cache?scheme=http&endpoint=minio-gitlab-runner-cache +trusted-public-keys = hydra.iohk.io:f/Ea+s+dFdN+3Y/G+FDgSq+a5NEWhJGzdjvKNGv0/EQ= cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= minio-gitlab-runner-cache:ZN5neq93MHqpnieHSBlxGgWvCL0WKrZ2S9QS+U5Bnro= +require-sigs = true +post-build-hook = /etc/nix/upload-to-cache.sh \ No newline at end of file diff --git a/.gitlab-ci/patched-nix.nix b/.gitlab-ci/patched-nix.nix new file mode 100644 index 000000000..84f2d601e --- /dev/null +++ b/.gitlab-ci/patched-nix.nix @@ -0,0 +1,4 @@ +{ pkgs ? import {} }: +pkgs.nixUnstable.overrideAttrs (oldAttrs: { + patches = oldAttrs.patches or [] ++ [ ./fix-aws-scheme.patch ]; +}) diff --git a/.gitlab-ci/upload-to-cache.sh b/.gitlab-ci/upload-to-cache.sh new file mode 100755 index 000000000..bbadb87b2 --- /dev/null +++ b/.gitlab-ci/upload-to-cache.sh @@ -0,0 +1,8 @@ +#!/usr/bin/bash + +set -eu +set -f # disable globbing +export IFS=' ' + +echo "Signing and uploading paths" $OUT_PATHS +exec nix copy --to "s3://nix-cache?region=us-east-1&scheme=http&endpoint=minio-gitlab-runner-cache&secret-key=${NIX_CACHE_KEYFILE}" $OUT_PATHS diff --git a/.gitlab-ci/write-minio-creds.sh b/.gitlab-ci/write-minio-creds.sh new file mode 100755 index 000000000..356c367f1 --- /dev/null +++ b/.gitlab-ci/write-minio-creds.sh @@ -0,0 +1,8 @@ +#!/usr/bin/bash + +mkdir -p $(dirname ${AWS_SHARED_CREDENTIALS_FILE}) +cat > ${AWS_SHARED_CREDENTIALS_FILE} <''${basePath}/.develop.env + + flockRes= + set +e + ${pkgs.util-linux}/bin/flock -en 4; flockRes=$? + set -e + if [[ ''${flockRes} -ne 0 ]]; then + echo "Could not take exclusive lock; is another develop running?" >&2 + exit ''${flockRes} + fi + ''} + + cleanup() { + set +e -x + type cleanup_postgres &>/dev/null && cleanup_postgres + type cleanup_widget_memcached &>/dev/null && cleanup_widget_memcached + type cleanup_session_memcached &>/dev/null && cleanup_session_memcached + type cleanup_cache_memcached &>/dev/null && cleanup_cache_memcached + type cleanup_minio &>/dev/null && cleanup_minio + type cleanup_maildev &>/dev/null && cleanup_maildev + + ${optionalString doDevelopEnv '' + [ -f "''${basePath}/.develop.env" ] && rm -vf "''${basePath}/.develop.env" + ''} + set +x + } + + trap cleanup EXIT + + export PORT_OFFSET=${if doPortOffset then "$(((16#$(echo \"fradrive $(whoami)\" | sha256sum | head -c 16)) % 1000))" else "0"} + + if [[ -z "$PGHOST" ]]; then + set -xe + + pgDir=$(mktemp -d --tmpdir=''${XDG_RUNTIME_DIR} postgresql.XXXXXX) + pgSockDir=$(mktemp -d --tmpdir=''${XDG_RUNTIME_DIR} postgresql.sock.XXXXXX) + pgLogFile=$(mktemp --tmpdir=''${XDG_RUNTIME_DIR} postgresql.XXXXXX.log) + initdb --no-locale -D ''${pgDir} + pg_ctl start -D ''${pgDir} -l ''${pgLogFile} -w -o "-k ''${pgSockDir} -c listen_addresses=''' -c hba_file='${postgresHba}' -c unix_socket_permissions=0700 -c max_connections=9990 -c shared_preload_libraries=pg_stat_statements -c auto_explain.log_min_duration=100ms" + psql -h ''${pgSockDir} -f ${postgresSchema} postgres + printf "Postgres logfile is %s\nPostgres socket directory is %s\n" ''${pgLogFile} ''${pgSockDir} + + export PGHOST=''${pgSockDir} + export PGLOG=''${pgLogFile} + + cleanup_postgres() { + set +e -x + pg_ctl stop -D ''${pgDir} + rm -rvf ''${pgDir} ''${pgSockDir} ''${pgLogFile} + set +x + } + + set +xe + fi + + if [[ -z "$WIDGET_MEMCACHED_HOST" ]]; then + set -xe + + memcached -l localhost -p $(($PORT_OFFSET + 11211)) &>/dev/null & + widget_memcached_pid=$! + + export WIDGET_MEMCACHED_HOST=localhost + export WIDGET_MEMCACHED_PORT=$(($PORT_OFFSET + 11211)) + + cleanup_widget_memcached() { + [[ -n "$widget_memcached_pid" ]] && kill $widget_memcached_pid + } + + set +xe + fi + + if [[ -z "$SESSION_MEMCACHED_HOST" ]]; then + set -xe + + memcached -l localhost -p $(($PORT_OFFSET + 11212)) &>/dev/null & + session_memcached_pid=$! + + export SESSION_MEMCACHED_HOST=localhost + export SESSION_MEMCACHED_PORT=$(($PORT_OFFSET + 11212)) + + cleanup_session_memcached() { + [[ -n "$session_memcached_pid" ]] && kill $session_memcached_pid + } + + set +xe + fi + + if [[ -z "$MEMCACHED_HOST" ]]; then + set -xe + + memcached -l localhost -p $(($PORT_OFFSET + 11213)) &>/dev/null & + memcached_pid=$! + + export MEMCACHED_HOST=localhost + export MEMCACHED_PORT=$(($PORT_OFFSET + 11212)) + + cleanup_session_memcached() { + [[ -n "$memcached_pid" ]] && kill $memcached_pid + } + + set +xe + fi + + if [[ -z "$UPLOAD_S3_HOST" ]]; then + set -xe + + cleanup_minio() { + [[ -n "$minio_pid" ]] && kill $minio_pid + [[ -n "''${MINIO_DIR}" ]] && rm -rvf ''${MINIO_DIR} + [[ -n "''${MINIO_LOGFILE}" ]] && rm -rvf ''${MINIO_LOGFILE} + } + + export MINIO_DIR=$(mktemp -d --tmpdir=''${XDG_RUNTIME_DIR} minio.XXXXXX) + export MINIO_LOGFILE=$(mktemp --tmpdir=''${XDG_RUNTIME_DIR} minio.XXXXXX.log) + export MINIO_ACCESS_KEY=$(${pkgs.pwgen}/bin/pwgen -s 16 1) + export MINIO_SECRET_KEY=$(${pkgs.pwgen}/bin/pwgen -s 32 1) + + minio server --address localhost:$(($PORT_OFFSET + 9000)) ''${MINIO_DIR} &>''${MINIO_LOGFILE} & + minio_pid=$! + + export UPLOAD_S3_HOST=localhost + export UPLOAD_S3_PORT=$(($PORT_OFFSET + 9000)) + export UPLOAD_S3_SSL=false + export UPLOAD_S3_KEY_ID=''${MINIO_ACCESS_KEY} + export UPLOAD_S3_KEY=''${MINIO_SECRET_KEY} + + sleep 1 + + set +xe + fi + + ${optionalString (pkgs.nodePackages ? "maildev") '' + if [[ -z "$SMTPHOST" ]]; then + set -xe + + cleanup_maildev() { + [[ -n "$maildev_pid" ]] && kill $maildev_pid + } + + TMPDIR=''${XDG_RUNTIME_DIR} ${pkgs.nodePackages.maildev}/bin/maildev --smtp $(($PORT_OFFSET + 1025)) --web $(($PORT_OFFSET + 8080)) --ip localhost --web-ip localhost &>/dev/null & + maildev_pid=$! + + export SMTPHOST=localhost + export SMTPPORT=$(($PORT_OFFSET + 1025)) + export SMTPSSL=none + + set +xe + fi + ''} + + ${optionalString doDevelopEnv '' + set -xe + + cat >&4 < $out + ''); + in if self ? lastModified then fromDate else "1970-01-01T00:00:01Z"; + + mkUniworxDocker = { isDemo }: prev.dockerTools.buildImage { + name = "uniworx${optionalString isDemo "-demo"}"; + tag = + let + versionFile = if isDemo then ./demo-version.json else ./version.json; + in (builtins.fromJSON (prev.lib.readFile versionFile)).version; + inherit created; + + contents = with final; [ + uniworx.uniworx.components.exes.uniworx + prev.dockerTools.binSh findutils coreutils + ] ++ optionals isDemo [ postgresql_12 memcached uniworx.uniworx.components.exes.uniworxdb ]; + + runAsRoot = '' + #!${final.stdenv.shell} + + ${prev.dockerTools.shadowSetup} + + mkdir -p /var/lib + + groupadd -r uniworx + useradd -r -g uniworx -d /var/lib/uniworx -M uniworx + install -d -g uniworx -o uniworx -m 0750 /var/lib/uniworx + + mkdir -p /var/log + install -d -g uniworx -o uniworx -m 0755 /var/log/uniworx + + ${optionalString isDemo '' + install -d -g uniworx -o uniworx -m 0750 /var/lib/postgres + + install -d -g uniworx -o uniworx -m 0750 /var/lib/memcached + + install -d -g uniworx -o uniworx -m 0755 /var/log/postgres + install -d -g uniworx -o uniworx -m 0755 /var/log/memcached + + mkdir -p /run + install -d -g uniworx -o uniworx -m 0755 /run/postgres + ''} + ''; + + config = + let + entrypoint = prev.writeScriptBin "uniworx-entrypoint" '' + #!${final.zsh}/bin/zsh -xe + + cTime=$(date -Is) + + ${optionalString isDemo '' + pgDir=/var/lib/postgres + pgSockDir=/run/postgres + pgLogFile=/var/log/postgres/''${cTime}.log + export PGHOST=''${pgSockDir} + export PGLOG=''${pgLogFile} + + pgNew= + if [[ -n "$(find ''${pgDir} -maxdepth 0 -type d -empty 2>/dev/null)" ]]; then + pgNew=1 + fi + + [[ -z "''${pgNew}" ]] || initdb --no-locale --encoding=UTF8 --username postgres --pgdata ''${pgDir} + pg_ctl start -D ''${pgDir} -l ''${pgLogFile} -w -o "-k ''${pgSockDir} -c listen_addresses= -c hba_file=${postgresHba} -c unix_socket_permissions=0777 -c max_connections=9990 -c shared_preload_libraries=pg_stat_statements -c auto_explain.log_min_duration=100ms" + [[ -z "''${pgNew}" ]] || psql -f ${postgresSchema} postgres postgres + + ( cd /var/lib/memcached; memcached -p 11212 ) &>/var/log/memcached/''${cTime}.log & + export SESSION_MEMCACHED_HOST=localhost + export SESSION_MEMCACHED_PORT=11212 + ''} + + export LOGDEST=/var/log/uniworx/''${cTime}.log + typeset -a configs + configs=() + configDir=''${CONFIG_DIR-/cfg} + if [[ -d "''${configDir}" ]]; then + while IFS= read -d $'\0' cfg; do + configs+=("''${(q)cfg}") + done < <(find "''${configDir}" \( -name '*.yml' -o -name '*.yaml' \) -print0 | sort -rz) + fi + configs+=('${uniworxConfig}') + cd /var/lib/uniworx + ${optionalString isDemo '' + [[ -z "''${pgNew}" ]] || uniworxdb -f ''${configs} + ''} + exec -- uniworx ''${configs} + ''; + postgresSchema = prev.writeText "schema.sql" '' + CREATE USER uniworx WITH SUPERUSER; + CREATE DATABASE uniworx; + GRANT ALL ON DATABASE uniworx TO uniworx; + ''; + + postgresHba = prev.writeText "hba_file" '' + local all all trust + ''; + uniworxConfig = prev.writeText "uni2work.yml" '' + port: 8080 + approot: "_env:APPROOT:http://localhost:8080" + ''; + in { + Cmd = [ "${entrypoint}/bin/uniworx-entrypoint" ]; + User = "uniworx:uniworx"; + ExposedPorts = { + "8080/tcp" = {}; + }; + Volumes = { + "/var/lib/uniworx" = {}; + "/var/log" = {}; + } // optionalAttrs isDemo { + "/var/lib/postgres" = {}; + }; + }; + }; +in +{ + ciDocker = prev.dockerTools.buildImageWithNixDb rec { + name = "uniworx-ci"; + inherit created; + tag = fromImage.imageTag; + fromImage = prev.docker-nixpkgs.nix-unstable; + + contents = with final; [ + bash coreutils + minio-client + xz + ]; + }; +} // mapAttrs (_name: mkUniworxDocker) { + uniworxDemoDocker = { isDemo = true; }; + uniworxDocker = { isDemo = false; }; +} diff --git a/nix/docker/demo-version.json b/nix/docker/demo-version.json new file mode 100644 index 000000000..dfcc3af19 --- /dev/null +++ b/nix/docker/demo-version.json @@ -0,0 +1,3 @@ +{ + "version": "0.3.0" +} diff --git a/nix/docker/version.json b/nix/docker/version.json new file mode 100644 index 000000000..74e9a96b4 --- /dev/null +++ b/nix/docker/version.json @@ -0,0 +1,3 @@ +{ + "version": "25.21.0" +} diff --git a/nix/frontend/default.nix b/nix/frontend/default.nix new file mode 100644 index 000000000..f929727d5 --- /dev/null +++ b/nix/frontend/default.nix @@ -0,0 +1,17 @@ +# This file has been generated by node2nix 1.9.0. Do not edit! + +{pkgs ? import { + inherit system; + }, system ? builtins.currentSystem, nodejs ? pkgs."nodejs-14_x"}: + +let + nodeEnv = import ./node-env.nix { + inherit (pkgs) stdenv lib python2 runCommand writeTextFile; + inherit pkgs nodejs; + libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null; + }; +in +import ./node-packages.nix { + inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit; + inherit nodeEnv; +} diff --git a/nix/frontend/node-env.nix b/nix/frontend/node-env.nix new file mode 100644 index 000000000..c2b723195 --- /dev/null +++ b/nix/frontend/node-env.nix @@ -0,0 +1,567 @@ +# This file originates from node2nix + +{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile}: + +let + # Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master + utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux; + + python = if nodejs ? python then nodejs.python else python2; + + # Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise + tarWrapper = runCommand "tarWrapper" {} '' + mkdir -p $out/bin + + cat > $out/bin/tar <> $out/nix-support/hydra-build-products + ''; + }; + + includeDependencies = {dependencies}: + lib.optionalString (dependencies != []) + (lib.concatMapStrings (dependency: + '' + # Bundle the dependencies of the package + mkdir -p node_modules + cd node_modules + + # Only include dependencies if they don't exist. They may also be bundled in the package. + if [ ! -e "${dependency.name}" ] + then + ${composePackage dependency} + fi + + cd .. + '' + ) dependencies); + + # Recursively composes the dependencies of a package + composePackage = { name, packageName, src, dependencies ? [], ... }@args: + builtins.addErrorContext "while evaluating node package '${packageName}'" '' + DIR=$(pwd) + cd $TMPDIR + + unpackFile ${src} + + # Make the base dir in which the target dependency resides first + mkdir -p "$(dirname "$DIR/${packageName}")" + + if [ -f "${src}" ] + then + # Figure out what directory has been unpacked + packageDir="$(find . -maxdepth 1 -type d | tail -1)" + + # Restore write permissions to make building work + find "$packageDir" -type d -exec chmod u+x {} \; + chmod -R u+w "$packageDir" + + # Move the extracted tarball into the output folder + mv "$packageDir" "$DIR/${packageName}" + elif [ -d "${src}" ] + then + # Get a stripped name (without hash) of the source directory. + # On old nixpkgs it's already set internally. + if [ -z "$strippedName" ] + then + strippedName="$(stripHash ${src})" + fi + + # Restore write permissions to make building work + chmod -R u+w "$strippedName" + + # Move the extracted directory into the output folder + mv "$strippedName" "$DIR/${packageName}" + fi + + # Unset the stripped name to not confuse the next unpack step + unset strippedName + + # Include the dependencies of the package + cd "$DIR/${packageName}" + ${includeDependencies { inherit dependencies; }} + cd .. + ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} + ''; + + pinpointDependencies = {dependencies, production}: + let + pinpointDependenciesFromPackageJSON = writeTextFile { + name = "pinpointDependencies.js"; + text = '' + var fs = require('fs'); + var path = require('path'); + + function resolveDependencyVersion(location, name) { + if(location == process.env['NIX_STORE']) { + return null; + } else { + var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json"); + + if(fs.existsSync(dependencyPackageJSON)) { + var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON)); + + if(dependencyPackageObj.name == name) { + return dependencyPackageObj.version; + } + } else { + return resolveDependencyVersion(path.resolve(location, ".."), name); + } + } + } + + function replaceDependencies(dependencies) { + if(typeof dependencies == "object" && dependencies !== null) { + for(var dependency in dependencies) { + var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency); + + if(resolvedVersion === null) { + process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n"); + } else { + dependencies[dependency] = resolvedVersion; + } + } + } + } + + /* Read the package.json configuration */ + var packageObj = JSON.parse(fs.readFileSync('./package.json')); + + /* Pinpoint all dependencies */ + replaceDependencies(packageObj.dependencies); + if(process.argv[2] == "development") { + replaceDependencies(packageObj.devDependencies); + } + replaceDependencies(packageObj.optionalDependencies); + + /* Write the fixed package.json file */ + fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2)); + ''; + }; + in + '' + node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"} + + ${lib.optionalString (dependencies != []) + '' + if [ -d node_modules ] + then + cd node_modules + ${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies} + cd .. + fi + ''} + ''; + + # Recursively traverses all dependencies of a package and pinpoints all + # dependencies in the package.json file to the versions that are actually + # being used. + + pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args: + '' + if [ -d "${packageName}" ] + then + cd "${packageName}" + ${pinpointDependencies { inherit dependencies production; }} + cd .. + ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} + fi + ''; + + # Extract the Node.js source code which is used to compile packages with + # native bindings + nodeSources = runCommand "node-sources" {} '' + tar --no-same-owner --no-same-permissions -xf ${nodejs.src} + mv node-* $out + ''; + + # Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty) + addIntegrityFieldsScript = writeTextFile { + name = "addintegrityfields.js"; + text = '' + var fs = require('fs'); + var path = require('path'); + + function augmentDependencies(baseDir, dependencies) { + for(var dependencyName in dependencies) { + var dependency = dependencies[dependencyName]; + + // Open package.json and augment metadata fields + var packageJSONDir = path.join(baseDir, "node_modules", dependencyName); + var packageJSONPath = path.join(packageJSONDir, "package.json"); + + if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored + console.log("Adding metadata fields to: "+packageJSONPath); + var packageObj = JSON.parse(fs.readFileSync(packageJSONPath)); + + if(dependency.integrity) { + packageObj["_integrity"] = dependency.integrity; + } else { + packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads. + } + + if(dependency.resolved) { + packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided + } else { + packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories. + } + + if(dependency.from !== undefined) { // Adopt from property if one has been provided + packageObj["_from"] = dependency.from; + } + + fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2)); + } + + // Augment transitive dependencies + if(dependency.dependencies !== undefined) { + augmentDependencies(packageJSONDir, dependency.dependencies); + } + } + } + + if(fs.existsSync("./package-lock.json")) { + var packageLock = JSON.parse(fs.readFileSync("./package-lock.json")); + + if(![1, 2].includes(packageLock.lockfileVersion)) { + process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n"); + process.exit(1); + } + + if(packageLock.dependencies !== undefined) { + augmentDependencies(".", packageLock.dependencies); + } + } + ''; + }; + + # Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes + reconstructPackageLock = writeTextFile { + name = "addintegrityfields.js"; + text = '' + var fs = require('fs'); + var path = require('path'); + + var packageObj = JSON.parse(fs.readFileSync("package.json")); + + var lockObj = { + name: packageObj.name, + version: packageObj.version, + lockfileVersion: 1, + requires: true, + dependencies: {} + }; + + function augmentPackageJSON(filePath, dependencies) { + var packageJSON = path.join(filePath, "package.json"); + if(fs.existsSync(packageJSON)) { + var packageObj = JSON.parse(fs.readFileSync(packageJSON)); + dependencies[packageObj.name] = { + version: packageObj.version, + integrity: "sha1-000000000000000000000000000=", + dependencies: {} + }; + processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies); + } + } + + function processDependencies(dir, dependencies) { + if(fs.existsSync(dir)) { + var files = fs.readdirSync(dir); + + files.forEach(function(entry) { + var filePath = path.join(dir, entry); + var stats = fs.statSync(filePath); + + if(stats.isDirectory()) { + if(entry.substr(0, 1) == "@") { + // When we encounter a namespace folder, augment all packages belonging to the scope + var pkgFiles = fs.readdirSync(filePath); + + pkgFiles.forEach(function(entry) { + if(stats.isDirectory()) { + var pkgFilePath = path.join(filePath, entry); + augmentPackageJSON(pkgFilePath, dependencies); + } + }); + } else { + augmentPackageJSON(filePath, dependencies); + } + } + }); + } + } + + processDependencies("node_modules", lockObj.dependencies); + + fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2)); + ''; + }; + + prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}: + let + forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com"; + in + '' + # Pinpoint the versions of all dependencies to the ones that are actually being used + echo "pinpointing versions of dependencies..." + source $pinpointDependenciesScriptPath + + # Patch the shebangs of the bundled modules to prevent them from + # calling executables outside the Nix store as much as possible + patchShebangs . + + # Deploy the Node.js package by running npm install. Since the + # dependencies have been provided already by ourselves, it should not + # attempt to install them again, which is good, because we want to make + # it Nix's responsibility. If it needs to install any dependencies + # anyway (e.g. because the dependency parameters are + # incomplete/incorrect), it fails. + # + # The other responsibilities of NPM are kept -- version checks, build + # steps, postprocessing etc. + + export HOME=$TMPDIR + cd "${packageName}" + runHook preRebuild + + ${lib.optionalString bypassCache '' + ${lib.optionalString reconstructLock '' + if [ -f package-lock.json ] + then + echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!" + echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!" + rm package-lock.json + else + echo "No package-lock.json file found, reconstructing..." + fi + + node ${reconstructPackageLock} + ''} + + node ${addIntegrityFieldsScript} + ''} + + npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild + + if [ "''${dontNpmInstall-}" != "1" ] + then + # NPM tries to download packages even when they already exist if npm-shrinkwrap is used. + rm -f npm-shrinkwrap.json + + npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} install + fi + ''; + + # Builds and composes an NPM package including all its dependencies + buildNodePackage = + { name + , packageName + , version + , dependencies ? [] + , buildInputs ? [] + , production ? true + , npmFlags ? "" + , dontNpmInstall ? false + , bypassCache ? false + , reconstructLock ? false + , preRebuild ? "" + , dontStrip ? true + , unpackPhase ? "true" + , buildPhase ? "true" + , ... }@args: + + let + extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" ]; + in + stdenv.mkDerivation ({ + name = "node_${name}-${version}"; + buildInputs = [ tarWrapper python nodejs ] + ++ lib.optional (stdenv.isLinux) utillinux + ++ lib.optional (stdenv.isDarwin) libtool + ++ buildInputs; + + inherit nodejs; + + inherit dontStrip; # Stripping may fail a build for some package deployments + inherit dontNpmInstall preRebuild unpackPhase buildPhase; + + compositionScript = composePackage args; + pinpointDependenciesScript = pinpointDependenciesOfPackage args; + + passAsFile = [ "compositionScript" "pinpointDependenciesScript" ]; + + installPhase = '' + # Create and enter a root node_modules/ folder + mkdir -p $out/lib/node_modules + cd $out/lib/node_modules + + # Compose the package and all its dependencies + source $compositionScriptPath + + ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }} + + # Create symlink to the deployed executable folder, if applicable + if [ -d "$out/lib/node_modules/.bin" ] + then + ln -s $out/lib/node_modules/.bin $out/bin + fi + + # Create symlinks to the deployed manual page folders, if applicable + if [ -d "$out/lib/node_modules/${packageName}/man" ] + then + mkdir -p $out/share + for dir in "$out/lib/node_modules/${packageName}/man/"* + do + mkdir -p $out/share/man/$(basename "$dir") + for page in "$dir"/* + do + ln -s $page $out/share/man/$(basename "$dir") + done + done + fi + + # Run post install hook, if provided + runHook postInstall + ''; + } // extraArgs); + + # Builds a node environment (a node_modules folder and a set of binaries) + buildNodeDependencies = + { name + , packageName + , version + , src + , dependencies ? [] + , buildInputs ? [] + , production ? true + , npmFlags ? "" + , dontNpmInstall ? false + , bypassCache ? false + , reconstructLock ? false + , dontStrip ? true + , unpackPhase ? "true" + , buildPhase ? "true" + , ... }@args: + + let + extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ]; + in + stdenv.mkDerivation ({ + name = "node-dependencies-${name}-${version}"; + + buildInputs = [ tarWrapper python nodejs ] + ++ lib.optional (stdenv.isLinux) utillinux + ++ lib.optional (stdenv.isDarwin) libtool + ++ buildInputs; + + inherit dontStrip; # Stripping may fail a build for some package deployments + inherit dontNpmInstall unpackPhase buildPhase; + + includeScript = includeDependencies { inherit dependencies; }; + pinpointDependenciesScript = pinpointDependenciesOfPackage args; + + passAsFile = [ "includeScript" "pinpointDependenciesScript" ]; + + installPhase = '' + mkdir -p $out/${packageName} + cd $out/${packageName} + + source $includeScriptPath + + # Create fake package.json to make the npm commands work properly + cp ${src}/package.json . + chmod 644 package.json + ${lib.optionalString bypassCache '' + if [ -f ${src}/package-lock.json ] + then + cp ${src}/package-lock.json . + fi + ''} + + # Go to the parent folder to make sure that all packages are pinpointed + cd .. + ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} + + ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }} + + # Expose the executables that were installed + cd .. + ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} + + mv ${packageName} lib + ln -s $out/lib/node_modules/.bin $out/bin + ''; + } // extraArgs); + + # Builds a development shell + buildNodeShell = + { name + , packageName + , version + , src + , dependencies ? [] + , buildInputs ? [] + , production ? true + , npmFlags ? "" + , dontNpmInstall ? false + , bypassCache ? false + , reconstructLock ? false + , dontStrip ? true + , unpackPhase ? "true" + , buildPhase ? "true" + , ... }@args: + + let + nodeDependencies = buildNodeDependencies args; + in + stdenv.mkDerivation { + name = "node-shell-${name}-${version}"; + + buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs; + buildCommand = '' + mkdir -p $out/bin + cat > $out/bin/shell < $out + ''; + + jqChangelogJson = prev.writeShellScriptBin "jq-changelog" '' + exec -- ${final.jq}/bin/jq $@ < ${changelogJson} + ''; +} diff --git a/nix/uniworx/backend.nix b/nix/uniworx/backend.nix new file mode 100644 index 000000000..fc1ceb525 --- /dev/null +++ b/nix/uniworx/backend.nix @@ -0,0 +1,89 @@ +{ inputs, backendSource, ... }: final: prev: + +with prev.lib; + +let + haskellInputs = ["encoding" "memcached-binary" "conduit-resumablesink" "HaskellNet-SSL" "ldap-client" "serversession" "xss-sanitize" "colonnade" "minio-hs" "cryptoids" "zip-stream" "yesod" "cryptonite" "esqueleto"]; +in { + uniworx = final.haskell-nix.stackProject { + src = prev.stdenv.mkDerivation { + name = "uniworx-src"; + src = backendSource; + + phases = ["unpackPhase" "patchPhase" "installPhase"]; + + patchPhase = '' + substitute stack-flake.yaml stack.yaml \ + ${concatMapStringsSep " \\\n" (pkgName: "--replace @${pkgName}@ ${inputs."${pkgName}"}") haskellInputs} + ''; + + installPhase = '' + mkdir -p $out + cp -pr --reflink=auto ./. $out + ''; + }; + compiler-nix-name = "ghc8104"; + # stack-sha256 = "1n7z294ldv2rjkfj1vs3kqmnbp34m2scrmyrp5kwmga9vp86fd9z"; # produces errors gregor does not understand :( + modules = [ + { + packages = { + encoding.src = inputs.encoding; + memcached-binary.src = inputs.memcached-binary; + conduit-resumablesink.src = inputs.conduit-resumablesink; + HaskellNet-SSL.src = inputs.HaskellNet-SSL; + ldap-client.src = inputs.ldap-client; + serversession.src = "${inputs.serversession}/serversession"; + serversession-backend-acid-state.src = "${inputs.serversession}/serversession-backend-acid-state"; + xss-sanitize.src = inputs.xss-sanitize; + colonnade.src = "${inputs.colonnade}/colonnade"; + minio-hs.src = inputs.minio-hs; + cryptoids-class.src = "${inputs.cryptoids}/cryptoids-class"; + cryptoids-types.src = "${inputs.cryptoids}/cryptoids-types"; + cryptoids.src = "${inputs.cryptoids}/cryptoids"; + filepath-crypto.src = "${inputs.cryptoids}/filepath-crypto"; + uuid-crypto.src = "${inputs.cryptoids}/uuid-crypto"; + zip-stream.src = inputs.zip-stream; + yesod.src = "${inputs.yesod}/yesod"; + yesod-core.src = "${inputs.yesod}/yesod-core"; + yesod-static.src = "${inputs.yesod}/yesod-static"; + yesod-persistent.src = "${inputs.yesod}/yesod-persistent"; + yesod-form.src = "${inputs.yesod}/yesod-form"; + yesod-auth.src = "${inputs.yesod}/yesod-auth"; + yesod-test.src = "${inputs.yesod}/yesod-test"; + cryptonite.src = inputs.cryptonite; + esqueleto.src = inputs.esqueleto; + }; + } + { + packages.uniworx = { + postUnpack = '' + ${final.xorg.lndir}/bin/lndir -silent ${prev.uniworxFrontend} $sourceRoot + chmod a+w -R $sourceRoot + ''; + preBuild = '' + export TZDIR=${final.tzdata}/share/zoneinfo + ''; + components.library.build-tools = with final.pkgs; [ llvm_9 ]; + components.exes.uniworx.build-tools = with final.pkgs; [ llvm_9 ]; + components.exes.uniworxdb.build-tools = with final.pkgs; [ llvm_9 ]; + components.exes.uniworxload.build-tools = with final.pkgs; [ llvm_9 ]; + components.exes.uniworx-wflint.build-tools = with final.pkgs; [ llvm_9 ]; + components.tests.yesod = { + build-tools = with final.pkgs; [ llvm_9 final.uniworx.hsPkgs.hspec-discover ]; + testWrapper = + let + testWrapper = prev.writeScript "test-wrapper" (import ../develop.nix { inherit prev; pkgs = final; doDevelopEnv = false; } "$@"); + testWrapperWrapped = prev.runCommand "test-wrapper" { buildInputs = [final.makeWrapper]; } '' + makeWrapper ${testWrapper} $out \ + --prefix PATH : ${final.postgresql_12}/bin \ + --prefix PATH : ${final.minio}/bin \ + --prefix PATH : ${final.memcached}/bin + ''; + in singleton (toString testWrapperWrapped); + }; + components.tests.hlint.build-tools = with final.pkgs; [ llvm_9 final.uniworx.hsPkgs.hlint-test ]; + }; + } + ]; + }; +} diff --git a/nix/uniworx/default.nix b/nix/uniworx/default.nix new file mode 100644 index 000000000..c353be222 --- /dev/null +++ b/nix/uniworx/default.nix @@ -0,0 +1,6 @@ +{ inputs, frontendSource, backendSource }: final: prev: prev.lib.composeManyExtensions [ + (import ./node-dependencies.nix { inherit inputs; }) + (import ./well-known.nix { inherit frontendSource; }) + (import ./frontend.nix { inherit frontendSource; }) + (import ./backend.nix { inherit backendSource inputs; }) +] final prev diff --git a/nix/uniworx/frontend.nix b/nix/uniworx/frontend.nix new file mode 100644 index 000000000..dff9a92dc --- /dev/null +++ b/nix/uniworx/frontend.nix @@ -0,0 +1,58 @@ +{ frontendSource, ... }: final: prev: +let + setupNodeDeps = '' + ln -s ${final.uniworxNodeDependencies}/lib/node_modules ./node_modules + export PATH="${final.uniworxNodeDependencies}/bin:$PATH" + ''; +in { + uniworxFrontend = prev.stdenv.mkDerivation { + name = "uniworx-frontend"; + srcs = [frontendSource prev.uniworxWellKnown]; + sourceRoot = "source"; + + phases = ["unpackPhase" "checkPhase" "buildPhase" "installPhase"]; + + postUnpack = '' + ${final.xorg.lndir}/bin/lndir -silent ../uniworx-well-known $sourceRoot + ''; + + preBuild = setupNodeDeps; + buildPhase = '' + runHook preBuild + + webpack --progress + + runHook postBuild + ''; + + preCheck = '' + ${setupNodeDeps} + export FONTCONFIG_FILE="${final.fontconfig.out}/etc/fonts/fonts.conf" + export FONTCONFIG_PATH="${final.fontconfig.out}/etc/fonts/" + export CHROME_BIN="${final.chromium}/bin/chromium-browser" + ''; + checkPhase = '' + runHook preCheck + + eslint frontend/src + karma start --conf karma.conf.js + + runHook postCheck + ''; + + installPhase = '' + mkdir -p $out $out/config + cp -r --reflink=auto well-known static $out + cp -r --reflink=auto config/webpack.yml $out/config + ''; + + passthru.check = final.uniworxFrontend.overrideAttrs (oldAttrs: { + name = "${oldAttrs.name}-check"; + phases = ["unpackPhase" "buildPhase"]; + buildPhase = '' + mkdir $out + ( ${oldAttrs.checkPhase} ) | tee $out/test-stdout + ''; + }); + }; +} diff --git a/nix/uniworx/node-dependencies.nix b/nix/uniworx/node-dependencies.nix new file mode 100644 index 000000000..fe7f45d65 --- /dev/null +++ b/nix/uniworx/node-dependencies.nix @@ -0,0 +1,15 @@ +{ inputs, ... }: final: prev: { + uniworxNodeDependencies = (prev.callPackage ../frontend {}).nodeDependencies.override (oldArgs: { + dependencies = + let + srcOverrides = { + "tail.datetime" = inputs.tail-DateTime; + "@fortawesome/fontawesome-pro" = prev.fetchurl { + url = "https://npm.fontawesome.com/@fortawesome/fontawesome-pro/-/5.14.0/fontawesome-pro-5.14.0.tgz"; + curlOpts = "-H @${prev.writeText "headers.txt" "Authorization: Bearer ${builtins.readFile inputs.fontawesome-token}"}"; + hash = "sha256-jGvPrTKKL0rCWRZUEnJEmrOdHyQYs3M5709B1hjmFw4="; + }; + }; + in map (dep: dep // { src = srcOverrides."${dep.packageName}" or dep.src; }) oldArgs.dependencies; + }); +} diff --git a/nix/uniworx/well-known.nix b/nix/uniworx/well-known.nix new file mode 100644 index 000000000..9e0ab278f --- /dev/null +++ b/nix/uniworx/well-known.nix @@ -0,0 +1,23 @@ +{ frontendSource, ... }: final: prev: { + uniworxWellKnown = prev.stdenv.mkDerivation { + name = "uniworx-well-known"; + src = frontendSource; + + phases = ["unpackPhase" "buildPhase" "installPhase" "fixupPhase"]; + + buildPhase = '' + ln -s ${final.uniworxNodeDependencies}/lib/node_modules ./node_modules + export PATH="${final.uniworxNodeDependencies}/bin:${prev.exiftool}/bin:$PATH" + webpack --progress + ''; + + installPhase = '' + mkdir -p $out + cp -r --reflink=auto well-known $out/.nix-well-known + ''; + + outputHashMode = "recursive"; + outputHashAlgo = "sha256"; + outputHash = "tDaffdAT5EGPKdDJ2ovo9XSGdV48W3Efqe+iBmakh6g="; + }; +} diff --git a/package.json b/package.json index 727983d52..725d04e74 100644 --- a/package.json +++ b/package.json @@ -122,7 +122,7 @@ "moment": "^2.27.0", "npm": "^6.14.8", "sodium-javascript": "^0.5.6", - "tail.datetime": "git+ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/tail.DateTime.git#master", + "tail.datetime": "git+ssh://git@gitlab2.rz.ifi.lmu.de/uni2work/tail.DateTime.git#uni2work", "toposort": "^2.0.2", "whatwg-fetch": "^3.4.0" } diff --git a/package.yaml b/package.yaml index 2793c89b4..710b808e1 100644 --- a/package.yaml +++ b/package.yaml @@ -253,6 +253,10 @@ when: else: ghc-options: - -O -fllvm + +data-files: + - testdata/** + library: source-dirs: src executables: @@ -276,6 +280,7 @@ executables: - uniworx other-modules: - Database.Fill + - Paths_uniworx when: - condition: flag(library-only) buildable: false diff --git a/shell.nix b/shell.nix index 3fa3cbdc4..d420eec75 100644 --- a/shell.nix +++ b/shell.nix @@ -5,201 +5,7 @@ let haskellPackages = pkgs.haskellPackages; - postgresSchema = pkgs.writeText "schema.sql" '' - CREATE USER uniworx WITH SUPERUSER; - CREATE DATABASE uniworx_test; - GRANT ALL ON DATABASE uniworx_test TO uniworx; - CREATE DATABASE uniworx; - GRANT ALL ON DATABASE uniworx TO uniworx; - ''; - - postgresHba = pkgs.writeText "hba_file" '' - local all all trust - ''; - - develop = pkgs.writeScriptBin "develop" '' - #!${pkgs.zsh}/bin/zsh -e - - basePath=$(pwd) - exec 4<>''${basePath}/.develop.env - - flockRes= - set +e - ${pkgs.util-linux}/bin/flock -en 4; flockRes=$? - set -e - if [[ ''${flockRes} -ne 0 ]]; then - echo "Could not take exclusive lock; is another develop running?" >&2 - exit ''${flockRes} - fi - - cleanup() { - set +e -x - type cleanup_postgres &>/dev/null && cleanup_postgres - type cleanup_widget_memcached &>/dev/null && cleanup_widget_memcached - type cleanup_session_memcached &>/dev/null && cleanup_session_memcached - type cleanup_cache_memcached &>/dev/null && cleanup_cache_memcached - type cleanup_minio &>/dev/null && cleanup_minio - type cleanup_maildev &>/dev/null && cleanup_maildev - - [ -f "''${basePath}/.develop.env" ] && rm -vf "''${basePath}/.develop.env" - set +x - } - - trap cleanup EXIT - - export PORT_OFFSET=$(((16#$(echo "fradrive $(whoami)" | sha256sum | head -c 16)) % 1000)) - - if [[ -z "$PGHOST" ]]; then - set -xe - - pgDir=$(mktemp -d --tmpdir=''${XDG_RUNTIME_DIR} postgresql.XXXXXX) - pgSockDir=$(mktemp -d --tmpdir=''${XDG_RUNTIME_DIR} postgresql.sock.XXXXXX) - pgLogFile=$(mktemp --tmpdir=''${XDG_RUNTIME_DIR} postgresql.XXXXXX.log) - initdb --no-locale -D ''${pgDir} - pg_ctl start -D ''${pgDir} -l ''${pgLogFile} -w -o "-k ''${pgSockDir} -c listen_addresses=''' -c hba_file='${postgresHba}' -c unix_socket_permissions=0700 -c max_connections=9990 -c shared_preload_libraries=pg_stat_statements -c auto_explain.log_min_duration=100ms" - psql -h ''${pgSockDir} -f ${postgresSchema} postgres - printf "Postgres logfile is %s\nPostgres socket directory is %s\n" ''${pgLogFile} ''${pgSockDir} - - export PGHOST=''${pgSockDir} - export PGLOG=''${pgLogFile} - - cleanup_postgres() { - set +e -x - pg_ctl stop -D ''${pgDir} - rm -rvf ''${pgDir} ''${pgSockDir} ''${pgLogFile} - set +x - } - - set +xe - fi - - if [[ -z "$WIDGET_MEMCACHED_HOST" ]]; then - set -xe - - memcached -l localhost -p $(($PORT_OFFSET + 11211)) &>/dev/null & - widget_memcached_pid=$! - - export WIDGET_MEMCACHED_HOST=localhost - export WIDGET_MEMCACHED_PORT=$(($PORT_OFFSET + 11211)) - - cleanup_widget_memcached() { - [[ -n "$widget_memcached_pid" ]] && kill $widget_memcached_pid - } - - set +xe - fi - - if [[ -z "$SESSION_MEMCACHED_HOST" ]]; then - set -xe - - memcached -l localhost -p $(($PORT_OFFSET + 11212)) &>/dev/null & - session_memcached_pid=$! - - export SESSION_MEMCACHED_HOST=localhost - export SESSION_MEMCACHED_PORT=$(($PORT_OFFSET + 11212)) - - cleanup_session_memcached() { - [[ -n "$session_memcached_pid" ]] && kill $session_memcached_pid - } - - set +xe - fi - - if [[ -z "$MEMCACHED_HOST" ]]; then - set -xe - - memcached -l localhost -p $(($PORT_OFFSET + 11213)) &>/dev/null & - memcached_pid=$! - - export MEMCACHED_HOST=localhost - export MEMCACHED_PORT=$(($PORT_OFFSET + 11212)) - - cleanup_session_memcached() { - [[ -n "$memcached_pid" ]] && kill $memcached_pid - } - - set +xe - fi - - if [[ -z "$UPLOAD_S3_HOST" ]]; then - set -xe - - cleanup_minio() { - [[ -n "$minio_pid" ]] && kill $minio_pid - [[ -n "''${MINIO_DIR}" ]] && rm -rvf ''${MINIO_DIR} - [[ -n "''${MINIO_LOGFILE}" ]] && rm -rvf ''${MINIO_LOGFILE} - } - - export MINIO_DIR=$(mktemp -d --tmpdir=''${XDG_RUNTIME_DIR} minio.XXXXXX) - export MINIO_LOGFILE=$(mktemp --tmpdir=''${XDG_RUNTIME_DIR} minio.XXXXXX.log) - export MINIO_ACCESS_KEY=$(${pkgs.pwgen}/bin/pwgen -s 16 1) - export MINIO_SECRET_KEY=$(${pkgs.pwgen}/bin/pwgen -s 32 1) - - minio server --address localhost:$(($PORT_OFFSET + 9000)) ''${MINIO_DIR} &>''${MINIO_LOGFILE} & - minio_pid=$! - - export UPLOAD_S3_HOST=localhost - export UPLOAD_S3_PORT=$(($PORT_OFFSET + 9000)) - export UPLOAD_S3_SSL=false - export UPLOAD_S3_KEY_ID=''${MINIO_ACCESS_KEY} - export UPLOAD_S3_KEY=''${MINIO_SECRET_KEY} - - sleep 1 - - set +xe - fi - - ${optionalString (pkgs.nodePackages ? "maildev") '' - if [[ -z "$SMTPHOST" ]]; then - set -xe - - cleanup_maildev() { - [[ -n "$maildev_pid" ]] && kill $maildev_pid - } - - TMPDIR=''${XDG_RUNTIME_DIR} ${pkgs.nodePackages.maildev}/bin/maildev --smtp $(($PORT_OFFSET + 1025)) --web $(($PORT_OFFSET + 8080)) --ip localhost --web-ip localhost &>/dev/null & - maildev_pid=$! - - export SMTPHOST=localhost - export SMTPPORT=$(($PORT_OFFSET + 1025)) - export SMTPSSL=none - - set +xe - fi - ''} - - set -xe - - cat >&4 < FilePath -> m FilePath +testdataFile = liftIO . getDataFileName . ("testdata" ) insertFile :: ( HasFileReference fRef, PersistRecordBackend fRef SqlBackend ) => FileReferenceResidual fRef -> FilePath -> DB (Key fRef) insertFile residual fileTitle = do - let fileContent = Just . C.sourceFile $ testdataDir fileTitle + filepath <- testdataFile fileTitle + let fileContent = Just $ C.sourceFile filepath fileModified <- liftIO getCurrentTime sinkFile' File{..} residual >>= insert @@ -1399,8 +1401,6 @@ fillDb = do return . pure . AllocationPriorityNumericRecord matr . fromList $ sortOn Down prios _other -> return mempty - liftIO . LBS.writeFile (testdataDir "bigAlloc_numeric.csv") $ Csv.encode numericPriorities - ordinalPriorities <- do manyUsers' <- shuffleM manyUsers flip foldMapM manyUsers' $ \uid -> do @@ -1410,16 +1410,19 @@ fillDb = do return . pure $ Csv.Only matr _other -> return mempty - liftIO . LBS.writeFile (testdataDir "bigAlloc_ordinal.csv") $ Csv.encode ordinalPriorities + liftIO . handle (\(_ :: IOException) -> return ()) $ do + haveTestdata <- doesDirectoryExist "testdata" + LBS.writeFile (bool id ("testdata" ) haveTestdata "bigAlloc_numeric.csv") $ Csv.encode numericPriorities + LBS.writeFile (bool id ("testdata" ) haveTestdata "bigAlloc_ordinal.csv") $ Csv.encode ordinalPriorities - whenM (liftIO . doesFileExist $ testdataDir "workflows" "_index.yaml") $ do + whenM (liftIO . doesFileExist <=< testdataFile $ "workflows" "_index.yaml") $ do let displayLinterIssue :: MonadIO m => WorkflowGraphLinterIssue -> m () displayLinterIssue = liftIO . hPutStrLn stderr . displayException - wfIndex <- Yaml.decodeFileThrow @_ @(Map WorkflowDefinitionName WorkflowIndexItem) $ testdataDir "workflows" "_index.yaml" + wfIndex <- Yaml.decodeFileThrow @_ @(Map WorkflowDefinitionName WorkflowIndexItem) =<< testdataFile ("workflows" "_index.yaml") iforM_ wfIndex $ \wiName WorkflowIndexItem{..} -> handleSql displayLinterIssue $ do - graph <- Yaml.decodeFileThrow $ testdataDir "workflows" wiiGraphFile + graph <- Yaml.decodeFileThrow =<< testdataFile ("workflows" wiiGraphFile) for_ (lintWorkflowGraph graph) $ mapM_ throwM workflowDefinitionGraph <- insertSharedWorkflowGraph graph let workflowDef = WorkflowDefinition{..} diff --git a/webpack.config.js b/webpack.config.js index 6e71b96dc..19a9d1d04 100644 --- a/webpack.config.js +++ b/webpack.config.js @@ -24,20 +24,23 @@ const webpackVersion = require('webpack/package.json').version.split('.').slice( const packageVersion = require('./package.json').version; async function webpackConfig() { + const faviconNixDirectory = path.resolve(__dirname, '.nix-well-known'); let faviconApiVersion = undefined; - try { - const faviconApiChangelog = await request({ - method: 'GET', - uri: 'https://realfavicongenerator.net/api/versions', - headers: { - 'Accept': '*/*' - }, - json: true - }); - faviconApiVersion = faviconApiChangelog.filter(vObj => vObj.relevance.automated_update).slice(-1)[0].version; - } catch(e) { - console.error(e); + if (!fs.existsSync(faviconNixDirectory)) { + try { + const faviconApiChangelog = await request({ + method: 'GET', + uri: 'https://realfavicongenerator.net/api/versions', + headers: { + 'Accept': '*/*' + }, + json: true + }); + faviconApiVersion = faviconApiChangelog.filter(vObj => vObj.relevance.automated_update).slice(-1)[0].version; + } catch(e) { + console.error(e); + } } return { @@ -219,7 +222,16 @@ async function webpackConfig() { const cacheDirectory = path.resolve(__dirname, '.well-known-cache', `${cacheDigest}-${lang}`); - if (fs.existsSync(cacheDirectory) && (!faviconApiVersion || faviconApiVersion === cachedVersion)) { + if (fs.existsSync(faviconNixDirectory)) { + console.log("Using favicons generated by nix"); + return [ + new CopyPlugin({ + patterns: [ + { from: path.resolve(faviconNixDirectory, lang), to: path.resolve(__dirname, 'well-known', lang) } + ] + }) + ]; + } else if (fs.existsSync(cacheDirectory) && (!faviconApiVersion || faviconApiVersion === cachedVersion)) { console.log(`Using cached well-known from ${cacheDirectory} for ${lang}`); return [ new CopyPlugin({