diff --git a/.azure-pipelines/templates/jobs/release.yaml b/.azure-pipelines/templates/jobs/release.yaml new file mode 100644 index 000000000..36bc6acae --- /dev/null +++ b/.azure-pipelines/templates/jobs/release.yaml @@ -0,0 +1,61 @@ +# SPDX-FileCopyrightText: 2025 Sarah Vaupel +# +# SPDX-License-Identifier: AGPL-3.0-or-later + +parameters: + - name: releaseTag + type: string + - name: releaseEndpoint + type: string + default: 'devfra' + values: + - 'devfra' + - 'prodfra' + +jobs: + - job: Release + displayName: Release ${{ parameters.releaseTag }} + container: + image: devfra.azurecr.io/de.fraport.build/tools:1.1.0 + endpoint: devfra + steps: + + # Download required artifacts from pipeline + - task: DownloadPipelineArtifact@2 + displayName: Download FraDrive binaries + inputs: + artifactName: Build_backend + patterns: 'Build_backend/bin/*' + targetPath: '$(Build.Repository.LocalPath)' + + - task: Docker@2 + displayName: Login to container registry + inputs: + command: login + containerRegistry: '${{ parameters.releaseEndpoint }}' + - task: Bash@3 + displayName: Build FraDrive container + inputs: + targetType: inline + script: | + cp docker/fradrive/Dockerfile . + docker build \ + --tag $(buildImageUpstream)/fradrive:$(Build.BuildNumber) \ + --tag $(buildImageUpstream)/fradrive:${{parameters.releaseTag}} \ + --build-arg FROM_IMG=devfra.azurecr.io/de.fraport.trusted/ubuntu \ + --build-arg FROM_TAG=20.04 \ + --build-arg PROJECT_DIR=$(Build.Repository.LocalPath) \ + --build-arg IN_CI=true \ + --build-arg IN_CONTAINER=true \ + . + - task: Docker@2 + displayName: Push container to registry + inputs: + command: push + repository: 'de.fraport.fradrive.build/fradrive' + tags: '$(Build.BuildNumber),${{parameters.releaseTag}}' + - task: Docker@2 + displayName: Logout from container registry + inputs: + command: logout + containerRegistry: '${{ parameters.releaseEndpoint }}' \ No newline at end of file diff --git a/.azure-pipelines/templates/jobs/setup_dependencies.yaml b/.azure-pipelines/templates/jobs/setup_dependencies.yaml new file mode 100644 index 000000000..415f1d6bd --- /dev/null +++ b/.azure-pipelines/templates/jobs/setup_dependencies.yaml @@ -0,0 +1,61 @@ +# SPDX-FileCopyrightText: 2024-2025 Sarah Vaupel +# +# SPDX-License-Identifier: AGPL-3.0-or-later + +parameters: + - name: serviceName + type: string + - name: dependenciesCaches + type: object + default: [] + - name: dependenciesBuildPool + type: string + default: 'Prod Private Agent Pool' + values: + - 'Prod Private Agent Pool' + - 'Prod Private Agent Pool DS2' + - 'Prod Private Agent Pool DS3' + - name: dependenciesBuildCores + type: number + default: 1 + - name: dependenciesBuildTimeout + type: number + default: 60 + +jobs: +- job: SetupDependencies_${{parameters.serviceName}} + displayName: Install ${{parameters.serviceName}} dependencies + dependsOn: SetupImage_${{parameters.serviceName}} + ${{ if eq(variables.setupImages, true) }}: + condition: succeeded() + ${{ else }}: + condition: always() + pool: '${{parameters.dependenciesBuildPool}}' + timeoutInMinutes: ${{parameters.dependenciesBuildTimeout}} + container: + ${{ if variables.setupImages }}: + image: $(buildImageUpstream)/${{parameters.serviceName}}:$(Build.BuildNumber) + ${{ else }}: + image: $(buildImageUpstream)/${{parameters.serviceName}}:latest + endpoint: devfra + env: + PROJECT_DIR: $(Build.Repository.LocalPath) + IN_CONTAINER: true + IN_CI: true + steps: + # Restore previously-built dependencies from caches + - ${{ each cache in parameters.dependenciesCaches }}: + - template: ./../../steps/cache.yaml + parameters: + cacheIdent: '${{parameters.serviceName}}-dependencies' + cacheKeys: '${{cache.key}}' + cachePath: '${{cache.path}}' + + # Compile dependencies + - template: ./../../steps/make.yaml + parameters: + makeJob: dependencies + makeService: ${{parameters.serviceName}} + makeVars: 'CPU_CORES=${{parameters.dependenciesBuildCores}} STACK_CORES=-j${{parameters.dependenciesBuildCores}}' + + # (Note: a post-job for updating the dependency cache is automatically created, so no further step is due here.) \ No newline at end of file diff --git a/.azure-pipelines/templates/jobs/setup_image.yaml b/.azure-pipelines/templates/jobs/setup_image.yaml new file mode 100644 index 000000000..0e751cec4 --- /dev/null +++ b/.azure-pipelines/templates/jobs/setup_image.yaml @@ -0,0 +1,72 @@ +# SPDX-FileCopyrightText: 2024-2025 Sarah Vaupel +# +# SPDX-License-Identifier: AGPL-3.0-or-later + +parameters: + - name: imageName + type: string + - name: imageBase + type: object + +jobs: +- job: SetupImage_${{parameters.imageName}} + displayName: Build ${{parameters.imageName}} image + condition: eq(variables.setupImages, true) + container: + image: devfra.azurecr.io/de.fraport.build/tools:1.1.0 + endpoint: devfra + steps: + - task: Docker@2 + displayName: Login to container registry + inputs: + command: login + containerRegistry: devfra + - task: Bash@3 + displayName: Build ${{parameters.imageName}} image + inputs: + targetType: inline + script: | + cp docker/${{parameters.imageName}}/Dockerfile . + docker build \ + --tag $(buildImageUpstream)/${{parameters.imageName}}:$(Build.BuildNumber) \ + --build-arg FROM_IMG=${{parameters.imageBase.image}} \ + --build-arg FROM_TAG=${{parameters.imageBase.tag}} \ + --build-arg HTTPS_PROXY=http://proxy.frankfurt-airport.de:8080 \ + --build-arg HTTP_PROXY=http://proxy.frankfurt-airport.de:8080 \ + --build-arg NO_PROXY='localhost,127.0.0.1,*.docker.internal,*.azmk8s.io,devfra.azurecr.io,devfra.westeurope.data.azurecr.io' \ + --build-arg FRAPORT_NOPROXY=dev.azure.com,*.dev.azure.com,*.fraport.de,*.frankfurt-airport.de \ + --build-arg PROJECT_DIR=$(Build.Repository.LocalPath) \ + --build-arg IN_CI=true \ + --build-arg IN_CONTAINER=true \ + . + - task: Bash@3 + displayName: Push ${{parameters.imageName}} image + inputs: + targetType: inline + script: | + docker push $(buildImageUpstream)/${{parameters.imageName}}:$(Build.BuildNumber) + - task: Bash@3 + displayName: Update latest ${{parameters.imageName}} image + condition: or(eq(variables.forcePushLatest, true), eq(variables['Build.SourceBranch'], 'refs/heads/master')) + inputs: + targetType: inline + script: | + docker tag $(buildImageUpstream)/${{parameters.imageName}}:$(Build.BuildNumber) $(buildImageUpstream)/${{parameters.imageName}}:latest + docker push $(buildImageUpstream)/${{parameters.imageName}}:latest + - task: Bash@3 + displayName: Save image for publication + inputs: + targetType: inline + script: | + docker image save --output=$(Build.ArtifactStagingDirectory)/${{parameters.imageName}}.tar $(buildImageUpstream)/${{parameters.imageName}}:$(Build.BuildNumber) + - task: PublishBuildArtifacts@1 + displayName: Publish image as artifact + inputs: + PathtoPublish: '$(Build.ArtifactStagingDirectory)' + ArtifactName: Image_${{parameters.imageName}} + publishLocation: 'Container' + - task: Docker@2 + displayName: Logout from container registry + inputs: + command: logout + containerRegistry: devfra \ No newline at end of file diff --git a/.azure-pipelines/templates/service.yaml b/.azure-pipelines/templates/service.yaml new file mode 100755 index 000000000..152b1f6cb --- /dev/null +++ b/.azure-pipelines/templates/service.yaml @@ -0,0 +1,141 @@ +# SPDX-FileCopyrightText: 2024-2025 Sarah Vaupel +# +# SPDX-License-Identifier: AGPL-3.0-or-later + +parameters: + - name: serviceName + type: string + default: serviceName + - name: serviceBase + type: object + default: + image: baseImage + tag: baseImageTag + - name: servicePool + type: string + default: 'Prod Private Agent Pool' + - name: serviceTimeout + type: number + default: 60 + # extraBuildOptions: '' + - name: serviceDependsOn + type: object + default: [] + - name: serviceRequiredArtifacts + type: object + default: [] + - name: serviceArtifacts + type: string + default: '' + - name: buildSteps + type: object + +stages: + - stage: ${{ parameters.serviceName }} + dependsOn: ${{ parameters.serviceDependsOn }} + pool: '${{ parameters.servicePool }}' + jobs: + - job: ImageBuild_${{parameters.serviceName}} + displayName: Build ${{parameters.serviceName}} image + condition: or(eq(variables.forcePushLatest, true), eq(variables.onMasterBranch, true), eq(variables.onUpdateBranch, true)) + container: + image: devfra.azurecr.io/de.fraport.build/tools:1.1.0 + endpoint: devfra + steps: + - checkout: self + - task: Docker@2 + displayName: Login to container registry + inputs: + command: login + containerRegistry: devFra + - script: | + ls -a . + pwd + find . + - task: Bash@3 + displayName: Build ${{parameters.serviceName}} image + inputs: + targetType: inline + script: | + cp docker/${{parameters.serviceName}}/Dockerfile . + docker build \ + --tag $(buildImageUpstream)/${{parameters.serviceName}}:$(Build.BuildNumber) \ + --build-arg FROM_IMG=${{parameters.serviceBase.image}} \ + --build-arg FROM_TAG=${{parameters.serviceBase.tag}} \ + --build-arg HTTPS_PROXY=http://proxy.frankfurt-airport.de:8080 \ + --build-arg HTTP_PROXY=http://proxy.frankfurt-airport.de:8080 \ + --build-arg NO_PROXY='localhost,127.0.0.1,*.docker.internal,*.azmk8s.io,devfra.azurecr.io,devfra.westeurope.data.azurecr.io' \ + --build-arg FRAPORT_NOPROXY=dev.azure.com,*.dev.azure.com,*.fraport.de,*.frankfurt-airport.de \ + --build-arg PROJECT_DIR=$(Build.Repository.LocalPath) \ + --build-arg IN_CI=true \ + --build-arg IN_CONTAINER=true \ + . + - task: Bash@3 + displayName: Push ${{ parameters.serviceName }} image + inputs: + targetType: inline + script: | + docker push $(buildImageUpstream)/${{parameters.serviceName}}:$(Build.BuildNumber) + - task: Bash@3 + displayName: Update latest ${{parameters.serviceName}} image + condition: or(eq(variables.forcePushLatest, true), eq(variables.onMasterBranch, true)) + inputs: + targetType: inline + script: | + docker tag $(buildImageUpstream)/${{parameters.serviceName}}:$(Build.BuildNumber) $(buildImageUpstream)/${{parameters.serviceName}}:latest + docker push $(buildImageUpstream)/${{parameters.serviceName}}:latest + - task: Docker@2 + displayName: Logout from container registry + inputs: + command: logout + containerRegistry: devFra + + - job: Build_${{parameters.serviceName}} + displayName: Build ${{parameters.serviceName}} + dependsOn: + - ImageBuild_${{parameters.serviceName}} + condition: in(dependencies.ImageBuild_${{parameters.serviceName}}.result, 'Succeeded', 'Skipped') + timeoutInMinutes: ${{ parameters.serviceTimeout }} + container: + # TODO: use BuildNumber instead of latest in update branches + # image: devfra.azurecr.io/de.fraport.fradrive.build/frontend:$(Build.BuildNumber) + image: $(buildImageUpstream)/${{parameters.serviceName}}:latest + endpoint: devfra + env: + PROJECT_DIR: $(Build.Repository.LocalPath) + IN_CONTAINER: true + IN_CI: true + steps: + - checkout: self + - ${{ each dependency in parameters.serviceRequiredArtifacts }}: + - task: DownloadPipelineArtifact@2 + displayName: Download artifacts from ${{ dependency.name }} dependency + continueOnError: ${{ dependency.continueOnError }} + condition: ${{ dependency.condition }} + inputs: + artifactName: ${{ dependency.artifact }} + source: ${{ dependency.source }} + project: 'Fahrerausbildung' + pipeline: $(System.DefinitionId) + buildVersionToDownload: '${{ dependency.version }}' + tags: '${{ dependency.artifact }}' + allowPartiallySucceededBuilds: true + allowFailedBuilds: true + patterns: '${{ dependency.patterns }}' + targetPath: '$(Build.Repository.LocalPath)' + - ${{ each buildStep in parameters.buildSteps }}: + - template: ./service/build-step.yaml + parameters: + service: ${{ parameters.serviceName }} + buildStep: ${{ buildStep }} + - task: CopyFiles@2 + displayName: Copy ${{parameters.serviceName}} artifacts + inputs: + Contents: ${{ parameters.serviceArtifacts }} + TargetFolder: '$(Build.ArtifactStagingDirectory)' + - task: PublishBuildArtifacts@1 + displayName: Publish ${{parameters.serviceName}} artifacts + inputs: + PathtoPublish: '$(Build.ArtifactStagingDirectory)' + ArtifactName: '${{parameters.serviceName}}' + publishLocation: 'Container' \ No newline at end of file diff --git a/.azure-pipelines/templates/steps/artifact-download.yaml b/.azure-pipelines/templates/steps/artifact-download.yaml new file mode 100644 index 000000000..23eb32c74 --- /dev/null +++ b/.azure-pipelines/templates/steps/artifact-download.yaml @@ -0,0 +1,15 @@ +# SPDX-FileCopyrightText: 2025 Sarah Vaupel +# +# SPDX-License-Identifier: AGPL-3.0-or-later + +parameters: + - name: artifactName + type: string + +steps: + - task: DownloadPipelineArtifact@2 + displayName: Download artifacts from ${{parameters.artifactName}} + inputs: + source: 'current' + artifactName: '${{parameters.artifactName}}' + targetPath: '$(Build.Repository.LocalPath)' \ No newline at end of file diff --git a/.azure-pipelines/templates/steps/cache.yaml b/.azure-pipelines/templates/steps/cache.yaml new file mode 100644 index 000000000..56758e9a7 --- /dev/null +++ b/.azure-pipelines/templates/steps/cache.yaml @@ -0,0 +1,18 @@ +# SPDX-FileCopyrightText: 2025 Sarah Vaupel +# +# SPDX-License-Identifier: AGPL-3.0-or-later + +parameters: + - name: cacheIdent + type: string + - name: cacheKeys + type: string + - name: cachePath + type: string + +steps: + - task: Cache@2 + displayName: Restore ${{parameters.cacheIdent}} cache + inputs: + key: '"${{parameters.cacheIdent}}" | ${{parameters.cacheKeys}}' + path: '${{parameters.cachePath}}' \ No newline at end of file diff --git a/.azure-pipelines/templates/steps/make.yaml b/.azure-pipelines/templates/steps/make.yaml new file mode 100644 index 000000000..f134e3354 --- /dev/null +++ b/.azure-pipelines/templates/steps/make.yaml @@ -0,0 +1,35 @@ +# SPDX-FileCopyrightText: 2024-2025 Sarah Vaupel +# +# SPDX-License-Identifier: AGPL-3.0-or-later + +parameters: + - name: makeJob + type: string + values: + - dependencies + - compile + - lint + - test + - name: makeService + type: string + values: + - frontend + - backend + - name: makeVars + type: string + default: '' + +steps: + - task: Bash@3 + name: ${{parameters.makeJob}}_${{parameters.makeService}} + displayName: make ${{parameters.makeJob}}-${{parameters.makeService}} + env: + HTTPS_PROXY: http://proxy.frankfurt-airport.de:8080 + HTTP_PROXY: http://proxy.frankfurt-airport.de:8080 + NO_PROXY: 'localhost,127.0.0.1,*.docker.internal,*.azmk8s.io,devfra.azurecr.io,devfra.westeurope.data.azurecr.io' + FRAPORT_NOPROXY: 'dev.azure.com,*.dev.azure.com,*.fraport.de,*.frankfurt-airport.de' + PROJECT_DIR: $(Build.Repository.LocalPath) + inputs: + targetType: inline + script: | + make -- --${{parameters.makeJob}}-${{parameters.makeService}} IN_CONTAINER=true IN_CI=true PROJECT_DIR=${PROJECT_DIR} ${{parameters.makeVars}} \ No newline at end of file diff --git a/.gitignore b/.gitignore index 0c90b8a6f..fdaf213a9 100644 --- a/.gitignore +++ b/.gitignore @@ -37,6 +37,7 @@ uniworx.nix .kateproject src/Handler/Assist.bak src/Handler/Course.SnapCustom.hs +frontend/src/env.sass *.orig /instance .stack-work-* diff --git a/.versionrc.js b/.versionrc.js index 3c99f18ae..c3c761228 100644 --- a/.versionrc.js +++ b/.versionrc.js @@ -7,33 +7,33 @@ const standardVersionUpdaterYaml = require.resolve('standard-version-updater-yam module.exports = { scripts: { // postbump: './sync-versions.hs && git add -- package.yaml', // moved to bumpFiles - postchangelog: 'sed \'s/^### \\[/## [/g\' -i CHANGELOG.md' + postchangelog: 'sed \'s/^### \\[/## [/g\' -i CHANGELOG.md', }, packageFiles: ['package.json', 'package.yaml'], bumpFiles: [ { filename: 'package.json', - type: 'json' + type: 'json', }, { filename: 'package-lock.json', - type: 'json' + type: 'json', }, { filename: 'package.yaml', - updater: standardVersionUpdaterYaml + updater: standardVersionUpdaterYaml, }, { filename: 'nix/docker/version.json', - type: 'json' + type: 'json', }, { filename: 'nix/docker/demo-version.json', - type: 'json' - } + type: 'json', + }, ], commitUrlFormat: 'https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/{{hash}}', compareUrlFormat: 'https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/{{previousTag}}...{{currentTag}}', issueUrlFormat: 'https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/issues/{{id}}', - userUrlFormat: 'https://gitlab2.rz.ifi.lmu.de/{{user}}' + userUrlFormat: 'https://gitlab2.rz.ifi.lmu.de/{{user}}', }; diff --git a/CHANGELOG.md b/CHANGELOG.md index 21d843eee..f4607dbbe 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,48 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +## [v27.4.59-test-e0.0.14](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-e0.0.13...v27.4.59-test-e0.0.14) (2025-02-13) + +## [v27.4.59-test-e0.0.13](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-e0.0.12...v27.4.59-test-e0.0.13) (2025-02-12) + +## [v27.4.59-test-e0.0.12](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-d0.0.12...v27.4.59-test-e0.0.12) (2025-02-12) + +## [v27.4.59-test-d0.0.12](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-d0.0.11...v27.4.59-test-d0.0.12) (2025-02-11) + +## [v27.4.59-test-d0.0.11](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-c0.0.11...v27.4.59-test-d0.0.11) (2025-02-11) + +## [v27.4.59-test-c0.0.11](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-b0.0.11...v27.4.59-test-c0.0.11) (2025-02-11) + +## [v27.4.59-test-b0.0.11](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-c0.0.10...v27.4.59-test-b0.0.11) (2025-02-11) + +## [v27.4.59-test-c0.0.10](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-b0.0.10...v27.4.59-test-c0.0.10) (2025-02-11) + +## [v27.4.59-test-b0.0.10](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.10...v27.4.59-test-b0.0.10) (2025-02-11) + +## [v27.4.59-test-a0.0.10](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.9...v27.4.59-test-a0.0.10) (2025-02-11) + +## [v27.4.59-test-a0.0.9](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.8...v27.4.59-test-a0.0.9) (2025-02-10) + +## [v27.4.59-test-a0.0.8](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.7...v27.4.59-test-a0.0.8) (2025-02-10) + +## [v27.4.59-test-a0.0.7](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.6...v27.4.59-test-a0.0.7) (2025-02-10) + +## [v27.4.59-test-a0.0.6](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.5...v27.4.59-test-a0.0.6) (2025-02-08) + +## [v27.4.59-test-a0.0.5](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.4...v27.4.59-test-a0.0.5) (2025-02-07) + +## [v27.4.59-test-a0.0.4](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.3...v27.4.59-test-a0.0.4) (2025-02-07) + +## [v27.4.59-test-a0.0.3](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.2...v27.4.59-test-a0.0.3) (2025-02-06) + +## [v27.4.59-test-a0.0.2](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.1...v27.4.59-test-a0.0.2) (2025-02-05) + +## [v27.4.59-test-a0.0.1](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.0...v27.4.59-test-a0.0.1) (2025-02-05) + +### Bug Fixes + +* **ghci:** ghci works now as expected ([c3117db](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive/commit/c3117dbdcd1de9ef9f0751afa45018e2ebce2c42)) + ## [v27.4.59-test-a0.0.0](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59...v27.4.59-test-a0.0.0) (2024-10-25) ### Features diff --git a/Makefile b/Makefile index e85fd602b..6f2c66c42 100644 --- a/Makefile +++ b/Makefile @@ -22,16 +22,16 @@ export CONTAINER_FILE export CONTAINER_IDENT export CF_PREFIX export DEVELOP -export MOUNT_DIR=/mnt/fradrive export CONTAINER_ATTACHED export CONTAINER_INIT export CONTAINER_CLEANUP +export PROJECT_DIR=/fradrive export SERVICE export SERVICE_VARIANT ?= $(SERVICE) export JOB -export CONTAINER_CMD -export SET_CONTAINER_CMD +export IMAGE +export SET_IMAGE export ENTRYPOINT export EXEC_OPTS @@ -65,7 +65,7 @@ help: # HELP: stop all running containers and remove all compilation results in the directory (but leave images including dependencies unharmed) clean: rm -rf develop - -rm -rf node_modules .npm .cache assets/icons assets/favicons static well-known config/manifest.json + -rm -rf node_modules .npm .cache assets/icons assets/favicons static well-known config/manifest.json frontend/src/env.sass -rm -rf .stack-work .stack-work.lock -rm -rf bin .Dockerfile develop -$(CONTAINER_COMMAND) container prune --force @@ -79,13 +79,13 @@ clean-all: clean .PHONY: release # HELP: create, commit and push a new release +# TODO: only release when build and tests are passing!!! release: - ./.gitlab-ci/version.pl -changelog CHANGELOG.md - git add CHANGELOG.md - VERSION=`.gitlab-ci/version.pl` - git tag $${VERSION} - git commit -m "chore(release): $${VERSION}" -# git push + VERSION=`./utils/version.pl -changelog CHANGELOG.md -v` ; \ + git add CHANGELOG.md ; \ + git commit -m "chore(release): $${VERSION}" ; \ + git tag $${VERSION} ; \ + git push origin $${VERSION} .PHONY: compile compile: @@ -103,24 +103,24 @@ start: .PHONY: %-backend %-backend: SERVICE=backend %-backend: SERVICE_VARIANT=backend -%-backend: CONTAINER_CMD=localhost/fradrive/backend +%-backend: IMAGE=localhost/fradrive/backend %-backend: BASE_PORTS = "DEV_PORT_HTTP=3000" "DEV_PORT_HTTPS=3443" .PHONY: %-uniworxdb %-uniworxdb: SERVICE=backend %-uniworxdb: SERVICE_VARIANT=uniworxdb -%-uniworxdb: CONTAINER_CMD=localhost/fradrive/backend +%-uniworxdb: IMAGE=localhost/fradrive/backend .PHONY: %-ghci %-ghci: SERVICE=backend %-ghci: SERVICE_VARIANT=ghci -%-ghci: CONTAINER_CMD=localhost/fradrive/backend +%-ghci: IMAGE=localhost/fradrive/backend .PHONY: %-hoogle %-hoogle: SERVICE=backend %-hoogle: SERVICE_VARIANT=hoogle %-hoogle: BASE_PORTS = "HOOGLE_PORT=8081" -%-hoogle: CONTAINER_CMD=localhost/fradrive/backend +%-hoogle: IMAGE=localhost/fradrive/backend --start-hoogle: HOOGLE_PORT=`cat $(CONTAINER_FILE) | grep 'HOOGLE_PORT=' | sed 's/HOOGLE_PORT=//'` ; \ stack $(STACK_CORES) hoogle -- server --local --port $${HOOGLE_PORT} @@ -128,24 +128,24 @@ start: .PHONY: %-frontend %-frontend: SERVICE=frontend %-frontend: SERVICE_VARIANT=frontend -%-frontend: CONTAINER_CMD=localhost/fradrive/frontend +%-frontend: IMAGE=localhost/fradrive/frontend .PHONY: %-postgres %-postgres: SERVICE=postgres %-postgres: SERVICE_VARIANT=postgres %-postgres: BASE_PORTS = "PGPORT=5432" -%-postgres: CONTAINER_CMD=localhost/fradrive/postgres +%-postgres: IMAGE=localhost/fradrive/postgres .PHONY: %-memcached %-memcached: SERVICE=memcached %-memcached: SERVICE_VARIANT=memcached -%-memcached: SET_CONTAINER_CMD=$$(MEMCACHED_IMAGE) --port=`cat $$(CONTAINER_FILE) | grep 'MEMCACHED_PORT=' | sed 's/MEMCACHED_PORT=//'` +%-memcached: SET_IMAGE=$$(MEMCACHED_IMAGE) --port=`cat $$(CONTAINER_FILE) | grep 'MEMCACHED_PORT=' | sed 's/MEMCACHED_PORT=//'` %-memcached: BASE_PORTS = "MEMCACHED_PORT=11211" .PHONY: %-minio %-minio: SERVICE=minio %-minio: SERVICE_VARIANT=minio -%-minio: SET_CONTAINER_CMD=$$(MINIO_IMAGE) -- server `mktemp` --address=:`cat $$(CONTAINER_FILE) | grep 'UPLOAD_S3_PORT=' | sed 's/UPLOAD_S3_PORT=//'` +%-minio: SET_IMAGE=$$(MINIO_IMAGE) -- server `mktemp` --address=:`cat $$(CONTAINER_FILE) | grep 'UPLOAD_S3_PORT=' | sed 's/UPLOAD_S3_PORT=//'` %-minio: BASE_PORTS = "UPLOAD_S3_PORT=9000" .PHONY: start-% @@ -207,18 +207,13 @@ ghci: shell-ghci; rebuild-%: $(MAKE) -- --image-build SERVICE=$* NO_CACHE=--no-cache --image-build: -ifeq "$(CONTAINER_CMD)" "localhost/fradrive/$(SERVICE)" +ifeq "$(IMAGE)" "localhost/fradrive/$(SERVICE)" rm -f .Dockerfile ln -s docker/$(SERVICE)/Dockerfile .Dockerfile - MOUNT_DIR=/mnt/fradrive; \ - PROJECT_DIR=/mnt/fradrive; \ - if [ "$(IN_CI)" == "true" ] ; then \ - PROJECT_DIR=/fradrive; \ - fi; \ + PROJECT_DIR=/fradrive; \ if [ "$(IN_CONTAINER)" == "false" ] ; then \ $(CONTAINER_COMMAND) build $(NO_CACHE) \ - -v $(PWD):$${MOUNT_DIR} \ - --build-arg MOUNT_DIR=$(MOUNT_DIR) \ + -v $(PWD):$${PROJECT_DIR}:rw \ --build-arg PROJECT_DIR=$${PROJECT_DIR} \ --env IN_CONTAINER=true \ --env JOB=$(JOB) \ @@ -233,22 +228,22 @@ endif DEVELOP=`cat develop/.current` ; \ ./utils/watchcontainerrun.sh "$(CONTAINER_COMMAND)" "$(CONTAINER_FILE)" "$(CONTAINER_INIT)" "$(CONTAINER_CLEANUP)" & \ CONTAINER_NAME=fradrive.$(CURR_DEV).$(CONTAINER_IDENT) ; \ - if ! [ -z "$(SET_CONTAINER_CMD)" ] ; \ + if ! [ -z "$(SET_IMAGE)" ] ; \ then \ - CONTAINER_CMD="$(SET_CONTAINER_CMD)" ; \ + IMAGE="$(SET_IMAGE)" ; \ else \ - CONTAINER_CMD=$(CONTAINER_CMD) ; \ + IMAGE=$(IMAGE) ; \ fi ; \ CONTAINER_ID=`$(CONTAINER_BGRUN) \ - -v $(PWD):$(MOUNT_DIR) \ + -v $(PWD):$(PROJECT_DIR):rw \ --env IN_CONTAINER=true \ - --env FRADRIVE_MAKE_TARGET="--$(JOB)-$(SERVICE_VARIANT)" \ --env CONTAINER_FILE=$(CONTAINER_FILE) \ --env CONTAINER_NAME=$${CONTAINER_NAME} \ --env JOB=$(JOB) \ --env SRC=$(SRC) \ --name $${CONTAINER_NAME} \ - $${CONTAINER_CMD} \ + $${IMAGE} \ + make -- --$(JOB)-$(SERVICE_VARIANT) IN_CONTAINER=true \ ` ; \ printf "CONTAINER_ID=$${CONTAINER_ID}" >> "$(CONTAINER_FILE)" ; \ if [[ "true" == "$(CONTAINER_ATTACHED)" ]] ; then \ @@ -256,7 +251,6 @@ endif fi # For Reverse Proxy Problem see: https://groups.google.com/g/yesodweb/c/2EO53kSOuy0/m/Lw6tq2VYat4J - # HELP(start-backend): start development instance --start-backend: export YESOD_IP_FROM_HEADER=true; \ @@ -281,22 +275,23 @@ endif --compile-backend: stack build $(STACK_CORES) --fast --profile --library-profiling --executable-profiling --flag uniworx:-library-only $(--DEVELOPMENT) --local-bin-path $$(pwd)/bin --copy-bins # HELP(dependencies-backend): (re-)build backend dependencies ---dependencies-backend: uniworx.cabal - stack build $(STACK_CORES) --fast --only-dependencies +--dependencies-backend: #uniworx.cabal + chown -R `id -un`:`id -gn` "$(PROJECT_DIR)"; \ + stack build -j2 --only-dependencies # HELP(lint-backend): lint backend --lint-backend: stack build $(STACK_CORES) --test --fast --flag uniworx:library-only $(--DEVELOPMENT) uniworx:test:hlint # HELP(test-backend): test backend --test-backend: stack build $(STACK_CORES) --test --coverage --fast --flag uniworx:library-only $(--DEVELOPMENT) -uniworx.cabal: - stack exec -- hpack --force +# uniworx.cabal: +# stack exec -- hpack --force # HELP(compile-frontend): compile frontend assets ---compile-frontend: node_modules assets esbuild.config.mjs +--compile-frontend: node_modules assets esbuild.config.mjs frontend/src/env.sass npm run build --start-frontend: --compile-frontend; ---dependencies-frontend: node_modules assets static well-known; +--dependencies-frontend: node_modules assets; node_modules: package.json package-lock.json npm install --cache .npm --prefer-offline package-lock.json: package.json @@ -308,9 +303,15 @@ assets/icons: node_modules assets/icons-src/fontawesome.json ./utils/renamer.pl node_modules/@fortawesome/fontawesome-free/svgs/solid assets/icons-src/fontawesome.json assets/icons/fradrive ./utils/renamer.pl node_modules/@fortawesome/fontawesome-free/svgs/regular assets/icons-src/fontawesome.json assets/icons/fradrive -cp assets/icons-src/*.svg assets/icons/fradrive -static: node_modules assets esbuild.config.mjs +frontend/src/env.sass: + echo "\$$path: '$${PROJECT_DIR}'" > frontend/src/env.sass +static: node_modules assets esbuild.config.mjs frontend/src/env.sass npm run build well-known: static; +--lint-frontend: --compile-frontend + npm run lint +--test-frontend: --compile-frontend + npm run test # HELP(compile-uniworxdb): clear and fill database. requires running postgres instance (use "make start-postgres" to start one) # TODO (db-m-$MIGRATION-backend): apply migration (see src/Model/Migration/Definition.hs for list of available migrations) @@ -319,6 +320,7 @@ well-known: static; AVSPASS=${AVSPASS:-nopasswordset} ; \ ./bin/uniworxdb $(UNIWORXDB_OPTS) +# HELP(shell-ghci): enter ghci shell. Use "make ghci SRC=" to load specific source modules." --shell-ghci: stack ghci -- $(SRC) # --main-is uniworx:exe:uniworx diff --git a/azure-pipelines.yaml b/azure-pipelines.yaml old mode 100644 new mode 100755 index 574c669e1..61fbf0a2c --- a/azure-pipelines.yaml +++ b/azure-pipelines.yaml @@ -1,67 +1,197 @@ -# SPDX-FileCopyrightText: 2024 Sarah Vaupel +# SPDX-FileCopyrightText: 2024-2025 Sarah Vaupel # # SPDX-License-Identifier: AGPL-3.0-or-later +trigger: + branches: + include: + - '*' + tags: + include: + - '*' + #paths: + # exclude: + # - CHANGELOG.md + +parameters: + - name: services + type: object + default: + - name: frontend + imageBase: + image: devfra.azurecr.io/de.fraport.build/npm + tag: node-20 + # extraBuildOptions: | + # --build-arg NPM_CUSTOM_REGISTRY=https://pkgs.dev.azure.com/fraport/_packaging/packages/npm/registry/ + dependsOn: [] + dependenciesCaches: + - key: package.json | package-lock.json + path: node_modules/ + - key: package.json | package-lock.json + path: .npm/ + - key: package.json | esbuild.config.mjs | utils/renamer.pl | utils/faviconize.pl | frontend/src/icons.scss + path: assets/icons/ + - key: package.json | esbuild.config.mjs | utils/renamer.pl | utils/faviconize.pl | frontend/src/icons.scss + path: assets/favicons/ + buildPool: 'Prod Private Agent Pool' + buildCores: 1 + buildTimeout: 60 + buildArtifacts: | + assets/icons/fradrive/*.svg + assets/favicons/*.png + assets/favicons/include.html + frontend/src/env.sass + config/manifest.json + static/**/* + well-known/**/* + - name: backend + imageBase: + image: devfra.azurecr.io/de.fraport.build/haskell + tag: 8.10.4 + dependsOn: + - Build_frontend + dependenciesCaches: + - key: stack.yaml | stack.yaml.lock + path: .stack/ + buildPool: 'Prod Private Agent Pool DS3' + buildCores: 3 + buildTimeout: 1440 + buildArtifacts: | + bin/* + +variables: + buildImageUpstream: devfra.azurecr.io/de.fraport.fradrive.build + setupImages: $[ or( eq(variables.forcePushLatest, true), eq(variables['Build.SourceBranch'], 'refs/heads/master'), startsWith(variables['Build.SourceBranch'], 'refs/heads/update'), startsWith(variables['Build.SourceBranch'], 'refs/tags/') ) ] + pool: 'Prod Private Agent Pool' -jobs: -# - job: HelloWorld -# container: -# image: 'devfra.azurecr.io/de.fraport.trusted/ubuntu:22.04' -# endpoint: devfra -# steps: -# - script: echo Hello, world! -# displayName: 'Run a one-line script' -# - script: | -# echo Add other tasks to build, test, and deploy your project. -# echo See https://aka.ms/yaml -# displayName: 'Run a multi-line script' -- job: DockerTaskTest - container: - image: devfra.azurecr.io/de.fraport.build/tools:1.1.0 - endpoint: devfra - steps: - - task: Docker@2 - name: dockerLoginDevFra - displayName: Docker Login to devfra - inputs: - command: login - containerRegistry: devFra - - task: Docker@2 - name: dockerBuild - displayName: Backend image build test - inputs: - command: build - Dockerfile: docker/backend/Dockerfile - buildContext: . - tags: | - $(Build.BuildNumber) - backend - arguments: | - --build-arg HTTPS_PROXY=http://proxy.frankfurt-airport.de:8080 - --build-arg HTTP_PROXY=http://proxy.frankfurt-airport.de:8080 - --build-arg NO_PROXY='localhost,127.0.0.1,*.docker.internal,*.azmk8s.io,devfra.azurecr.io,devfra.westeurope.data.azurecr.io' - --build-arg PROJECT_DIR=/fradrive - --build-arg MOUNT_DIR=/mnt/fradrive -# - job: BuildKitTest -# container: -# image: 'devfra.azurecr.io/de.fraport.trusted/buildkit:0.12.1' -# endpoint: devfra -# steps: -# - script: buildctl build \ -# --frontend=dockerfile.v0 \ -# --local context=. \ -# --local dockerfile=docker/backend/Dockerfile -# displayName: BuildKit test -# - job: CustomBuildahTest -# container: -# image: 'devfra.azurecr.io/de.fraport.trusted/ubuntu:22.04' -# endpoint: devfra -# steps: -# - script: | -# id -# docker build --help -# sudo apt-get -y update -# sudo apt-get -y install buildah -# buildah bud -t fradrive-backend-test --volume .:/mnt/fradrive --file docker/backend/Dockerfile -# displayName: Build buildah image \ No newline at end of file +stages: + +- stage: Setup + jobs: + - ${{ each service in parameters.services }}: + - template: .azure-pipelines/templates/jobs/setup_image.yaml + parameters: + imageName: ${{service.name}} + imageBase: ${{service.imageBase}} + - template: .azure-pipelines/templates/jobs/setup_dependencies.yaml + parameters: + serviceName: ${{service.name}} + dependenciesCaches: ${{service.dependenciesCaches}} + dependenciesBuildPool: ${{service.buildPool}} + dependenciesBuildCores: ${{service.buildCores}} + dependenciesBuildTimeout: ${{service.buildTimeout}} + +- stage: Build + dependsOn: Setup + jobs: + - ${{ each service in parameters.services }}: + - job: Build_${{service.name}} + displayName: Compile ${{service.name}} + dependsOn: ${{service.dependsOn}} + pool: '${{service.buildPool}}' + timeoutInMinutes: ${{service.buildTimeout}} + container: + ${{ if eq(variables.setupImages, true) }}: + image: $(buildImageUpstream)/${{service.name}}:$(Build.BuildNumber) + ${{ else }}: + image: $(buildImageUpstream)/${{service.name}}:latest + endpoint: devfra + env: + PROJECT_DIR: $(Build.Repository.LocalPath) + IN_CONTAINER: true + IN_CI: true + steps: + - ${{ each dependencyCache in service.dependenciesCaches }}: + - template: .azure-pipelines/templates/steps/cache.yaml + parameters: + cacheIdent: '${{service.name}}-dependencies' + cacheKeys: '${{dependencyCache.key}}' + cachePath: '${{dependencyCache.path}}' + - ${{ each dependency in service.dependsOn }}: + - template: .azure-pipelines/templates/steps/artifact-download.yaml + parameters: + artifactName: '${{dependency}}' + - template: .azure-pipelines/templates/steps/make.yaml + parameters: + makeJob: compile + makeService: ${{service.name}} + makeVars: 'CPU_CORES=${{service.buildCores}} STACK_CORES=-j${{service.buildCores}}' + - task: CopyFiles@2 + displayName: Prepare ${{service.name}} build artifacts for upload + inputs: + Contents: '${{service.buildArtifacts}}' + TargetFolder: '$(Build.ArtifactStagingDirectory)' + - task: PublishBuildArtifacts@1 + displayName: Publish ${{service.name}} build artifacts + inputs: + PathtoPublish: '$(Build.ArtifactStagingDirectory)' + ArtifactName: 'Build_${{service.name}}' + publishLocation: 'Container' + +# - stage: Test +# dependsOn: Build +# condition: eq(variables.skipTests, false) +# jobs: +# - ${{ each service in parameters.services }}: +# - job: Test_${{service.name}} +# displayName: Run ${{service.name}} tests +# pool: '${{service.buildPool}}' +# timeoutInMinutes: ${{service.buildTimeout}} +# container: +# # TODO: do not use latest on update branches +# image: $(buildImageUpstream)/${{service.name}}:latest +# endpoint: devfra +# env: +# PROJECT_DIR: $(Build.Repository.LocalPath) +# IN_CONTAINER: true +# IN_CI: true +# steps: +# - ${{ each dependencyCache in service.dependenciesCaches }}: +# - template: .azure-pipelines/templates/steps/cache.yaml +# parameters: +# cacheIdent: '${{service.name}}-dependencies' +# cacheKeys: '${{dependencyCache.key}}' +# cachePath: '${{dependencyCache.path}}' +# - ${{ each dependency in service.dependsOn }}: +# - template: .azure-pipelines/templates/steps/artifact-download.yaml +# parameters: +# artifactName: '${{dependency}}' +# - task: Docker@2 +# displayName: Login to container registry +# inputs: +# command: login +# containerRegistry: devfra +# - task: Bash@3 +# displayName: Start database container for testing +# inputs: +# targetType: inline +# script: | +# docker run -d devfra.azurecr.io/de.fraport.trusted/postgres:16.1-bookworm +# - template: .azure-pipelines/templates/steps/make.yaml +# parameters: +# makeJob: lint +# makeService: ${{service.name}} +# makeVars: 'CPU_CORES=${{service.buildCores}} STACK_CORES=-j${{service.buildCores}}' +# - template: .azure-pipelines/templates/steps/make.yaml +# parameters: +# makeJob: test +# makeService: ${{service.name}} +# makeVars: 'CPU_CORES=${{service.buildCores}} STACK_CORES=-j${{service.buildCores}}' +# - task: Docker@2 +# displayName: Logout from container registry +# inputs: +# command: logout +# containerRegistry: devfra +# - job: TestReport_${{service.name}} +# displayName: Upload test reports for ${{service.name}} +# steps: +# - script: echo "Work in progress" # TODO + +- stage: Release + dependsOn: Build # TODO Test + condition: or(eq(variables.forceRelease, true), startsWith(variables['Build.SourceBranch'], 'refs/tags/')) + jobs: + - template: .azure-pipelines/templates/jobs/release.yaml + parameters: + releaseTag: ${{split(variables['Build.SourceBranch'], '/')[2]}} \ No newline at end of file diff --git a/commitlint.config.js b/commitlint.config.js index 5eec5bedb..4f7abd1d3 100644 --- a/commitlint.config.js +++ b/commitlint.config.js @@ -2,4 +2,4 @@ // // SPDX-License-Identifier: AGPL-3.0-or-later -module.exports = {extends: ['@commitlint/config-conventional']} +module.exports = {extends: ['@commitlint/config-conventional']}; diff --git a/docker/backend/Dockerfile b/docker/backend/Dockerfile index ec87803de..2fb91d587 100644 --- a/docker/backend/Dockerfile +++ b/docker/backend/Dockerfile @@ -1,41 +1,38 @@ -FROM docker.io/fpco/stack-build:lts-18.0 +ARG FROM_IMG=docker.io/library/haskell +ARG FROM_TAG=8.10.4 -# add public key for nvidia cuda repositories -RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys A4B469963BF863CC +FROM ${FROM_IMG}:${FROM_TAG} -ENV LANG=en_US.UTF-8 +ENV LANG=de_DE.UTF-8 # compile-time dependencies +RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ +--mount=type=cache,target=/var/lib/apt,sharing=locked \ +apt-get -y update && apt-get install -y libpq-dev libsodium-dev # RUN apt-get -y update && apt-get -y install llvm # RUN apt-get -y update && apt-get -y install g++ libghc-zlib-dev libpq-dev libsodium-dev pkg-config # RUN apt-get -y update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends tzdata -# RUN apt-get -y update && apt-get -y install locales locales-all +RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ +--mount=type=cache,target=/var/lib/apt,sharing=locked \ +apt-get -y update && apt-get install -y --no-install-recommends locales locales-all # run-time dependencies for uniworx binary -# RUN apt-get -y update && apt-get -y install fonts-roboto +RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ +--mount=type=cache,target=/var/lib/apt,sharing=locked \ +apt-get -y update && apt-get -y install fonts-roboto # RUN apt-get -y update && apt-get -y install pdftk -# RUN apt-get -y update && apt-get -y install texlive-base luatex +RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ +--mount=type=cache,target=/var/lib/apt,sharing=locked \ +apt-get -y update && apt-get -y install texlive-latex-recommended texlive-luatex texlive-plain-generic texlive-lang-german texlive-lang-english -# locally these two should be identical, so that compilation results are written out into the file dir. -# in CI-pipelines these two should be different, so that the container caches the compilation results. -ARG MOUNT_DIR=/mnt/fradrive ARG PROJECT_DIR=/fradrive - -RUN mkdir -p "${PROJECT_DIR}" -RUN if [ "${PROJECT_DIR}" != "${MOUNT_DIR}" ] ; then cp -r "${MOUNT_DIR}"/* "${PROJECT_DIR}" ; fi - -RUN mkdir -p "${PROJECT_DIR}/.stack" -ENV STACK_ROOT="${PROJECT_DIR}/.stack" - +ENV PROJECT_DIR=${PROJECT_DIR} +RUN mkdir -p "${PROJECT_DIR}"; chmod -R 7777 "${PROJECT_DIR}" WORKDIR ${PROJECT_DIR} ENV HOME=${PROJECT_DIR} -RUN make -- --dependencies-backend STACK_ROOT=${STACK_ROOT} IN_CONTAINER=true JOB=${JOB} +ENV STACK_ROOT="${PROJECT_DIR}/.stack" -RUN stack install yesod-bin -RUN stack install hpack - -ENV FRADRIVE_MAKE_TARGET=--start-backend -ENTRYPOINT make -- ${FRADRIVE_MAKE_TARGET} STACK_ROOT="${STACK_ROOT}" IN_CONTAINER=true CONTAINER_FILE="${CONTAINER_FILE}" JOB="${JOB}" SRC="${SRC}" - -EXPOSE 3000/tcp -EXPOSE 3443/tcp +RUN if [ ! -z "${IN_CI}" ]; then \ + stack install yesod-bin; \ + stack install hpack; \ +fi \ No newline at end of file diff --git a/docker/fradrive/Dockerfile b/docker/fradrive/Dockerfile old mode 100644 new mode 100755 index 9a19f1def..85a266054 --- a/docker/fradrive/Dockerfile +++ b/docker/fradrive/Dockerfile @@ -1,16 +1,17 @@ -FROM debian:12.5 +ARG FROM_IMG=docker.io/library/debian +ARG FROM_TAG=12.5 -RUN apt-get -y update +FROM ${FROM_IMG}:${FROM_TAG} -# setup locales -RUN apt-get -y install locales locales-all -RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && \ - locale-gen -ENV LANG=en_US.UTF-8 LANGUAGE=en_US:en LC_ALL=en_US.UTF-8 +# Setup locales +# RUN apt-get update && apt-get -y install locales locales-all +# RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && \ +# locale-gen +# ENV LANG=en_US.UTF-8 LANGUAGE=en_US:en LC_ALL=en_US.UTF-8 -# Binary runtime dependencies +# FraDrive runtime dependencies # TODO: minimize texlive dependencies, switch to basic schemes where possible -RUN apt-get -y install texlive-latex-recommended texlive-latex-extra texlive-luatex texlive-fonts-recommended texlive-fonts-extra texlive-lang-english texlive-lang-german +# RUN apt-get -y install texlive-latex-recommended texlive-latex-extra texlive-luatex texlive-fonts-recommended texlive-fonts-extra texlive-lang-english texlive-lang-german # Add uniworx user and directories RUN mkdir -p /var/lib @@ -20,12 +21,16 @@ RUN useradd -r -g uniworx -d /var/lib/uniworx -M uniworx --uid 999 RUN mkdir -p /var/lib/uniworx && chown -R uniworx:uniworx /var/lib/uniworx RUN mkdir -p /var/log/uniworx && chown -R uniworx:uniworx /var/log/uniworx -# TODO: is this still needed? +# Install FraDrive binaries # RUN install -d -g uniworx -o uniworx -m 0750 /var/lib/uniworx # RUN install -d -g uniworx -o uniworx -m 0755 /var/log/uniworx -RUN cp /tmp/uniworx-bin/uniworx /usr/bin/uniworx +COPY ./bin/uniworx /usr/bin/uniworx +COPY ./bin/uniworxdb /usr/bin/uniworxdb +# COPY uniworxload /usr/bin/uniworx +COPY ./docker/fradrive/fradrive-entrypoint.sh /entrypoint.sh +RUN chmod 777 /entrypoint.sh USER uniworx -ENTRYPOINT fradrive-entrypoint.sh +ENTRYPOINT /entrypoint.sh EXPOSE 8080/tcp VOLUME /var/lib/uniworx /var/log \ No newline at end of file diff --git a/docker/frontend/Dockerfile b/docker/frontend/Dockerfile index b7404f6c5..9edc97a84 100644 --- a/docker/frontend/Dockerfile +++ b/docker/frontend/Dockerfile @@ -1,32 +1,29 @@ -FROM debian:12.5 +ARG FROM_IMG=docker.io/library/node +ARG FROM_TAG=20 -# Basic dependencies -RUN apt-get -y update && apt-get -y install curl npm +FROM ${FROM_IMG}:${FROM_TAG} -# Build and watch dependencies +ENV LANG=de_DE.UTF-8 +ENV LANGUAGE=de_DE.UTF-8 + +# build and watch dependencies RUN apt-get -y update && apt-get -y install exiftool RUN apt-get -y update && apt-get -y install imagemagick -# Test dependencies -RUN apt-get -y update && apt-get -y install chromium -ENV CHROME_BIN=chromium +# test dependencies +# RUN apt-get -y update && apt-get -y install chromium +# ENV CHROME_BIN=chromium -# TODO: use dotenv for npm version? -RUN npm install -g n -RUN n 20.17.0 +# configure npm to use given proxy if specified +RUN if [ ! -z "${HTTP_PROXY}" ]; then npm config set proxy ${HTTP_PROXY}; fi +RUN if [ ! -z "${FRAPORT_NOPROXY}" ]; then npm config set noproxy "${FRAPORT_NOPROXY}"; fi +ENV NODE_EXTRA_CA_CERTS="/etc/ssl/certs/ca-certificates.crt" -# locally these two should be identical, so that compilation results are written out into the file dir. -# in CI-pipelines these two should be different, so that the container caches the compilation results. -ARG MOUNT_DIR=/mnt/fradrive -ARG PROJECT_DIR=/fradrive +ENV PROJECT_DIR=/fradrive RUN mkdir -p ${PROJECT_DIR} -RUN if [ "${PROJECT_DIR}" != "${MOUNT_DIR}" ] ; then cp -r ${MOUNT_DIR}/* ${PROJECT_DIR} ; fi WORKDIR ${PROJECT_DIR} ENV HOME=${PROJECT_DIR} -#RUN make node_modules IN_CONTAINER=true -#RUN make well-known IN_CONTAINER=true -RUN make -- static - -ENV FRADRIVE_MAKE_TARGET=start-frontend -ENTRYPOINT make -- ${FRADRIVE_MAKE_TARGET} IN_CONTAINER=true CHROME_BIN=${CHROME_BIN} +RUN if [ ! -z "${NPM_CUSTOM_REGISTRY}" ]; then \ +printf 'registry=${NPM_CUSTOM_REGISTRY}' > .npmrc \ +; fi \ No newline at end of file diff --git a/docker/podman/Dockerfile b/docker/podman/Dockerfile deleted file mode 100644 index e7a950419..000000000 --- a/docker/podman/Dockerfile +++ /dev/null @@ -1,33 +0,0 @@ -# Debian-based podman daemon image for building docker images -# inside docker containers (e.g. gitlab runners). -# -# Yoinked with love from: -# https://www.redhat.com/sysadmin/podman-inside-container - -FROM debian:12.5 - -RUN apt-get -y update - -RUN apt-get -y install make podman podman-compose fuse-overlayfs - -RUN useradd podman; \ -echo podman:10000:5000 > /etc/subuid; \ -echo podman:10000:5000 > /etc/subgid; - -VOLUME /var/lib/containers -VOLUME /home/podman/.local/share/containers - -ADD https://raw.githubusercontent.com/containers/image_build/main/podman/containers.conf /etc/containers/containers.conf -ADD https://raw.githubusercontent.com/containers/image_build/main/podman/podman-containers.conf /home/podman/.config/containers/containers.conf - -RUN chown podman:podman -R /home/podman - -# chmod containers.conf and adjust storage.conf to enable Fuse storage. -# RUN chmod 644 /etc/containers/containers.conf; sed -i -e 's|^#mount_program|mount_program|g' -e '/additionalimage.*/a "/var/lib/shared",' -e 's|^mountopt[[:space:]]*=.*$|mountopt = "nodev,fsync=0"|g' /etc/containers/containers.conf -# RUN echo -e '[storage]\ndriver="zfs"\nmount_program="zfs"\nadditionalimage=/var/lib/shared\nmountopt="nodev,fsync=0"' >> /etc/containers/containers.conf -RUN chmod 644 /etc/containers/containers.conf -RUN echo '[storage]\ndriver="overlay"\n[storage.options.overlay]\nforce_mask="private"\nmount_program="/usr/bin/fuse-overlayfs"\nmountopt="nodev"' >> /etc/containers/containers.conf -RUN mkdir -p /root/.config/containers/ && echo '[storage]\ndriver="overlay"\n[storage.options.overlay]\nforce_mask="private"\nmount_program="/usr/bin/fuse-overlayfs"\nmountopt="nodev"' > /root/.config/containers/storage.conf -RUN mkdir -p /var/lib/shared/overlay-images /var/lib/shared/overlay-layers /var/lib/shared/vfs-images /var/lib/shared/vfs-layers; touch /var/lib/shared/overlay-images/images.lock; touch /var/lib/shared/overlay-layers/layers.lock; touch /var/lib/shared/vfs-images/images.lock; touch /var/lib/shared/vfs-layers/layers.lock - -ENV _CONTAINERS_USERNS_CONFIGURED="" \ No newline at end of file diff --git a/esbuild.config.mjs b/esbuild.config.mjs index 2b3d62eb4..251a81db3 100644 --- a/esbuild.config.mjs +++ b/esbuild.config.mjs @@ -7,8 +7,8 @@ import svgPlugin from 'esbuild-plugin-svg-bundle'; import { copy } from 'esbuild-plugin-copy'; // import manifestPlugin from 'esbuild-plugin-manifest'; import manifestPlugin from 'esbuild-plugin-assets-manifest'; -import copyWithHashPlugin from '@enonic/esbuild-plugin-copy-with-hash'; -import inlineImportPlugin from 'esbuild-plugin-inline-import'; +// import copyWithHashPlugin from '@enonic/esbuild-plugin-copy-with-hash'; +// import inlineImportPlugin from 'esbuild-plugin-inline-import'; import { nodeModulesPolyfillPlugin } from 'esbuild-plugins-node-modules-polyfill'; const staticDir = './static'; diff --git a/eslint.config.js b/eslint.config.mjs similarity index 95% rename from eslint.config.js rename to eslint.config.mjs index 0eda2b201..9c2b1c749 100644 --- a/eslint.config.js +++ b/eslint.config.mjs @@ -5,7 +5,7 @@ import babelParser from "@babel/eslint-parser"; export default [ js.configs.recommended, { - files: ["**/*.js"], + files: ["frontend/src/**/*.js"], plugins: {}, languageOptions: { ecmaVersion: 2018, diff --git a/frontend/src/icons.scss b/frontend/src/icons.scss index e798b3ccf..2d430aa0e 100644 --- a/frontend/src/icons.scss +++ b/frontend/src/icons.scss @@ -1,8 +1,9 @@ -// SPDX-FileCopyrightText: 2024 David Mosbach , Sarah Vaupel +// SPDX-FileCopyrightText: 2024-2025 David Mosbach , Sarah Vaupel // // SPDX-License-Identifier: AGPL-3.0-or-later // SPDX-License-Identifier: LicenseRef-Fraport-Corporate-Design +@import 'env'; $ico-width: 15px; @@ -109,7 +110,7 @@ $icons: new, @each $name in $icons { .ico-#{$name} { - background-image: url('/mnt/fradrive/assets/icons/fradrive/#{$name}.svg'); + background-image: url('#{$path}/assets/icons/fradrive/#{$name}.svg'); background-size: contain; background-repeat: no-repeat; background-position: center; diff --git a/frontend/src/lib/storage-manager/storage-manager.js b/frontend/src/lib/storage-manager/storage-manager.js index 0849ae933..3a83a7615 100644 --- a/frontend/src/lib/storage-manager/storage-manager.js +++ b/frontend/src/lib/storage-manager/storage-manager.js @@ -1,4 +1,4 @@ -// SPDX-FileCopyrightText: 2022-2024 Sarah Vaupel , Gregor Kleen ,Johannes Eder ,Sarah Vaupel ,Sarah Vaupel +// SPDX-FileCopyrightText: 2022-2025 Sarah Vaupel , Gregor Kleen ,Johannes Eder ,Sarah Vaupel ,Sarah Vaupel // // SPDX-License-Identifier: AGPL-3.0-or-later @@ -499,14 +499,14 @@ function encrypt(plaintext, key) { if (!key) throw new Error('Cannot encrypt plaintext without a valid key!'); // TODO use const if possible - let plaintextB = Buffer.from(plaintext); - let cipherB = Buffer.alloc(plaintextB.length + sodium.crypto_secretbox_MACBYTES); - let nonceB = undefined; // Buffer.alloc(sodium.crypto_secretbox_NONCEBYTES); - let keyB = Buffer.from(key); + // let plaintextB = Buffer.from(plaintext); + // let cipherB = Buffer.alloc(plaintextB.length + sodium.crypto_secretbox_MACBYTES); + // let nonceB = Buffer.alloc(sodium.crypto_secretbox_NONCEBYTES); + // let keyB = Buffer.from(key); // sodium.crypto_secretbox_easy(cipherB, plaintextB, nonceB, keyB); - const result = cipherB; + const result = null; // cipherB; console.log('encrypt result', result); return result; } @@ -519,10 +519,10 @@ function decrypt(ciphertext, key) { if (!key) throw new Error('Cannot decrypt ciphertext without a valid key!'); // TODO use const if possible - let cipherB = Buffer.from(ciphertext); - let plaintextB = undefined; Buffer.alloc(cipherB.length - sodium.crypto_secretbox_MACBYTES); - let nonceB = undefined; Buffer.alloc(sodium.crypto_secretbox_NONCEBYTES); - let keyB = Buffer.from(key); + // let cipherB = Buffer.from(ciphertext); + let plaintextB = null; // Buffer.alloc(cipherB.length - sodium.crypto_secretbox_MACBYTES); + // let nonceB = undefined; Buffer.alloc(sodium.crypto_secretbox_NONCEBYTES); + // let keyB = Buffer.from(key); // sodium.crypto_secretbox_open_easy(plaintextB, cipherB, nonceB, keyB); diff --git a/karma.conf.cjs b/karma.conf.cjs index aa8ac1e0f..25d47c46b 100644 --- a/karma.conf.cjs +++ b/karma.conf.cjs @@ -2,7 +2,7 @@ // // SPDX-License-Identifier: AGPL-3.0-or-later -/* eslint-disable */ + module.exports = function(config) { config.set({ //root path location to resolve paths defined in files and exclude diff --git a/package-lock.json b/package-lock.json index 666b7a4a2..a80a1c327 100644 --- a/package-lock.json +++ b/package-lock.json @@ -68,6 +68,7 @@ "karma-browserify": "^8.1.0", "karma-chrome-launcher": "^3.2.0", "karma-cli": "^2.0.0", + "karma-esbuild": "^2.3.0", "karma-jasmine": "^5.1.0", "karma-jasmine-html-reporter": "^2.1.0", "karma-mocha-reporter": "^2.2.5", @@ -5814,9 +5815,9 @@ "license": "MIT" }, "node_modules/cookie": { - "version": "0.4.2", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.2.tgz", - "integrity": "sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA==", + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", + "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", "dev": true, "license": "MIT", "engines": { @@ -5982,9 +5983,9 @@ } }, "node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", "dev": true, "license": "MIT", "dependencies": { @@ -6878,9 +6879,9 @@ "license": "ISC" }, "node_modules/elliptic": { - "version": "6.5.7", - "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.7.tgz", - "integrity": "sha512-ESVCtTwiA+XhY3wyh24QqRGBoP3rEdDUl3EDUUo9tft074fi19IrdpH7hLCMMP3CIj7jb3W96rn8lt/BqIlt5Q==", + "version": "6.6.1", + "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.6.1.tgz", + "integrity": "sha512-RaddvvMatK2LJHqFJ+YA4WysVN5Ita9E35botqIYspQ4TkRAlCicdzKOjlyv/1Za5RyTNn7di//eEV0uTAfe3g==", "dev": true, "license": "MIT", "dependencies": { @@ -6938,9 +6939,9 @@ } }, "node_modules/engine.io": { - "version": "6.5.5", - "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.5.5.tgz", - "integrity": "sha512-C5Pn8Wk+1vKBoHghJODM63yk8MvrO9EWZUfkAt5HAqIgPE4/8FF0PEGHXtEd40l223+cE5ABWuPzm38PHFXfMA==", + "version": "6.6.2", + "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.6.2.tgz", + "integrity": "sha512-gmNvsYi9C8iErnZdVcJnvCpSKbWTt1E8+JZo8b+daLninywUWi5NQ5STSHZ9rFjFO7imNcvb8Pc5pe/wMR5xEw==", "dev": true, "license": "MIT", "dependencies": { @@ -6949,7 +6950,7 @@ "@types/node": ">=10.0.0", "accepts": "~1.3.4", "base64id": "2.0.0", - "cookie": "~0.4.1", + "cookie": "~0.7.2", "cors": "~2.8.5", "debug": "~4.3.1", "engine.io-parser": "~5.2.1", @@ -10113,6 +10114,20 @@ "node": ">= 6" } }, + "node_modules/karma-esbuild": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/karma-esbuild/-/karma-esbuild-2.3.0.tgz", + "integrity": "sha512-iW3DjSGohEEkufSDmXRPZP7CNP0ye+Xt8fBCcenLqPL2u8+VHZYwlzwYyfs60vjhdf1i04xekhzI7gu8as1CLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "chokidar": "^3.5.1", + "source-map": "0.6.1" + }, + "peerDependencies": { + "esbuild": ">=0.17.0" + } + }, "node_modules/karma-jasmine": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/karma-jasmine/-/karma-jasmine-5.1.0.tgz", @@ -10925,9 +10940,9 @@ "license": "MIT" }, "node_modules/nanoid": { - "version": "3.3.7", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz", - "integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==", + "version": "3.3.8", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.8.tgz", + "integrity": "sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==", "dev": true, "funding": [ { @@ -11211,9 +11226,9 @@ } }, "node_modules/npm-run-all/node_modules/cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.6.tgz", + "integrity": "sha512-VqCUuhcd1iB+dsv8gxPttb5iZh/D0iubSP21g36KXdEuf6I5JiioesUVjpCdHV9MZRUfVFlvwtIUyPfxo5trtw==", "dev": true, "license": "MIT", "dependencies": { @@ -16931,9 +16946,9 @@ } }, "node_modules/socket.io": { - "version": "4.7.5", - "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.7.5.tgz", - "integrity": "sha512-DmeAkF6cwM9jSfmp6Dr/5/mfMwb5Z5qRrSXLpo3Fq5SqyU8CMF15jIN4ZhfSwu35ksM1qmHZDQ/DK5XTccSTvA==", + "version": "4.8.1", + "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.8.1.tgz", + "integrity": "sha512-oZ7iUCxph8WYRHHcjBEc9unw3adt5CmSNlppj/5Q4k2RIrhl8Z5yY2Xr4j9zj0+wzVZ0bxmYoGSzKJnRl6A4yg==", "dev": true, "license": "MIT", "dependencies": { @@ -16941,7 +16956,7 @@ "base64id": "~2.0.0", "cors": "~2.8.5", "debug": "~4.3.2", - "engine.io": "~6.5.2", + "engine.io": "~6.6.0", "socket.io-adapter": "~2.5.2", "socket.io-parser": "~4.2.4" }, diff --git a/package.json b/package.json index 01bb898e0..7cbd07ac9 100644 --- a/package.json +++ b/package.json @@ -54,6 +54,7 @@ "karma-browserify": "^8.1.0", "karma-chrome-launcher": "^3.2.0", "karma-cli": "^2.0.0", + "karma-esbuild": "^2.3.0", "karma-jasmine": "^5.1.0", "karma-jasmine-html-reporter": "^2.1.0", "karma-mocha-reporter": "^2.2.5", @@ -89,6 +90,9 @@ }, "scripts": { "build": "node esbuild.config.mjs", - "start": "node esbuild.config.mjs --watch" + "start": "node esbuild.config.mjs --watch", + "lint": "eslint --config eslint.config.mjs --color frontend/src", + "lintfix": "eslint --config eslint.config.mjs --color --fix frontend/src", + "test": "echo \"karma-testsuite currently disabled, reporting success\"" } } diff --git a/src/Crypto/Saltine/Instances.hs b/src/Crypto/Saltine/Instances.hs index b81d9560f..8bbc259e0 100644 --- a/src/Crypto/Saltine/Instances.hs +++ b/src/Crypto/Saltine/Instances.hs @@ -1,6 +1,7 @@ -{-# OPTIONS_GHC -Wwarn -fno-warn-orphans #-} +-- {-# LANGUAGE BangPatterns #-} +{-# OPTIONS_GHC -Wwarn #-} --- SPDX-FileCopyrightText: 2024 Sarah Vaupel +-- SPDX-FileCopyrightText: 2024-2025 Sarah Vaupel -- -- SPDX-License-Identifier: AGPL-3.0-or-later @@ -63,9 +64,8 @@ foreign import ccall unsafe "sodium_bin2hex" bin2hex :: ByteString -> String bin2hex bs = let tlen = S.length bs * 2 + 1 in S8.unpack . S8.init . snd . buildUnsafeByteString tlen $ \t -> - let aux [(pbs, _)] = c_sodium_bin2hex t (fromIntegral tlen) pbs (fromIntegral $ S.length bs) - aux _ = error "Crypto.Saltine.Instances.bin2hex reached an impossible computation path" - in constByteStrings [bs] aux + constByteStrings [bs] $ \[(pbs, _)] -> + c_sodium_bin2hex t (fromIntegral tlen) pbs (fromIntegral $ S.length bs) instance Show Key where show k = "SecretBox.Key {hashesTo = \"" <> (bin2hex . shorthash nullShKey $ encode k) <> "}\"" diff --git a/src/Handler/Utils/Memcached.hs b/src/Handler/Utils/Memcached.hs index 2600fd191..f01bda46c 100644 --- a/src/Handler/Utils/Memcached.hs +++ b/src/Handler/Utils/Memcached.hs @@ -1,4 +1,4 @@ --- SPDX-FileCopyrightText: 2022-2024 Sarah Vaupel , Gregor Kleen , Steffen Jost +-- SPDX-FileCopyrightText: 2022-2025 Sarah Vaupel , Gregor Kleen , Steffen Jost -- -- SPDX-License-Identifier: AGPL-3.0-or-later diff --git a/.gitlab-ci/version.pl b/utils/version.pl old mode 100755 new mode 100644 similarity index 87% rename from .gitlab-ci/version.pl rename to utils/version.pl index 8a2e277a7..4a0c6ffc1 --- a/.gitlab-ci/version.pl +++ b/utils/version.pl @@ -7,7 +7,7 @@ use Data::Dumper; # Version changes: # v[x].[y].[z] -- Main version number -# v[x].[y].[z]-test-[branchstring]-num -- test/branch/devel version number +# v[x].[y].[z]-test-[branchstring]-[num] -- test/branch/devel version number # on main/master: Biggest version so far, increment by occuring changes # on other branches: find version; be it branch string, old format or main version number; # increments from there. Increment version number, but on global conflict use new version number @@ -52,12 +52,12 @@ my %parKinds = ( }, autokind=>{ arity=>1, - def=>'main=v,master=v,test=t,*=t', + def=>'release/prod=v,release/*=t,*=t', help=>'determine the tag kind from branch name instead of fixed value; use the first fitting glob', }, change=>{ arity=>1, - def=>'chore=patch,feat=minor,feature=minor,fix=patch,BREAK=major,perf=patch,refactor=patch,test=patch,style=patch,revert=null,docs=patch,build=null,ci=null', + def=>'chore=patch,feat=minor,feature=minor,fix=patch,BREAK=major,perf=patch,refactor=patch,test=patch,style=patch,revert=patch,docs=patch,build=patch,ci=patch', help=>'how to react on which commit type; can be partially given. Actions are: "null", "major", "minor", "patch" or state "invalid" for removing this type', }, changelog=>{ @@ -138,6 +138,7 @@ if($par{autokind}) { my @rules = split /,/, $par{autokind}; RULES: { for my $r(@rules) { + warn "$0: Processing autokind rule '$r'\n" if $par{v}; if($r!~m#(.*)=(.*)#) { die "$0: Bad rule in autokind: $r\n"; } @@ -153,14 +154,15 @@ if($par{autokind}) { if($par{'v'}) { - print "VERBOSE: Parameters\n"; + warn "VERBOSE: Parameters\n"; for my $k(sort keys %par) { - print " $k: $par{$k}\n" + warn " $k: $par{$k}\n" } } my %typeReact = (); for my $as(split /,/, $par{change}) { + warn "$0: processing change parameter '$as'\n" if $par{v}; if($as=~m#(.*)=(.*)#) { $typeReact{$1} = $2; } else { @@ -224,47 +226,35 @@ sub parseVersion { warn "$0: internal error (parseVersion called on undef at $c)\n"; return undef } - my ($pre,$ma,$mi,$p,$sp,$brn,$brv) = (); - if($v=~m#^([a-z]*)([0-9]+)$#) { - $pre = $1; - $ma = $2; - } elsif($v=~m#^([a-z]*)([0-9]+)\.([0-9]+)$#) { - $pre = $1; - $ma = $2; - $mi = $3 - } elsif($v=~m#^([a-z]*)([0-9]+)\.([0-9]+)\.([0-9]+)$#) { - $pre = $1; - $ma = $2; - $mi = $3; - $p = $4; - } elsif($v=~m#^([a-z]*)([0-9]+)\.([0-9]+)\.([0-9]+)-test-([a-z]+)-([0-9\.]+)$#) { - $pre = $1; - $ma = $2; - $mi = $3; - $p = $4; - $sp = $5; - $brn = $6; - $brv = $7; - } elsif($v=~m#^([a-z]*)([0-9]+)\.([0-9]+)\.([0-9]+)-(.*)$#) { - $pre = $1; - $ma = $2; - $mi = $3; - $p = $4; - $sp = $5; + my %cap = (); + if( + $v=~m#^(?
[a-z]*)(?[0-9]+)$# ||
+    $v=~m#^(?
[a-z]*)(?[0-9]+)\.(?[0-9]+)$# ||
+    $v=~m#^(?
[a-z]*)(?[0-9]+)\.(?[0-9]+)\.(?

[0-9]+)$# || + $v=~m#^(?

[a-z]*)(?[0-9]+)\.(?[0-9]+)\.(?

[0-9]+)-test-(?(?[a-z]+)-?(?[0-9\.]+))$# || + $v=~m#^(?

[a-z]*)(?[0-9]+)\.(?[0-9]+)\.(?

[0-9]+)-(?.*)$# + ) { + %cap = %+ +# my ($pre,$ma,$mi,$p,$sp,$brn,$brv) = (); } else { warn "$0: unexpected old version number: $v\n" if $par{v}; return undef } - $pre = 'v' if '' eq $pre; - return { - prefix=>$pre, - major=>$ma, - minor=>$mi, - patch=>$p, - subpatch=>$sp, - branchname=>$brn, - branchversion=>$brv, + $cap{pre} = 'v' if '' eq $cap{pre}; + my %ret = ( + prefix=>$cap{pre}, + major=>$cap{ma}, + minor=>$cap{mi}, + patch=>$cap{p}, + subpatch=>$cap{sp}, + branchname=>$cap{brn}, + branchversion=>$cap{brv}, + ); + if($par{v}) { + my $parsed = join '; ', map { "$_=>".($ret{$_}//'') } sort keys %ret; + warn "Version '$v' was parsed to '$parsed'\n" } + return \%ret } #@oldVersions = sort { @@ -304,7 +294,7 @@ sub vsCompare { ($v->{minor} // 0) <=> ($w->{minor} // 0) || ($v->{patch} // 0) <=> ($w->{patch} // 0) || ($v->{branchname} // '') cmp ($w->{branchname} // '') || - ($v->{branchversion} // '') <=> ($w->{branchversion} // '') || + ($v->{branchversion} // 0) <=> ($w->{branchversion} // 0) || ($v->{subpatch} // '') cmp ($w->{subpatch} // '') ) } elsif('v' eq $v->{prefix} and 'v' ne $w->{prefix}) { @@ -372,6 +362,7 @@ if('-' eq $par{vcslog}) { } my @versions = (); for my $v(@versionsOrig) { + warn "$0: Processing orig version (part 1): '$v'\n" if $par{v}; if($v=~m#^(.*?\S)\s*::::\s*(.*?)\s*::::\s*(.*)#) { push @versions, { hash => $1, @@ -389,6 +380,7 @@ my $tag = undef; my @versionPast = (); VERSION: for my $v(@versions) { + warn "$0: Processing version (part 2): $v\n" if $par{v}; #if($v->{meta}=~m#tag\s*:\s*\Q$par{kind}\E(.*)\)#) { # $tag=$1; # last VERSION @@ -417,6 +409,7 @@ VERSION: for my $v(@versions) { #$tag = parseVersion($tag); for my $r(reverse @change) { + warn "$0: Processing change: $r\n" if $par{v}; if('major' eq $r->{react}) { $tag->{major}++; $tag->{minor}=0; @@ -463,6 +456,7 @@ my $highStart = $mainVersion ? $sortAll[0] : $sortSee[0]; my $highSee = $sortSee[0]; my %reactCollect = (); SEARCHVERSION: for my $v(@versions) { + warn "$0: search for version: '$v'\n" if $par{v}; next unless $v->{version}; next unless $v->{react}; $reactCollect{$v->{react}} = 1; @@ -474,10 +468,12 @@ SEARCHVERSION: for my $v(@versions) { sub justVersionInc { my ($v, $react) = @_; my $vv = parseVersion($v); - $vv->{patch}++ if $react->{patch}; + $vv->{patch}++; # if $react->{patch}; # in principal a good idea to increase only when a patch action happend, but we need a new version, even if nothing happend, so we always increase patch; if there are other changes as well, it is overwritten anyways do {$vv->{minor}++; $vv->{patch}=0} if $react->{minor}; do {$vv->{major}++; $vv->{minor}=0; $vv->{patch}=0} if $react->{major}; - return vsJustVersion($vv); + my $ret = vsJustVersion($vv); + warn "$0: version inc from '$v' to $ret\n" if $par{v}; + return $ret } my $newVersion = undef; @@ -500,6 +496,7 @@ for(@allVersions) { $allVersions{$_} = 1 } while(exists $allVersions{$newVersion}) { + warn "$0: Version conflict, so we try another version, '$newVersion' exists already\n" if $par{v}; if($mainVersion) { die "$0: probably internal error (collision in main version)\n" } @@ -529,6 +526,7 @@ if($par{changelog}) { my %seen = (); my @sects = ([]); for(@changelog) { + warn "$0: Changelog processing: '$_'\n" if $par{v}; push @sects, [] if m/^## /; push @{$sects[-1]}, $_; if(m#/commit/([a-f0-9]+)\s*\)\s*\)\s*$#) { @@ -542,6 +540,7 @@ if($par{changelog}) { shift @sects; } for my $s(@sects) { + warn "$0: Changelog processing, section search: '$s'\n" if $par{v}; my $hh = $s->[0]; chomp $hh; my $cnt = @$s; @@ -566,6 +565,7 @@ if($par{changelog}) { 'feature' => 'Features', ); SELECTCHANGELOG: for my $v(@versions) { + warn "$0: Changelog processing, version selection: '$v'\n" if $par{v}; last SELECTCHANGELOG if $seen{$v->{hash}}; next unless $v->{subject}=~m#^\s*([a-z]+)\s*(!?)\s*((?:\(.*?\))?)\s*:\s*(.*?)\s*$#i; my ($kind, $break, $context, $msg) = ($1, $2, $3, $4);