Compare commits
257 Commits
fradrive/m
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| 64df38f2a9 | |||
| 3c6a580808 | |||
| a3762ce938 | |||
| 573abcf43f | |||
| 89d2462974 | |||
| 599d2c1c7a | |||
| d2105c8894 | |||
| 7b486702f4 | |||
| bf88b19f1c | |||
| 5fd52768bc | |||
| 123e9eb057 | |||
| 5ab47c6c4f | |||
| fd6ba5b0c5 | |||
| f784f645a6 | |||
| 92ff99a36e | |||
| 5a5e4886b7 | |||
| 506b26ddd5 | |||
| aa664a5822 | |||
| 487069b29e | |||
| f5d701a871 | |||
| 579ea86503 | |||
| 9c1b074adf | |||
| a01398f1db | |||
| 020486819f | |||
| cd8a7a8322 | |||
| 79e0d5a642 | |||
| 27c5f61299 | |||
| 69b5818427 | |||
| e5e7612e9b | |||
| ebf71b7135 | |||
| e7c7ec7a82 | |||
| d55efd77f7 | |||
| 72f5a9fb37 | |||
| f1ec60a5b6 | |||
| 5c01ea36c1 | |||
| 26b34eee15 | |||
| efa55c8f72 | |||
| babee7afa2 | |||
| 0cee6f01e8 | |||
| fdd4283b0d | |||
| 0ac972bf22 | |||
| f40818c1cc | |||
| d33a792045 | |||
| 9ce3b5d146 | |||
| 570cfc238b | |||
| 4fb7a71cfc | |||
| b8f6581064 | |||
| c9613e2982 | |||
| 19c64616f0 | |||
| e403b6bfb2 | |||
| 87da02048c | |||
| 3dc74de68e | |||
| b4f85f155a | |||
| a97dc071a3 | |||
| ee5a79398f | |||
| e069d6be46 | |||
| 6b32cddeac | |||
| 553e699b56 | |||
| 479e807d6d | |||
| 9d1a97172e | |||
| 24196cc2cd | |||
| 9ba7a82449 | |||
| 695bd18763 | |||
| 7e61e56ae1 | |||
| 207a304192 | |||
| 1b8c6c33a7 | |||
| bee015c1f2 | |||
| 1521d08355 | |||
| bb047da360 | |||
| 6f90c04b1b | |||
| 1633c41b1d | |||
| 20faec8973 | |||
| 65cb75921d | |||
| 8104ce96cc | |||
| 396b365a31 | |||
| ff3d33bbf4 | |||
| 82ea44c63f | |||
| 926dbae09a | |||
| eb67136c0c | |||
| b37954d452 | |||
| 5752b4b8da | |||
| 3929bccd6e | |||
| aae5a5f997 | |||
| 1fc948711a | |||
| 088587549d | |||
| bc63324ddf | |||
| 9a12e00f7f | |||
| 06cd87f360 | |||
| 095002637f | |||
| ffae8553d5 | |||
| 2ad61c73f1 | |||
| 7dcd0b7297 | |||
| 5881973906 | |||
| d7dcf0acf5 | |||
| 1f484f7781 | |||
| 9fbab25ecc | |||
| be18af08c6 | |||
| b12de8be15 | |||
| 4156b3b553 | |||
| 7b7c0d4053 | |||
| 6441bc5562 | |||
| 2a1cff4cd0 | |||
| aefafa32d1 | |||
| 8e0eb401b5 | |||
| 8adcdf69fe | |||
| d6b4afe975 | |||
| cce4b2b27d | |||
| 12e01238c4 | |||
| 130f592491 | |||
| d1dcdcfe91 | |||
| 5e0df28444 | |||
| 0a4ad611c7 | |||
| 2109996387 | |||
| 4f5c7d56b0 | |||
| 05bc06df47 | |||
| 38606949b0 | |||
| f3f2f397fc | |||
| d06bc10408 | |||
| 490b89e174 | |||
| d5bbec9fa3 | |||
| fbd99f2394 | |||
| 593ee2cf76 | |||
| 2360375385 | |||
| 56aa06097b | |||
| e9fefa75bd | |||
| 0ffd594a04 | |||
| ab340aa715 | |||
| 5128f9b74c | |||
| 558402ac47 | |||
| 1b35c57660 | |||
| 56b8a8de86 | |||
| 48096c6b81 | |||
| e8a21610a4 | |||
| c1ed89a30b | |||
| cf8fc90db7 | |||
| b26dd285df | |||
| e5cf120af2 | |||
| ad12b8f927 | |||
| 9fe78541d7 | |||
| a0604637bd | |||
| 5e41c2073f | |||
| 50c7d18b53 | |||
| f996976f65 | |||
| f04a40c0a3 | |||
| 6cc929e377 | |||
| 11bcef67f8 | |||
| 7b7ffab109 | |||
| afbeb86762 | |||
| 9af4a3a22e | |||
| 4241c75afe | |||
| afa2f9bf0e | |||
| f44d66cb91 | |||
| 1d68ed9c5e | |||
| cd84d0a932 | |||
| f467f6086e | |||
| 4f524bd8d2 | |||
| aaf72f7255 | |||
| 36a3b04ad8 | |||
| ae6d3b0fc3 | |||
| 6f3dd408bb | |||
| c0c1665ccb | |||
| c2e0f6b2b8 | |||
| a1d7f16427 | |||
| 5f3d8a88e2 | |||
| b42e93e891 | |||
| e53be8ddf9 | |||
| 6d583fe8c4 | |||
| 954e86c95a | |||
| f47528c741 | |||
| ad1d235bea | |||
| d4d915bd60 | |||
| 07cfc0adcb | |||
| f6c82009ee | |||
| c08d6ae0d0 | |||
| 6a0876ae55 | |||
| 75a4f52a80 | |||
| 1f7e9b6a2f | |||
| 564488d5fa | |||
| 500c9a749a | |||
| ede00deb86 | |||
| 9eb075836f | |||
| d546e5da0f | |||
| 162c44a44f | |||
| 0a29540089 | |||
| 6f1ad811f7 | |||
| d2f69dc023 | |||
| cd76bdd4e7 | |||
| e1dca7d6b0 | |||
| 5d46479a33 | |||
| fcf1b6d9d8 | |||
| 7e09636a2b | |||
| 8317f682d8 | |||
| 85511091cc | |||
| 02d10006fc | |||
| 2fdb132140 | |||
| fba0b71d50 | |||
| 4934f5f89d | |||
| 133a8d3739 | |||
| e8af2b8da9 | |||
| ac766ea217 | |||
| a113d43089 | |||
| 14140c982b | |||
| ce125b6495 | |||
| 0c78996260 | |||
| b78c898ebf | |||
| 4bca7580d0 | |||
| 46f777740f | |||
| a7b08b1ae5 | |||
| 452cdf4442 | |||
| f6b87a09b0 | |||
| 0a5b0fceff | |||
| 3e6717904a | |||
| 2059d678ee | |||
| 225af31943 | |||
| 9d26c1c171 | |||
| 22d6cf737e | |||
| 35cadda2e8 | |||
| 692350677f | |||
| 36b481a548 | |||
| cb58c20ca1 | |||
| e757209b80 | |||
| 74c330bd24 | |||
| cac0a47d01 | |||
| 5c70b1099c | |||
| 26ea39dc67 | |||
| 2385d989a8 | |||
| 0105aa8c3f | |||
| 3bae365b37 | |||
| 1d01897757 | |||
| ce62b99d2b | |||
| 4dbe005709 | |||
| 11ef856b2b | |||
| 5a03d1cabe | |||
| 0264c87510 | |||
| e9a4c838a8 | |||
| 733324a732 | |||
| 757d383d33 | |||
| ee02a50bdd | |||
| c859974495 | |||
| ce35c8efc9 | |||
| 3151be6f41 | |||
| e90d11682b | |||
| 9a281f040e | |||
| 0b70036a02 | |||
| 3b0029ba04 | |||
| e554048f5a | |||
| e59fff352f | |||
| e9d4174b83 | |||
| 90613faf72 | |||
| d92d23bc99 | |||
| 4959736c90 | |||
| 547f34d2ec | |||
| 08788427a8 | |||
| 6d1b177ce9 | |||
| e1a02879d6 | |||
| 97446aa9ef | |||
| 776e6b6736 |
65
.azure-pipelines/templates/jobs/release.yaml
Normal file
65
.azure-pipelines/templates/jobs/release.yaml
Normal file
@ -0,0 +1,65 @@
|
||||
# SPDX-FileCopyrightText: 2025 Sarah Vaupel <sarah.vaupel@uniworx.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
parameters:
|
||||
- name: releaseTag
|
||||
type: string
|
||||
- name: releaseEndpoint
|
||||
type: string
|
||||
default: 'devfra'
|
||||
values:
|
||||
- 'devfra'
|
||||
- 'prodfra'
|
||||
|
||||
jobs:
|
||||
- job: Release
|
||||
displayName: Release ${{ parameters.releaseTag }}
|
||||
container:
|
||||
image: devfra.azurecr.io/de.fraport.build/tools:1.1.0
|
||||
endpoint: devfra
|
||||
steps:
|
||||
|
||||
# Download required artifacts from pipeline
|
||||
- task: DownloadPipelineArtifact@2
|
||||
displayName: Download FraDrive binaries
|
||||
inputs:
|
||||
artifactName: Build_backend
|
||||
patterns: 'Build_backend/bin/*'
|
||||
targetPath: '$(Build.Repository.LocalPath)'
|
||||
|
||||
- task: Docker@2
|
||||
displayName: Login to container registry
|
||||
inputs:
|
||||
command: login
|
||||
containerRegistry: '${{ parameters.releaseEndpoint }}'
|
||||
- task: Bash@3
|
||||
displayName: Build FraDrive container
|
||||
inputs:
|
||||
targetType: inline
|
||||
script: |
|
||||
cp docker/fradrive/Dockerfile .
|
||||
docker build \
|
||||
--tag $(buildImageUpstream)/fradrive:$(Build.BuildNumber) \
|
||||
--tag $(buildImageUpstream)/fradrive:${{parameters.releaseTag}} \
|
||||
--build-arg FROM_IMG=devfra.azurecr.io/de.fraport.trusted/ubuntu \
|
||||
--build-arg FROM_TAG=20.04 \
|
||||
--build-arg PROJECT_DIR=$(Build.Repository.LocalPath) \
|
||||
--build-arg IN_CI=true \
|
||||
--build-arg IN_CONTAINER=true \
|
||||
--build-arg HTTPS_PROXY=http://proxy.frankfurt-airport.de:8080 \
|
||||
--build-arg HTTP_PROXY=http://proxy.frankfurt-airport.de:8080 \
|
||||
--build-arg NO_PROXY='localhost,127.0.0.1,*.docker.internal,*.azmk8s.io,devfra.azurecr.io,devfra.westeurope.data.azurecr.io' \
|
||||
--build-arg FRAPORT_NOPROXY=dev.azure.com,*.dev.azure.com,*.fraport.de,*.frankfurt-airport.de \
|
||||
.
|
||||
- task: Docker@2
|
||||
displayName: Push container to registry
|
||||
inputs:
|
||||
command: push
|
||||
repository: 'de.fraport.fradrive.build/fradrive'
|
||||
tags: '$(Build.BuildNumber),${{parameters.releaseTag}}'
|
||||
- task: Docker@2
|
||||
displayName: Logout from container registry
|
||||
inputs:
|
||||
command: logout
|
||||
containerRegistry: '${{ parameters.releaseEndpoint }}'
|
||||
61
.azure-pipelines/templates/jobs/setup_dependencies.yaml
Normal file
61
.azure-pipelines/templates/jobs/setup_dependencies.yaml
Normal file
@ -0,0 +1,61 @@
|
||||
# SPDX-FileCopyrightText: 2024-2025 Sarah Vaupel <sarah.vaupel@uniworx.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
parameters:
|
||||
- name: serviceName
|
||||
type: string
|
||||
- name: dependenciesCaches
|
||||
type: object
|
||||
default: []
|
||||
- name: dependenciesBuildPool
|
||||
type: string
|
||||
default: 'Prod Private Agent Pool'
|
||||
values:
|
||||
- 'Prod Private Agent Pool'
|
||||
- 'Prod Private Agent Pool DS2'
|
||||
- 'Prod Private Agent Pool DS3'
|
||||
- name: dependenciesBuildCores
|
||||
type: number
|
||||
default: 1
|
||||
- name: dependenciesBuildTimeout
|
||||
type: number
|
||||
default: 60
|
||||
|
||||
jobs:
|
||||
- job: SetupDependencies_${{parameters.serviceName}}
|
||||
displayName: Install ${{parameters.serviceName}} dependencies
|
||||
dependsOn: SetupImage_${{parameters.serviceName}}
|
||||
${{ if eq(variables.setupImages, true) }}:
|
||||
condition: succeeded()
|
||||
${{ else }}:
|
||||
condition: always()
|
||||
pool: '${{parameters.dependenciesBuildPool}}'
|
||||
timeoutInMinutes: ${{parameters.dependenciesBuildTimeout}}
|
||||
container:
|
||||
${{ if variables.setupImages }}:
|
||||
image: $(buildImageUpstream)/${{parameters.serviceName}}:$(Build.BuildNumber)
|
||||
${{ else }}:
|
||||
image: $(buildImageUpstream)/${{parameters.serviceName}}:latest
|
||||
endpoint: devfra
|
||||
env:
|
||||
PROJECT_DIR: $(Build.Repository.LocalPath)
|
||||
IN_CONTAINER: true
|
||||
IN_CI: true
|
||||
steps:
|
||||
# Restore previously-built dependencies from caches
|
||||
- ${{ each cache in parameters.dependenciesCaches }}:
|
||||
- template: ./../../steps/cache.yaml
|
||||
parameters:
|
||||
cacheIdent: '${{parameters.serviceName}}-dependencies'
|
||||
cacheKeys: '${{cache.key}}'
|
||||
cachePath: '${{cache.path}}'
|
||||
|
||||
# Compile dependencies
|
||||
- template: ./../../steps/make.yaml
|
||||
parameters:
|
||||
makeJob: dependencies
|
||||
makeService: ${{parameters.serviceName}}
|
||||
makeVars: 'CPU_CORES=${{parameters.dependenciesBuildCores}} STACK_CORES=-j${{parameters.dependenciesBuildCores}}'
|
||||
|
||||
# (Note: a post-job for updating the dependency cache is automatically created, so no further step is due here.)
|
||||
72
.azure-pipelines/templates/jobs/setup_image.yaml
Normal file
72
.azure-pipelines/templates/jobs/setup_image.yaml
Normal file
@ -0,0 +1,72 @@
|
||||
# SPDX-FileCopyrightText: 2024-2025 Sarah Vaupel <sarah.vaupel@uniworx.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
parameters:
|
||||
- name: imageName
|
||||
type: string
|
||||
- name: imageBase
|
||||
type: object
|
||||
|
||||
jobs:
|
||||
- job: SetupImage_${{parameters.imageName}}
|
||||
displayName: Build ${{parameters.imageName}} image
|
||||
condition: eq(variables.setupImages, true)
|
||||
container:
|
||||
image: devfra.azurecr.io/de.fraport.build/tools:1.1.0
|
||||
endpoint: devfra
|
||||
steps:
|
||||
- task: Docker@2
|
||||
displayName: Login to container registry
|
||||
inputs:
|
||||
command: login
|
||||
containerRegistry: devfra
|
||||
- task: Bash@3
|
||||
displayName: Build ${{parameters.imageName}} image
|
||||
inputs:
|
||||
targetType: inline
|
||||
script: |
|
||||
cp docker/${{parameters.imageName}}/Dockerfile .
|
||||
docker build \
|
||||
--tag $(buildImageUpstream)/${{parameters.imageName}}:$(Build.BuildNumber) \
|
||||
--build-arg FROM_IMG=${{parameters.imageBase.image}} \
|
||||
--build-arg FROM_TAG=${{parameters.imageBase.tag}} \
|
||||
--build-arg HTTPS_PROXY=http://proxy.frankfurt-airport.de:8080 \
|
||||
--build-arg HTTP_PROXY=http://proxy.frankfurt-airport.de:8080 \
|
||||
--build-arg NO_PROXY='localhost,127.0.0.1,*.docker.internal,*.azmk8s.io,devfra.azurecr.io,devfra.westeurope.data.azurecr.io' \
|
||||
--build-arg FRAPORT_NOPROXY=dev.azure.com,*.dev.azure.com,*.fraport.de,*.frankfurt-airport.de \
|
||||
--build-arg PROJECT_DIR=$(Build.Repository.LocalPath) \
|
||||
--build-arg IN_CI=true \
|
||||
--build-arg IN_CONTAINER=true \
|
||||
.
|
||||
- task: Bash@3
|
||||
displayName: Push ${{parameters.imageName}} image
|
||||
inputs:
|
||||
targetType: inline
|
||||
script: |
|
||||
docker push $(buildImageUpstream)/${{parameters.imageName}}:$(Build.BuildNumber)
|
||||
- task: Bash@3
|
||||
displayName: Update latest ${{parameters.imageName}} image
|
||||
condition: or(eq(variables.forcePushLatest, true), eq(variables['Build.SourceBranch'], 'refs/heads/master'))
|
||||
inputs:
|
||||
targetType: inline
|
||||
script: |
|
||||
docker tag $(buildImageUpstream)/${{parameters.imageName}}:$(Build.BuildNumber) $(buildImageUpstream)/${{parameters.imageName}}:latest
|
||||
docker push $(buildImageUpstream)/${{parameters.imageName}}:latest
|
||||
- task: Bash@3
|
||||
displayName: Save image for publication
|
||||
inputs:
|
||||
targetType: inline
|
||||
script: |
|
||||
docker image save --output=$(Build.ArtifactStagingDirectory)/${{parameters.imageName}}.tar $(buildImageUpstream)/${{parameters.imageName}}:$(Build.BuildNumber)
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Publish image as artifact
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.ArtifactStagingDirectory)'
|
||||
ArtifactName: Image_${{parameters.imageName}}
|
||||
publishLocation: 'Container'
|
||||
- task: Docker@2
|
||||
displayName: Logout from container registry
|
||||
inputs:
|
||||
command: logout
|
||||
containerRegistry: devfra
|
||||
141
.azure-pipelines/templates/service.yaml
Executable file
141
.azure-pipelines/templates/service.yaml
Executable file
@ -0,0 +1,141 @@
|
||||
# SPDX-FileCopyrightText: 2024-2025 Sarah Vaupel <sarah.vaupel@uniworx.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
parameters:
|
||||
- name: serviceName
|
||||
type: string
|
||||
default: serviceName
|
||||
- name: serviceBase
|
||||
type: object
|
||||
default:
|
||||
image: baseImage
|
||||
tag: baseImageTag
|
||||
- name: servicePool
|
||||
type: string
|
||||
default: 'Prod Private Agent Pool'
|
||||
- name: serviceTimeout
|
||||
type: number
|
||||
default: 60
|
||||
# extraBuildOptions: ''
|
||||
- name: serviceDependsOn
|
||||
type: object
|
||||
default: []
|
||||
- name: serviceRequiredArtifacts
|
||||
type: object
|
||||
default: []
|
||||
- name: serviceArtifacts
|
||||
type: string
|
||||
default: ''
|
||||
- name: buildSteps
|
||||
type: object
|
||||
|
||||
stages:
|
||||
- stage: ${{ parameters.serviceName }}
|
||||
dependsOn: ${{ parameters.serviceDependsOn }}
|
||||
pool: '${{ parameters.servicePool }}'
|
||||
jobs:
|
||||
- job: ImageBuild_${{parameters.serviceName}}
|
||||
displayName: Build ${{parameters.serviceName}} image
|
||||
condition: or(eq(variables.forcePushLatest, true), eq(variables.onMasterBranch, true), eq(variables.onUpdateBranch, true))
|
||||
container:
|
||||
image: devfra.azurecr.io/de.fraport.build/tools:1.1.0
|
||||
endpoint: devfra
|
||||
steps:
|
||||
- checkout: self
|
||||
- task: Docker@2
|
||||
displayName: Login to container registry
|
||||
inputs:
|
||||
command: login
|
||||
containerRegistry: devFra
|
||||
- script: |
|
||||
ls -a .
|
||||
pwd
|
||||
find .
|
||||
- task: Bash@3
|
||||
displayName: Build ${{parameters.serviceName}} image
|
||||
inputs:
|
||||
targetType: inline
|
||||
script: |
|
||||
cp docker/${{parameters.serviceName}}/Dockerfile .
|
||||
docker build \
|
||||
--tag $(buildImageUpstream)/${{parameters.serviceName}}:$(Build.BuildNumber) \
|
||||
--build-arg FROM_IMG=${{parameters.serviceBase.image}} \
|
||||
--build-arg FROM_TAG=${{parameters.serviceBase.tag}} \
|
||||
--build-arg HTTPS_PROXY=http://proxy.frankfurt-airport.de:8080 \
|
||||
--build-arg HTTP_PROXY=http://proxy.frankfurt-airport.de:8080 \
|
||||
--build-arg NO_PROXY='localhost,127.0.0.1,*.docker.internal,*.azmk8s.io,devfra.azurecr.io,devfra.westeurope.data.azurecr.io' \
|
||||
--build-arg FRAPORT_NOPROXY=dev.azure.com,*.dev.azure.com,*.fraport.de,*.frankfurt-airport.de \
|
||||
--build-arg PROJECT_DIR=$(Build.Repository.LocalPath) \
|
||||
--build-arg IN_CI=true \
|
||||
--build-arg IN_CONTAINER=true \
|
||||
.
|
||||
- task: Bash@3
|
||||
displayName: Push ${{ parameters.serviceName }} image
|
||||
inputs:
|
||||
targetType: inline
|
||||
script: |
|
||||
docker push $(buildImageUpstream)/${{parameters.serviceName}}:$(Build.BuildNumber)
|
||||
- task: Bash@3
|
||||
displayName: Update latest ${{parameters.serviceName}} image
|
||||
condition: or(eq(variables.forcePushLatest, true), eq(variables.onMasterBranch, true))
|
||||
inputs:
|
||||
targetType: inline
|
||||
script: |
|
||||
docker tag $(buildImageUpstream)/${{parameters.serviceName}}:$(Build.BuildNumber) $(buildImageUpstream)/${{parameters.serviceName}}:latest
|
||||
docker push $(buildImageUpstream)/${{parameters.serviceName}}:latest
|
||||
- task: Docker@2
|
||||
displayName: Logout from container registry
|
||||
inputs:
|
||||
command: logout
|
||||
containerRegistry: devFra
|
||||
|
||||
- job: Build_${{parameters.serviceName}}
|
||||
displayName: Build ${{parameters.serviceName}}
|
||||
dependsOn:
|
||||
- ImageBuild_${{parameters.serviceName}}
|
||||
condition: in(dependencies.ImageBuild_${{parameters.serviceName}}.result, 'Succeeded', 'Skipped')
|
||||
timeoutInMinutes: ${{ parameters.serviceTimeout }}
|
||||
container:
|
||||
# TODO: use BuildNumber instead of latest in update branches
|
||||
# image: devfra.azurecr.io/de.fraport.fradrive.build/frontend:$(Build.BuildNumber)
|
||||
image: $(buildImageUpstream)/${{parameters.serviceName}}:latest
|
||||
endpoint: devfra
|
||||
env:
|
||||
PROJECT_DIR: $(Build.Repository.LocalPath)
|
||||
IN_CONTAINER: true
|
||||
IN_CI: true
|
||||
steps:
|
||||
- checkout: self
|
||||
- ${{ each dependency in parameters.serviceRequiredArtifacts }}:
|
||||
- task: DownloadPipelineArtifact@2
|
||||
displayName: Download artifacts from ${{ dependency.name }} dependency
|
||||
continueOnError: ${{ dependency.continueOnError }}
|
||||
condition: ${{ dependency.condition }}
|
||||
inputs:
|
||||
artifactName: ${{ dependency.artifact }}
|
||||
source: ${{ dependency.source }}
|
||||
project: 'Fahrerausbildung'
|
||||
pipeline: $(System.DefinitionId)
|
||||
buildVersionToDownload: '${{ dependency.version }}'
|
||||
tags: '${{ dependency.artifact }}'
|
||||
allowPartiallySucceededBuilds: true
|
||||
allowFailedBuilds: true
|
||||
patterns: '${{ dependency.patterns }}'
|
||||
targetPath: '$(Build.Repository.LocalPath)'
|
||||
- ${{ each buildStep in parameters.buildSteps }}:
|
||||
- template: ./service/build-step.yaml
|
||||
parameters:
|
||||
service: ${{ parameters.serviceName }}
|
||||
buildStep: ${{ buildStep }}
|
||||
- task: CopyFiles@2
|
||||
displayName: Copy ${{parameters.serviceName}} artifacts
|
||||
inputs:
|
||||
Contents: ${{ parameters.serviceArtifacts }}
|
||||
TargetFolder: '$(Build.ArtifactStagingDirectory)'
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Publish ${{parameters.serviceName}} artifacts
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.ArtifactStagingDirectory)'
|
||||
ArtifactName: '${{parameters.serviceName}}'
|
||||
publishLocation: 'Container'
|
||||
15
.azure-pipelines/templates/steps/artifact-download.yaml
Normal file
15
.azure-pipelines/templates/steps/artifact-download.yaml
Normal file
@ -0,0 +1,15 @@
|
||||
# SPDX-FileCopyrightText: 2025 Sarah Vaupel <sarah.vaupel@uniworx.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
parameters:
|
||||
- name: artifactName
|
||||
type: string
|
||||
|
||||
steps:
|
||||
- task: DownloadPipelineArtifact@2
|
||||
displayName: Download artifacts from ${{parameters.artifactName}}
|
||||
inputs:
|
||||
source: 'current'
|
||||
artifactName: '${{parameters.artifactName}}'
|
||||
targetPath: '$(Build.Repository.LocalPath)'
|
||||
18
.azure-pipelines/templates/steps/cache.yaml
Normal file
18
.azure-pipelines/templates/steps/cache.yaml
Normal file
@ -0,0 +1,18 @@
|
||||
# SPDX-FileCopyrightText: 2025 Sarah Vaupel <sarah.vaupel@uniworx.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
parameters:
|
||||
- name: cacheIdent
|
||||
type: string
|
||||
- name: cacheKeys
|
||||
type: string
|
||||
- name: cachePath
|
||||
type: string
|
||||
|
||||
steps:
|
||||
- task: Cache@2
|
||||
displayName: Restore ${{parameters.cacheIdent}} cache
|
||||
inputs:
|
||||
key: '"${{parameters.cacheIdent}}" | ${{parameters.cacheKeys}}'
|
||||
path: '${{parameters.cachePath}}'
|
||||
35
.azure-pipelines/templates/steps/make.yaml
Normal file
35
.azure-pipelines/templates/steps/make.yaml
Normal file
@ -0,0 +1,35 @@
|
||||
# SPDX-FileCopyrightText: 2024-2025 Sarah Vaupel <sarah.vaupel@uniworx.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
parameters:
|
||||
- name: makeJob
|
||||
type: string
|
||||
values:
|
||||
- dependencies
|
||||
- compile
|
||||
- lint
|
||||
- test
|
||||
- name: makeService
|
||||
type: string
|
||||
values:
|
||||
- frontend
|
||||
- backend
|
||||
- name: makeVars
|
||||
type: string
|
||||
default: ''
|
||||
|
||||
steps:
|
||||
- task: Bash@3
|
||||
name: ${{parameters.makeJob}}_${{parameters.makeService}}
|
||||
displayName: make ${{parameters.makeJob}}-${{parameters.makeService}}
|
||||
env:
|
||||
HTTPS_PROXY: http://proxy.frankfurt-airport.de:8080
|
||||
HTTP_PROXY: http://proxy.frankfurt-airport.de:8080
|
||||
NO_PROXY: 'localhost,127.0.0.1,*.docker.internal,*.azmk8s.io,devfra.azurecr.io,devfra.westeurope.data.azurecr.io'
|
||||
FRAPORT_NOPROXY: 'dev.azure.com,*.dev.azure.com,*.fraport.de,*.frankfurt-airport.de'
|
||||
PROJECT_DIR: $(Build.Repository.LocalPath)
|
||||
inputs:
|
||||
targetType: inline
|
||||
script: |
|
||||
make -- --${{parameters.makeJob}}-${{parameters.makeService}} IN_CONTAINER=true IN_CI=true PROJECT_DIR=${PROJECT_DIR} ${{parameters.makeVars}}
|
||||
@ -1,3 +0,0 @@
|
||||
SPDX-FileCopyrightText: 2022 Felix Hamann <felix.hamann@campus.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>,Sarah Vaupel <vaupel.sarah@campus.lmu.de>
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
@ -1,3 +0,0 @@
|
||||
SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>,Sarah Vaupel <vaupel.sarah@campus.lmu.de>
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
21
.gitignore
vendored
21
.gitignore
vendored
@ -2,9 +2,12 @@
|
||||
dist*
|
||||
develop
|
||||
node_modules/
|
||||
assets/icons
|
||||
assets/favicons
|
||||
.npm/
|
||||
.node_repl_history
|
||||
**/assets/icons
|
||||
**/assets/favicons
|
||||
bin/
|
||||
assets/fonts/
|
||||
*.hi
|
||||
*.o
|
||||
*.sqlite3
|
||||
@ -39,19 +42,19 @@ src/Handler/Assist.bak
|
||||
src/Handler/Course.SnapCustom.hs
|
||||
*.orig
|
||||
/instance
|
||||
backend/instance
|
||||
.stack-work-*
|
||||
.stack-work.lock
|
||||
.directory
|
||||
tags
|
||||
test.log
|
||||
*.dump-splices
|
||||
/.stack-work.lock
|
||||
/.npmrc
|
||||
/.npm/
|
||||
/config/manifest.json
|
||||
tunnel.log
|
||||
/static
|
||||
/well-known
|
||||
/.well-known-cache
|
||||
static
|
||||
well-known
|
||||
.well-known-cache
|
||||
manifest.json
|
||||
/.nix-well-known
|
||||
/**/tmp-*
|
||||
/testdata/bigAlloc_*.csv
|
||||
@ -65,4 +68,4 @@ tunnel.log
|
||||
**/result-*
|
||||
.develop.cmd
|
||||
/.vscode
|
||||
.ghc/ghci_history
|
||||
backend/.ghc/ghci_history
|
||||
|
||||
@ -1,3 +0,0 @@
|
||||
SPDX-FileCopyrightText: 2022 Gregor Kleen <gregor.kleen@ifi.lmu.de>
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
@ -7,33 +7,33 @@ const standardVersionUpdaterYaml = require.resolve('standard-version-updater-yam
|
||||
module.exports = {
|
||||
scripts: {
|
||||
// postbump: './sync-versions.hs && git add -- package.yaml', // moved to bumpFiles
|
||||
postchangelog: 'sed \'s/^### \\[/## [/g\' -i CHANGELOG.md'
|
||||
postchangelog: 'sed \'s/^### \\[/## [/g\' -i CHANGELOG.md',
|
||||
},
|
||||
packageFiles: ['package.json', 'package.yaml'],
|
||||
bumpFiles: [
|
||||
{
|
||||
filename: 'package.json',
|
||||
type: 'json'
|
||||
type: 'json',
|
||||
},
|
||||
{
|
||||
filename: 'package-lock.json',
|
||||
type: 'json'
|
||||
type: 'json',
|
||||
},
|
||||
{
|
||||
filename: 'package.yaml',
|
||||
updater: standardVersionUpdaterYaml
|
||||
updater: standardVersionUpdaterYaml,
|
||||
},
|
||||
{
|
||||
filename: 'nix/docker/version.json',
|
||||
type: 'json'
|
||||
type: 'json',
|
||||
},
|
||||
{
|
||||
filename: 'nix/docker/demo-version.json',
|
||||
type: 'json'
|
||||
}
|
||||
type: 'json',
|
||||
},
|
||||
],
|
||||
commitUrlFormat: 'https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/{{hash}}',
|
||||
compareUrlFormat: 'https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/{{previousTag}}...{{currentTag}}',
|
||||
issueUrlFormat: 'https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/issues/{{id}}',
|
||||
userUrlFormat: 'https://gitlab2.rz.ifi.lmu.de/{{user}}'
|
||||
userUrlFormat: 'https://gitlab2.rz.ifi.lmu.de/{{user}}',
|
||||
};
|
||||
|
||||
143
CHANGELOG.md
143
CHANGELOG.md
@ -2,6 +2,16 @@
|
||||
|
||||
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
||||
|
||||
## [27.4.59-0.0.20+145-build-system-rewrite](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/27.4.59-0.0.19+145-build-system-rewrite...27.4.59-0.0.20+145-build-system-rewrite) (2025-03-19)
|
||||
|
||||
## [27.4.59-0.0.19+145-build-system-rewrite](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/27.4.59-0.0.18+145-build-system-rewrite...27.4.59-0.0.19+145-build-system-rewrite) (2025-03-17)
|
||||
|
||||
## [27.4.59-0.0.18+145-build-system-rewrite](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-g0.0.17...27.4.59-0.0.18+145-build-system-rewrite) (2025-03-17)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **static:** fix addStaticContent by using memcached again to supply static files ([570cfc2](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive/commit/570cfc238bdccd3438124f96290b9272c8e82f0f))
|
||||
|
||||
## [v27.4.59-test-g0.0.17](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-f0.0.17...v27.4.59-test-g0.0.17) (2025-02-18)
|
||||
|
||||
## [v27.4.59-test-f0.0.17](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-g0.0.16...v27.4.59-test-f0.0.17) (2025-02-17)
|
||||
@ -16,6 +26,48 @@ All notable changes to this project will be documented in this file. See [standa
|
||||
|
||||
## [v27.4.59-test-f0.0.14](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-e0.0.14...v27.4.59-test-f0.0.14) (2025-02-14)
|
||||
|
||||
## [v27.4.59-test-e0.0.14](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-e0.0.13...v27.4.59-test-e0.0.14) (2025-02-13)
|
||||
|
||||
## [v27.4.59-test-e0.0.13](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-e0.0.12...v27.4.59-test-e0.0.13) (2025-02-12)
|
||||
|
||||
## [v27.4.59-test-e0.0.12](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-d0.0.12...v27.4.59-test-e0.0.12) (2025-02-12)
|
||||
|
||||
## [v27.4.59-test-d0.0.12](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-d0.0.11...v27.4.59-test-d0.0.12) (2025-02-11)
|
||||
|
||||
## [v27.4.59-test-d0.0.11](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-c0.0.11...v27.4.59-test-d0.0.11) (2025-02-11)
|
||||
|
||||
## [v27.4.59-test-c0.0.11](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-b0.0.11...v27.4.59-test-c0.0.11) (2025-02-11)
|
||||
|
||||
## [v27.4.59-test-b0.0.11](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-c0.0.10...v27.4.59-test-b0.0.11) (2025-02-11)
|
||||
|
||||
## [v27.4.59-test-c0.0.10](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-b0.0.10...v27.4.59-test-c0.0.10) (2025-02-11)
|
||||
|
||||
## [v27.4.59-test-b0.0.10](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.10...v27.4.59-test-b0.0.10) (2025-02-11)
|
||||
|
||||
## [v27.4.59-test-a0.0.10](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.9...v27.4.59-test-a0.0.10) (2025-02-11)
|
||||
|
||||
## [v27.4.59-test-a0.0.9](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.8...v27.4.59-test-a0.0.9) (2025-02-10)
|
||||
|
||||
## [v27.4.59-test-a0.0.8](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.7...v27.4.59-test-a0.0.8) (2025-02-10)
|
||||
|
||||
## [v27.4.59-test-a0.0.7](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.6...v27.4.59-test-a0.0.7) (2025-02-10)
|
||||
|
||||
## [v27.4.59-test-a0.0.6](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.5...v27.4.59-test-a0.0.6) (2025-02-08)
|
||||
|
||||
## [v27.4.59-test-a0.0.5](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.4...v27.4.59-test-a0.0.5) (2025-02-07)
|
||||
|
||||
## [v27.4.59-test-a0.0.4](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.3...v27.4.59-test-a0.0.4) (2025-02-07)
|
||||
|
||||
## [v27.4.59-test-a0.0.3](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.2...v27.4.59-test-a0.0.3) (2025-02-06)
|
||||
|
||||
## [v27.4.59-test-a0.0.2](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.1...v27.4.59-test-a0.0.2) (2025-02-05)
|
||||
|
||||
## [v27.4.59-test-a0.0.1](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.0...v27.4.59-test-a0.0.1) (2025-02-05)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **ghci:** ghci works now as expected ([c3117db](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive/commit/c3117dbdcd1de9ef9f0751afa45018e2ebce2c42))
|
||||
|
||||
## [v27.4.59-test-a0.0.0](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59...v27.4.59-test-a0.0.0) (2024-10-25)
|
||||
|
||||
### Features
|
||||
@ -23,97 +75,6 @@ All notable changes to this project will be documented in this file. See [standa
|
||||
* **util script:** Util script for renaming of files added. ([caf8fec](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive/commit/caf8fec5acb94df16293bf9aa0cdab766f8829e8))
|
||||
* **frontend:** load icons from svg files ([22781e1](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive/commit/22781e1565e890cf6c5b40973146b0334cb667aa))
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **stack.yaml:** move to uniworx.de gitlab ([55484e6](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive/commit/55484e631b786ea3710d322282019baf5292c243))
|
||||
* **utils/renamer:** Mehr outputs nur im verbose-Fall. ([ac30cb9](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive/commit/ac30cb9e6712d0ee3f204da4863d1e2509af8a76))
|
||||
* **utils:** Verboseparameter -v hinzugefuegt; rekursives makedir; genauere Meldungen. ([1806d9f](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive/commit/1806d9f01fc4a0746d2f9df42ef1ee6827c7fa09))
|
||||
* **Dockerfile:** change rights of source dir to env user ([e7a8183](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive/commit/e7a8183656ae419cfee2942543045c6fa6a9caa3))
|
||||
* **Makefile:** add missing dependency on well-known for backend-builds ([a09dc59](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive/commit/a09dc59f260843f8815c382576bb5254d21104bf))
|
||||
* **frontend:** fixed icon colour in table headers ([4c4571d](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive/commit/4c4571d2d0879e89f2572eba6015d34a7f4794c8))
|
||||
* **doc:** minor haddock problems ([d4f8a6c](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive/commit/d4f8a6c77b2a4a4540935f7f0beca0d0605508c8))
|
||||
## [27.4.76](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v27.4.75...v27.4.76) (2024-08-08)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **ap:** disambiguate action message ([8b0466e](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/8b0466e74e36e1d0d07518fd317d46b00ab53eff))
|
||||
* **avs:** fix [#173](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/issues/173) by not using firm superior email as display email ([43f5c5f](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/43f5c5f4854d1ab2af27b479e72a58e2818a5696))
|
||||
* **avs:** towards [#117](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/issues/117) update if current value is Nothing even if oldval == newval ([d1fa01f](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/d1fa01fcc5125c4adee8849f9c944884926f78ad))
|
||||
* **avs:** using firm superior as UserEmail is a no-go due to uniqueness constraints ([507a7e0](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/507a7e02fc68476d01031dc9f9ee1a669a453ed1))
|
||||
* **build:** linter likes it ([f929e03](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/f929e03129378e08c8a08ed4bd6f8e8716401813))
|
||||
* **course:** fix [#150](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/issues/150) course edit for associated qualifications requires school admin or lecturer rights ([5b6e4e6](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/5b6e4e60e7d2957fbce93ee2e2d6d3464b4e3db7))
|
||||
* **course:** fix [#148](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/issues/148) course qualification ordering ([cfd2534](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/cfd25348ad3b63ac6bc5031467a3c4ead2e07eed)), closes [#150](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/issues/150)
|
||||
* **course:** fix [#149](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/issues/149) course cloning proposes associated qualifications ([e141976](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/e1419766f3a06f702abad0ea42f6552305504ba0))
|
||||
* **course:** fix [#150](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/issues/150) no longer allow duplicated associated qualifications and orders due to editing existing ([ec02767](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/ec027675525b30198378745ed281f60a42471807))
|
||||
* **course:** WIP course cloning should propose same associated qualifications, towards [#149](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/issues/149) ([bc47387](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/bc47387c91dda60a2f12e52dba28ea7b079316f0))
|
||||
* **lms:** max e-learning tries default removed and info added to lms overview ([11fdcf0](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/11fdcf0d445b8cfe97c3a3c26513a9229937c536))
|
||||
* **user:** format userDisplayNames having umlaut substitutes with respect to userSurname correctly ([e35a5e9](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/e35a5e99a6cea0976fd1c28f919e7d0ac0338503))
|
||||
|
||||
## [27.4.75](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v27.4.74...v27.4.75) (2024-07-12)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **build:** make linter happy again ([c17c18f](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/c17c18f9247ef322bc051602a3cb4a52cd50affa))
|
||||
* **build:** minor ([ab28c8c](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/ab28c8c2437680023d80e6ab43113d4328b3a151))
|
||||
* **firm:** fix [#157](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/issues/157) by removing redundant duplicated code in firm user and supervision handling ([28e2739](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/28e2739e515700d15c75647c0efe2fe9a9cf15b1))
|
||||
* **job:** change some queueJob' to queueJob instead ([fa0541a](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/fa0541aa4eaf10f98535a0959593b148b8346109))
|
||||
* **lms:** allow 2nd reminders to be independent of renewal period ([d853e85](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/d853e8559b753865ee818bf24764f5c8d2e2303f))
|
||||
* **lms:** move lms reuse info from QualificationR to LmsR ([468af9d](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/468af9de9da44a8ad685ca4bb6890a3e630b58be))
|
||||
* **lms:** send second reminder indepentently from renewal period ([a97c3a5](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/a97c3a5c9d3cb9dddf90f561712f0845400893bd))
|
||||
* **nix:** workaround parsing port numbers failed in nix-shell ([b5215cc](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/b5215cc7e8df3a7ad636271c8e6950979b2b8e42))
|
||||
* **users:** nameHtml no longer complains about differing case for surname and displayname ([a1668f8](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/a1668f891a36b887439afb098f016ef22535af42))
|
||||
* **users:** remove users with company post address from list of unreachable users ([c813c66](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/c813c665ed306135b7813d91d23310341c689f41))
|
||||
|
||||
## [27.4.74](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v27.4.73...v27.4.74) (2024-07-04)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **lms:** fix [#161](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/issues/161) lms for multiple joint qualifications ([f869a82](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/f869a829d2c1a726930864b3af62d1f0fbebe955))
|
||||
|
||||
## [27.4.73](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v27.4.72...v27.4.73) (2024-07-03)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **letter:** rephrase some minor letter parts ([0ac75e0](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/0ac75e0d5948cb90855d0e36ca8e99c22a0f6fcb))
|
||||
|
||||
## [27.4.72](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v27.4.71...v27.4.72) (2024-07-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **avs:** do not associate users by AvsInfoPersonEmail ([9e2f221](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/9e2f2214ce5c7ee1e8d80e6fa75298b7a70d9043))
|
||||
* **avs:** fix superfluous quotes for matriculation numbers on newly created users ([ff9014c](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/ff9014ce05d197c1dc0fce0774a640789cb38b26))
|
||||
* **avs:** towards [#169](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/issues/169) - superiors are elevated to max priority for that company ([5bf8539](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/5bf85394d4db6de8f10b4e318d667130d37601ac))
|
||||
* **firm:** supervisor secondary did not work as intended ([d4f3ce7](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/d4f3ce7bf3d208b16f95ab81971b47dfa752939a))
|
||||
|
||||
## [27.4.71](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v27.4.70...v27.4.71) (2024-06-27)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **avs:** company superior emails become company wide supervisors ([37efc89](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/37efc89e0723452e6d271ba5b43d6bd026642190))
|
||||
* **avs:** match mobile number better between LDAP and AVS ([f108c6c](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/f108c6cfec2d94d866e7c1605b0abe5471fd0f2b))
|
||||
* **avs:** new AVS from existing LDAP user no longer misses fields ([2559346](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/2559346d963ede802321dfc8cbd2088d9a5de685))
|
||||
* **avs:** priority for picking primary email demote superior ([e4fa1dd](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/e4fa1ddd6873910bef82d569fe16aca936efc567))
|
||||
* **build:** add missing license file ([8721bdb](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/8721bdb3f349658baab144d64c19942bfd7fa49a))
|
||||
* **build:** hlint wants a newtype instead ([18cdc52](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/18cdc52df094b9dbccd4f015561367cea59e33fe))
|
||||
* **doc:** fix erroneous unintentional haddock annotations ([3dfc7f8](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/3dfc7f8c8b12dd6ef87848a75f1669d700fffe4c))
|
||||
* **i18n:** add missing translation for new primary company ([c212f2e](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/c212f2e8d735616e59c9b8111a34118e3a48fd47))
|
||||
* **i18n:** add missing translation for new primary company ([2cc529b](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/2cc529be39655c317ca028f8f09fa80826ec668d))
|
||||
* **ldap:** match mobile number better between LDAP and AVS ([47e5628](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/47e56280fce4ad37e6bc3b9f1c61cb7867069cc5))
|
||||
* **letter:** adjust spacing, pin location and interpolation ([d4a0e1f](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/d4a0e1f201151f76e8e9afd67b456cc878d2afde))
|
||||
* **letter:** convenience links working again ([5f1af13](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/5f1af130edae7ada2f0c7f7829890bbe0d4f395a))
|
||||
* **letter:** expiry and valid dates were wrong ([f8c3663](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/f8c36636ff1f2591507e993af32ed01af94cf1fc))
|
||||
* **letter:** switch markdown for renewal letter too ([c38e87e](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/c38e87e1e0e9285a10c00521b7440cd8246af88a))
|
||||
* **print:** fix [#167](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/issues/167) by sotring affected user in PrintJob ([73aecc2](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/73aecc2df833bdeed93a113b6c756e36b50491b7))
|
||||
|
||||
## [27.4.70](https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/v27.4.69...v27.4.70) (2024-06-21)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **stack.yaml:** move to uniworx.de gitlab ([55484e6](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive/commit/55484e631b786ea3710d322282019baf5292c243))
|
||||
|
||||
467
Makefile
467
Makefile
@ -1,87 +1,57 @@
|
||||
|
||||
export SHELL=bash
|
||||
|
||||
# MAKE=make -f Makefile-loggingsymbols
|
||||
# MAKE=make -d
|
||||
export CLEAN_DEPENDENCIES ?= false
|
||||
export CLEAN_IMAGES ?= false
|
||||
|
||||
# System information
|
||||
export CPU_CORES = $(shell cat /proc/cpuinfo | grep '^processor' | wc -l)
|
||||
|
||||
export CONTAINER_COMMAND ?= podman
|
||||
export CONTAINER_BGRUN ?= $(CONTAINER_COMMAND) run -dit --network=host --replace
|
||||
export CONTAINER_FGRUN ?= $(CONTAINER_COMMAND) run -it --network=host --replace
|
||||
|
||||
export IMAGE_REGISTRY = docker.io
|
||||
export MEMCACHED_IMAGE = $(IMAGE_REGISTRY)/memcached:latest
|
||||
export MINIO_IMAGE = $(IMAGE_REGISTRY)/minio/minio:latest
|
||||
export MAILDEV_IMAGE = $(IMAGE_REGISTRY)/maildev/maildev:latest # TODO: needs different port than 1025 to avoid conflicts
|
||||
|
||||
export IN_CONTAINER ?= false
|
||||
export IN_CI ?= false
|
||||
export CONTAINER_FILE
|
||||
export CONTAINER_IDENT
|
||||
export CF_PREFIX
|
||||
export DEVELOP
|
||||
export MOUNT_DIR=/mnt/fradrive
|
||||
export CONTAINER_ATTACHED
|
||||
export CONTAINER_INIT
|
||||
export CONTAINER_CLEANUP
|
||||
|
||||
export SERVICE
|
||||
export SERVICE_VARIANT ?= $(SERVICE)
|
||||
export JOB
|
||||
export CONTAINER_CMD
|
||||
export SET_CONTAINER_CMD
|
||||
export ENTRYPOINT
|
||||
export EXEC_OPTS
|
||||
|
||||
export STACK_CORES = $(shell echo $(($(CPU_CORES)/2)))
|
||||
export BASE_PORTS
|
||||
export UNIWORXDB_OPTS ?= -cf
|
||||
export PROD ?= false
|
||||
export ENTRYPOINT ?= bash
|
||||
export SRC
|
||||
|
||||
ifneq ($(PROD),true)
|
||||
export --DEVELOPMENT=--flag uniworx:dev
|
||||
endif
|
||||
|
||||
export DATE := $(shell date +'%Y-%m-%dT%H-%M-%S')
|
||||
|
||||
export CURR_DEV = $(shell cat develop/.current 2>/dev/null)
|
||||
export SET_DEVELOP = $(eval DEVELOP=develop/$$(CURR_DEV))
|
||||
export NEW_DEVELOP = $(eval DEVELOP=develop/$$(DATE))
|
||||
|
||||
|
||||
.PHONY: help
|
||||
# HELP: print out this help message
|
||||
help:
|
||||
@if [ -z "$$(which perl 2>/dev/null)" ] ; then \
|
||||
$(CONTAINER_FGRUN) .:/mnt 'debian:12.5' '/mnt/utils/makehelp.pl' '/mnt/Makefile' ; \
|
||||
else \
|
||||
utils/makehelp.pl Makefile ; \
|
||||
fi
|
||||
docker compose run help
|
||||
|
||||
.PHONY: clean
|
||||
# HELP: stop all running containers and remove all compilation results in the directory (but leave images including dependencies unharmed)
|
||||
# HELP: clean compilation caches
|
||||
clean:
|
||||
rm -rf develop
|
||||
-rm -rf node_modules .npm .cache assets/icons assets/favicons static well-known config/manifest.json
|
||||
-rm -rf .stack-work .stack-work.lock
|
||||
-rm -rf bin .Dockerfile develop
|
||||
-$(CONTAINER_COMMAND) container prune --force
|
||||
.PHONY: clean-images
|
||||
# HELP: stop all running containers and clean all images from local repositories
|
||||
clean-images:
|
||||
rm -rf develop
|
||||
sleep 5
|
||||
-$(CONTAINER_COMMAND) system prune --all --force --volumes
|
||||
-$(CONTAINER_COMMAND) image prune --all --force
|
||||
-$(CONTAINER_COMMAND) volume prune --force
|
||||
$(MAKE) clean-frontend CLEAN_DEPENDENCIES=$(CLEAN_DEPENDENCIES) CLEAN_IMAGES=$(CLEAN_IMAGES)
|
||||
$(MAKE) clean-backend CLEAN_DEPENDENCIES=$(CLEAN_DEPENDENCIES) CLEAN_IMAGES=$(CLEAN_IMAGES)
|
||||
.PHONY: clean-all
|
||||
# HELP: like clean but with full container, image, and volume prune
|
||||
clean-all: clean-images
|
||||
-rm -rf .stack
|
||||
# HELP: clean everything, including dependency and image caches
|
||||
clean-all: CLEAN_DEPENDENCIES = true
|
||||
clean-all: CLEAN_IMAGES = true
|
||||
clean-all: clean ;
|
||||
|
||||
.PHONY: clean-%
|
||||
# HELP(clean-$SERVICE): invalidate caches for a given service. Supported services: frontend, backend.
|
||||
clean-%:
|
||||
$(MAKE) stop-$*
|
||||
@$(MAKE) -- --clean-$*
|
||||
@echo "Cleaned $* build files and binaries."
|
||||
ifeq ("$(CLEAN_DEPENDENCIES)", "true")
|
||||
@$(MAKE) -- --clean-$*-deps
|
||||
@echo "Cleaned $* dependencies."
|
||||
endif
|
||||
ifeq ("$(CLEAN_IMAGES)", "true")
|
||||
$(MAKE) kill-$*
|
||||
docker compose rm --force --volumes
|
||||
docker compose down --rmi 'all' --volumes
|
||||
@echo "Cleaned $* image."
|
||||
endif
|
||||
--clean-frontend:
|
||||
-rm -rf assets/icons assets/favicons
|
||||
-rm -rf static well-known
|
||||
--clean-frontend-deps:
|
||||
-rm -rf frontend/node_modules
|
||||
-rm -rf frontend/.npm
|
||||
--clean-backend:
|
||||
-rm -rf backend/.stack-work
|
||||
-rm -rf bin/
|
||||
--clean-backend-deps:
|
||||
-rf -rf backend/.stack
|
||||
|
||||
|
||||
# TODO: only release when build and tests are passing!!!
|
||||
.PHONY: release
|
||||
# HELP: create, commit and push a new release
|
||||
release:
|
||||
@ -93,328 +63,61 @@ release:
|
||||
git push origin $${VERSION}
|
||||
|
||||
.PHONY: compile
|
||||
compile:
|
||||
$(MAKE) compile-frontend
|
||||
$(MAKE) compile-backend
|
||||
# HELP: perform full compilation (frontend and backend)
|
||||
compile: compile-frontend compile-backend ;
|
||||
.PHONY: compile-%
|
||||
# HELP(compile-$SERVICE): compile a given service once
|
||||
compile-%:
|
||||
docker compose run --remove-orphans --build --no-deps $* make compile
|
||||
|
||||
.PHONY: start
|
||||
start:
|
||||
$(MAKE) start-postgres
|
||||
$(MAKE) start-memcached
|
||||
$(MAKE) start-minio
|
||||
$(MAKE) compile-frontend
|
||||
$(MAKE) compile-uniworxdb
|
||||
$(MAKE) start-backend
|
||||
|
||||
.PHONY: %-backend
|
||||
%-backend: SERVICE=backend
|
||||
%-backend: SERVICE_VARIANT=backend
|
||||
%-backend: CONTAINER_CMD=localhost/fradrive/backend
|
||||
%-backend: BASE_PORTS = "DEV_PORT_HTTP=3000" "DEV_PORT_HTTPS=3443"
|
||||
|
||||
.PHONY: %-uniworxdb
|
||||
%-uniworxdb: SERVICE=backend
|
||||
%-uniworxdb: SERVICE_VARIANT=uniworxdb
|
||||
%-uniworxdb: CONTAINER_CMD=localhost/fradrive/backend
|
||||
|
||||
.PHONY: %-ghci
|
||||
%-ghci: SERVICE=backend
|
||||
%-ghci: SERVICE_VARIANT=ghci
|
||||
%-ghci: CONTAINER_CMD=localhost/fradrive/backend
|
||||
|
||||
.PHONY: %-hoogle
|
||||
%-hoogle: SERVICE=backend
|
||||
%-hoogle: SERVICE_VARIANT=hoogle
|
||||
%-hoogle: BASE_PORTS = "HOOGLE_PORT=8081"
|
||||
%-hoogle: CONTAINER_CMD=localhost/fradrive/backend
|
||||
--start-hoogle:
|
||||
HOOGLE_PORT=`cat $(CONTAINER_FILE) | grep 'HOOGLE_PORT=' | sed 's/HOOGLE_PORT=//'` ; \
|
||||
stack $(STACK_CORES) hoogle -- server --local --port $${HOOGLE_PORT}
|
||||
|
||||
.PHONY: %-frontend
|
||||
%-frontend: SERVICE=frontend
|
||||
%-frontend: SERVICE_VARIANT=frontend
|
||||
%-frontend: CONTAINER_CMD=localhost/fradrive/frontend
|
||||
|
||||
.PHONY: %-postgres
|
||||
%-postgres: SERVICE=postgres
|
||||
%-postgres: SERVICE_VARIANT=postgres
|
||||
%-postgres: BASE_PORTS = "PGPORT=5432"
|
||||
%-postgres: CONTAINER_CMD=localhost/fradrive/postgres
|
||||
|
||||
.PHONY: %-memcached
|
||||
%-memcached: SERVICE=memcached
|
||||
%-memcached: SERVICE_VARIANT=memcached
|
||||
%-memcached: SET_CONTAINER_CMD=$$(MEMCACHED_IMAGE) --port=`cat $$(CONTAINER_FILE) | grep 'MEMCACHED_PORT=' | sed 's/MEMCACHED_PORT=//'`
|
||||
%-memcached: BASE_PORTS = "MEMCACHED_PORT=11211"
|
||||
|
||||
.PHONY: %-minio
|
||||
%-minio: SERVICE=minio
|
||||
%-minio: SERVICE_VARIANT=minio
|
||||
%-minio: SET_CONTAINER_CMD=$$(MINIO_IMAGE) -- server `mktemp` --address=:`cat $$(CONTAINER_FILE) | grep 'UPLOAD_S3_PORT=' | sed 's/UPLOAD_S3_PORT=//'`
|
||||
%-minio: BASE_PORTS = "UPLOAD_S3_PORT=9000"
|
||||
|
||||
# HELP: start complete development environment with a fresh test database
|
||||
start: start-postgres start-maildev start-memcached start-minio start-backend
|
||||
docker compose exec backend make start
|
||||
.PHONY: start-%
|
||||
start-%: JOB=start
|
||||
start-%: CF_PREFIX = start-
|
||||
start-%: CONTAINER_ATTACHED = false
|
||||
start-%: --act ;
|
||||
|
||||
.PHONY: compile-%
|
||||
compile-%: JOB=compile
|
||||
compile-%: CF_PREFIX = compile-
|
||||
compile-%: CONTAINER_ATTACHED = true
|
||||
compile-%: --act ;
|
||||
|
||||
.PHONY: dependencies-%
|
||||
dependencies-%: JOB=dependencies
|
||||
dependencies-%: CF_PREFIX = dependencies-
|
||||
dependencies-%: CONTAINER_ATTACHED = true
|
||||
dependencies-%: --act ;
|
||||
|
||||
.PHONY: test-%
|
||||
test-%: JOB=test
|
||||
test-%: CF_PREFIX = test-
|
||||
test-%: CONTAINER_ATTACHED = true
|
||||
test-%: --act ;
|
||||
|
||||
.PHONY: lint-%
|
||||
lint-%: JOB=lint
|
||||
lint-%: CF_PREFIX = lint-
|
||||
lint-%: CONTAINER_ATTACHED = true
|
||||
lint-%: --act ;
|
||||
# HELP(start-$SERVICE): start a given service
|
||||
start-%:
|
||||
docker compose up -d --build $*
|
||||
|
||||
.PHONY: shell-%
|
||||
# HELP(shell-$SERVICE): launch (bash) shell inside a new $SERVICE container
|
||||
shell-%: JOB=shell
|
||||
shell-%: CF_PREFIX=shell-
|
||||
shell-%: CONTAINER_ATTACHED=true
|
||||
shell-%: --act ;
|
||||
# HELP(shell-$SERVICE): launch a (bash) shell inside a given service
|
||||
shell-%:
|
||||
docker compose run --build --no-deps --entrypoint="$(ENTRYPOINT)" $*
|
||||
.PHONY: ghci
|
||||
# HELP(ghci): launch new backend instance and enter interactive ghci shell
|
||||
ghci: shell-ghci;
|
||||
|
||||
--act: --develop_containerized;
|
||||
|
||||
--develop_%: PORTS = $(foreach PORT,$(BASE_PORTS),$(shell utils/next_free_port.pl $(PORT)))
|
||||
--develop_%: --ensure-develop
|
||||
DEVELOP=develop/`cat develop/.current` ; \
|
||||
CONTAINER_IDENT=$(CF_PREFIX)$(SERVICE_VARIANT) ; \
|
||||
CONTAINER_FILE=$${DEVELOP}/$${CONTAINER_IDENT} ; \
|
||||
if [[ -e $${CONTAINER_FILE} ]]; then \
|
||||
>&2 echo "Another $* service is already running! Use \"make new-develop\" to start a new develop instance despite currently running services." ; \
|
||||
exit 1 ; \
|
||||
fi ; \
|
||||
echo "$(PORTS)" | sed 's/ /\n/g' > $${CONTAINER_FILE} ; \
|
||||
$(MAKE) -- --$* CONTAINER_FILE=$${CONTAINER_FILE} CONTAINER_IDENT=$${CONTAINER_IDENT} JOB=$(JOB)
|
||||
|
||||
.PHONY: rebuild-%
|
||||
# HELP(rebuild-{backend,frontend,database,memcached,minio}): force-rebuild a given container image
|
||||
rebuild-%:
|
||||
$(MAKE) -- --image-build SERVICE=$* NO_CACHE=--no-cache
|
||||
--image-build:
|
||||
ifeq "$(CONTAINER_CMD)" "localhost/fradrive/$(SERVICE)"
|
||||
rm -f .Dockerfile
|
||||
ln -s docker/$(SERVICE)/Dockerfile .Dockerfile
|
||||
MOUNT_DIR=/mnt/fradrive; \
|
||||
PROJECT_DIR=/mnt/fradrive; \
|
||||
if [ "$(IN_CI)" == "true" ] ; then \
|
||||
PROJECT_DIR=/fradrive; \
|
||||
fi; \
|
||||
if [ "$(IN_CONTAINER)" == "false" ] ; then \
|
||||
$(CONTAINER_COMMAND) build $(NO_CACHE) \
|
||||
-v $(PWD):$${MOUNT_DIR} \
|
||||
--build-arg MOUNT_DIR=$(MOUNT_DIR) \
|
||||
--build-arg PROJECT_DIR=$${PROJECT_DIR} \
|
||||
--env IN_CONTAINER=true \
|
||||
--env JOB=$(JOB) \
|
||||
--tag fradrive/$(SERVICE) \
|
||||
--file $(PWD)/.Dockerfile ; \
|
||||
fi
|
||||
else
|
||||
:
|
||||
endif
|
||||
|
||||
--containerized: --image-build
|
||||
DEVELOP=`cat develop/.current` ; \
|
||||
./utils/watchcontainerrun.sh "$(CONTAINER_COMMAND)" "$(CONTAINER_FILE)" "$(CONTAINER_INIT)" "$(CONTAINER_CLEANUP)" & \
|
||||
CONTAINER_NAME=fradrive.$(CURR_DEV).$(CONTAINER_IDENT) ; \
|
||||
if ! [ -z "$(SET_CONTAINER_CMD)" ] ; \
|
||||
then \
|
||||
CONTAINER_CMD="$(SET_CONTAINER_CMD)" ; \
|
||||
else \
|
||||
CONTAINER_CMD=$(CONTAINER_CMD) ; \
|
||||
fi ; \
|
||||
CONTAINER_ID=`$(CONTAINER_BGRUN) \
|
||||
-v $(PWD):$(MOUNT_DIR) \
|
||||
--env IN_CONTAINER=true \
|
||||
--env FRADRIVE_MAKE_TARGET="--$(JOB)-$(SERVICE_VARIANT)" \
|
||||
--env CONTAINER_FILE=$(CONTAINER_FILE) \
|
||||
--env CONTAINER_NAME=$${CONTAINER_NAME} \
|
||||
--env JOB=$(JOB) \
|
||||
--env SRC=$(SRC) \
|
||||
--name $${CONTAINER_NAME} \
|
||||
$${CONTAINER_CMD} \
|
||||
` ; \
|
||||
printf "CONTAINER_ID=$${CONTAINER_ID}" >> "$(CONTAINER_FILE)" ; \
|
||||
if [[ "true" == "$(CONTAINER_ATTACHED)" ]] ; then \
|
||||
$(CONTAINER_COMMAND) attach $${CONTAINER_ID} || : ; \
|
||||
fi
|
||||
|
||||
# For Reverse Proxy Problem see: https://groups.google.com/g/yesodweb/c/2EO53kSOuy0/m/Lw6tq2VYat4J
|
||||
|
||||
# HELP(start-backend): start development instance
|
||||
--start-backend:
|
||||
export YESOD_IP_FROM_HEADER=true; \
|
||||
export DEV_PORT_HTTP=`cat $(CONTAINER_FILE) | grep 'DEV_PORT_HTTP=' | sed 's/DEV_PORT_HTTP=//'`; \
|
||||
export DEV_PORT_HTTPS=`cat $(CONTAINER_FILE) | grep 'DEV_PORT_HTTPS=' | sed 's/DEV_PORT_HTTPS=//'`; \
|
||||
export HOST=127.0.0.1 ; \
|
||||
export PORT=$${PORT:-$${DEV_PORT_HTTP}} ; \
|
||||
export DETAILED_LOGGING=$${DETAILED_LOGGING:-true} ; \
|
||||
export LOG_ALL=$${LOG_ALL:-false} ; \
|
||||
export LOGLEVEL=$${LOGLEVEL:-info} ; \
|
||||
export DUMMY_LOGIN=$${DUMMY_LOGIN:-true} ; \
|
||||
export SERVER_SESSION_ACID_FALLBACK=$${SERVER_SESSION_ACID_FALLBACK:-true} ; \
|
||||
export SERVER_SESSION_COOKIES_SECURE=$${SERVER_SESSION_COOKIES_SECURE:-false} ; \
|
||||
export COOKIES_SECURE=$${COOKIES_SECURE:-false} ; \
|
||||
export ALLOW_DEPRECATED=$${ALLOW_DEPRECATED:-true} ; \
|
||||
export ENCRYPT_ERRORS=$${ENCRYPT_ERRORS:-false} ; \
|
||||
export RIBBON=$${RIBBON:-$${HOST:-localhost}} ; \
|
||||
export APPROOT=$${APPROOT:-http://localhost:$${DEV_PORT_HTTP}} ; \
|
||||
export AVSPASS=$${AVSPASS:-nopasswordset} ; \
|
||||
stack $(STACK_CORES) exec --local-bin-path $$(pwd)/bin --copy-bins -- yesod devel -p "$${DEV_PORT_HTTP}" -q "$${DEV_PORT_HTTPS}"
|
||||
# HELP(compile-backend): compile backend binaries
|
||||
--compile-backend:
|
||||
stack build $(STACK_CORES) --fast --profile --library-profiling --executable-profiling --flag uniworx:-library-only $(--DEVELOPMENT) --local-bin-path $$(pwd)/bin --copy-bins
|
||||
# HELP(dependencies-backend): (re-)build backend dependencies
|
||||
--dependencies-backend: uniworx.cabal
|
||||
stack build $(STACK_CORES) --fast --only-dependencies
|
||||
# HELP(lint-backend): lint backend
|
||||
--lint-backend:
|
||||
stack build $(STACK_CORES) --test --fast --flag uniworx:library-only $(--DEVELOPMENT) uniworx:test:hlint
|
||||
# HELP(test-backend): test backend
|
||||
--test-backend:
|
||||
stack build $(STACK_CORES) --test --coverage --fast --flag uniworx:library-only $(--DEVELOPMENT)
|
||||
uniworx.cabal:
|
||||
stack exec -- hpack --force
|
||||
|
||||
# HELP(compile-frontend): compile frontend assets
|
||||
--compile-frontend: node_modules assets esbuild.config.mjs
|
||||
npm run build
|
||||
--start-frontend: --compile-frontend;
|
||||
--dependencies-frontend: node_modules assets static well-known;
|
||||
node_modules: package.json package-lock.json
|
||||
npm install --cache .npm --prefer-offline
|
||||
package-lock.json: package.json
|
||||
npm install --cache .npm --prefer-offline
|
||||
assets: assets/favicons assets/icons;
|
||||
assets/favicons:
|
||||
./utils/faviconize.pl assets/favicon.svg long assets/favicons
|
||||
assets/icons: node_modules assets/icons-src/fontawesome.json
|
||||
./utils/renamer.pl node_modules/@fortawesome/fontawesome-free/svgs/solid assets/icons-src/fontawesome.json assets/icons/fradrive
|
||||
./utils/renamer.pl node_modules/@fortawesome/fontawesome-free/svgs/regular assets/icons-src/fontawesome.json assets/icons/fradrive
|
||||
-cp assets/icons-src/*.svg assets/icons/fradrive
|
||||
static: node_modules assets esbuild.config.mjs
|
||||
npm run build
|
||||
well-known: static;
|
||||
|
||||
# HELP(compile-uniworxdb): clear and fill database. requires running postgres instance (use "make start-postgres" to start one)
|
||||
# TODO (db-m-$MIGRATION-backend): apply migration (see src/Model/Migration/Definition.hs for list of available migrations)
|
||||
--compile-uniworxdb: --compile-backend
|
||||
SERVER_SESSION_ACID_FALLBACK=${SERVER_SESSION_ACID_FALLBACK:-true} ; \
|
||||
AVSPASS=${AVSPASS:-nopasswordset} ; \
|
||||
./bin/uniworxdb $(UNIWORXDB_OPTS)
|
||||
|
||||
--shell-ghci:
|
||||
stack ghci -- $(SRC)
|
||||
# --main-is uniworx:exe:uniworx
|
||||
|
||||
# HELP(shell-{backend,frontend,memcached,minio,postgres}): enter (bash) shell inside a new container of a given service
|
||||
--shell-%:
|
||||
/bin/bash
|
||||
|
||||
# HELP(start-minio): start minio service
|
||||
|
||||
.PHONY: status
|
||||
# HELP: print develop status: running containers, used ports
|
||||
status:
|
||||
@./utils/develop-status.pl -a
|
||||
|
||||
.PHONY: log-%
|
||||
# HELP(log-$(JOB)-$(SERVICE)): inspect output of a given service. The service must be currently running When a service supports multiple running instances in one develop (i.e. backend), you need to specify the exact instance by its associated file (e.g. backend-1, backend-2, etc.), please check the contents of the develop/ directory for a list of running instances.
|
||||
log-%:
|
||||
DEVELOP=develop/`cat develop/.current` ; \
|
||||
SEARCH_FILE="$${DEVELOP}/$*" ; \
|
||||
if [[ ! -e "$${SEARCH_FILE}" ]] ; then \
|
||||
SEARCH_FILE="$${DEVELOP}/.exited.$*" ; \
|
||||
fi ; \
|
||||
if [[ -e "$${SEARCH_FILE}" ]] ; then \
|
||||
$(CONTAINER_COMMAND) logs --follow `cat "$${SEARCH_FILE}" | grep CONTAINER_ID= | sed 's/^CONTAINER_ID=//'` ; \
|
||||
else \
|
||||
>&2 echo "Cannot show log: No develop file found for '$*'" ; \
|
||||
exit 1 ; \
|
||||
fi
|
||||
|
||||
.PHONY: enter
|
||||
# HELP: launch (bash) shell inside a currently running container. Use ./enter shell wrapper for more convenient usage, possibly with tab-completion in the future
|
||||
enter: --ensure-develop
|
||||
$(MAKE) -- --enter
|
||||
|
||||
.PHONY: psql
|
||||
# HELP: enter psql (postgresql) cli inside a currently running database container
|
||||
psql: ENTRYPOINT=/usr/bin/psql -d uniworx
|
||||
psql: EXEC_OPTS=--user postgres
|
||||
psql: --ensure-develop
|
||||
$(MAKE) -- --enter CONTAINER_FILE=develop/`cat develop/.current`/start-postgres
|
||||
|
||||
--enter:
|
||||
CONTAINER_ID=`cat $(CONTAINER_FILE) | grep 'CONTAINER_ID=' | sed 's/CONTAINER_ID=//'` ; \
|
||||
$(CONTAINER_COMMAND) exec -it $(EXEC_OPTS) $${CONTAINER_ID} $(if $(ENTRYPOINT),$(ENTRYPOINT),/bin/bash)
|
||||
# HELP: launch ghci instance. Use in combination with SRC to specify the modules to be loaded by ghci: make ghci SRC=src/SomeModule.hs
|
||||
ghci: ENTRYPOINT=stack ghci $(SRC)
|
||||
ghci: shell-backend ;
|
||||
|
||||
.PHONY: stop
|
||||
# HELP: stop all currently running develop instances
|
||||
# HELP: stop all services
|
||||
stop:
|
||||
rm -rf develop
|
||||
docker compose down
|
||||
.PHONY: stop-%
|
||||
# HELP(stop-SERVICE): stop all currently running develop instances of a given service (i.e. backend,frontend,uniworxdb,hoogle,postgres,...)
|
||||
# HELP(stop-JOB): stop all currently running develop instances of a given job (i.e. compile,start,test,lint)
|
||||
stop-compile: CF_PREFIX=compile-
|
||||
stop-start: CF_PREFIX=start-
|
||||
stop-test: CF_PREFIX=test-
|
||||
stop-lint: CF_PREFIX=lint-
|
||||
stop-%: --stop;
|
||||
--stop:
|
||||
$(SET_DEVELOP)
|
||||
ifdef CF_PREFIX
|
||||
rm -rf $(DEVELOP)/$(CF_PREFIX)*
|
||||
endif
|
||||
ifdef SERVICE_VARIANT
|
||||
rm -rf $(DEVELOP)/*-$(SERVICE_VARIANT)
|
||||
endif
|
||||
# HELP(stop-$SERVICE): stop a given service
|
||||
stop-%:
|
||||
docker compose down $*
|
||||
.PHONY: kill-%
|
||||
# HELP(kill-$SERVICE): kill a given service the hard way. Use this if the servive does not respond to stop.
|
||||
kill-%:
|
||||
docker compose kill $*
|
||||
|
||||
.PHONY: new-develop
|
||||
# HELP: instantiate new development bundle, i.e. create new directory under develop/
|
||||
new-develop:
|
||||
$(NEW_DEVELOP)
|
||||
mkdir -p $(DEVELOP)
|
||||
$(MAKE) develop/.current
|
||||
.PHONY: switch-develop
|
||||
# HELP: switch current develop instance to DEVELOP=...
|
||||
switch-develop:
|
||||
if ! [ -e develop/$(DEVELOP) ]; then \
|
||||
echo "Specified develop $(DEVELOP) does not exist! Not switching." ; \
|
||||
exit 1 ; \
|
||||
fi ; \
|
||||
echo "$(DEVELOP)" > develop/.current
|
||||
--ensure-develop:
|
||||
if ! [[ -e develop ]]; then \
|
||||
$(MAKE) new-develop; \
|
||||
fi
|
||||
$(MAKE) develop/.current
|
||||
$(SET_DEVELOP)
|
||||
.PHONY: develop/.current
|
||||
develop/.current:
|
||||
ls -1 develop | tail -n1 > develop/.current
|
||||
.PHONY: status
|
||||
# HELP: print an overview of currently running services and their health
|
||||
status:
|
||||
docker compose ps
|
||||
.PHONY: top
|
||||
# HELP: print an overview of the ressource usage of the currently running services
|
||||
top:
|
||||
docker compose stats
|
||||
.PHONY: list-projects
|
||||
# HELP: list all currently running projects on this machine
|
||||
list-projects:
|
||||
docker compose ls
|
||||
|
||||
.PHONY: log-%
|
||||
# HELP(log-$SERVICE): follow the output of a given service. Service must be running.
|
||||
log-%:
|
||||
docker compose logs --follow --timestamps $*
|
||||
|
||||
.PHONY: --%
|
||||
.SUFFIXES: # Delete all default suffixes
|
||||
|
||||
254
azure-pipelines.yaml
Normal file → Executable file
254
azure-pipelines.yaml
Normal file → Executable file
@ -1,67 +1,197 @@
|
||||
# SPDX-FileCopyrightText: 2024 Sarah Vaupel <sarah.vaupel@uniworx.de>
|
||||
# SPDX-FileCopyrightText: 2024-2025 Sarah Vaupel <sarah.vaupel@uniworx.de>
|
||||
#
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
trigger:
|
||||
branches:
|
||||
include:
|
||||
- '*'
|
||||
tags:
|
||||
include:
|
||||
- '*'
|
||||
#paths:
|
||||
# exclude:
|
||||
# - CHANGELOG.md
|
||||
|
||||
parameters:
|
||||
- name: services
|
||||
type: object
|
||||
default:
|
||||
- name: frontend
|
||||
imageBase:
|
||||
image: devfra.azurecr.io/de.fraport.build/npm
|
||||
tag: node-20
|
||||
# extraBuildOptions: |
|
||||
# --build-arg NPM_CUSTOM_REGISTRY=https://pkgs.dev.azure.com/fraport/_packaging/packages/npm/registry/
|
||||
dependsOn: []
|
||||
dependenciesCaches:
|
||||
- key: package.json | package-lock.json
|
||||
path: node_modules/
|
||||
- key: package.json | package-lock.json
|
||||
path: .npm/
|
||||
- key: package.json | esbuild.config.mjs | utils/renamer.pl | utils/faviconize.pl | frontend/src/icons.scss
|
||||
path: assets/icons/
|
||||
- key: package.json | esbuild.config.mjs | utils/renamer.pl | utils/faviconize.pl | frontend/src/icons.scss
|
||||
path: assets/favicons/
|
||||
buildPool: 'Prod Private Agent Pool'
|
||||
buildCores: 1
|
||||
buildTimeout: 60
|
||||
buildArtifacts: |
|
||||
assets/icons/fradrive/*.svg
|
||||
assets/favicons/*.png
|
||||
assets/favicons/include.html
|
||||
frontend/src/env.sass
|
||||
config/manifest.json
|
||||
static/**/*
|
||||
well-known/**/*
|
||||
- name: backend
|
||||
imageBase:
|
||||
image: devfra.azurecr.io/de.fraport.build/haskell
|
||||
tag: 8.10.4
|
||||
dependsOn:
|
||||
- Build_frontend
|
||||
dependenciesCaches:
|
||||
- key: stack.yaml | stack.yaml.lock
|
||||
path: .stack/
|
||||
buildPool: 'Prod Private Agent Pool DS3'
|
||||
buildCores: 3
|
||||
buildTimeout: 1440
|
||||
buildArtifacts: |
|
||||
bin/*
|
||||
|
||||
variables:
|
||||
buildImageUpstream: devfra.azurecr.io/de.fraport.fradrive.build
|
||||
setupImages: $[ or( eq(variables.forcePushLatest, true), eq(variables['Build.SourceBranch'], 'refs/heads/master'), startsWith(variables['Build.SourceBranch'], 'refs/heads/update'), startsWith(variables['Build.SourceBranch'], 'refs/tags/') ) ]
|
||||
|
||||
pool: 'Prod Private Agent Pool'
|
||||
|
||||
jobs:
|
||||
# - job: HelloWorld
|
||||
# container:
|
||||
# image: 'devfra.azurecr.io/de.fraport.trusted/ubuntu:22.04'
|
||||
# endpoint: devfra
|
||||
# steps:
|
||||
# - script: echo Hello, world!
|
||||
# displayName: 'Run a one-line script'
|
||||
# - script: |
|
||||
# echo Add other tasks to build, test, and deploy your project.
|
||||
# echo See https://aka.ms/yaml
|
||||
# displayName: 'Run a multi-line script'
|
||||
- job: DockerTaskTest
|
||||
container:
|
||||
image: devfra.azurecr.io/de.fraport.build/tools:1.1.0
|
||||
endpoint: devfra
|
||||
steps:
|
||||
- task: Docker@2
|
||||
name: dockerLoginDevFra
|
||||
displayName: Docker Login to devfra
|
||||
inputs:
|
||||
command: login
|
||||
containerRegistry: devFra
|
||||
- task: Docker@2
|
||||
name: dockerBuild
|
||||
displayName: Backend image build test
|
||||
inputs:
|
||||
command: build
|
||||
Dockerfile: docker/backend/Dockerfile
|
||||
buildContext: .
|
||||
tags: |
|
||||
$(Build.BuildNumber)
|
||||
backend
|
||||
arguments: |
|
||||
--build-arg HTTPS_PROXY=http://proxy.frankfurt-airport.de:8080
|
||||
--build-arg HTTP_PROXY=http://proxy.frankfurt-airport.de:8080
|
||||
--build-arg NO_PROXY='localhost,127.0.0.1,*.docker.internal,*.azmk8s.io,devfra.azurecr.io,devfra.westeurope.data.azurecr.io'
|
||||
--build-arg PROJECT_DIR=/fradrive
|
||||
--build-arg MOUNT_DIR=/mnt/fradrive
|
||||
# - job: BuildKitTest
|
||||
# container:
|
||||
# image: 'devfra.azurecr.io/de.fraport.trusted/buildkit:0.12.1'
|
||||
# endpoint: devfra
|
||||
# steps:
|
||||
# - script: buildctl build \
|
||||
# --frontend=dockerfile.v0 \
|
||||
# --local context=. \
|
||||
# --local dockerfile=docker/backend/Dockerfile
|
||||
# displayName: BuildKit test
|
||||
# - job: CustomBuildahTest
|
||||
# container:
|
||||
# image: 'devfra.azurecr.io/de.fraport.trusted/ubuntu:22.04'
|
||||
# endpoint: devfra
|
||||
# steps:
|
||||
# - script: |
|
||||
# id
|
||||
# docker build --help
|
||||
# sudo apt-get -y update
|
||||
# sudo apt-get -y install buildah
|
||||
# buildah bud -t fradrive-backend-test --volume .:/mnt/fradrive --file docker/backend/Dockerfile
|
||||
# displayName: Build buildah image
|
||||
stages:
|
||||
|
||||
- stage: Setup
|
||||
jobs:
|
||||
- ${{ each service in parameters.services }}:
|
||||
- template: .azure-pipelines/templates/jobs/setup_image.yaml
|
||||
parameters:
|
||||
imageName: ${{service.name}}
|
||||
imageBase: ${{service.imageBase}}
|
||||
- template: .azure-pipelines/templates/jobs/setup_dependencies.yaml
|
||||
parameters:
|
||||
serviceName: ${{service.name}}
|
||||
dependenciesCaches: ${{service.dependenciesCaches}}
|
||||
dependenciesBuildPool: ${{service.buildPool}}
|
||||
dependenciesBuildCores: ${{service.buildCores}}
|
||||
dependenciesBuildTimeout: ${{service.buildTimeout}}
|
||||
|
||||
- stage: Build
|
||||
dependsOn: Setup
|
||||
jobs:
|
||||
- ${{ each service in parameters.services }}:
|
||||
- job: Build_${{service.name}}
|
||||
displayName: Compile ${{service.name}}
|
||||
dependsOn: ${{service.dependsOn}}
|
||||
pool: '${{service.buildPool}}'
|
||||
timeoutInMinutes: ${{service.buildTimeout}}
|
||||
container:
|
||||
${{ if eq(variables.setupImages, true) }}:
|
||||
image: $(buildImageUpstream)/${{service.name}}:$(Build.BuildNumber)
|
||||
${{ else }}:
|
||||
image: $(buildImageUpstream)/${{service.name}}:latest
|
||||
endpoint: devfra
|
||||
env:
|
||||
PROJECT_DIR: $(Build.Repository.LocalPath)
|
||||
IN_CONTAINER: true
|
||||
IN_CI: true
|
||||
steps:
|
||||
- ${{ each dependencyCache in service.dependenciesCaches }}:
|
||||
- template: .azure-pipelines/templates/steps/cache.yaml
|
||||
parameters:
|
||||
cacheIdent: '${{service.name}}-dependencies'
|
||||
cacheKeys: '${{dependencyCache.key}}'
|
||||
cachePath: '${{dependencyCache.path}}'
|
||||
- ${{ each dependency in service.dependsOn }}:
|
||||
- template: .azure-pipelines/templates/steps/artifact-download.yaml
|
||||
parameters:
|
||||
artifactName: '${{dependency}}'
|
||||
- template: .azure-pipelines/templates/steps/make.yaml
|
||||
parameters:
|
||||
makeJob: compile
|
||||
makeService: ${{service.name}}
|
||||
makeVars: 'CPU_CORES=${{service.buildCores}} STACK_CORES=-j${{service.buildCores}}'
|
||||
- task: CopyFiles@2
|
||||
displayName: Prepare ${{service.name}} build artifacts for upload
|
||||
inputs:
|
||||
Contents: '${{service.buildArtifacts}}'
|
||||
TargetFolder: '$(Build.ArtifactStagingDirectory)'
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Publish ${{service.name}} build artifacts
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.ArtifactStagingDirectory)'
|
||||
ArtifactName: 'Build_${{service.name}}'
|
||||
publishLocation: 'Container'
|
||||
|
||||
# - stage: Test
|
||||
# dependsOn: Build
|
||||
# condition: eq(variables.skipTests, false)
|
||||
# jobs:
|
||||
# - ${{ each service in parameters.services }}:
|
||||
# - job: Test_${{service.name}}
|
||||
# displayName: Run ${{service.name}} tests
|
||||
# pool: '${{service.buildPool}}'
|
||||
# timeoutInMinutes: ${{service.buildTimeout}}
|
||||
# container:
|
||||
# # TODO: do not use latest on update branches
|
||||
# image: $(buildImageUpstream)/${{service.name}}:latest
|
||||
# endpoint: devfra
|
||||
# env:
|
||||
# PROJECT_DIR: $(Build.Repository.LocalPath)
|
||||
# IN_CONTAINER: true
|
||||
# IN_CI: true
|
||||
# steps:
|
||||
# - ${{ each dependencyCache in service.dependenciesCaches }}:
|
||||
# - template: .azure-pipelines/templates/steps/cache.yaml
|
||||
# parameters:
|
||||
# cacheIdent: '${{service.name}}-dependencies'
|
||||
# cacheKeys: '${{dependencyCache.key}}'
|
||||
# cachePath: '${{dependencyCache.path}}'
|
||||
# - ${{ each dependency in service.dependsOn }}:
|
||||
# - template: .azure-pipelines/templates/steps/artifact-download.yaml
|
||||
# parameters:
|
||||
# artifactName: '${{dependency}}'
|
||||
# - task: Docker@2
|
||||
# displayName: Login to container registry
|
||||
# inputs:
|
||||
# command: login
|
||||
# containerRegistry: devfra
|
||||
# - task: Bash@3
|
||||
# displayName: Start database container for testing
|
||||
# inputs:
|
||||
# targetType: inline
|
||||
# script: |
|
||||
# docker run -d devfra.azurecr.io/de.fraport.trusted/postgres:16.1-bookworm
|
||||
# - template: .azure-pipelines/templates/steps/make.yaml
|
||||
# parameters:
|
||||
# makeJob: lint
|
||||
# makeService: ${{service.name}}
|
||||
# makeVars: 'CPU_CORES=${{service.buildCores}} STACK_CORES=-j${{service.buildCores}}'
|
||||
# - template: .azure-pipelines/templates/steps/make.yaml
|
||||
# parameters:
|
||||
# makeJob: test
|
||||
# makeService: ${{service.name}}
|
||||
# makeVars: 'CPU_CORES=${{service.buildCores}} STACK_CORES=-j${{service.buildCores}}'
|
||||
# - task: Docker@2
|
||||
# displayName: Logout from container registry
|
||||
# inputs:
|
||||
# command: logout
|
||||
# containerRegistry: devfra
|
||||
# - job: TestReport_${{service.name}}
|
||||
# displayName: Upload test reports for ${{service.name}}
|
||||
# steps:
|
||||
# - script: echo "Work in progress" # TODO
|
||||
|
||||
- stage: Release
|
||||
dependsOn: Build # TODO Test
|
||||
condition: or(eq(variables.forceRelease, true), startsWith(variables['Build.SourceBranch'], 'refs/tags/'))
|
||||
jobs:
|
||||
- template: .azure-pipelines/templates/jobs/release.yaml
|
||||
parameters:
|
||||
releaseTag: ${{split(variables['Build.SourceBranch'], '/')[2]}}
|
||||
0
backend/.ghc/ghci_history
Normal file
0
backend/.ghc/ghci_history
Normal file
40
backend/Dockerfile
Normal file
40
backend/Dockerfile
Normal file
@ -0,0 +1,40 @@
|
||||
ARG FROM_IMG=docker.io/library/debian
|
||||
ARG FROM_TAG=12.5
|
||||
|
||||
FROM ${FROM_IMG}:${FROM_TAG}
|
||||
|
||||
ENV LANG=de_DE.UTF-8
|
||||
|
||||
# basic dependencies
|
||||
RUN apt-get -y update && apt-get -y install git
|
||||
RUN apt-get -y update && apt-get -y install haskell-stack
|
||||
RUN apt-get -y update && apt-get -y install llvm
|
||||
RUN apt-get -y update && apt-get install -y --no-install-recommends locales locales-all
|
||||
|
||||
# compile-time dependencies
|
||||
RUN apt-get -y update && apt-get install -y libpq-dev libsodium-dev
|
||||
RUN apt-get -y update && apt-get -y install g++ libghc-zlib-dev libpq-dev libsodium-dev pkg-config
|
||||
RUN apt-get -y update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends tzdata
|
||||
|
||||
# run-time dependencies for uniworx binary
|
||||
RUN apt-get -y update && apt-get -y install fonts-roboto
|
||||
# RUN apt-get -y update && apt-get -y install pdftk
|
||||
# RUN apt-get -y update && apt-get -y install \
|
||||
# texlive texlive-latex-recommended texlive-luatex texlive-plain-generic texlive-lang-german texlive-lang-english
|
||||
RUN apt-get -y update && apt-get -y install texlive
|
||||
# RUN ls /usr/local/texlive
|
||||
# RUN chown -hR root /usr/local/texlive/2018
|
||||
# RUN tlmgr init-usertree
|
||||
# RUN tlmgr option repository ftp://tug.org/historic/systems/texlive/2018/tlnet-final
|
||||
# RUN tlmgr update --self --all
|
||||
|
||||
ARG PROJECT_DIR=/fradrive
|
||||
ENV PROJECT_DIR=${PROJECT_DIR}
|
||||
# RUN mkdir -p "${PROJECT_DIR}"; chmod -R 777 "${PROJECT_DIR}"
|
||||
WORKDIR ${PROJECT_DIR}
|
||||
ENV HOME=${PROJECT_DIR}
|
||||
ENV STACK_ROOT="${PROJECT_DIR}/.stack"
|
||||
|
||||
ENV STACK_SRC=""
|
||||
ENV STACK_ENTRY="ghci ${STACK_SRC}"
|
||||
ENTRYPOINT stack ${STACK_ENTRY}
|
||||
51
backend/Makefile
Normal file
51
backend/Makefile
Normal file
@ -0,0 +1,51 @@
|
||||
export CPU_CORES = $(shell cat /proc/cpuinfo | grep '^processor' | wc -l)
|
||||
export STACK_CORES = $(shell echo $(($(CPU_CORES)/2)))
|
||||
|
||||
ifeq ($(PROD),true)
|
||||
export --DEVELOPMENT=--flag uniworx:-dev
|
||||
else
|
||||
export --DEVELOPMENT=--flag uniworx:dev
|
||||
endif
|
||||
|
||||
.PHONY: dependencies
|
||||
dependencies:
|
||||
stack install hpack; stack install yesod-bin; \
|
||||
stack build -j2 --only-dependencies
|
||||
|
||||
.PHONY: compile
|
||||
compile: dependencies
|
||||
stack build $(STACK_CORES) --fast --profile --library-profiling --executable-profiling --flag uniworx:-library-only $(--DEVELOPMENT) --local-bin-path $$(pwd)/bin --copy-bins
|
||||
|
||||
.PHONY: lint
|
||||
lint:
|
||||
stack build $(STACK_CORES) --test --fast --flag uniworx:library-only $(--DEVELOPMENT) uniworx:test:hlint
|
||||
|
||||
.PHONY: test
|
||||
test:
|
||||
stack build $(STACK_CORES) --test --coverage --fast --flag uniworx:library-only $(--DEVELOPMENT)
|
||||
|
||||
# For Reverse Proxy Problem see: https://groups.google.com/g/yesodweb/c/2EO53kSOuy0/m/Lw6tq2VYat4J
|
||||
.PHONY: start
|
||||
start: dependencies
|
||||
export YESOD_IP_FROM_HEADER=true; \
|
||||
export DEV_PORT_HTTP=3000; \
|
||||
export DEV_PORT_HTTPS=3443; \
|
||||
export HOST=127.0.0.1 ; \
|
||||
export PORT=$${PORT:-$${DEV_PORT_HTTP}} ; \
|
||||
export DETAILED_LOGGING=$${DETAILED_LOGGING:-true} ; \
|
||||
export LOG_ALL=$${LOG_ALL:-false} ; \
|
||||
export LOGLEVEL=$${LOGLEVEL:-info} ; \
|
||||
export DUMMY_LOGIN=$${DUMMY_LOGIN:-true} ; \
|
||||
export SERVER_SESSION_ACID_FALLBACK=$${SERVER_SESSION_ACID_FALLBACK:-true} ; \
|
||||
export SERVER_SESSION_COOKIES_SECURE=$${SERVER_SESSION_COOKIES_SECURE:-false} ; \
|
||||
export COOKIES_SECURE=$${COOKIES_SECURE:-false} ; \
|
||||
export ALLOW_DEPRECATED=$${ALLOW_DEPRECATED:-true} ; \
|
||||
export ENCRYPT_ERRORS=$${ENCRYPT_ERRORS:-false} ; \
|
||||
export RIBBON=$${RIBBON:-$${HOST:-localhost}} ; \
|
||||
export APPROOT=$${APPROOT:-http://localhost:$${DEV_PORT_HTTP}} ; \
|
||||
export AVSPASS=$${AVSPASS:-nopasswordset} ; \
|
||||
stack $(STACK_CORES) exec --local-bin-path $$(pwd)/bin --copy-bins -- yesod devel -p "$${DEV_PORT_HTTP}" -q "$${DEV_PORT_HTTPS}"
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
rm -rf .stack-work .stack uniworx.cabal .ghc
|
||||
@ -8,7 +8,7 @@
|
||||
# See https://github.com/yesodweb/yesod/wiki/Configuration#parsing-numeric-values-as-strings
|
||||
|
||||
|
||||
#DEVELOPMENT ONLY, NOT TO BE USED IN PRODUCTION
|
||||
# DEVELOPMENT ONLY, NOT TO BE USED IN PRODUCTION
|
||||
|
||||
avs-licence-synch:
|
||||
times: [12]
|
||||
@ -17,14 +17,16 @@ avs-licence-synch:
|
||||
max-changes: 999
|
||||
|
||||
mail-reroute-to:
|
||||
name: FRADrive-QA-Umleitungen
|
||||
email: FRADrive-TEST-Umleitungen@fraport.de
|
||||
name: "FRADrive-QA-Umleitungen"
|
||||
email: "FRADrive-TEST-Umleitungen@fraport.de"
|
||||
|
||||
# Enqueue at specified hour, a few minutes later
|
||||
job-lms-qualifications-enqueue-hour: 16
|
||||
job-lms-qualifications-dequeue-hour: 4
|
||||
|
||||
job-mode:
|
||||
tag: offload
|
||||
job-workers: 1
|
||||
job-flush-interval: 600
|
||||
# Using these setting kills the job-workers somehow
|
||||
# job-workers: 5
|
||||
# job-flush-interval: 600
|
||||
# job-stale-threshold: 3600
|
||||
# job-move-threshold: 60
|
||||
|
||||
@ -16,5 +16,4 @@ log-settings:
|
||||
auth-dummy-login: true
|
||||
server-session-acid-fallback: true
|
||||
|
||||
job-cron-interval: null
|
||||
job-workers: 1
|
||||
job-workers: 20
|
||||
@ -47,9 +47,11 @@ TutorialUserDeregister: Vom Kurs abmelden
|
||||
TutorialUserSendMail: Mitteilung verschicken
|
||||
TutorialUserPrintQualification: Zertifikat drucken
|
||||
TutorialUserGrantQualification: Qualifikation vergeben
|
||||
TutorialUserGrantQualificationDateTooltip: Leer lassen, um das Ablaufdatum auf das heutige Datum plus Standardgültigkeitsdauer zu setzen.
|
||||
TutorialUserGrantQualificationDateError qsh@QualificationShorthand: Qualifikation #{qsh} hat keine Standardgültigkeitsdauer, daher ist ein explizites Ablaufdatum erforderlich!
|
||||
TutorialUserRenewQualification: Qualifikation regulär verlängern
|
||||
TutorialUserRenewedQualification n@Int: Qualifikation für #{tshow n} Kurs-#{pluralDE n "Teilnehmer:in" "Teilnehmer:innen"} regulär verlängert
|
||||
TutorialUserGrantedQualification n@Int: Qualifikation erfolgreich an #{tshow n} Kurs-#{pluralDE n "Teilnehmer:in" "Teilnehmer:innen"} vergeben
|
||||
TutorialUserRenewedQualification qsh@QualificationShorthand n@Int: Qualifikation #{qsh} für #{tshow n} Kurs-#{pluralDE n "Teilnehmer:in" "Teilnehmer:innen"} regulär verlängert.
|
||||
TutorialUserGrantedQualification qsh@QualificationShorthand day@Text n@Int: Qualifikation #{qsh} bis #{day} erfolgreich an #{tshow n} Kurs-#{pluralDE n "Teilnehmer:in" "Teilnehmer:innen"} vergeben.
|
||||
TutorialUserAssignExam: Zur Prüfung einteilen
|
||||
TutorialUserExamAssignedFor n@Int m@Int p@Text: #{n}/#{m} zur Prüfung #{p} eingeteilt
|
||||
CommTutorial: Kursmitteilung
|
||||
@ -43,14 +43,15 @@ TutorInviteHeading tutn: Invitation to be instructor for #{tutn}
|
||||
TutorInviteExplanation: You were invited to be a instructor.
|
||||
TutorCorrectorInvitationAccepted shn: You are now a corrector for #{shn}
|
||||
TutorialUsersDeregistered count: Successfully deregistered #{show count} participants from course
|
||||
|
||||
TutorialUserDeregister: Deregister from course
|
||||
TutorialUserSendMail: Send mail
|
||||
TutorialUserPrintQualification: Print certificate
|
||||
TutorialUserGrantQualification: Grant qualification
|
||||
TutorialUserGrantQualificationDateTooltip: Leave blank for expiry on today's date plus standard qualification valid duration.
|
||||
TutorialUserGrantQualificationDateError qsh@QualificationShorthand: Qualification #{qsh} has no standard valid duration. Please provide an explicit expiry date!
|
||||
TutorialUserRenewQualification: Renew qualification
|
||||
TutorialUserRenewedQualification n@Int: Successfully renewed qualification #{tshow n} course #{pluralEN n "user" "users"}
|
||||
TutorialUserGrantedQualification n: Successfully granted qualification #{tshow n} course #{pluralEN n "user" "users"}
|
||||
TutorialUserRenewedQualification qsh n: Successfully renewed #{qsh} qualification for #{pluralENsN n "course participant"}
|
||||
TutorialUserGrantedQualification qsh day n: Successfully granted #{qsh} qualification until #{day} to #{pluralENsN n "course participant"}
|
||||
TutorialUserAssignExam: Register for examination
|
||||
TutorialUserExamAssignedFor n@Int m@Int p@Text: #{n}/#{m} enrolled for exam #{p}
|
||||
CommTutorial: Course message
|
||||
@ -28,5 +28,8 @@ PrintLmsUser: E‑Learning Id
|
||||
PrintJobs: Druckaufräge
|
||||
PrintLetterType: Brieftypkürzel
|
||||
|
||||
MCActDummy: Platzhalter
|
||||
MCActResendEmail: E‑Mail Kopie versenden
|
||||
MCActResendEmailTooltip: Eine unveränderte Kopie der E‑Mail erneut versenden. Nur die vorherigen Empfänger werden offiziell aufgeführt, sie erhalten jedoch keine neue Kopie.
|
||||
MCActResendEmailInfo n@Int recv@Text: #{pluralDEnN n "E‑Mail Kopie"} wurden an #{recv} versandt.
|
||||
|
||||
CCActDummy: Platzhalter
|
||||
@ -28,5 +28,8 @@ PrintLmsUser: E‑learning id
|
||||
PrintJobs: Print jobs
|
||||
PrintLetterType: Letter type shorthand
|
||||
|
||||
MCActDummy: Placeholder
|
||||
MCActResendEmail: Resend email copy
|
||||
MCActResendEmailTooltip: Resend an unchanged copy of the email. Only previous recipients will officially be listed, but they will not receive another copy.
|
||||
MCActResendEmailInfo n recv: #{n} #{noneOneMoreEN n "email copy" "email copy" "email copies"} were sent to #{recv} only.
|
||||
|
||||
CCActDummy: Placeholder
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user