Revert "Merge branch '145-build-system-rewrite' into fradrive/merge-jost-new-build"

This reverts commit 0a6340844f, reversing
changes made to b4f3171257.
This commit is contained in:
Steffen Jost 2025-02-14 11:51:20 +01:00
parent 0a6340844f
commit 98c2189b54
27 changed files with 294 additions and 858 deletions

View File

@ -1,61 +0,0 @@
# SPDX-FileCopyrightText: 2025 Sarah Vaupel <sarah.vaupel@uniworx.de>
#
# SPDX-License-Identifier: AGPL-3.0-or-later
parameters:
- name: releaseTag
type: string
- name: releaseEndpoint
type: string
default: 'devfra'
values:
- 'devfra'
- 'prodfra'
jobs:
- job: Release
displayName: Release ${{ parameters.releaseTag }}
container:
image: devfra.azurecr.io/de.fraport.build/tools:1.1.0
endpoint: devfra
steps:
# Download required artifacts from pipeline
- task: DownloadPipelineArtifact@2
displayName: Download FraDrive binaries
inputs:
artifactName: Build_backend
patterns: 'Build_backend/bin/*'
targetPath: '$(Build.Repository.LocalPath)'
- task: Docker@2
displayName: Login to container registry
inputs:
command: login
containerRegistry: '${{ parameters.releaseEndpoint }}'
- task: Bash@3
displayName: Build FraDrive container
inputs:
targetType: inline
script: |
cp docker/fradrive/Dockerfile .
docker build \
--tag $(buildImageUpstream)/fradrive:$(Build.BuildNumber) \
--tag $(buildImageUpstream)/fradrive:${{parameters.releaseTag}} \
--build-arg FROM_IMG=devfra.azurecr.io/de.fraport.trusted/ubuntu \
--build-arg FROM_TAG=20.04 \
--build-arg PROJECT_DIR=$(Build.Repository.LocalPath) \
--build-arg IN_CI=true \
--build-arg IN_CONTAINER=true \
.
- task: Docker@2
displayName: Push container to registry
inputs:
command: push
repository: 'de.fraport.fradrive.build/fradrive'
tags: '$(Build.BuildNumber),${{parameters.releaseTag}}'
- task: Docker@2
displayName: Logout from container registry
inputs:
command: logout
containerRegistry: '${{ parameters.releaseEndpoint }}'

View File

@ -1,61 +0,0 @@
# SPDX-FileCopyrightText: 2024-2025 Sarah Vaupel <sarah.vaupel@uniworx.de>
#
# SPDX-License-Identifier: AGPL-3.0-or-later
parameters:
- name: serviceName
type: string
- name: dependenciesCaches
type: object
default: []
- name: dependenciesBuildPool
type: string
default: 'Prod Private Agent Pool'
values:
- 'Prod Private Agent Pool'
- 'Prod Private Agent Pool DS2'
- 'Prod Private Agent Pool DS3'
- name: dependenciesBuildCores
type: number
default: 1
- name: dependenciesBuildTimeout
type: number
default: 60
jobs:
- job: SetupDependencies_${{parameters.serviceName}}
displayName: Install ${{parameters.serviceName}} dependencies
dependsOn: SetupImage_${{parameters.serviceName}}
${{ if eq(variables.setupImages, true) }}:
condition: succeeded()
${{ else }}:
condition: always()
pool: '${{parameters.dependenciesBuildPool}}'
timeoutInMinutes: ${{parameters.dependenciesBuildTimeout}}
container:
${{ if variables.setupImages }}:
image: $(buildImageUpstream)/${{parameters.serviceName}}:$(Build.BuildNumber)
${{ else }}:
image: $(buildImageUpstream)/${{parameters.serviceName}}:latest
endpoint: devfra
env:
PROJECT_DIR: $(Build.Repository.LocalPath)
IN_CONTAINER: true
IN_CI: true
steps:
# Restore previously-built dependencies from caches
- ${{ each cache in parameters.dependenciesCaches }}:
- template: ./../../steps/cache.yaml
parameters:
cacheIdent: '${{parameters.serviceName}}-dependencies'
cacheKeys: '${{cache.key}}'
cachePath: '${{cache.path}}'
# Compile dependencies
- template: ./../../steps/make.yaml
parameters:
makeJob: dependencies
makeService: ${{parameters.serviceName}}
makeVars: 'CPU_CORES=${{parameters.dependenciesBuildCores}} STACK_CORES=-j${{parameters.dependenciesBuildCores}}'
# (Note: a post-job for updating the dependency cache is automatically created, so no further step is due here.)

View File

@ -1,72 +0,0 @@
# SPDX-FileCopyrightText: 2024-2025 Sarah Vaupel <sarah.vaupel@uniworx.de>
#
# SPDX-License-Identifier: AGPL-3.0-or-later
parameters:
- name: imageName
type: string
- name: imageBase
type: object
jobs:
- job: SetupImage_${{parameters.imageName}}
displayName: Build ${{parameters.imageName}} image
condition: eq(variables.setupImages, true)
container:
image: devfra.azurecr.io/de.fraport.build/tools:1.1.0
endpoint: devfra
steps:
- task: Docker@2
displayName: Login to container registry
inputs:
command: login
containerRegistry: devfra
- task: Bash@3
displayName: Build ${{parameters.imageName}} image
inputs:
targetType: inline
script: |
cp docker/${{parameters.imageName}}/Dockerfile .
docker build \
--tag $(buildImageUpstream)/${{parameters.imageName}}:$(Build.BuildNumber) \
--build-arg FROM_IMG=${{parameters.imageBase.image}} \
--build-arg FROM_TAG=${{parameters.imageBase.tag}} \
--build-arg HTTPS_PROXY=http://proxy.frankfurt-airport.de:8080 \
--build-arg HTTP_PROXY=http://proxy.frankfurt-airport.de:8080 \
--build-arg NO_PROXY='localhost,127.0.0.1,*.docker.internal,*.azmk8s.io,devfra.azurecr.io,devfra.westeurope.data.azurecr.io' \
--build-arg FRAPORT_NOPROXY=dev.azure.com,*.dev.azure.com,*.fraport.de,*.frankfurt-airport.de \
--build-arg PROJECT_DIR=$(Build.Repository.LocalPath) \
--build-arg IN_CI=true \
--build-arg IN_CONTAINER=true \
.
- task: Bash@3
displayName: Push ${{parameters.imageName}} image
inputs:
targetType: inline
script: |
docker push $(buildImageUpstream)/${{parameters.imageName}}:$(Build.BuildNumber)
- task: Bash@3
displayName: Update latest ${{parameters.imageName}} image
condition: or(eq(variables.forcePushLatest, true), eq(variables['Build.SourceBranch'], 'refs/heads/master'))
inputs:
targetType: inline
script: |
docker tag $(buildImageUpstream)/${{parameters.imageName}}:$(Build.BuildNumber) $(buildImageUpstream)/${{parameters.imageName}}:latest
docker push $(buildImageUpstream)/${{parameters.imageName}}:latest
- task: Bash@3
displayName: Save image for publication
inputs:
targetType: inline
script: |
docker image save --output=$(Build.ArtifactStagingDirectory)/${{parameters.imageName}}.tar $(buildImageUpstream)/${{parameters.imageName}}:$(Build.BuildNumber)
- task: PublishBuildArtifacts@1
displayName: Publish image as artifact
inputs:
PathtoPublish: '$(Build.ArtifactStagingDirectory)'
ArtifactName: Image_${{parameters.imageName}}
publishLocation: 'Container'
- task: Docker@2
displayName: Logout from container registry
inputs:
command: logout
containerRegistry: devfra

View File

@ -1,141 +0,0 @@
# SPDX-FileCopyrightText: 2024-2025 Sarah Vaupel <sarah.vaupel@uniworx.de>
#
# SPDX-License-Identifier: AGPL-3.0-or-later
parameters:
- name: serviceName
type: string
default: serviceName
- name: serviceBase
type: object
default:
image: baseImage
tag: baseImageTag
- name: servicePool
type: string
default: 'Prod Private Agent Pool'
- name: serviceTimeout
type: number
default: 60
# extraBuildOptions: ''
- name: serviceDependsOn
type: object
default: []
- name: serviceRequiredArtifacts
type: object
default: []
- name: serviceArtifacts
type: string
default: ''
- name: buildSteps
type: object
stages:
- stage: ${{ parameters.serviceName }}
dependsOn: ${{ parameters.serviceDependsOn }}
pool: '${{ parameters.servicePool }}'
jobs:
- job: ImageBuild_${{parameters.serviceName}}
displayName: Build ${{parameters.serviceName}} image
condition: or(eq(variables.forcePushLatest, true), eq(variables.onMasterBranch, true), eq(variables.onUpdateBranch, true))
container:
image: devfra.azurecr.io/de.fraport.build/tools:1.1.0
endpoint: devfra
steps:
- checkout: self
- task: Docker@2
displayName: Login to container registry
inputs:
command: login
containerRegistry: devFra
- script: |
ls -a .
pwd
find .
- task: Bash@3
displayName: Build ${{parameters.serviceName}} image
inputs:
targetType: inline
script: |
cp docker/${{parameters.serviceName}}/Dockerfile .
docker build \
--tag $(buildImageUpstream)/${{parameters.serviceName}}:$(Build.BuildNumber) \
--build-arg FROM_IMG=${{parameters.serviceBase.image}} \
--build-arg FROM_TAG=${{parameters.serviceBase.tag}} \
--build-arg HTTPS_PROXY=http://proxy.frankfurt-airport.de:8080 \
--build-arg HTTP_PROXY=http://proxy.frankfurt-airport.de:8080 \
--build-arg NO_PROXY='localhost,127.0.0.1,*.docker.internal,*.azmk8s.io,devfra.azurecr.io,devfra.westeurope.data.azurecr.io' \
--build-arg FRAPORT_NOPROXY=dev.azure.com,*.dev.azure.com,*.fraport.de,*.frankfurt-airport.de \
--build-arg PROJECT_DIR=$(Build.Repository.LocalPath) \
--build-arg IN_CI=true \
--build-arg IN_CONTAINER=true \
.
- task: Bash@3
displayName: Push ${{ parameters.serviceName }} image
inputs:
targetType: inline
script: |
docker push $(buildImageUpstream)/${{parameters.serviceName}}:$(Build.BuildNumber)
- task: Bash@3
displayName: Update latest ${{parameters.serviceName}} image
condition: or(eq(variables.forcePushLatest, true), eq(variables.onMasterBranch, true))
inputs:
targetType: inline
script: |
docker tag $(buildImageUpstream)/${{parameters.serviceName}}:$(Build.BuildNumber) $(buildImageUpstream)/${{parameters.serviceName}}:latest
docker push $(buildImageUpstream)/${{parameters.serviceName}}:latest
- task: Docker@2
displayName: Logout from container registry
inputs:
command: logout
containerRegistry: devFra
- job: Build_${{parameters.serviceName}}
displayName: Build ${{parameters.serviceName}}
dependsOn:
- ImageBuild_${{parameters.serviceName}}
condition: in(dependencies.ImageBuild_${{parameters.serviceName}}.result, 'Succeeded', 'Skipped')
timeoutInMinutes: ${{ parameters.serviceTimeout }}
container:
# TODO: use BuildNumber instead of latest in update branches
# image: devfra.azurecr.io/de.fraport.fradrive.build/frontend:$(Build.BuildNumber)
image: $(buildImageUpstream)/${{parameters.serviceName}}:latest
endpoint: devfra
env:
PROJECT_DIR: $(Build.Repository.LocalPath)
IN_CONTAINER: true
IN_CI: true
steps:
- checkout: self
- ${{ each dependency in parameters.serviceRequiredArtifacts }}:
- task: DownloadPipelineArtifact@2
displayName: Download artifacts from ${{ dependency.name }} dependency
continueOnError: ${{ dependency.continueOnError }}
condition: ${{ dependency.condition }}
inputs:
artifactName: ${{ dependency.artifact }}
source: ${{ dependency.source }}
project: 'Fahrerausbildung'
pipeline: $(System.DefinitionId)
buildVersionToDownload: '${{ dependency.version }}'
tags: '${{ dependency.artifact }}'
allowPartiallySucceededBuilds: true
allowFailedBuilds: true
patterns: '${{ dependency.patterns }}'
targetPath: '$(Build.Repository.LocalPath)'
- ${{ each buildStep in parameters.buildSteps }}:
- template: ./service/build-step.yaml
parameters:
service: ${{ parameters.serviceName }}
buildStep: ${{ buildStep }}
- task: CopyFiles@2
displayName: Copy ${{parameters.serviceName}} artifacts
inputs:
Contents: ${{ parameters.serviceArtifacts }}
TargetFolder: '$(Build.ArtifactStagingDirectory)'
- task: PublishBuildArtifacts@1
displayName: Publish ${{parameters.serviceName}} artifacts
inputs:
PathtoPublish: '$(Build.ArtifactStagingDirectory)'
ArtifactName: '${{parameters.serviceName}}'
publishLocation: 'Container'

View File

@ -1,15 +0,0 @@
# SPDX-FileCopyrightText: 2025 Sarah Vaupel <sarah.vaupel@uniworx.de>
#
# SPDX-License-Identifier: AGPL-3.0-or-later
parameters:
- name: artifactName
type: string
steps:
- task: DownloadPipelineArtifact@2
displayName: Download artifacts from ${{parameters.artifactName}}
inputs:
source: 'current'
artifactName: '${{parameters.artifactName}}'
targetPath: '$(Build.Repository.LocalPath)'

View File

@ -1,18 +0,0 @@
# SPDX-FileCopyrightText: 2025 Sarah Vaupel <sarah.vaupel@uniworx.de>
#
# SPDX-License-Identifier: AGPL-3.0-or-later
parameters:
- name: cacheIdent
type: string
- name: cacheKeys
type: string
- name: cachePath
type: string
steps:
- task: Cache@2
displayName: Restore ${{parameters.cacheIdent}} cache
inputs:
key: '"${{parameters.cacheIdent}}" | ${{parameters.cacheKeys}}'
path: '${{parameters.cachePath}}'

View File

@ -1,35 +0,0 @@
# SPDX-FileCopyrightText: 2024-2025 Sarah Vaupel <sarah.vaupel@uniworx.de>
#
# SPDX-License-Identifier: AGPL-3.0-or-later
parameters:
- name: makeJob
type: string
values:
- dependencies
- compile
- lint
- test
- name: makeService
type: string
values:
- frontend
- backend
- name: makeVars
type: string
default: ''
steps:
- task: Bash@3
name: ${{parameters.makeJob}}_${{parameters.makeService}}
displayName: make ${{parameters.makeJob}}-${{parameters.makeService}}
env:
HTTPS_PROXY: http://proxy.frankfurt-airport.de:8080
HTTP_PROXY: http://proxy.frankfurt-airport.de:8080
NO_PROXY: 'localhost,127.0.0.1,*.docker.internal,*.azmk8s.io,devfra.azurecr.io,devfra.westeurope.data.azurecr.io'
FRAPORT_NOPROXY: 'dev.azure.com,*.dev.azure.com,*.fraport.de,*.frankfurt-airport.de'
PROJECT_DIR: $(Build.Repository.LocalPath)
inputs:
targetType: inline
script: |
make -- --${{parameters.makeJob}}-${{parameters.makeService}} IN_CONTAINER=true IN_CI=true PROJECT_DIR=${PROJECT_DIR} ${{parameters.makeVars}}

1
.gitignore vendored
View File

@ -37,7 +37,6 @@ uniworx.nix
.kateproject
src/Handler/Assist.bak
src/Handler/Course.SnapCustom.hs
frontend/src/env.sass
*.orig
/instance
.stack-work-*

88
utils/version.pl → .gitlab-ci/version.pl Normal file → Executable file
View File

@ -7,7 +7,7 @@ use Data::Dumper;
# Version changes:
# v[x].[y].[z] -- Main version number
# v[x].[y].[z]-test-[branchstring]-[num] -- test/branch/devel version number
# v[x].[y].[z]-test-[branchstring]-num -- test/branch/devel version number
# on main/master: Biggest version so far, increment by occuring changes
# on other branches: find version; be it branch string, old format or main version number;
# increments from there. Increment version number, but on global conflict use new version number
@ -52,12 +52,12 @@ my %parKinds = (
},
autokind=>{
arity=>1,
def=>'release/prod=v,release/*=t,*=t',
def=>'main=v,master=v,test=t,*=t',
help=>'determine the tag kind from branch name instead of fixed value; use the first fitting glob',
},
change=>{
arity=>1,
def=>'chore=patch,feat=minor,feature=minor,fix=patch,BREAK=major,perf=patch,refactor=patch,test=patch,style=patch,revert=patch,docs=patch,build=patch,ci=patch',
def=>'chore=patch,feat=minor,feature=minor,fix=patch,BREAK=major,perf=patch,refactor=patch,test=patch,style=patch,revert=null,docs=patch,build=null,ci=null',
help=>'how to react on which commit type; can be partially given. Actions are: "null", "major", "minor", "patch" or state "invalid" for removing this type',
},
changelog=>{
@ -138,7 +138,6 @@ if($par{autokind}) {
my @rules = split /,/, $par{autokind};
RULES: {
for my $r(@rules) {
warn "$0: Processing autokind rule '$r'\n" if $par{v};
if($r!~m#(.*)=(.*)#) {
die "$0: Bad rule in autokind: $r\n";
}
@ -154,15 +153,14 @@ if($par{autokind}) {
if($par{'v'}) {
warn "VERBOSE: Parameters\n";
print "VERBOSE: Parameters\n";
for my $k(sort keys %par) {
warn " $k: $par{$k}\n"
print " $k: $par{$k}\n"
}
}
my %typeReact = ();
for my $as(split /,/, $par{change}) {
warn "$0: processing change parameter '$as'\n" if $par{v};
if($as=~m#(.*)=(.*)#) {
$typeReact{$1} = $2;
} else {
@ -226,35 +224,47 @@ sub parseVersion {
warn "$0: internal error (parseVersion called on undef at $c)\n";
return undef
}
my %cap = ();
if(
$v=~m#^(?<pre>[a-z]*)(?<ma>[0-9]+)$# ||
$v=~m#^(?<pre>[a-z]*)(?<ma>[0-9]+)\.(?<mi>[0-9]+)$# ||
$v=~m#^(?<pre>[a-z]*)(?<ma>[0-9]+)\.(?<mi>[0-9]+)\.(?<p>[0-9]+)$# ||
$v=~m#^(?<pre>[a-z]*)(?<ma>[0-9]+)\.(?<mi>[0-9]+)\.(?<p>[0-9]+)-test-(?<sp>(?<brn>[a-z]+)-?(?<brv>[0-9\.]+))$# ||
$v=~m#^(?<pre>[a-z]*)(?<ma>[0-9]+)\.(?<mi>[0-9]+)\.(?<p>[0-9]+)-(?<sp>.*)$#
) {
%cap = %+
# my ($pre,$ma,$mi,$p,$sp,$brn,$brv) = ();
my ($pre,$ma,$mi,$p,$sp,$brn,$brv) = ();
if($v=~m#^([a-z]*)([0-9]+)$#) {
$pre = $1;
$ma = $2;
} elsif($v=~m#^([a-z]*)([0-9]+)\.([0-9]+)$#) {
$pre = $1;
$ma = $2;
$mi = $3
} elsif($v=~m#^([a-z]*)([0-9]+)\.([0-9]+)\.([0-9]+)$#) {
$pre = $1;
$ma = $2;
$mi = $3;
$p = $4;
} elsif($v=~m#^([a-z]*)([0-9]+)\.([0-9]+)\.([0-9]+)-test-([a-z]+)-([0-9\.]+)$#) {
$pre = $1;
$ma = $2;
$mi = $3;
$p = $4;
$sp = $5;
$brn = $6;
$brv = $7;
} elsif($v=~m#^([a-z]*)([0-9]+)\.([0-9]+)\.([0-9]+)-(.*)$#) {
$pre = $1;
$ma = $2;
$mi = $3;
$p = $4;
$sp = $5;
} else {
warn "$0: unexpected old version number: $v\n" if $par{v};
return undef
}
$cap{pre} = 'v' if '' eq $cap{pre};
my %ret = (
prefix=>$cap{pre},
major=>$cap{ma},
minor=>$cap{mi},
patch=>$cap{p},
subpatch=>$cap{sp},
branchname=>$cap{brn},
branchversion=>$cap{brv},
);
if($par{v}) {
my $parsed = join '; ', map { "$_=>".($ret{$_}//'') } sort keys %ret;
warn "Version '$v' was parsed to '$parsed'\n"
$pre = 'v' if '' eq $pre;
return {
prefix=>$pre,
major=>$ma,
minor=>$mi,
patch=>$p,
subpatch=>$sp,
branchname=>$brn,
branchversion=>$brv,
}
return \%ret
}
#@oldVersions = sort {
@ -294,7 +304,7 @@ sub vsCompare {
($v->{minor} // 0) <=> ($w->{minor} // 0) ||
($v->{patch} // 0) <=> ($w->{patch} // 0) ||
($v->{branchname} // '') cmp ($w->{branchname} // '') ||
($v->{branchversion} // 0) <=> ($w->{branchversion} // 0) ||
($v->{branchversion} // '') <=> ($w->{branchversion} // '') ||
($v->{subpatch} // '') cmp ($w->{subpatch} // '')
)
} elsif('v' eq $v->{prefix} and 'v' ne $w->{prefix}) {
@ -362,7 +372,6 @@ if('-' eq $par{vcslog}) {
}
my @versions = ();
for my $v(@versionsOrig) {
warn "$0: Processing orig version (part 1): '$v'\n" if $par{v};
if($v=~m#^(.*?\S)\s*::::\s*(.*?)\s*::::\s*(.*)#) {
push @versions, {
hash => $1,
@ -380,7 +389,6 @@ my $tag = undef;
my @versionPast = ();
VERSION: for my $v(@versions) {
warn "$0: Processing version (part 2): $v\n" if $par{v};
#if($v->{meta}=~m#tag\s*:\s*\Q$par{kind}\E(.*)\)#) {
# $tag=$1;
# last VERSION
@ -409,7 +417,6 @@ VERSION: for my $v(@versions) {
#$tag = parseVersion($tag);
for my $r(reverse @change) {
warn "$0: Processing change: $r\n" if $par{v};
if('major' eq $r->{react}) {
$tag->{major}++;
$tag->{minor}=0;
@ -456,7 +463,6 @@ my $highStart = $mainVersion ? $sortAll[0] : $sortSee[0];
my $highSee = $sortSee[0];
my %reactCollect = ();
SEARCHVERSION: for my $v(@versions) {
warn "$0: search for version: '$v'\n" if $par{v};
next unless $v->{version};
next unless $v->{react};
$reactCollect{$v->{react}} = 1;
@ -468,12 +474,10 @@ SEARCHVERSION: for my $v(@versions) {
sub justVersionInc {
my ($v, $react) = @_;
my $vv = parseVersion($v);
$vv->{patch}++; # if $react->{patch}; # in principal a good idea to increase only when a patch action happend, but we need a new version, even if nothing happend, so we always increase patch; if there are other changes as well, it is overwritten anyways
$vv->{patch}++ if $react->{patch};
do {$vv->{minor}++; $vv->{patch}=0} if $react->{minor};
do {$vv->{major}++; $vv->{minor}=0; $vv->{patch}=0} if $react->{major};
my $ret = vsJustVersion($vv);
warn "$0: version inc from '$v' to $ret\n" if $par{v};
return $ret
return vsJustVersion($vv);
}
my $newVersion = undef;
@ -496,7 +500,6 @@ for(@allVersions) {
$allVersions{$_} = 1
}
while(exists $allVersions{$newVersion}) {
warn "$0: Version conflict, so we try another version, '$newVersion' exists already\n" if $par{v};
if($mainVersion) {
die "$0: probably internal error (collision in main version)\n"
}
@ -526,7 +529,6 @@ if($par{changelog}) {
my %seen = ();
my @sects = ([]);
for(@changelog) {
warn "$0: Changelog processing: '$_'\n" if $par{v};
push @sects, [] if m/^## /;
push @{$sects[-1]}, $_;
if(m#/commit/([a-f0-9]+)\s*\)\s*\)\s*$#) {
@ -540,7 +542,6 @@ if($par{changelog}) {
shift @sects;
}
for my $s(@sects) {
warn "$0: Changelog processing, section search: '$s'\n" if $par{v};
my $hh = $s->[0];
chomp $hh;
my $cnt = @$s;
@ -565,7 +566,6 @@ if($par{changelog}) {
'feature' => 'Features',
);
SELECTCHANGELOG: for my $v(@versions) {
warn "$0: Changelog processing, version selection: '$v'\n" if $par{v};
last SELECTCHANGELOG if $seen{$v->{hash}};
next unless $v->{subject}=~m#^\s*([a-z]+)\s*(!?)\s*((?:\(.*?\))?)\s*:\s*(.*?)\s*$#i;
my ($kind, $break, $context, $msg) = ($1, $2, $3, $4);

View File

@ -7,33 +7,33 @@ const standardVersionUpdaterYaml = require.resolve('standard-version-updater-yam
module.exports = {
scripts: {
// postbump: './sync-versions.hs && git add -- package.yaml', // moved to bumpFiles
postchangelog: 'sed \'s/^### \\[/## [/g\' -i CHANGELOG.md',
postchangelog: 'sed \'s/^### \\[/## [/g\' -i CHANGELOG.md'
},
packageFiles: ['package.json', 'package.yaml'],
bumpFiles: [
{
filename: 'package.json',
type: 'json',
type: 'json'
},
{
filename: 'package-lock.json',
type: 'json',
type: 'json'
},
{
filename: 'package.yaml',
updater: standardVersionUpdaterYaml,
updater: standardVersionUpdaterYaml
},
{
filename: 'nix/docker/version.json',
type: 'json',
type: 'json'
},
{
filename: 'nix/docker/demo-version.json',
type: 'json',
},
type: 'json'
}
],
commitUrlFormat: 'https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/commit/{{hash}}',
compareUrlFormat: 'https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/compare/{{previousTag}}...{{currentTag}}',
issueUrlFormat: 'https://gitlab2.rz.ifi.lmu.de/uni2work/uni2work/issues/{{id}}',
userUrlFormat: 'https://gitlab2.rz.ifi.lmu.de/{{user}}',
userUrlFormat: 'https://gitlab2.rz.ifi.lmu.de/{{user}}'
};

View File

@ -2,48 +2,6 @@
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
## [v27.4.59-test-e0.0.14](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-e0.0.13...v27.4.59-test-e0.0.14) (2025-02-13)
## [v27.4.59-test-e0.0.13](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-e0.0.12...v27.4.59-test-e0.0.13) (2025-02-12)
## [v27.4.59-test-e0.0.12](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-d0.0.12...v27.4.59-test-e0.0.12) (2025-02-12)
## [v27.4.59-test-d0.0.12](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-d0.0.11...v27.4.59-test-d0.0.12) (2025-02-11)
## [v27.4.59-test-d0.0.11](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-c0.0.11...v27.4.59-test-d0.0.11) (2025-02-11)
## [v27.4.59-test-c0.0.11](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-b0.0.11...v27.4.59-test-c0.0.11) (2025-02-11)
## [v27.4.59-test-b0.0.11](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-c0.0.10...v27.4.59-test-b0.0.11) (2025-02-11)
## [v27.4.59-test-c0.0.10](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-b0.0.10...v27.4.59-test-c0.0.10) (2025-02-11)
## [v27.4.59-test-b0.0.10](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.10...v27.4.59-test-b0.0.10) (2025-02-11)
## [v27.4.59-test-a0.0.10](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.9...v27.4.59-test-a0.0.10) (2025-02-11)
## [v27.4.59-test-a0.0.9](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.8...v27.4.59-test-a0.0.9) (2025-02-10)
## [v27.4.59-test-a0.0.8](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.7...v27.4.59-test-a0.0.8) (2025-02-10)
## [v27.4.59-test-a0.0.7](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.6...v27.4.59-test-a0.0.7) (2025-02-10)
## [v27.4.59-test-a0.0.6](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.5...v27.4.59-test-a0.0.6) (2025-02-08)
## [v27.4.59-test-a0.0.5](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.4...v27.4.59-test-a0.0.5) (2025-02-07)
## [v27.4.59-test-a0.0.4](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.3...v27.4.59-test-a0.0.4) (2025-02-07)
## [v27.4.59-test-a0.0.3](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.2...v27.4.59-test-a0.0.3) (2025-02-06)
## [v27.4.59-test-a0.0.2](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.1...v27.4.59-test-a0.0.2) (2025-02-05)
## [v27.4.59-test-a0.0.1](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59-test-a0.0.0...v27.4.59-test-a0.0.1) (2025-02-05)
### Bug Fixes
* **ghci:** ghci works now as expected ([c3117db](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive/commit/c3117dbdcd1de9ef9f0751afa45018e2ebce2c42))
## [v27.4.59-test-a0.0.0](https://fraport@dev.azure.com/fraport/Fahrerausbildung/_git/FRADrive//compare/v27.4.59...v27.4.59-test-a0.0.0) (2024-10-25)
### Features

View File

@ -22,16 +22,16 @@ export CONTAINER_FILE
export CONTAINER_IDENT
export CF_PREFIX
export DEVELOP
export MOUNT_DIR=/mnt/fradrive
export CONTAINER_ATTACHED
export CONTAINER_INIT
export CONTAINER_CLEANUP
export PROJECT_DIR=/fradrive
export SERVICE
export SERVICE_VARIANT ?= $(SERVICE)
export JOB
export IMAGE
export SET_IMAGE
export CONTAINER_CMD
export SET_CONTAINER_CMD
export ENTRYPOINT
export EXEC_OPTS
@ -65,7 +65,7 @@ help:
# HELP: stop all running containers and remove all compilation results in the directory (but leave images including dependencies unharmed)
clean:
rm -rf develop
-rm -rf node_modules .npm .cache assets/icons assets/favicons static well-known config/manifest.json frontend/src/env.sass
-rm -rf node_modules .npm .cache assets/icons assets/favicons static well-known config/manifest.json
-rm -rf .stack-work .stack-work.lock
-rm -rf bin .Dockerfile develop
-$(CONTAINER_COMMAND) container prune --force
@ -79,13 +79,13 @@ clean-all: clean
.PHONY: release
# HELP: create, commit and push a new release
# TODO: only release when build and tests are passing!!!
release:
VERSION=`./utils/version.pl -changelog CHANGELOG.md -v` ; \
git add CHANGELOG.md ; \
git commit -m "chore(release): $${VERSION}" ; \
git tag $${VERSION} ; \
git push origin $${VERSION}
./.gitlab-ci/version.pl -changelog CHANGELOG.md
git add CHANGELOG.md
VERSION=`.gitlab-ci/version.pl`
git tag $${VERSION}
git commit -m "chore(release): $${VERSION}"
# git push
.PHONY: compile
compile:
@ -103,24 +103,24 @@ start:
.PHONY: %-backend
%-backend: SERVICE=backend
%-backend: SERVICE_VARIANT=backend
%-backend: IMAGE=localhost/fradrive/backend
%-backend: CONTAINER_CMD=localhost/fradrive/backend
%-backend: BASE_PORTS = "DEV_PORT_HTTP=3000" "DEV_PORT_HTTPS=3443"
.PHONY: %-uniworxdb
%-uniworxdb: SERVICE=backend
%-uniworxdb: SERVICE_VARIANT=uniworxdb
%-uniworxdb: IMAGE=localhost/fradrive/backend
%-uniworxdb: CONTAINER_CMD=localhost/fradrive/backend
.PHONY: %-ghci
%-ghci: SERVICE=backend
%-ghci: SERVICE_VARIANT=ghci
%-ghci: IMAGE=localhost/fradrive/backend
%-ghci: CONTAINER_CMD=localhost/fradrive/backend
.PHONY: %-hoogle
%-hoogle: SERVICE=backend
%-hoogle: SERVICE_VARIANT=hoogle
%-hoogle: BASE_PORTS = "HOOGLE_PORT=8081"
%-hoogle: IMAGE=localhost/fradrive/backend
%-hoogle: CONTAINER_CMD=localhost/fradrive/backend
--start-hoogle:
HOOGLE_PORT=`cat $(CONTAINER_FILE) | grep 'HOOGLE_PORT=' | sed 's/HOOGLE_PORT=//'` ; \
stack $(STACK_CORES) hoogle -- server --local --port $${HOOGLE_PORT}
@ -128,24 +128,24 @@ start:
.PHONY: %-frontend
%-frontend: SERVICE=frontend
%-frontend: SERVICE_VARIANT=frontend
%-frontend: IMAGE=localhost/fradrive/frontend
%-frontend: CONTAINER_CMD=localhost/fradrive/frontend
.PHONY: %-postgres
%-postgres: SERVICE=postgres
%-postgres: SERVICE_VARIANT=postgres
%-postgres: BASE_PORTS = "PGPORT=5432"
%-postgres: IMAGE=localhost/fradrive/postgres
%-postgres: CONTAINER_CMD=localhost/fradrive/postgres
.PHONY: %-memcached
%-memcached: SERVICE=memcached
%-memcached: SERVICE_VARIANT=memcached
%-memcached: SET_IMAGE=$$(MEMCACHED_IMAGE) --port=`cat $$(CONTAINER_FILE) | grep 'MEMCACHED_PORT=' | sed 's/MEMCACHED_PORT=//'`
%-memcached: SET_CONTAINER_CMD=$$(MEMCACHED_IMAGE) --port=`cat $$(CONTAINER_FILE) | grep 'MEMCACHED_PORT=' | sed 's/MEMCACHED_PORT=//'`
%-memcached: BASE_PORTS = "MEMCACHED_PORT=11211"
.PHONY: %-minio
%-minio: SERVICE=minio
%-minio: SERVICE_VARIANT=minio
%-minio: SET_IMAGE=$$(MINIO_IMAGE) -- server `mktemp` --address=:`cat $$(CONTAINER_FILE) | grep 'UPLOAD_S3_PORT=' | sed 's/UPLOAD_S3_PORT=//'`
%-minio: SET_CONTAINER_CMD=$$(MINIO_IMAGE) -- server `mktemp` --address=:`cat $$(CONTAINER_FILE) | grep 'UPLOAD_S3_PORT=' | sed 's/UPLOAD_S3_PORT=//'`
%-minio: BASE_PORTS = "UPLOAD_S3_PORT=9000"
.PHONY: start-%
@ -207,13 +207,18 @@ ghci: shell-ghci;
rebuild-%:
$(MAKE) -- --image-build SERVICE=$* NO_CACHE=--no-cache
--image-build:
ifeq "$(IMAGE)" "localhost/fradrive/$(SERVICE)"
ifeq "$(CONTAINER_CMD)" "localhost/fradrive/$(SERVICE)"
rm -f .Dockerfile
ln -s docker/$(SERVICE)/Dockerfile .Dockerfile
PROJECT_DIR=/fradrive; \
MOUNT_DIR=/mnt/fradrive; \
PROJECT_DIR=/mnt/fradrive; \
if [ "$(IN_CI)" == "true" ] ; then \
PROJECT_DIR=/fradrive; \
fi; \
if [ "$(IN_CONTAINER)" == "false" ] ; then \
$(CONTAINER_COMMAND) build $(NO_CACHE) \
-v $(PWD):$${PROJECT_DIR}:rw \
-v $(PWD):$${MOUNT_DIR} \
--build-arg MOUNT_DIR=$(MOUNT_DIR) \
--build-arg PROJECT_DIR=$${PROJECT_DIR} \
--env IN_CONTAINER=true \
--env JOB=$(JOB) \
@ -228,22 +233,22 @@ endif
DEVELOP=`cat develop/.current` ; \
./utils/watchcontainerrun.sh "$(CONTAINER_COMMAND)" "$(CONTAINER_FILE)" "$(CONTAINER_INIT)" "$(CONTAINER_CLEANUP)" & \
CONTAINER_NAME=fradrive.$(CURR_DEV).$(CONTAINER_IDENT) ; \
if ! [ -z "$(SET_IMAGE)" ] ; \
if ! [ -z "$(SET_CONTAINER_CMD)" ] ; \
then \
IMAGE="$(SET_IMAGE)" ; \
CONTAINER_CMD="$(SET_CONTAINER_CMD)" ; \
else \
IMAGE=$(IMAGE) ; \
CONTAINER_CMD=$(CONTAINER_CMD) ; \
fi ; \
CONTAINER_ID=`$(CONTAINER_BGRUN) \
-v $(PWD):$(PROJECT_DIR):rw \
-v $(PWD):$(MOUNT_DIR) \
--env IN_CONTAINER=true \
--env FRADRIVE_MAKE_TARGET="--$(JOB)-$(SERVICE_VARIANT)" \
--env CONTAINER_FILE=$(CONTAINER_FILE) \
--env CONTAINER_NAME=$${CONTAINER_NAME} \
--env JOB=$(JOB) \
--env SRC=$(SRC) \
--name $${CONTAINER_NAME} \
$${IMAGE} \
make -- --$(JOB)-$(SERVICE_VARIANT) IN_CONTAINER=true \
$${CONTAINER_CMD} \
` ; \
printf "CONTAINER_ID=$${CONTAINER_ID}" >> "$(CONTAINER_FILE)" ; \
if [[ "true" == "$(CONTAINER_ATTACHED)" ]] ; then \
@ -251,6 +256,7 @@ endif
fi
# For Reverse Proxy Problem see: https://groups.google.com/g/yesodweb/c/2EO53kSOuy0/m/Lw6tq2VYat4J
# HELP(start-backend): start development instance
--start-backend:
export YESOD_IP_FROM_HEADER=true; \
@ -275,23 +281,22 @@ endif
--compile-backend:
stack build $(STACK_CORES) --fast --profile --library-profiling --executable-profiling --flag uniworx:-library-only $(--DEVELOPMENT) --local-bin-path $$(pwd)/bin --copy-bins
# HELP(dependencies-backend): (re-)build backend dependencies
--dependencies-backend: #uniworx.cabal
chown -R `id -un`:`id -gn` "$(PROJECT_DIR)"; \
stack build -j2 --only-dependencies
--dependencies-backend: uniworx.cabal
stack build $(STACK_CORES) --fast --only-dependencies
# HELP(lint-backend): lint backend
--lint-backend:
stack build $(STACK_CORES) --test --fast --flag uniworx:library-only $(--DEVELOPMENT) uniworx:test:hlint
# HELP(test-backend): test backend
--test-backend:
stack build $(STACK_CORES) --test --coverage --fast --flag uniworx:library-only $(--DEVELOPMENT)
# uniworx.cabal:
# stack exec -- hpack --force
uniworx.cabal:
stack exec -- hpack --force
# HELP(compile-frontend): compile frontend assets
--compile-frontend: node_modules assets esbuild.config.mjs frontend/src/env.sass
--compile-frontend: node_modules assets esbuild.config.mjs
npm run build
--start-frontend: --compile-frontend;
--dependencies-frontend: node_modules assets;
--dependencies-frontend: node_modules assets static well-known;
node_modules: package.json package-lock.json
npm install --cache .npm --prefer-offline
package-lock.json: package.json
@ -303,15 +308,9 @@ assets/icons: node_modules assets/icons-src/fontawesome.json
./utils/renamer.pl node_modules/@fortawesome/fontawesome-free/svgs/solid assets/icons-src/fontawesome.json assets/icons/fradrive
./utils/renamer.pl node_modules/@fortawesome/fontawesome-free/svgs/regular assets/icons-src/fontawesome.json assets/icons/fradrive
-cp assets/icons-src/*.svg assets/icons/fradrive
frontend/src/env.sass:
echo "\$$path: '$${PROJECT_DIR}'" > frontend/src/env.sass
static: node_modules assets esbuild.config.mjs frontend/src/env.sass
static: node_modules assets esbuild.config.mjs
npm run build
well-known: static;
--lint-frontend: --compile-frontend
npm run lint
--test-frontend: --compile-frontend
npm run test
# HELP(compile-uniworxdb): clear and fill database. requires running postgres instance (use "make start-postgres" to start one)
# TODO (db-m-$MIGRATION-backend): apply migration (see src/Model/Migration/Definition.hs for list of available migrations)
@ -320,7 +319,6 @@ well-known: static;
AVSPASS=${AVSPASS:-nopasswordset} ; \
./bin/uniworxdb $(UNIWORXDB_OPTS)
# HELP(shell-ghci): enter ghci shell. Use "make ghci SRC=<MODULE_FILE.hs>" to load specific source modules."
--shell-ghci:
stack ghci -- $(SRC)
# --main-is uniworx:exe:uniworx

254
azure-pipelines.yaml Executable file → Normal file
View File

@ -1,197 +1,67 @@
# SPDX-FileCopyrightText: 2024-2025 Sarah Vaupel <sarah.vaupel@uniworx.de>
# SPDX-FileCopyrightText: 2024 Sarah Vaupel <sarah.vaupel@uniworx.de>
#
# SPDX-License-Identifier: AGPL-3.0-or-later
trigger:
branches:
include:
- '*'
tags:
include:
- '*'
#paths:
# exclude:
# - CHANGELOG.md
parameters:
- name: services
type: object
default:
- name: frontend
imageBase:
image: devfra.azurecr.io/de.fraport.build/npm
tag: node-20
# extraBuildOptions: |
# --build-arg NPM_CUSTOM_REGISTRY=https://pkgs.dev.azure.com/fraport/_packaging/packages/npm/registry/
dependsOn: []
dependenciesCaches:
- key: package.json | package-lock.json
path: node_modules/
- key: package.json | package-lock.json
path: .npm/
- key: package.json | esbuild.config.mjs | utils/renamer.pl | utils/faviconize.pl | frontend/src/icons.scss
path: assets/icons/
- key: package.json | esbuild.config.mjs | utils/renamer.pl | utils/faviconize.pl | frontend/src/icons.scss
path: assets/favicons/
buildPool: 'Prod Private Agent Pool'
buildCores: 1
buildTimeout: 60
buildArtifacts: |
assets/icons/fradrive/*.svg
assets/favicons/*.png
assets/favicons/include.html
frontend/src/env.sass
config/manifest.json
static/**/*
well-known/**/*
- name: backend
imageBase:
image: devfra.azurecr.io/de.fraport.build/haskell
tag: 8.10.4
dependsOn:
- Build_frontend
dependenciesCaches:
- key: stack.yaml | stack.yaml.lock
path: .stack/
buildPool: 'Prod Private Agent Pool DS3'
buildCores: 3
buildTimeout: 1440
buildArtifacts: |
bin/*
variables:
buildImageUpstream: devfra.azurecr.io/de.fraport.fradrive.build
setupImages: $[ or( eq(variables.forcePushLatest, true), eq(variables['Build.SourceBranch'], 'refs/heads/master'), startsWith(variables['Build.SourceBranch'], 'refs/heads/update'), startsWith(variables['Build.SourceBranch'], 'refs/tags/') ) ]
pool: 'Prod Private Agent Pool'
stages:
- stage: Setup
jobs:
- ${{ each service in parameters.services }}:
- template: .azure-pipelines/templates/jobs/setup_image.yaml
parameters:
imageName: ${{service.name}}
imageBase: ${{service.imageBase}}
- template: .azure-pipelines/templates/jobs/setup_dependencies.yaml
parameters:
serviceName: ${{service.name}}
dependenciesCaches: ${{service.dependenciesCaches}}
dependenciesBuildPool: ${{service.buildPool}}
dependenciesBuildCores: ${{service.buildCores}}
dependenciesBuildTimeout: ${{service.buildTimeout}}
- stage: Build
dependsOn: Setup
jobs:
- ${{ each service in parameters.services }}:
- job: Build_${{service.name}}
displayName: Compile ${{service.name}}
dependsOn: ${{service.dependsOn}}
pool: '${{service.buildPool}}'
timeoutInMinutes: ${{service.buildTimeout}}
container:
${{ if eq(variables.setupImages, true) }}:
image: $(buildImageUpstream)/${{service.name}}:$(Build.BuildNumber)
${{ else }}:
image: $(buildImageUpstream)/${{service.name}}:latest
endpoint: devfra
env:
PROJECT_DIR: $(Build.Repository.LocalPath)
IN_CONTAINER: true
IN_CI: true
steps:
- ${{ each dependencyCache in service.dependenciesCaches }}:
- template: .azure-pipelines/templates/steps/cache.yaml
parameters:
cacheIdent: '${{service.name}}-dependencies'
cacheKeys: '${{dependencyCache.key}}'
cachePath: '${{dependencyCache.path}}'
- ${{ each dependency in service.dependsOn }}:
- template: .azure-pipelines/templates/steps/artifact-download.yaml
parameters:
artifactName: '${{dependency}}'
- template: .azure-pipelines/templates/steps/make.yaml
parameters:
makeJob: compile
makeService: ${{service.name}}
makeVars: 'CPU_CORES=${{service.buildCores}} STACK_CORES=-j${{service.buildCores}}'
- task: CopyFiles@2
displayName: Prepare ${{service.name}} build artifacts for upload
inputs:
Contents: '${{service.buildArtifacts}}'
TargetFolder: '$(Build.ArtifactStagingDirectory)'
- task: PublishBuildArtifacts@1
displayName: Publish ${{service.name}} build artifacts
inputs:
PathtoPublish: '$(Build.ArtifactStagingDirectory)'
ArtifactName: 'Build_${{service.name}}'
publishLocation: 'Container'
# - stage: Test
# dependsOn: Build
# condition: eq(variables.skipTests, false)
# jobs:
# - ${{ each service in parameters.services }}:
# - job: Test_${{service.name}}
# displayName: Run ${{service.name}} tests
# pool: '${{service.buildPool}}'
# timeoutInMinutes: ${{service.buildTimeout}}
# container:
# # TODO: do not use latest on update branches
# image: $(buildImageUpstream)/${{service.name}}:latest
# endpoint: devfra
# env:
# PROJECT_DIR: $(Build.Repository.LocalPath)
# IN_CONTAINER: true
# IN_CI: true
# steps:
# - ${{ each dependencyCache in service.dependenciesCaches }}:
# - template: .azure-pipelines/templates/steps/cache.yaml
# parameters:
# cacheIdent: '${{service.name}}-dependencies'
# cacheKeys: '${{dependencyCache.key}}'
# cachePath: '${{dependencyCache.path}}'
# - ${{ each dependency in service.dependsOn }}:
# - template: .azure-pipelines/templates/steps/artifact-download.yaml
# parameters:
# artifactName: '${{dependency}}'
# - task: Docker@2
# displayName: Login to container registry
# inputs:
# command: login
# containerRegistry: devfra
# - task: Bash@3
# displayName: Start database container for testing
# inputs:
# targetType: inline
# script: |
# docker run -d devfra.azurecr.io/de.fraport.trusted/postgres:16.1-bookworm
# - template: .azure-pipelines/templates/steps/make.yaml
# parameters:
# makeJob: lint
# makeService: ${{service.name}}
# makeVars: 'CPU_CORES=${{service.buildCores}} STACK_CORES=-j${{service.buildCores}}'
# - template: .azure-pipelines/templates/steps/make.yaml
# parameters:
# makeJob: test
# makeService: ${{service.name}}
# makeVars: 'CPU_CORES=${{service.buildCores}} STACK_CORES=-j${{service.buildCores}}'
# - task: Docker@2
# displayName: Logout from container registry
# inputs:
# command: logout
# containerRegistry: devfra
# - job: TestReport_${{service.name}}
# displayName: Upload test reports for ${{service.name}}
# steps:
# - script: echo "Work in progress" # TODO
- stage: Release
dependsOn: Build # TODO Test
condition: or(eq(variables.forceRelease, true), startsWith(variables['Build.SourceBranch'], 'refs/tags/'))
jobs:
- template: .azure-pipelines/templates/jobs/release.yaml
parameters:
releaseTag: ${{split(variables['Build.SourceBranch'], '/')[2]}}
jobs:
# - job: HelloWorld
# container:
# image: 'devfra.azurecr.io/de.fraport.trusted/ubuntu:22.04'
# endpoint: devfra
# steps:
# - script: echo Hello, world!
# displayName: 'Run a one-line script'
# - script: |
# echo Add other tasks to build, test, and deploy your project.
# echo See https://aka.ms/yaml
# displayName: 'Run a multi-line script'
- job: DockerTaskTest
container:
image: devfra.azurecr.io/de.fraport.build/tools:1.1.0
endpoint: devfra
steps:
- task: Docker@2
name: dockerLoginDevFra
displayName: Docker Login to devfra
inputs:
command: login
containerRegistry: devFra
- task: Docker@2
name: dockerBuild
displayName: Backend image build test
inputs:
command: build
Dockerfile: docker/backend/Dockerfile
buildContext: .
tags: |
$(Build.BuildNumber)
backend
arguments: |
--build-arg HTTPS_PROXY=http://proxy.frankfurt-airport.de:8080
--build-arg HTTP_PROXY=http://proxy.frankfurt-airport.de:8080
--build-arg NO_PROXY='localhost,127.0.0.1,*.docker.internal,*.azmk8s.io,devfra.azurecr.io,devfra.westeurope.data.azurecr.io'
--build-arg PROJECT_DIR=/fradrive
--build-arg MOUNT_DIR=/mnt/fradrive
# - job: BuildKitTest
# container:
# image: 'devfra.azurecr.io/de.fraport.trusted/buildkit:0.12.1'
# endpoint: devfra
# steps:
# - script: buildctl build \
# --frontend=dockerfile.v0 \
# --local context=. \
# --local dockerfile=docker/backend/Dockerfile
# displayName: BuildKit test
# - job: CustomBuildahTest
# container:
# image: 'devfra.azurecr.io/de.fraport.trusted/ubuntu:22.04'
# endpoint: devfra
# steps:
# - script: |
# id
# docker build --help
# sudo apt-get -y update
# sudo apt-get -y install buildah
# buildah bud -t fradrive-backend-test --volume .:/mnt/fradrive --file docker/backend/Dockerfile
# displayName: Build buildah image

View File

@ -2,4 +2,4 @@
//
// SPDX-License-Identifier: AGPL-3.0-or-later
module.exports = {extends: ['@commitlint/config-conventional']};
module.exports = {extends: ['@commitlint/config-conventional']}

View File

@ -1,38 +1,41 @@
ARG FROM_IMG=docker.io/library/haskell
ARG FROM_TAG=8.10.4
FROM docker.io/fpco/stack-build:lts-18.0
FROM ${FROM_IMG}:${FROM_TAG}
# add public key for nvidia cuda repositories
RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys A4B469963BF863CC
ENV LANG=de_DE.UTF-8
ENV LANG=en_US.UTF-8
# compile-time dependencies
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get -y update && apt-get install -y libpq-dev libsodium-dev
# RUN apt-get -y update && apt-get -y install llvm
# RUN apt-get -y update && apt-get -y install g++ libghc-zlib-dev libpq-dev libsodium-dev pkg-config
# RUN apt-get -y update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends tzdata
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get -y update && apt-get install -y --no-install-recommends locales locales-all
# RUN apt-get -y update && apt-get -y install locales locales-all
# run-time dependencies for uniworx binary
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get -y update && apt-get -y install fonts-roboto
# RUN apt-get -y update && apt-get -y install fonts-roboto
# RUN apt-get -y update && apt-get -y install pdftk
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get -y update && apt-get -y install texlive-latex-recommended texlive-luatex texlive-plain-generic texlive-lang-german texlive-lang-english
# RUN apt-get -y update && apt-get -y install texlive-base luatex
# locally these two should be identical, so that compilation results are written out into the file dir.
# in CI-pipelines these two should be different, so that the container caches the compilation results.
ARG MOUNT_DIR=/mnt/fradrive
ARG PROJECT_DIR=/fradrive
ENV PROJECT_DIR=${PROJECT_DIR}
RUN mkdir -p "${PROJECT_DIR}"; chmod -R 7777 "${PROJECT_DIR}"
WORKDIR ${PROJECT_DIR}
ENV HOME=${PROJECT_DIR}
RUN mkdir -p "${PROJECT_DIR}"
RUN if [ "${PROJECT_DIR}" != "${MOUNT_DIR}" ] ; then cp -r "${MOUNT_DIR}"/* "${PROJECT_DIR}" ; fi
RUN mkdir -p "${PROJECT_DIR}/.stack"
ENV STACK_ROOT="${PROJECT_DIR}/.stack"
RUN if [ ! -z "${IN_CI}" ]; then \
stack install yesod-bin; \
stack install hpack; \
fi
WORKDIR ${PROJECT_DIR}
ENV HOME=${PROJECT_DIR}
RUN make -- --dependencies-backend STACK_ROOT=${STACK_ROOT} IN_CONTAINER=true JOB=${JOB}
RUN stack install yesod-bin
RUN stack install hpack
ENV FRADRIVE_MAKE_TARGET=--start-backend
ENTRYPOINT make -- ${FRADRIVE_MAKE_TARGET} STACK_ROOT="${STACK_ROOT}" IN_CONTAINER=true CONTAINER_FILE="${CONTAINER_FILE}" JOB="${JOB}" SRC="${SRC}"
EXPOSE 3000/tcp
EXPOSE 3443/tcp

29
docker/fradrive/Dockerfile Executable file → Normal file
View File

@ -1,17 +1,16 @@
ARG FROM_IMG=docker.io/library/debian
ARG FROM_TAG=12.5
FROM debian:12.5
FROM ${FROM_IMG}:${FROM_TAG}
RUN apt-get -y update
# Setup locales
# RUN apt-get update && apt-get -y install locales locales-all
# RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && \
# locale-gen
# ENV LANG=en_US.UTF-8 LANGUAGE=en_US:en LC_ALL=en_US.UTF-8
# setup locales
RUN apt-get -y install locales locales-all
RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && \
locale-gen
ENV LANG=en_US.UTF-8 LANGUAGE=en_US:en LC_ALL=en_US.UTF-8
# FraDrive runtime dependencies
# Binary runtime dependencies
# TODO: minimize texlive dependencies, switch to basic schemes where possible
# RUN apt-get -y install texlive-latex-recommended texlive-latex-extra texlive-luatex texlive-fonts-recommended texlive-fonts-extra texlive-lang-english texlive-lang-german
RUN apt-get -y install texlive-latex-recommended texlive-latex-extra texlive-luatex texlive-fonts-recommended texlive-fonts-extra texlive-lang-english texlive-lang-german
# Add uniworx user and directories
RUN mkdir -p /var/lib
@ -21,16 +20,12 @@ RUN useradd -r -g uniworx -d /var/lib/uniworx -M uniworx --uid 999
RUN mkdir -p /var/lib/uniworx && chown -R uniworx:uniworx /var/lib/uniworx
RUN mkdir -p /var/log/uniworx && chown -R uniworx:uniworx /var/log/uniworx
# Install FraDrive binaries
# TODO: is this still needed?
# RUN install -d -g uniworx -o uniworx -m 0750 /var/lib/uniworx
# RUN install -d -g uniworx -o uniworx -m 0755 /var/log/uniworx
COPY ./bin/uniworx /usr/bin/uniworx
COPY ./bin/uniworxdb /usr/bin/uniworxdb
# COPY uniworxload /usr/bin/uniworx
COPY ./docker/fradrive/fradrive-entrypoint.sh /entrypoint.sh
RUN chmod 777 /entrypoint.sh
RUN cp /tmp/uniworx-bin/uniworx /usr/bin/uniworx
USER uniworx
ENTRYPOINT /entrypoint.sh
ENTRYPOINT fradrive-entrypoint.sh
EXPOSE 8080/tcp
VOLUME /var/lib/uniworx /var/log

View File

@ -1,29 +1,32 @@
ARG FROM_IMG=docker.io/library/node
ARG FROM_TAG=20
FROM debian:12.5
FROM ${FROM_IMG}:${FROM_TAG}
# Basic dependencies
RUN apt-get -y update && apt-get -y install curl npm
ENV LANG=de_DE.UTF-8
ENV LANGUAGE=de_DE.UTF-8
# build and watch dependencies
# Build and watch dependencies
RUN apt-get -y update && apt-get -y install exiftool
RUN apt-get -y update && apt-get -y install imagemagick
# test dependencies
# RUN apt-get -y update && apt-get -y install chromium
# ENV CHROME_BIN=chromium
# Test dependencies
RUN apt-get -y update && apt-get -y install chromium
ENV CHROME_BIN=chromium
# configure npm to use given proxy if specified
RUN if [ ! -z "${HTTP_PROXY}" ]; then npm config set proxy ${HTTP_PROXY}; fi
RUN if [ ! -z "${FRAPORT_NOPROXY}" ]; then npm config set noproxy "${FRAPORT_NOPROXY}"; fi
ENV NODE_EXTRA_CA_CERTS="/etc/ssl/certs/ca-certificates.crt"
# TODO: use dotenv for npm version?
RUN npm install -g n
RUN n 20.17.0
ENV PROJECT_DIR=/fradrive
# locally these two should be identical, so that compilation results are written out into the file dir.
# in CI-pipelines these two should be different, so that the container caches the compilation results.
ARG MOUNT_DIR=/mnt/fradrive
ARG PROJECT_DIR=/fradrive
RUN mkdir -p ${PROJECT_DIR}
RUN if [ "${PROJECT_DIR}" != "${MOUNT_DIR}" ] ; then cp -r ${MOUNT_DIR}/* ${PROJECT_DIR} ; fi
WORKDIR ${PROJECT_DIR}
ENV HOME=${PROJECT_DIR}
RUN if [ ! -z "${NPM_CUSTOM_REGISTRY}" ]; then \
printf 'registry=${NPM_CUSTOM_REGISTRY}' > .npmrc \
; fi
#RUN make node_modules IN_CONTAINER=true
#RUN make well-known IN_CONTAINER=true
RUN make -- static
ENV FRADRIVE_MAKE_TARGET=start-frontend
ENTRYPOINT make -- ${FRADRIVE_MAKE_TARGET} IN_CONTAINER=true CHROME_BIN=${CHROME_BIN}

33
docker/podman/Dockerfile Normal file
View File

@ -0,0 +1,33 @@
# Debian-based podman daemon image for building docker images
# inside docker containers (e.g. gitlab runners).
#
# Yoinked with love from:
# https://www.redhat.com/sysadmin/podman-inside-container
FROM debian:12.5
RUN apt-get -y update
RUN apt-get -y install make podman podman-compose fuse-overlayfs
RUN useradd podman; \
echo podman:10000:5000 > /etc/subuid; \
echo podman:10000:5000 > /etc/subgid;
VOLUME /var/lib/containers
VOLUME /home/podman/.local/share/containers
ADD https://raw.githubusercontent.com/containers/image_build/main/podman/containers.conf /etc/containers/containers.conf
ADD https://raw.githubusercontent.com/containers/image_build/main/podman/podman-containers.conf /home/podman/.config/containers/containers.conf
RUN chown podman:podman -R /home/podman
# chmod containers.conf and adjust storage.conf to enable Fuse storage.
# RUN chmod 644 /etc/containers/containers.conf; sed -i -e 's|^#mount_program|mount_program|g' -e '/additionalimage.*/a "/var/lib/shared",' -e 's|^mountopt[[:space:]]*=.*$|mountopt = "nodev,fsync=0"|g' /etc/containers/containers.conf
# RUN echo -e '[storage]\ndriver="zfs"\nmount_program="zfs"\nadditionalimage=/var/lib/shared\nmountopt="nodev,fsync=0"' >> /etc/containers/containers.conf
RUN chmod 644 /etc/containers/containers.conf
RUN echo '[storage]\ndriver="overlay"\n[storage.options.overlay]\nforce_mask="private"\nmount_program="/usr/bin/fuse-overlayfs"\nmountopt="nodev"' >> /etc/containers/containers.conf
RUN mkdir -p /root/.config/containers/ && echo '[storage]\ndriver="overlay"\n[storage.options.overlay]\nforce_mask="private"\nmount_program="/usr/bin/fuse-overlayfs"\nmountopt="nodev"' > /root/.config/containers/storage.conf
RUN mkdir -p /var/lib/shared/overlay-images /var/lib/shared/overlay-layers /var/lib/shared/vfs-images /var/lib/shared/vfs-layers; touch /var/lib/shared/overlay-images/images.lock; touch /var/lib/shared/overlay-layers/layers.lock; touch /var/lib/shared/vfs-images/images.lock; touch /var/lib/shared/vfs-layers/layers.lock
ENV _CONTAINERS_USERNS_CONFIGURED=""

View File

@ -7,8 +7,8 @@ import svgPlugin from 'esbuild-plugin-svg-bundle';
import { copy } from 'esbuild-plugin-copy';
// import manifestPlugin from 'esbuild-plugin-manifest';
import manifestPlugin from 'esbuild-plugin-assets-manifest';
// import copyWithHashPlugin from '@enonic/esbuild-plugin-copy-with-hash';
// import inlineImportPlugin from 'esbuild-plugin-inline-import';
import copyWithHashPlugin from '@enonic/esbuild-plugin-copy-with-hash';
import inlineImportPlugin from 'esbuild-plugin-inline-import';
import { nodeModulesPolyfillPlugin } from 'esbuild-plugins-node-modules-polyfill';
const staticDir = './static';

View File

@ -5,7 +5,7 @@ import babelParser from "@babel/eslint-parser";
export default [
js.configs.recommended,
{
files: ["frontend/src/**/*.js"],
files: ["**/*.js"],
plugins: {},
languageOptions: {
ecmaVersion: 2018,

View File

@ -1,9 +1,8 @@
// SPDX-FileCopyrightText: 2024-2025 David Mosbach <david.mosbach@uniworx.de>, Sarah Vaupel <sarah.vaupel@uniworx.de>
// SPDX-FileCopyrightText: 2024 David Mosbach <david.mosbach@uniworx.de>, Sarah Vaupel <sarah.vaupel@uniworx.de>
//
// SPDX-License-Identifier: AGPL-3.0-or-later
// SPDX-License-Identifier: LicenseRef-Fraport-Corporate-Design
@import 'env';
$ico-width: 15px;
@ -110,7 +109,7 @@ $icons: new,
@each $name in $icons {
.ico-#{$name} {
background-image: url('#{$path}/assets/icons/fradrive/#{$name}.svg');
background-image: url('/mnt/fradrive/assets/icons/fradrive/#{$name}.svg');
background-size: contain;
background-repeat: no-repeat;
background-position: center;

View File

@ -1,4 +1,4 @@
// SPDX-FileCopyrightText: 2022-2025 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor.kleen@ifi.lmu.de>,Johannes Eder <ederj@cip.ifi.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>,Sarah Vaupel <vaupel.sarah@campus.lmu.de>
// SPDX-FileCopyrightText: 2022-2024 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor.kleen@ifi.lmu.de>,Johannes Eder <ederj@cip.ifi.lmu.de>,Sarah Vaupel <sarah.vaupel@ifi.lmu.de>,Sarah Vaupel <vaupel.sarah@campus.lmu.de>
//
// SPDX-License-Identifier: AGPL-3.0-or-later
@ -499,14 +499,14 @@ function encrypt(plaintext, key) {
if (!key) throw new Error('Cannot encrypt plaintext without a valid key!');
// TODO use const if possible
// let plaintextB = Buffer.from(plaintext);
// let cipherB = Buffer.alloc(plaintextB.length + sodium.crypto_secretbox_MACBYTES);
// let nonceB = Buffer.alloc(sodium.crypto_secretbox_NONCEBYTES);
// let keyB = Buffer.from(key);
let plaintextB = Buffer.from(plaintext);
let cipherB = Buffer.alloc(plaintextB.length + sodium.crypto_secretbox_MACBYTES);
let nonceB = undefined; // Buffer.alloc(sodium.crypto_secretbox_NONCEBYTES);
let keyB = Buffer.from(key);
// sodium.crypto_secretbox_easy(cipherB, plaintextB, nonceB, keyB);
const result = null; // cipherB;
const result = cipherB;
console.log('encrypt result', result);
return result;
}
@ -519,10 +519,10 @@ function decrypt(ciphertext, key) {
if (!key) throw new Error('Cannot decrypt ciphertext without a valid key!');
// TODO use const if possible
// let cipherB = Buffer.from(ciphertext);
let plaintextB = null; // Buffer.alloc(cipherB.length - sodium.crypto_secretbox_MACBYTES);
// let nonceB = undefined; Buffer.alloc(sodium.crypto_secretbox_NONCEBYTES);
// let keyB = Buffer.from(key);
let cipherB = Buffer.from(ciphertext);
let plaintextB = undefined; Buffer.alloc(cipherB.length - sodium.crypto_secretbox_MACBYTES);
let nonceB = undefined; Buffer.alloc(sodium.crypto_secretbox_NONCEBYTES);
let keyB = Buffer.from(key);
// sodium.crypto_secretbox_open_easy(plaintextB, cipherB, nonceB, keyB);

View File

@ -2,7 +2,7 @@
//
// SPDX-License-Identifier: AGPL-3.0-or-later
/* eslint-disable */
module.exports = function(config) {
config.set({
//root path location to resolve paths defined in files and exclude

61
package-lock.json generated
View File

@ -68,7 +68,6 @@
"karma-browserify": "^8.1.0",
"karma-chrome-launcher": "^3.2.0",
"karma-cli": "^2.0.0",
"karma-esbuild": "^2.3.0",
"karma-jasmine": "^5.1.0",
"karma-jasmine-html-reporter": "^2.1.0",
"karma-mocha-reporter": "^2.2.5",
@ -5815,9 +5814,9 @@
"license": "MIT"
},
"node_modules/cookie": {
"version": "0.7.2",
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz",
"integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==",
"version": "0.4.2",
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.2.tgz",
"integrity": "sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA==",
"dev": true,
"license": "MIT",
"engines": {
@ -5983,9 +5982,9 @@
}
},
"node_modules/cross-spawn": {
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
"integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
"version": "7.0.3",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
"integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
"dev": true,
"license": "MIT",
"dependencies": {
@ -6879,9 +6878,9 @@
"license": "ISC"
},
"node_modules/elliptic": {
"version": "6.6.1",
"resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.6.1.tgz",
"integrity": "sha512-RaddvvMatK2LJHqFJ+YA4WysVN5Ita9E35botqIYspQ4TkRAlCicdzKOjlyv/1Za5RyTNn7di//eEV0uTAfe3g==",
"version": "6.5.7",
"resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.7.tgz",
"integrity": "sha512-ESVCtTwiA+XhY3wyh24QqRGBoP3rEdDUl3EDUUo9tft074fi19IrdpH7hLCMMP3CIj7jb3W96rn8lt/BqIlt5Q==",
"dev": true,
"license": "MIT",
"dependencies": {
@ -6939,9 +6938,9 @@
}
},
"node_modules/engine.io": {
"version": "6.6.2",
"resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.6.2.tgz",
"integrity": "sha512-gmNvsYi9C8iErnZdVcJnvCpSKbWTt1E8+JZo8b+daLninywUWi5NQ5STSHZ9rFjFO7imNcvb8Pc5pe/wMR5xEw==",
"version": "6.5.5",
"resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.5.5.tgz",
"integrity": "sha512-C5Pn8Wk+1vKBoHghJODM63yk8MvrO9EWZUfkAt5HAqIgPE4/8FF0PEGHXtEd40l223+cE5ABWuPzm38PHFXfMA==",
"dev": true,
"license": "MIT",
"dependencies": {
@ -6950,7 +6949,7 @@
"@types/node": ">=10.0.0",
"accepts": "~1.3.4",
"base64id": "2.0.0",
"cookie": "~0.7.2",
"cookie": "~0.4.1",
"cors": "~2.8.5",
"debug": "~4.3.1",
"engine.io-parser": "~5.2.1",
@ -10114,20 +10113,6 @@
"node": ">= 6"
}
},
"node_modules/karma-esbuild": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/karma-esbuild/-/karma-esbuild-2.3.0.tgz",
"integrity": "sha512-iW3DjSGohEEkufSDmXRPZP7CNP0ye+Xt8fBCcenLqPL2u8+VHZYwlzwYyfs60vjhdf1i04xekhzI7gu8as1CLg==",
"dev": true,
"license": "MIT",
"dependencies": {
"chokidar": "^3.5.1",
"source-map": "0.6.1"
},
"peerDependencies": {
"esbuild": ">=0.17.0"
}
},
"node_modules/karma-jasmine": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/karma-jasmine/-/karma-jasmine-5.1.0.tgz",
@ -10940,9 +10925,9 @@
"license": "MIT"
},
"node_modules/nanoid": {
"version": "3.3.8",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.8.tgz",
"integrity": "sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==",
"version": "3.3.7",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz",
"integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==",
"dev": true,
"funding": [
{
@ -11226,9 +11211,9 @@
}
},
"node_modules/npm-run-all/node_modules/cross-spawn": {
"version": "6.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.6.tgz",
"integrity": "sha512-VqCUuhcd1iB+dsv8gxPttb5iZh/D0iubSP21g36KXdEuf6I5JiioesUVjpCdHV9MZRUfVFlvwtIUyPfxo5trtw==",
"version": "6.0.5",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz",
"integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==",
"dev": true,
"license": "MIT",
"dependencies": {
@ -16946,9 +16931,9 @@
}
},
"node_modules/socket.io": {
"version": "4.8.1",
"resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.8.1.tgz",
"integrity": "sha512-oZ7iUCxph8WYRHHcjBEc9unw3adt5CmSNlppj/5Q4k2RIrhl8Z5yY2Xr4j9zj0+wzVZ0bxmYoGSzKJnRl6A4yg==",
"version": "4.7.5",
"resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.7.5.tgz",
"integrity": "sha512-DmeAkF6cwM9jSfmp6Dr/5/mfMwb5Z5qRrSXLpo3Fq5SqyU8CMF15jIN4ZhfSwu35ksM1qmHZDQ/DK5XTccSTvA==",
"dev": true,
"license": "MIT",
"dependencies": {
@ -16956,7 +16941,7 @@
"base64id": "~2.0.0",
"cors": "~2.8.5",
"debug": "~4.3.2",
"engine.io": "~6.6.0",
"engine.io": "~6.5.2",
"socket.io-adapter": "~2.5.2",
"socket.io-parser": "~4.2.4"
},

View File

@ -54,7 +54,6 @@
"karma-browserify": "^8.1.0",
"karma-chrome-launcher": "^3.2.0",
"karma-cli": "^2.0.0",
"karma-esbuild": "^2.3.0",
"karma-jasmine": "^5.1.0",
"karma-jasmine-html-reporter": "^2.1.0",
"karma-mocha-reporter": "^2.2.5",
@ -90,9 +89,6 @@
},
"scripts": {
"build": "node esbuild.config.mjs",
"start": "node esbuild.config.mjs --watch",
"lint": "eslint --config eslint.config.mjs --color frontend/src",
"lintfix": "eslint --config eslint.config.mjs --color --fix frontend/src",
"test": "echo \"karma-testsuite currently disabled, reporting success\""
"start": "node esbuild.config.mjs --watch"
}
}

View File

@ -1,7 +1,6 @@
-- {-# LANGUAGE BangPatterns #-}
{-# OPTIONS_GHC -Wwarn #-}
{-# OPTIONS_GHC -Wwarn -fno-warn-orphans #-}
-- SPDX-FileCopyrightText: 2024-2025 Sarah Vaupel <sarah.vaupel@uniworx.de>
-- SPDX-FileCopyrightText: 2024 Sarah Vaupel <sarah.vaupel@uniworx.de>
--
-- SPDX-License-Identifier: AGPL-3.0-or-later
@ -64,8 +63,9 @@ foreign import ccall unsafe "sodium_bin2hex"
bin2hex :: ByteString -> String
bin2hex bs = let tlen = S.length bs * 2 + 1 in
S8.unpack . S8.init . snd . buildUnsafeByteString tlen $ \t ->
constByteStrings [bs] $ \[(pbs, _)] ->
c_sodium_bin2hex t (fromIntegral tlen) pbs (fromIntegral $ S.length bs)
let aux [(pbs, _)] = c_sodium_bin2hex t (fromIntegral tlen) pbs (fromIntegral $ S.length bs)
aux _ = error "Crypto.Saltine.Instances.bin2hex reached an impossible computation path"
in constByteStrings [bs] aux
instance Show Key where
show k = "SecretBox.Key {hashesTo = \"" <> (bin2hex . shorthash nullShKey $ encode k) <> "}\""

View File

@ -1,4 +1,4 @@
-- SPDX-FileCopyrightText: 2022-2025 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor.kleen@ifi.lmu.de>, Steffen Jost <s.jost@fraport.de>
-- SPDX-FileCopyrightText: 2022-2024 Sarah Vaupel <sarah.vaupel@uniworx.de>, Gregor Kleen <gregor.kleen@ifi.lmu.de>, Steffen Jost <s.jost@fraport.de>
--
-- SPDX-License-Identifier: AGPL-3.0-or-later