Compare commits

...

10 commits

Author SHA1 Message Date
2acb480283
ilot/*: bump pkgrel
Some checks failed
/ lint (pull_request) Successful in 30s
/ deploy-x86_64 (pull_request) Has been skipped
/ build-x86_64 (pull_request) Failing after 39s
/ deploy-aarch64 (pull_request) Has been skipped
/ build-aarch64 (pull_request) Failing after 1m5s
2024-08-24 21:15:16 -04:00
113cb11e23
forgejo-ci: initial 2024-08-24 21:14:02 -04:00
761467d6f7
gitlab-ci: drop 2024-08-24 21:13:35 -04:00
57592c6060
README: update 2024-08-09 22:40:23 -04:00
c4417ce32c
ilot/wikijs: new aport 2024-08-09 22:36:23 -04:00
3951396a15
ilot/uptime-kuma: new aport 2024-08-09 22:36:21 -04:00
75f60c7d23
ilot/py3-tenant-schemas-celery: new aport 2024-08-09 22:36:19 -04:00
29fe09187e
ilot/py3-scim2-filter-parser: new aport 2024-08-09 22:36:16 -04:00
0977a13904
ilot/py3-django-tenants: new aport 2024-08-09 22:36:14 -04:00
989f7c21ed
ilot/py3-django-rest-framework: new aport 2024-08-09 22:36:11 -04:00
37 changed files with 4560 additions and 1356 deletions

34
.forgejo/bin/deploy.sh Executable file
View file

@ -0,0 +1,34 @@
#!/bin/sh
# shellcheck disable=SC3040
set -eu -o pipefail
readonly REPOS="backports ilot"
readonly BASEBRANCH=$GITHUB_BASE_REF
readonly TARGET_REPO=$CI_ALPINE_REPO
apkgs=$(find package -type f -name "*.apk")
for apk in $apkgs; do
branch=$(echo $apk | awk -F '/' '{print $2}')
arch=$(echo $apk | awk -F '/' '{print $3}')
name=$(echo $apk | awk -F '/' '{print $4}')
if [ "$(curl -s $GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/pulls/$GITHUB_EVENT_NUMBER | jq .draft)" == "true" ]; then
# if draft, send to -testing branch
branch="$branch-testing"
else
# if not draft, assume that this was sent to $branch-testing and nuke it
curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN -X DELETE $TARGET_REPO/$BASEBRANCH/$branch-testing/$arch/$name
fi
echo "Sending $name of arch $arch to $TARGET_REPO/$BASEBRANCH/$branch"
return=$(curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN --upload-file $apk $TARGET_REPO/$BASEBRANCH/$branch 2>&1)
echo $return
if [ "$return" == "package file already exists" ]; then
echo "Package already exists, refreshing..."
curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN -X DELETE $TARGET_REPO/$BASEBRANCH/$branch/$arch/$name
curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN --upload-file $apk $TARGET_REPO/$BASEBRANCH/$branch
fi
done

View file

@ -0,0 +1,66 @@
diff --git a/usr/local/bin/build.sh.orig b/usr/local/bin/build.sh
old mode 100644
new mode 100755
index c3b8f7a..f609018
--- a/usr/local/bin/build.sh.orig
+++ b/usr/local/bin/build.sh
@@ -7,13 +7,15 @@
set -eu -o pipefail
readonly APORTSDIR=$CI_PROJECT_DIR
-readonly REPOS="main community testing non-free"
+readonly REPOS="ilot backports"
+readonly ALPINE_REPOS="main community testing"
readonly ARCH=$(apk --print-arch)
# gitlab variables
readonly BASEBRANCH=$CI_MERGE_REQUEST_TARGET_BRANCH_NAME
: "${REPODEST:=$HOME/packages}"
-: "${MIRROR:=https://dl-cdn.alpinelinux.org/alpine}"
+: "${MIRROR:=https://forge.ilot.io/api/packages/ilot/alpine}"
+: "${ALPINE_MIRROR:=http://dl-cdn.alpinelinux.org/alpine}"
: "${MAX_ARTIFACT_SIZE:=300000000}" #300M
: "${CI_DEBUG_BUILD:=}"
@@ -68,8 +70,8 @@ report() {
get_release() {
case $BASEBRANCH in
- *-stable) echo v"${BASEBRANCH%-*}";;
- master) echo edge;;
+ v*) echo "$BASEBRANCH";;
+ edge) echo edge;;
*) die "Branch \"$BASEBRANCH\" not supported!"
esac
}
@@ -101,11 +103,11 @@ set_repositories_for() {
release=$(get_release)
for repo in $REPOS; do
[ "$repo" = "non-free" ] && continue
- [ "$release" != "edge" ] && [ "$repo" == "testing" ] && continue
+ [ "$release" == "edge" ] && [ "$repo" == "backports" ] && continue
repos="$repos $MIRROR/$release/$repo $REPODEST/$repo"
[ "$repo" = "$target_repo" ] && break
done
- doas sh -c "printf '%s\n' $repos > /etc/apk/repositories"
+ doas sh -c "printf '%s\n' $repos >> /etc/apk/repositories"
doas apk update
}
@@ -118,7 +120,15 @@ apply_offset_limit() {
}
setup_system() {
- doas sh -c "echo $MIRROR/$(get_release)/main > /etc/apk/repositories"
+ local repos='' repo=''
+ local release
+
+ release=$(get_release)
+ for repo in $ALPINE_REPOS; do
+ [ "$release" != "edge" ] && [ "$repo" == "testing" ] && continue
+ repos="$repos $ALPINE_MIRROR/$release/$repo"
+ done
+ doas sh -c "printf '%s\n' $repos > /etc/apk/repositories"
doas apk -U upgrade -a || apk fix || die "Failed to up/downgrade system"
abuild-keygen -ain
doas sed -i -E 's/export JOBS=[0-9]+$/export JOBS=$(nproc)/' /etc/abuild.conf

View file

@ -0,0 +1,57 @@
on:
pull_request:
types: [ assigned, opened, synchronize, reopened ]
concurrency:
group: ${{ github.head_ref || github.ref_name }}
cancel-in-progress: true
jobs:
build-aarch64:
runs-on: aarch64
container:
image: alpinelinux/alpine-gitlab-ci:latest
env:
CI_PROJECT_DIR: ${{ github.workspace }}
CI_DEBUG_BUILD: ${{ runner.debug }}
CI_MERGE_REQUEST_PROJECT_URL: ${{ github.server_url }}/${{ github.repository }}
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }}
steps:
- name: Environment setup
run: |
doas apk add nodejs git patch curl
cd /etc/apk/keys
doas curl -JO https://forge.ilot.io/api/packages/ilot/alpine/key
- name: Repo pull
uses: actions/checkout@v4
with:
fetch-depth: 500
- name: Package build
run: |
doas patch -d / -p1 -i ${{ github.workspace }}/.forgejo/patches/build.patch
build.sh
- name: Package upload
uses: forgejo/upload-artifact@v3
with:
name: package
path: packages
deploy-aarch64:
needs: [build-aarch64]
runs-on: aarch64
container:
image: alpine:latest
env:
CI_ALPINE_REPO: 'https://forge.ilot.io/api/packages/ilot/alpine'
FORGE_REPO_TOKEN: ${{ secrets.FORGE_REPO_TOKEN }}
FORGE_REPO_USER: ${{ vars.FORGE_REPO_USER }}
GITHUB_EVENT_NUMBER: ${{ github.event.number }}
steps:
- name: Setting up environment
run: apk add nodejs curl findutils git gawk jq
- name: Repo pull
uses: actions/checkout@v4
- name: Package download
uses: forgejo/download-artifact@v3
- name: Package deployment
run: ${{ github.workspace }}/.forgejo/bin/deploy.sh

View file

@ -0,0 +1,57 @@
on:
pull_request:
types: [ assigned, opened, synchronize, reopened ]
concurrency:
group: ${{ github.head_ref || github.ref_name }}
cancel-in-progress: true
jobs:
build-x86_64:
runs-on: x86_64
container:
image: alpinelinux/alpine-gitlab-ci:latest
env:
CI_PROJECT_DIR: ${{ github.workspace }}
CI_DEBUG_BUILD: ${{ runner.debug }}
CI_MERGE_REQUEST_PROJECT_URL: ${{ github.server_url }}/${{ github.repository }}
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }}
steps:
- name: Environment setup
run: |
doas apk add nodejs git patch curl
cd /etc/apk/keys
doas curl -JO https://forge.ilot.io/api/packages/ilot/alpine/key
- name: Repo pull
uses: actions/checkout@v4
with:
fetch-depth: 500
- name: Package build
run: |
doas patch -d / -p1 -i ${{ github.workspace }}/.forgejo/patches/build.patch
build.sh
- name: Package upload
uses: forgejo/upload-artifact@v3
with:
name: package
path: packages
deploy-x86_64:
needs: [build-x86_64]
runs-on: x86_64
container:
image: alpine:latest
env:
CI_ALPINE_REPO: 'https://forge.ilot.io/api/packages/ilot/alpine'
FORGE_REPO_TOKEN: ${{ secrets.FORGE_REPO_TOKEN }}
FORGE_REPO_USER: ${{ vars.FORGE_REPO_USER }}
GITHUB_EVENT_NUMBER: ${{ github.event.number }}
steps:
- name: Setting up environment
run: apk add nodejs curl findutils git gawk jq
- name: Repo pull
uses: actions/checkout@v4
- name: Package download
uses: forgejo/download-artifact@v3
- name: Package deployment
run: ${{ github.workspace }}/.forgejo/bin/deploy.sh

View file

@ -0,0 +1,21 @@
on:
pull_request:
types: [ assigned, opened, synchronize, reopened ]
jobs:
lint:
run-name: lint
runs-on: x86_64
container:
image: alpinelinux/apkbuild-lint-tools:latest
env:
CI_PROJECT_DIR: ${{ github.workspace }}
CI_DEBUG_BUILD: ${{ runner.debug }}
CI_MERGE_REQUEST_PROJECT_URL: ${{ github.server_url }}/${{ github.repository }}
CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }}
steps:
- run: doas apk add nodejs git
- uses: actions/checkout@v4
with:
fetch-depth: 500
- run: lint

View file

@ -1,109 +0,0 @@
stages:
- verify
- build
- deploy
variables:
GIT_STRATEGY: clone
GIT_DEPTH: "500"
lint:
stage: verify
interruptible: true
script:
- |
sudo apk add shellcheck atools sudo abuild
export PATH="$PATH:$CI_PROJECT_DIR/.gitlab/bin"
lint
allow_failure: true
only:
- merge_requests
tags:
- apk-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME-x86_64
.build:
stage: build
interruptible: true
script:
- |
sudo apk add alpine-sdk lua-aports sudo
sudo addgroup $USER abuild
export PATH="$PATH:$CI_PROJECT_DIR/.gitlab/bin"
sudo -Eu $USER build.sh
artifacts:
paths:
- packages/
- keys/
- logs/
expire_in: 7 days
when: always
only:
- merge_requests
.cross:
stage: build
interruptible: true
script:
- |
sudo apk add alpine-sdk lua-aports sudo gzip xz qemu-$CI_QEMU_TARGET_ARCH
sudo addgroup $USER abuild
export PATH="$PATH:$CI_PROJECT_DIR/.gitlab/bin"
build-rootfs.sh alpine${CI_MERGE_REQUEST_TARGET_BRANCH_NAME/v} $CI_ALPINE_TARGET_ARCH --rootfsdir $HOME/sysroot-$CI_ALPINE_TARGET_ARCH
cp /etc/apk/repositories $HOME/sysroot-$CI_ALPINE_TARGET_ARCH/etc/apk/.
sudo -Eu $USER CHOST=$CI_TARGET_ALPINE_ARCH build.sh
artifacts:
paths:
- packages/
- keys/
- logs/
expire_in: 7 days
when: always
only:
- merge_requests
build-x86_64:
extends: .build
when: always
tags:
- apk-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME-x86_64
build-aarch64:
extends: .build
when: always
tags:
- apk-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME-aarch64
build-ppc64le:
extends: .build
when: manual
tags:
- apk-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME-ppc64le
build-s390x:
extends: .build
when: manual
tags:
- apk-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME-s390x
build-armv7:
extends: .cross
when: manual
tags:
- apk-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME-x86_64
variables:
CI_ALPINE_TARGET_ARCH: armv7
CI_QEMU_TARGET_ARCH: arm
push:
interruptible: true
stage: deploy
script:
- |
sudo apk add abuild git-lfs findutils
export PATH="$PATH:$CI_PROJECT_DIR/.gitlab/bin"
push.sh
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
when: manual
tags:
- repo

View file

@ -1,111 +0,0 @@
#!/bin/sh
set -e
arch=
builddir=
checkdepends=
depends=
depends_dev=
depends_doc=
depends_libs=
depends_openrc=
depends_static=
install=
install_if=
langdir=
ldpath=
license=
makedepends=
makedepends_build=
makedepends_host=
md5sums=
options=
patch_args=
pkgbasedir=
pkgdesc=
pkgdir=
pkgname=
pkgrel=
pkgver=
pkggroups=
pkgusers=
provides=
provider_priority=
replaces=
sha256sums=
sha512sums=
sonameprefix=
source=
srcdir=
startdir=
subpackages=
subpkgdir=
subpkgname=
triggers=
url=
# abuild.conf
CFLAGS=
CXXFLAGS=
CPPFLAGS=
LDFLAGS=
JOBS=
MAKEFLAGS=
CMAKE_CROSSOPTS=
. ./APKBUILD
: "$arch"
: "$builddir"
: "$checkdepends"
: "$depends"
: "$depends_dev"
: "$depends_doc"
: "$depends_libs"
: "$depends_openrc"
: "$depends_static"
: "$install"
: "$install_if"
: "$langdir"
: "$ldpath"
: "$license"
: "$makedepends"
: "$makedepends_build"
: "$makedepends_host"
: "$md5sums"
: "$options"
: "$patch_args"
: "$pkgbasedir"
: "$pkgdesc"
: "$pkgdir"
: "$pkgname"
: "$pkgrel"
: "$pkgver"
: "$pkggroups"
: "$pkgusers"
: "$provides"
: "$provider_priority"
: "$replaces"
: "$sha256sums"
: "$sha512sums"
: "$sonameprefix"
: "$source"
: "$srcdir"
: "$startdir"
: "$subpackages"
: "$subpkgdir"
: "$subpkgname"
: "$triggers"
: "$url"
# abuild.conf
: "$CFLAGS"
: "$CXXFLAGS"
: "$CPPFLAGS"
: "$LDFLAGS"
: "$JOBS"
: "$MAKEFLAGS"
: "$CMAKE_CROSSOPTS"

View file

@ -1,16 +0,0 @@
#!/bin/sh
shellcheck -s ash \
-e SC3043 \
-e SC3057 \
-e SC3060 \
-e SC2016 \
-e SC2086 \
-e SC2169 \
-e SC2155 \
-e SC2100 \
-e SC2209 \
-e SC2030 \
-e SC2031 \
-e SC1090 \
-xa $CI_PROJECT_DIR/.gitlab/bin/APKBUILD_SHIM

View file

@ -1,556 +0,0 @@
#!/usr/bin/env bash
# Availabl here: https://lab.ilot.io/dotnet/arcade/-/blob/7f6d9796cc7f594772f798358dbdd8c69b6a97af/eng/common/cross/build-rootfs.sh
# Only modification: qemu-$arch-static becomes qemu-$arch
set -e
usage()
{
echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [llvmx[.y]] [--skipunmount] --rootfsdir <directory>]"
echo "BuildArch can be: arm(default), arm64, armel, armv6, ppc64le, riscv64, s390x, x64, x86"
echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine"
echo " for alpine can be specified with version: alpineX.YY or alpineedge"
echo " for FreeBSD can be: freebsd12, freebsd13"
echo " for illumos can be: illumos"
echo " for Haiku can be: haiku."
echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FreeBSD"
echo "llvmx[.y] - optional, LLVM version for LLVM related packages."
echo "--skipunmount - optional, will skip the unmount of rootfs folder."
echo "--use-mirror - optional, use mirror URL to fetch resources, when available."
echo "--jobs N - optional, restrict to N jobs."
exit 1
}
__CodeName=xenial
__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
__BuildArch=arm
__AlpineArch=armv7
__FreeBSDArch=arm
__FreeBSDMachineArch=armv7
__IllumosArch=arm7
__QEMUArch=arm
__UbuntuArch=armhf
__UbuntuRepo="http://ports.ubuntu.com/"
__LLDB_Package="liblldb-3.9-dev"
__SkipUnmount=0
# base development support
__UbuntuPackages="build-essential"
__AlpinePackages="alpine-base"
__AlpinePackages+=" build-base"
# symlinks fixer
__UbuntuPackages+=" symlinks"
# runtime dependencies
__UbuntuPackages+=" libicu-dev"
__UbuntuPackages+=" liblttng-ust-dev"
__UbuntuPackages+=" libunwind8-dev"
__UbuntuPackages+=" libnuma-dev"
# runtime libraries' dependencies
__UbuntuPackages+=" libcurl4-openssl-dev"
__UbuntuPackages+=" libkrb5-dev"
__UbuntuPackages+=" libssl-dev"
__UbuntuPackages+=" zlib1g-dev"
__FreeBSDBase="12.3-RELEASE"
__FreeBSDPkg="1.17.0"
__FreeBSDABI="12"
__FreeBSDPackages="libunwind"
__FreeBSDPackages+=" icu"
__FreeBSDPackages+=" libinotify"
__FreeBSDPackages+=" openssl"
__FreeBSDPackages+=" krb5"
__FreeBSDPackages+=" terminfo-db"
__IllumosPackages="icu"
__IllumosPackages+=" mit-krb5"
__IllumosPackages+=" openssl"
__IllumosPackages+=" zlib"
__HaikuPackages="gmp"
__HaikuPackages+=" gmp_devel"
__HaikuPackages+=" krb5"
__HaikuPackages+=" krb5_devel"
__HaikuPackages+=" libiconv"
__HaikuPackages+=" libiconv_devel"
__HaikuPackages+=" llvm12_libunwind"
__HaikuPackages+=" llvm12_libunwind_devel"
__HaikuPackages+=" mpfr"
__HaikuPackages+=" mpfr_devel"
# ML.NET dependencies
__UbuntuPackages+=" libomp5"
__UbuntuPackages+=" libomp-dev"
__Keyring=
__UseMirror=0
__UnprocessedBuildArgs=
while :; do
if [[ "$#" -le 0 ]]; then
break
fi
lowerI="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
case $lowerI in
-\?|-h|--help)
usage
exit 1
;;
arm)
__BuildArch=arm
__UbuntuArch=armhf
__AlpineArch=armv7
__QEMUArch=arm
;;
arm64)
__BuildArch=arm64
__UbuntuArch=arm64
__AlpineArch=aarch64
__QEMUArch=aarch64
__FreeBSDArch=arm64
__FreeBSDMachineArch=aarch64
;;
armel)
__BuildArch=armel
__UbuntuArch=armel
__UbuntuRepo="http://ftp.debian.org/debian/"
__CodeName=jessie
;;
armv6)
__BuildArch=armv6
__UbuntuArch=armhf
__QEMUArch=arm
__UbuntuRepo="http://raspbian.raspberrypi.org/raspbian/"
__CodeName=buster
__LLDB_Package="liblldb-6.0-dev"
if [[ -e "/usr/share/keyrings/raspbian-archive-keyring.gpg" ]]; then
__Keyring="--keyring /usr/share/keyrings/raspbian-archive-keyring.gpg"
fi
;;
riscv64)
__BuildArch=riscv64
__AlpineArch=riscv64
__QEMUArch=riscv64
__UbuntuArch=riscv64
__UbuntuRepo="http://deb.debian.org/debian-ports"
__UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}"
unset __LLDB_Package
if [[ -e "/usr/share/keyrings/debian-ports-archive-keyring.gpg" ]]; then
__Keyring="--keyring /usr/share/keyrings/debian-ports-archive-keyring.gpg --include=debian-ports-archive-keyring"
fi
;;
ppc64le)
__BuildArch=ppc64le
__AlpineArch=ppc64le
__QEMUArch=ppc64le
__UbuntuArch=ppc64el
__UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/"
__UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}"
__UbuntuPackages="${__UbuntuPackages// libomp-dev/}"
__UbuntuPackages="${__UbuntuPackages// libomp5/}"
unset __LLDB_Package
;;
s390x)
__BuildArch=s390x
__AlpineArch=s390x
__QEMUArch=s390x
__UbuntuArch=s390x
__UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/"
__UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}"
__UbuntuPackages="${__UbuntuPackages// libomp-dev/}"
__UbuntuPackages="${__UbuntuPackages// libomp5/}"
unset __LLDB_Package
;;
x64)
__BuildArch=x64
__AlpineArch=x86_64
__QEMUArch=x86_64
__UbuntuArch=amd64
__FreeBSDArch=amd64
__FreeBSDMachineArch=amd64
__illumosArch=x86_64
__UbuntuRepo=
;;
x86)
__BuildArch=x86
__AlpineArch=i386
__QEMUArch=i386
__UbuntuArch=i386
__AlpineArch=x86
__UbuntuRepo="http://archive.ubuntu.com/ubuntu/"
;;
lldb*)
version="${lowerI/lldb/}"
parts=(${version//./ })
# for versions > 6.0, lldb has dropped the minor version
if [[ "${parts[0]}" -gt 6 ]]; then
version="${parts[0]}"
fi
__LLDB_Package="liblldb-${version}-dev"
;;
no-lldb)
unset __LLDB_Package
;;
llvm*)
version="${lowerI/llvm/}"
parts=(${version//./ })
__LLVM_MajorVersion="${parts[0]}"
__LLVM_MinorVersion="${parts[1]}"
# for versions > 6.0, llvm has dropped the minor version
if [[ -z "$__LLVM_MinorVersion" && "$__LLVM_MajorVersion" -le 6 ]]; then
__LLVM_MinorVersion=0;
fi
;;
xenial) # Ubuntu 16.04
if [[ "$__CodeName" != "jessie" ]]; then
__CodeName=xenial
fi
;;
zesty) # Ubuntu 17.04
if [[ "$__CodeName" != "jessie" ]]; then
__CodeName=zesty
fi
;;
bionic) # Ubuntu 18.04
if [[ "$__CodeName" != "jessie" ]]; then
__CodeName=bionic
fi
;;
focal) # Ubuntu 20.04
if [[ "$__CodeName" != "jessie" ]]; then
__CodeName=focal
fi
;;
jammy) # Ubuntu 22.04
if [[ "$__CodeName" != "jessie" ]]; then
__CodeName=jammy
fi
;;
jessie) # Debian 8
__CodeName=jessie
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi
;;
stretch) # Debian 9
__CodeName=stretch
__LLDB_Package="liblldb-6.0-dev"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi
;;
buster) # Debian 10
__CodeName=buster
__LLDB_Package="liblldb-6.0-dev"
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi
;;
bullseye) # Debian 11
__CodeName=bullseye
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi
;;
sid) # Debian sid
__CodeName=sid
if [[ -z "$__UbuntuRepo" ]]; then
__UbuntuRepo="http://ftp.debian.org/debian/"
fi
;;
tizen)
__CodeName=
__UbuntuRepo=
__Tizen=tizen
;;
alpine*)
__CodeName=alpine
__UbuntuRepo=
version="${lowerI/alpine/}"
if [[ "$version" == "edge" ]]; then
__AlpineVersion=edge
else
parts=(${version//./ })
__AlpineMajorVersion="${parts[0]}"
__AlpineMinoVersion="${parts[1]}"
__AlpineVersion="$__AlpineMajorVersion.$__AlpineMinoVersion"
fi
;;
freebsd12)
__CodeName=freebsd
__SkipUnmount=1
;;
freebsd13)
__CodeName=freebsd
__FreeBSDBase="13.0-RELEASE"
__FreeBSDABI="13"
__SkipUnmount=1
;;
illumos)
__CodeName=illumos
__SkipUnmount=1
;;
haiku)
__CodeName=haiku
__BuildArch=x64
__SkipUnmount=1
;;
--skipunmount)
__SkipUnmount=1
;;
--rootfsdir|-rootfsdir)
shift
__RootfsDir="$1"
;;
--use-mirror)
__UseMirror=1
;;
--use-jobs)
shift
MAXJOBS=$1
;;
*)
__UnprocessedBuildArgs="$__UnprocessedBuildArgs $1"
;;
esac
shift
done
if [[ "$__BuildArch" == "armel" ]]; then
__LLDB_Package="lldb-3.5-dev"
fi
__UbuntuPackages+=" ${__LLDB_Package:-}"
if [[ -n "$__LLVM_MajorVersion" ]]; then
__UbuntuPackages+=" libclang-common-${__LLVM_MajorVersion}${__LLVM_MinorVersion:+.$__LLVM_MinorVersion}-dev"
fi
if [[ -z "$__RootfsDir" && -n "$ROOTFS_DIR" ]]; then
__RootfsDir="$ROOTFS_DIR"
fi
if [[ -z "$__RootfsDir" ]]; then
__RootfsDir="$__CrossDir/../../../.tools/rootfs/$__BuildArch"
fi
if [[ -d "$__RootfsDir" ]]; then
if [[ "$__SkipUnmount" == "0" ]]; then
umount "$__RootfsDir"/* || true
fi
rm -rf "$__RootfsDir"
fi
mkdir -p "$__RootfsDir"
__RootfsDir="$( cd "$__RootfsDir" && pwd )"
if [[ "$__CodeName" == "alpine" ]]; then
__ApkToolsVersion=2.12.11
__ApkToolsDir="$(mktemp -d)"
wget "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic//v$__ApkToolsVersion/x86_64/apk.static" -P "$__ApkToolsDir"
chmod +x "$__ApkToolsDir/apk.static"
mkdir -p "$__RootfsDir"/usr/bin
cp -v "/usr/bin/qemu-$__QEMUArch" "$__RootfsDir/usr/bin"
if [[ "$__AlpineVersion" == "edge" ]]; then
version=edge
else
version="v$__AlpineVersion"
fi
# initialize DB
"$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
-U --allow-untrusted --root "$__RootfsDir" --arch "$__AlpineArch" --initdb add
if [[ "$__AlpineLlvmLibsLookup" == 1 ]]; then
__AlpinePackages+=" $("$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
-U --allow-untrusted --root "$__RootfsDir" --arch "$__AlpineArch" \
search 'llvm*-libs' | sort | tail -1 | sed 's/-[^-]*//2g')"
fi
# install all packages in one go
"$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
-U --allow-untrusted --no-scripts --root "$__RootfsDir" --arch "$__AlpineArch" \
add $__AlpinePackages
rm -r "$__ApkToolsDir"
elif [[ "$__CodeName" == "freebsd" ]]; then
mkdir -p "$__RootfsDir"/usr/local/etc
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
wget -O - "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf
echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf
mkdir -p "$__RootfsDir"/tmp
# get and build package manager
wget -O - "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf -
cd "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}"
# needed for install to succeed
mkdir -p "$__RootfsDir"/host/etc
./autogen.sh && ./configure --prefix="$__RootfsDir"/host && make -j "$JOBS" && make install
rm -rf "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}"
# install packages we need.
INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf update
INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages
elif [[ "$__CodeName" == "illumos" ]]; then
mkdir "$__RootfsDir/tmp"
pushd "$__RootfsDir/tmp"
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
echo "Downloading sysroot."
wget -O - https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf -
echo "Building binutils. Please wait.."
wget -O - https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf -
mkdir build-binutils && cd build-binutils
../binutils-2.33.1/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir"
make -j "$JOBS" && make install && cd ..
echo "Building gcc. Please wait.."
wget -O - https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf -
CFLAGS="-fPIC"
CXXFLAGS="-fPIC"
CXXFLAGS_FOR_TARGET="-fPIC"
CFLAGS_FOR_TARGET="-fPIC"
export CFLAGS CXXFLAGS CXXFLAGS_FOR_TARGET CFLAGS_FOR_TARGET
mkdir build-gcc && cd build-gcc
../gcc-8.4.0/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \
--with-gnu-ld --disable-nls --disable-libgomp --disable-libquadmath --disable-libssp --disable-libvtv --disable-libcilkrts --disable-libada --disable-libsanitizer \
--disable-libquadmath-support --disable-shared --enable-tls
make -j "$JOBS" && make install && cd ..
BaseUrl=https://pkgsrc.smartos.org
if [[ "$__UseMirror" == 1 ]]; then
BaseUrl=https://pkgsrc.smartos.skylime.net
fi
BaseUrl="$BaseUrl/packages/SmartOS/trunk/${__illumosArch}/All"
echo "Downloading manifest"
wget "$BaseUrl"
echo "Downloading dependencies."
read -ra array <<<"$__IllumosPackages"
for package in "${array[@]}"; do
echo "Installing '$package'"
# find last occurrence of package in listing and extract its name
package="$(sed -En '/.*href="('"$package"'-[0-9].*).tgz".*/h;$!d;g;s//\1/p' All)"
echo "Resolved name '$package'"
wget "$BaseUrl"/"$package".tgz
ar -x "$package".tgz
tar --skip-old-files -xzf "$package".tmp.tg* -C "$__RootfsDir" 2>/dev/null
done
echo "Cleaning up temporary files."
popd
rm -rf "$__RootfsDir"/{tmp,+*}
mkdir -p "$__RootfsDir"/usr/include/net
mkdir -p "$__RootfsDir"/usr/include/netpacket
wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h
wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h
wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h
wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h
elif [[ "$__CodeName" == "haiku" ]]; then
JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
echo "Building Haiku sysroot for x86_64"
mkdir -p "$__RootfsDir/tmp"
cd "$__RootfsDir/tmp"
git clone -b hrev56235 https://review.haiku-os.org/haiku
git clone -b btrev43195 https://review.haiku-os.org/buildtools
cd "$__RootfsDir/tmp/buildtools" && git checkout 7487388f5110021d400b9f3b88e1a7f310dc066d
# Fetch some unmerged patches
cd "$__RootfsDir/tmp/haiku"
## Add development build profile (slimmer than nightly)
git fetch origin refs/changes/64/4164/1 && git -c commit.gpgsign=false cherry-pick FETCH_HEAD
# Build jam
cd "$__RootfsDir/tmp/buildtools/jam"
make
# Configure cross tools
echo "Building cross-compiler"
mkdir -p "$__RootfsDir/generated"
cd "$__RootfsDir/generated"
"$__RootfsDir/tmp/haiku/configure" -j"$JOBS" --sysroot "$__RootfsDir" --cross-tools-source "$__RootfsDir/tmp/buildtools" --build-cross-tools x86_64
# Build Haiku packages
echo "Building Haiku"
echo 'HAIKU_BUILD_PROFILE = "development-raw" ;' > UserProfileConfig
"$__RootfsDir/tmp/buildtools/jam/jam0" -j"$JOBS" -q '<build>package' '<repository>Haiku'
BaseUrl="https://depot.haiku-os.org/__api/v2/pkg/get-pkg"
# Download additional packages
echo "Downloading additional required packages"
read -ra array <<<"$__HaikuPackages"
for package in "${array[@]}"; do
echo "Downloading $package..."
# API documented here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L60
# The schema here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L598
hpkgDownloadUrl="$(wget -qO- --post-data='{"name":"'"$package"'","repositorySourceCode":"haikuports_x86_64","versionType":"LATEST","naturalLanguageCode":"en"}' \
--header='Content-Type:application/json' "$BaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')"
wget -P "$__RootfsDir/generated/download" "$hpkgDownloadUrl"
done
# Setup the sysroot
echo "Setting up sysroot and extracting needed packages"
mkdir -p "$__RootfsDir/boot/system"
for file in "$__RootfsDir/generated/objects/haiku/x86_64/packaging/packages/"*.hpkg; do
"$__RootfsDir/generated/objects/linux/x86_64/release/tools/package/package" extract -C "$__RootfsDir/boot/system" "$file"
done
for file in "$__RootfsDir/generated/download/"*.hpkg; do
"$__RootfsDir/generated/objects/linux/x86_64/release/tools/package/package" extract -C "$__RootfsDir/boot/system" "$file"
done
# Cleaning up temporary files
echo "Cleaning up temporary files"
rm -rf "$__RootfsDir/tmp"
for name in "$__RootfsDir/generated/"*; do
if [[ "$name" =~ "cross-tools-" ]]; then
: # Keep the cross-compiler
else
rm -rf "$name"
fi
done
elif [[ -n "$__CodeName" ]]; then
qemu-debootstrap $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"
cp "$__CrossDir/$__BuildArch/sources.list.$__CodeName" "$__RootfsDir/etc/apt/sources.list"
chroot "$__RootfsDir" apt-get update
chroot "$__RootfsDir" apt-get -f -y install
chroot "$__RootfsDir" apt-get -y install $__UbuntuPackages
chroot "$__RootfsDir" symlinks -cr /usr
chroot "$__RootfsDir" apt-get clean
if [[ "$__SkipUnmount" == "0" ]]; then
umount "$__RootfsDir"/* || true
fi
if [[ "$__BuildArch" == "armel" && "$__CodeName" == "jessie" ]]; then
pushd "$__RootfsDir"
patch -p1 < "$__CrossDir/$__BuildArch/armel.jessie.patch"
popd
fi
elif [[ "$__Tizen" == "tizen" ]]; then
ROOTFS_DIR="$__RootfsDir" "$__CrossDir/tizen-build-rootfs.sh" "$__BuildArch"
else
echo "Unsupported target platform."
usage;
exit 1
fi

View file

@ -1,283 +0,0 @@
#!/bin/sh
# shellcheck disable=SC3043
. $CI_PROJECT_DIR/.gitlab/bin/functions.sh
# shellcheck disable=SC3040
set -eu -o pipefail
readonly APORTSDIR=$CI_PROJECT_DIR
readonly REPOS="cross backports user testing community"
readonly ALPINE_REPOS="main community"
readonly ARCH=$(apk --print-arch)
# gitlab variables
readonly BASEBRANCH=$CI_MERGE_REQUEST_TARGET_BRANCH_NAME
: "${REPODEST:=$HOME/packages}"
: "${MIRROR:=https://lab.ilot.io/ayakael/repo-apk/-/raw}"
: "${ALPINE_MIRROR:=http://dl-cdn.alpinelinux.org/alpine}"
: "${MAX_ARTIFACT_SIZE:=300000000}" #300M
: "${CI_DEBUG_BUILD:=}"
: "${CI_ALPINE_BUILD_OFFSET:=0}"
: "${CI_ALPINE_BUILD_LIMIT:=9999}"
: "${CI_ALPINE_TARGET_ARCH:=$(uname -m)}"
msg() {
local color=${2:-green}
case "$color" in
red) color="31";;
green) color="32";;
yellow) color="33";;
blue) color="34";;
*) color="32";;
esac
printf "\033[1;%sm>>>\033[1;0m %s\n" "$color" "$1" | xargs >&2
}
verbose() {
echo "> " "$@"
# shellcheck disable=SC2068
$@
}
debugging() {
[ -n "$CI_DEBUG_BUILD" ]
}
debug() {
if debugging; then
verbose "$@"
fi
}
die() {
msg "$1" red
exit 1
}
capture_stderr() {
"$@" 2>&1
}
report() {
report=$1
reportsdir=$APORTSDIR/logs/
mkdir -p "$reportsdir"
tee -a "$reportsdir/$report.log"
}
get_release() {
case $BASEBRANCH in
v*) echo "${BASEBRANCH%-*}";;
edge) echo edge;;
*) die "Branch \"$BASEBRANCH\" not supported!"
esac
}
build_aport() {
local repo="$1" aport="$2"
cd "$APORTSDIR/$repo/$aport"
export CHOST=$CI_ALPINE_TARGET_ARCH
if abuild -r 2>&1 | report "build-$aport"; then
checkapk | report "checkapk-$aport" || true
aport_ok="$aport_ok $repo/$aport"
else
aport_ng="$aport_ng $repo/$aport"
fi
}
check_aport() {
local repo="$1" aport="$2"
cd "$APORTSDIR/$repo/$aport"
export CHOST=$CI_ALPINE_TARGET_ARCH
# TODO: this enables crossbuild only on user, this should be cleaner
if [ "$repo" != "user" ] && [ "$repo" != "backports" ] && [ "$CI_ALPINE_TARGET_ARCH" != "$ARCH" ]; then
aport_na="$aport_na $repo/$aport"
return 1
fi
if ! abuild check_arch 2>/dev/null; then
aport_na="$aport_na $repo/$aport"
return 1
fi
}
set_repositories_for() {
local target_repo="$1" repos='' repo=''
local release
release=$(get_release)
for repo in $REPOS; do
repos="$repos $MIRROR/$release/$repo $REPODEST/$repo"
[ "$repo" = "$target_repo" ] && break
done
sudo sh -c "printf '%s\n' $repos >> /etc/apk/repositories"
sudo apk update || true
if [ "$CI_ALPINE_TARGET_ARCH" != "$ARCH" ]; then
sudo sh -c "printf '%s\n' $repos >> $HOME/sysroot-$CI_ALPINE_TARGET_ARCH/etc/apk/repositories"
sudo cp -R /etc/apk/keys/* $HOME/sysroot-$CI_ALPINE_TARGET_ARCH/etc/apk/keys/.
sudo apk --root=$HOME/sysroot-$CI_ALPINE_TARGET_ARCH update || true
fi
}
apply_offset_limit() {
start=$1
limit=$2
end=$((start+limit))
sed -n "$((start+1)),${end}p"
}
setup_system() {
local repos='' repo=''
local release
release=$(get_release)
for repo in $ALPINE_REPOS; do
[ "$release" != "edge" ] && [ "$repo" == "testing" ] && continue
repos="$repos $ALPINE_MIRROR/$release/$repo"
done
repos="$repos $MIRROR/$release/cross"
sudo sh -c "printf '%s\n' $repos > /etc/apk/repositories"
sudo apk -U upgrade -a || sudo apk fix || die "Failed to up/downgrade system"
if [ "$CI_ALPINE_TARGET_ARCH" != "$ARCH" ]; then
sudo apk add gcc-$CI_ALPINE_TARGET_ARCH
fi
gitlab_key_to_rsa $ABUILD_KEY rsa-private $HOME/.abuild/$ABUILD_KEY_NAME.rsa
gitlab_key_to_rsa $ABUILD_KEY_PUB rsa-public $HOME/.abuild/$ABUILD_KEY_NAME.rsa.pub
chmod 700 $HOME/.abuild/$ABUILD_KEY_NAME.rsa
echo "PACKAGER_PRIVKEY=$HOME/.abuild/$ABUILD_KEY_NAME.rsa" >> $HOME/.abuild/abuild.conf
sudo cp $HOME/.abuild/$ABUILD_KEY_NAME.rsa.pub /etc/apk/keys/$ABUILD_KEY_NAME.rsa.pub
# patch abuild for crosscompiling
sudo patch -p1 -d / -i $CI_PROJECT_DIR/.gitlab/patches/abuild-cross.patch
sudo sed -i -E 's/export JOBS=[0-9]+$/export JOBS=$(nproc)/' /etc/abuild.conf
( . /etc/abuild.conf && echo "Building with $JOBS jobs" )
mkdir -p "$REPODEST"
git config --global init.defaultBranch master
}
sysinfo() {
printf ">>> Host system information (arch: %s, release: %s) <<<\n" "$ARCH" "$(get_release)"
printf "- Number of Cores: %s\n" "$(nproc)"
printf "- Memory: %s Gb\n" "$(awk '/^MemTotal/ {print ($2/1024/1024)}' /proc/meminfo)"
printf "- Free space: %s\n" "$(df -hP / | awk '/\/$/ {print $4}')"
}
copy_artifacts() {
cd "$APORTSDIR"
packages_size="$(du -sk "$REPODEST" | awk '{print $1 * 1024}')"
if [ -z "$packages_size" ]; then
return
fi
echo "Artifact size: $packages_size bytes"
mkdir -p keys/ packages/
if [ "$packages_size" -lt $MAX_ARTIFACT_SIZE ]; then
msg "Copying packages for artifact upload"
cp -ar "$REPODEST"/* packages/ 2>/dev/null
cp ~/.abuild/*.rsa.pub keys/
else
msg "Artifact size $packages_size larger than max ($MAX_ARTIFACT_SIZE), skipping uploading them" yellow
fi
}
section_start setup "Setting up the system" collapse
if debugging; then
set -x
fi
aport_ok=
aport_na=
aport_ng=
failed=
sysinfo || true
setup_system || die "Failed to setup system"
# git no longer allows to execute in repositories owned by different users
sudo chown -R $USER: .
fetch_flags="-qn"
debugging && fetch_flags="-v"
git fetch $fetch_flags "$CI_MERGE_REQUEST_PROJECT_URL" \
"+refs/heads/$BASEBRANCH:refs/heads/$BASEBRANCH"
if debugging; then
merge_base=$(git merge-base "$BASEBRANCH" HEAD) || echo "Could not determine merge-base"
echo "Merge base: $merge_base"
git --version
git config -l
[ -n "$merge_base" ] && git tag -f merge-base "$merge_base"
git --no-pager log -200 --oneline --graph --decorate --all
fi
section_end setup
build_start=$CI_ALPINE_BUILD_OFFSET
build_limit=$CI_ALPINE_BUILD_LIMIT
for repo in $(changed_repos); do
mkdir -p "$APORTSDIR"/logs "$APORTSDIR"/packages "$APORTSDIR"/keys
set_repositories_for "$repo"
built_aports=0
changed_aports_in_repo=$(changed_aports "$repo")
changed_aports_in_repo_count=$(echo "$changed_aports_in_repo" | wc -l)
changed_aports_to_build=$(echo "$changed_aports_in_repo" | apply_offset_limit "$build_start" "$build_limit")
msg "Changed aports in $repo:"
# shellcheck disable=SC2086 # Splitting is expected here
printf " - %s\n" $changed_aports_to_build
for pkgname in $changed_aports_to_build; do
section_start "build_$pkgname" "Building package $pkgname"
built_aports=$((built_aports+1))
if check_aport "$repo" "$pkgname"; then
build_aport "$repo" "$pkgname"
fi
section_end "build_$pkgname"
done
build_start=$((build_start-(changed_aports_in_repo_count-built_aports)))
build_limit=$((build_limit-built_aports))
if [ $build_limit -le 0 ]; then
msg "Limit reached, breaking"
break
fi
done
section_start artifacts "Handeling artifacts" collapse
copy_artifacts || true
section_end artifacts
section_start summary "Build summary"
echo "### Build summary ###"
for ok in $aport_ok; do
msg "$ok: build succesfully"
done
for na in $aport_na; do
msg "$na: disabled for $CI_ALPINE_TARGET_ARCH" yellow
done
for ng in $aport_ng; do
msg "$ng: build failed" red
failed=true
done
section_end summary
if [ "$failed" = true ]; then
exit 1
elif [ -z "$aport_ok" ]; then
msg "No packages found to be built." yellow
fi

View file

@ -1,20 +0,0 @@
#!/bin/sh
if [ $# -lt 1 ]; then
echo "Usage: $0 <basebranch>"
exit 1
fi
if ! git rev-parse --is-inside-work-tree >/dev/null 2>&1; then
echo "Fatal: not inside a git repository"
exit 2
fi
basebranch=$1
if ! git rev-parse --verify --quiet $basebranch >/dev/null; then
# The base branch does not eixst, probably due to a shallow clone
git fetch -v $CI_MERGE_REQUEST_PROJECT_URL.git +refs/heads/$basebranch:refs/heads/$basebranch
fi
git --no-pager diff --diff-filter=ACMR --name-only $basebranch...HEAD -- "*/APKBUILD" | xargs -r -n1 dirname

View file

@ -1,74 +0,0 @@
# shellcheck disable=SC3043
:
# shellcheck disable=SC3040
set -eu -o pipefail
changed_repos() {
: "${APORTSDIR?APORTSDIR missing}"
: "${BASEBRANCH?BASEBRANCH missing}"
cd "$APORTSDIR"
for repo in $REPOS; do
git diff --diff-filter=ACMR --exit-code "$BASEBRANCH"...HEAD -- "$repo" >/dev/null \
|| echo "$repo"
done
}
changed_aports() {
: "${APORTSDIR?APORTSDIR missing}"
: "${BASEBRANCH?BASEBRANCH missing}"
cd "$APORTSDIR"
local repo="$1"
local aports
aports=$(git diff --name-only --diff-filter=ACMR --relative="$repo" \
"$BASEBRANCH"...HEAD -- "*/APKBUILD" | xargs -rn1 dirname)
# shellcheck disable=2086
ap builddirs -d "$APORTSDIR/$repo" $aports 2>/dev/null | xargs -rn1 basename
}
section_start() {
name=${1?arg 1 name missing}
header=${2?arg 2 header missing}
collapsed=$2
timestamp=$(date +%s)
options=""
case $collapsed in
yes|on|collapsed|true) options="[collapsed=true]";;
esac
printf "\e[0Ksection_start:%d:%s%s\r\e[0K%s\n" "$timestamp" "$name" "$options" "$header"
}
section_end() {
name=$1
timestamp=$(date +%s)
printf "\e[0Ksection_end:%d:%s\r\e[0K" "$timestamp" "$name"
}
gitlab_key_to_rsa() {
KEY=$1
TYPE=$2
TGT=$3
TGT_DIR=${TGT%/*}
if [ "$TGT" == "$TGT_DIR" ]; then
TGT_DIR="./"
fi
if [ ! -d "$TGT_DIR" ]; then
mkdir -p "$TGT_DIR"
fi
case $TYPE in
rsa-public) local type="PUBLIC";;
rsa-private) local type="RSA PRIVATE";;
esac
echo "-----BEGIN $type KEY-----" > "$TGT"
echo $1 | sed 's/.\{64\}/&\
/g' >> "$TGT"
echo "-----END $type KEY-----" >> "$TGT"
}

View file

@ -1,96 +0,0 @@
#!/bin/sh
BLUE="\e[34m"
MAGENTA="\e[35m"
RESET="\e[0m"
readonly BASEBRANCH=$CI_MERGE_REQUEST_TARGET_BRANCH_NAME
verbose() {
echo "> " "$@"
# shellcheck disable=SC2068
$@
}
debugging() {
[ -n "$CI_DEBUG_BUILD" ]
}
debug() {
if debugging; then
verbose "$@"
fi
}
# git no longer allows to execute in repositories owned by different users
sudo chown -R gitlab-runner: .
fetch_flags="-qn"
debugging && fetch_flags="-v"
git fetch $fetch_flags "$CI_MERGE_REQUEST_PROJECT_URL" \
"+refs/heads/$BASEBRANCH:refs/heads/$BASEBRANCH"
if debugging; then
merge_base=$(git merge-base "$BASEBRANCH" HEAD)
echo "$merge_base"
git --version
git config -l
git tag merge-base "$merge_base" || { echo "Could not determine merge-base"; exit 50; }
git log --oneline --graph --decorate --all
fi
has_problems=0
for PKG in $(changed-aports "$BASEBRANCH"); do
printf "$BLUE==>$RESET Linting $PKG\n"
(
cd "$PKG"
repo=$(basename $(dirname $PKG));
if [ "$repo" == "backports" ]; then
echo "Skipping $PKG as backports (we don't care)"
continue
fi
printf "\n\n"
printf "$BLUE"
printf '======================================================\n'
printf " parse APKBUILD:\n"
printf '======================================================'
printf "$RESET\n\n"
( . ./APKBUILD ) || has_problems=1
printf "\n\n"
printf "$BLUE"
printf '======================================================\n'
printf " abuild sanitycheck:\n"
printf '======================================================'
printf "$RESET\n\n"
abuild sanitycheck || has_problems=1
printf "\n\n"
printf "$BLUE"
printf '======================================================\n'
printf " apkbuild-shellcheck:\n"
printf '======================================================'
printf "$RESET\n"
apkbuild-shellcheck || has_problems=1
printf "\n\n"
printf "$BLUE"
printf '======================================================\n'
printf " apkbuild-lint:\n"
printf '======================================================'
printf "$RESET\n\n"
apkbuild-lint APKBUILD || has_problems=1
return $has_problems
) || has_problems=1
echo
done
exit $has_problems

View file

@ -1,56 +0,0 @@
#!/bin/sh
# shellcheck disable=SC3043
. $CI_PROJECT_DIR/.gitlab/bin/functions.sh
# shellcheck disable=SC3040
set -eu -o pipefail
readonly APORTSDIR=$CI_PROJECT_DIR
readonly REPOS="backports user"
readonly BASEBRANCH=$CI_MERGE_REQUEST_TARGET_BRANCH_NAME
export GIT_SSH_COMMAND="ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
gitlab_key_to_rsa $ABUILD_KEY rsa-private $HOME/.abuild/$ABUILD_KEY_NAME.rsa
gitlab_key_to_rsa $ABUILD_KEY_PUB rsa-public $HOME/.abuild/$ABUILD_KEY_NAME.rsa.pub
gitlab_key_to_rsa $SSH_KEY rsa-private $HOME/.ssh/id_rsa
chmod 700 "$HOME"/.ssh/id_rsa
chmod 700 "$HOME"/.abuild/$ABUILD_KEY_NAME.rsa
echo "PACKAGER_PRIVKEY=$HOME/.abuild/$ABUILD_KEY_NAME.rsa" > $HOME/.abuild/abuild.conf
echo "REPODEST=$HOME/repo-apk" >> $HOME/.abuild/abuild.conf
sudo cp $HOME/.abuild/$ABUILD_KEY_NAME.rsa.pub /etc/apk/keys/.
if [ -d $HOME/repo-apk ]; then
git -C $HOME/repo-apk fetch
git -C $HOME/repo-apk checkout $BASEBRANCH
git -C $HOME/repo-apk pull --rebase
else
git clone git@lab.ilot.io:ayakael/repo-apk -b $BASEBRANCH $HOME/repo-apk
fi
for i in $(find packages -type f -name "*.apk"); do
install -vDm644 $i ${i/packages/$HOME\/repo-apk}
done
fetch_flags="-qn"
git fetch $fetch_flags "$CI_MERGE_REQUEST_PROJECT_URL" \
"+refs/heads/$BASEBRANCH:refs/heads/$BASEBRANCH"
for repo in $(changed_repos); do
rm $HOME/repo-apk/$repo/*/APKINDEX.tar.gz | true
mkdir -p $repo/DUMMY
echo "pkgname=DUMMY" > $repo/DUMMY/APKBUILD
cd $repo/DUMMY
for i in $(find $HOME/repo-apk/$repo -maxdepth 1 -mindepth 1 -printf '%P '); do
CHOST=$i abuild index
done
cd "$CI_PROJECT_DIR"
rm -R $repo/DUMMY
done
git -C $HOME/repo-apk add .
git -C $HOME/repo-apk commit -m "Update from $CI_MERGE_REQUEST_IID - $CI_MERGE_REQUEST_TITLE"
git -C $HOME/repo-apk push

View file

@ -1,17 +0,0 @@
diff --git a/usr/bin/abuild.orig b/usr/bin/abuild
index 71e0681..d4ae3dd 100755
--- a/usr/bin/abuild.orig
+++ b/usr/bin/abuild
@@ -2231,7 +2231,11 @@ calcdeps() {
list_has $i $builddeps && continue
subpackages_has ${i%%[<>=]*} || builddeps="$builddeps $i"
done
- hostdeps="$EXTRADEPENDS_TARGET"
+ for i in $EXTRADEPENDS_HOST $EXTRADEPENDS_TARGET $depends $makedepends; do
+ [ "$pkgname" = "${i%%[<>=]*}" ] && continue
+ list_has $i $hostdeps && continue
+ subpackages_has ${i%%[<>=]*} || hostdeps="$hostdeps $i"
+ done
fi
}

View file

@ -1,19 +1,18 @@
# user-aports # iports
Upstream: https://lab.ilot.io/ayakael/user-aports Upstream: https://codeberg.org/ilot/iports
## Description ## Description
This repository contains aports that are not yet merged in the official Alpine This repository contains aports that are not yet merged in the official Alpine
Linux repository or dont adhere to Alpine polices. Packages are automatically Linux repository or dont adhere to Alpine polices. Packages are automatically
built using GitLab CI on my own GitLab instance. Once built, they are deployed built using CI. Once built, they are deployed to a git-lfs repository, making
to a git-lfs repository, making them available to apk. them available to apk.
Branches are matched to Alpine releases. Branches are matched to Alpine releases.
## Repositories ## Repositories
You can browse all the repositories at https://lab.ilot.io/ayakael/repo-apk. You can browse all the repositories at https://codeberg.org/ilot/iports
Affixed to each repository description is the appropriate link for use in Affixed to each repository description is the appropriate link for use in
`/etc/apk/repositories`. `/etc/apk/repositories`.
@ -24,13 +23,13 @@ https://lab.ilot.io/ayakael/repo-apk/-/raw/edge/backports
Aports from the official Alpine repositories backported from edge. Aports from the official Alpine repositories backported from edge.
#### User #### Ilot
``` ```
https://lab.ilot.io/ayakael/repo-apk/-/raw/edge/user https://lab.ilot.io/ayakael/repo-apk/-/raw/edge/user
``` ```
Aports that have yet to be (or may never be) upstreamed to the official Aports that have yet to be (or may never be) upstreamed to the official
aports. aports and that are used by ilot coop.
## How to use ## How to use
@ -52,12 +51,13 @@ they will work for you.
## Contribution & bug reports ## Contribution & bug reports
If you wish to contribute to this aports collection, or wish to report a bug, If you wish to contribute to this aports collection, or wish to report a bug,
you can do so on Alpine's GitLab instance here: you can do so on Codeberg here:
https://gitlab.alpinelinux.org/ayakael/user-aports https://codeberg.org/ilot/iports/issues
For packages that are in testing/community, bug reports and merge requests For packages that are in backports, bug reports and merge requests
should be done on Alpine's aports repo instance: should be done on Alpine's aports repo instance:
https://gitlab.alpinelinux.org/alpine/aports https://gitlab.alpinelinux.org/alpine/aports
## License ## License
This readme, abuilds and support scripts are licensed under MIT License. This readme, abuilds and support scripts are licensed under MIT License.

View file

@ -2,7 +2,7 @@
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net> # Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=authentik pkgname=authentik
pkgver=2024.4.3 pkgver=2024.4.3
pkgrel=1 pkgrel=2
pkgdesc="An open-source Identity Provider focused on flexibility and versatility" pkgdesc="An open-source Identity Provider focused on flexibility and versatility"
url="https://github.com/goauthentik/authentik" url="https://github.com/goauthentik/authentik"
# s390x: missing py3-celery py3-flower and py3-kombu # s390x: missing py3-celery py3-flower and py3-kombu

View file

@ -2,7 +2,7 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net> # Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=freescout pkgname=freescout
pkgver=1.8.139 pkgver=1.8.139
pkgrel=0 pkgrel=1
pkgdesc="Free self-hosted help desk & shared mailbox" pkgdesc="Free self-hosted help desk & shared mailbox"
arch="noarch" arch="noarch"
url="freescout.net" url="freescout.net"

View file

@ -2,7 +2,7 @@
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net> # Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=listmonk pkgname=listmonk
pkgver=3.0.0 pkgver=3.0.0
pkgrel=0 pkgrel=1
pkgdesc='Self-hosted newsletter and mailing list manager with a modern dashboard' pkgdesc='Self-hosted newsletter and mailing list manager with a modern dashboard'
arch="all" arch="all"
url=https://listmonk.app url=https://listmonk.app

View file

@ -4,7 +4,7 @@
pkgname=loomio pkgname=loomio
pkgver=2.21.4 pkgver=2.21.4
_gittag=v$pkgver _gittag=v$pkgver
pkgrel=0 pkgrel=1
pkgdesc="A collaborative decision making tool" pkgdesc="A collaborative decision making tool"
url="https://github.com/loomio/loomio" url="https://github.com/loomio/loomio"
arch="x86_64" arch="x86_64"

View file

@ -2,7 +2,7 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net> # Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=peertube pkgname=peertube
pkgver=6.0.2 pkgver=6.0.2
pkgrel=0 pkgrel=1
pkgdesc="ActivityPub-federated video streaming platform using P2P directly in your web browser" pkgdesc="ActivityPub-federated video streaming platform using P2P directly in your web browser"
arch="x86_64" arch="x86_64"
url="https://joinpeertube.org/" url="https://joinpeertube.org/"

View file

@ -3,7 +3,7 @@
pkgname=php82-pecl-inotify pkgname=php82-pecl-inotify
_extname=inotify _extname=inotify
pkgver=3.0.0 pkgver=3.0.0
pkgrel=0 pkgrel=1
pkgdesc="Inotify bindings for PHP 8.3" pkgdesc="Inotify bindings for PHP 8.3"
url="https://pecl.php.net/package/inotify" url="https://pecl.php.net/package/inotify"
arch="all" arch="all"

View file

@ -3,7 +3,7 @@
pkgname=php83-pecl-inotify pkgname=php83-pecl-inotify
_extname=inotify _extname=inotify
pkgver=3.0.0 pkgver=3.0.0
pkgrel=0 pkgrel=1
pkgdesc="Inotify bindings for PHP 8.3" pkgdesc="Inotify bindings for PHP 8.3"
url="https://pecl.php.net/package/inotify" url="https://pecl.php.net/package/inotify"
arch="all" arch="all"

View file

@ -0,0 +1,59 @@
# Contributor: Leonardo Arena <rnalrd@alpinelinux.org>
# Contributor: Justin Berthault <justin.berthault@zaclys.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-django-rest-framework
_pkgname=django-rest-framework
pkgver=3.14.0
pkgrel=1
pkgdesc="Web APIs for Django"
url="https://github.com/encode/django-rest-framework"
arch="noarch"
license="Custom"
depends="
py3-django
py3-tz
"
makedepends="
py3-setuptools
py3-gpep517
py3-wheel
"
checkdepends="
py3-pytest-django
py3-pytest-cov
py3-core-api
py3-jinja2
py3-uritemplate
py3-django-guardian
py3-psycopg2
py3-markdown
py3-yaml
py3-inflection
"
subpackages="$pkgname-pyc"
source="$pkgname-$pkgver.tar.gz::https://github.com/encode/$_pkgname/archive/$pkgver.tar.gz"
options="!check" # Failing tests
builddir="$srcdir"/$_pkgname-$pkgver
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer "$builddir"/.dist/*.whl
# test_urlpatterns: AssertionError: assert [<URLPattern ''>] is not [<URLPattern ''>]
# test_markdown: rather hard to decipher assertion error
.testenv/bin/python3 -m pytest -v -k 'not test_urlpatterns and not test_markdown'
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
c1012c656b427e0318b2056e2f984ddc75a5b4e85f375c76fba165ad06e285848eee1bc6dc76c097daec57d780efb2551110199d62ce636a03951aec13ab4013 py3-django-rest-framework-3.14.0.tar.gz
"

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,43 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-django-tenants
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=django-tenants
pkgver=3.6.1
pkgrel=1
pkgdesc="Tenant support for Django using PostgreSQL schemas."
url="https://pypi.python.org/project/django-tenants"
arch="noarch"
license="KIT"
depends="py3-django py3-psycopg py3-gunicorn py3-coverage"
checkdepends="python3-dev py3-pytest"
makedepends="py3-setuptools py3-gpep517 py3-wheel"
source="
$pkgname-$pkgver.tar.gz::https://codeload.github.com/django-tenants/django-tenants/tar.gz/refs/tags/v$pkgver
997_update-from-pgclone-schema.patch
"
builddir="$srcdir/$_pkgreal-$pkgver"
options="!check" # Requires setting up test database
subpackages="$pkgname-pyc"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
DJANGO_SETTINGS_MODULE=tests.settings .testenv/bin/python3 -m pytest -v
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
b18afce81ccc89e49fcc4ebe85d90be602415ca898c1660a4e71e2bef6a3ed2e8c724e94b61d8c6f48f3fb19eb2a87d6a6f5bbf449b3e2f661f87e4b5638eafb py3-django-tenants-3.6.1.tar.gz
f2424bb188db2e3c7d13c15e5bdf0959c6f794e68dbc677c8b876d4faa321f78aded5565539f1bfd97583c6df0fcc19ec05abe203b08407e4446dd7194756825 997_update-from-pgclone-schema.patch
"

View file

@ -0,0 +1,38 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-scim2-filter-parser
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=scim2-filter-parser
pkgver=0.5.0
pkgrel=1
pkgdesc="A customizable parser/transpiler for SCIM2.0 filters"
url="https://pypi.python.org/project/scim2-filter-parser"
arch="noarch"
license="MIT"
depends="py3-django py3-sly"
checkdepends="py3-pytest"
makedepends="py3-setuptools py3-gpep517 py3-wheel poetry"
source="$pkgname-$pkgver.tar.gz::https://github.com/15five/scim2-filter-parser/archive/refs/tags/$pkgver.tar.gz"
builddir="$srcdir/$_pkgreal-$pkgver"
subpackages="$pkgname-pyc"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
.testenv/bin/python3 -m pytest -v
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
5347852af6b82a764a32bc491a7e0f05f06b4f4d93dfa375668b5ca1a15ee58f488702536e350100fe5c96a5c94c492ea8cbd0e1952c5920d5a10e1453357f8c py3-scim2-filter-parser-0.5.0.tar.gz
"

View file

@ -0,0 +1,41 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-tenant-schemas-celery
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=tenant-schemas-celery
pkgver=2.2.0
pkgrel=1
pkgdesc="Celery integration for django-tenant-schemas and django-tenants"
url="https://pypi.python.org/project/tenant-schemas-celery"
arch="noarch"
license="MIT"
depends="py3-django-tenants py3-celery"
checkdepends="python3-dev py3-pytest"
makedepends="py3-setuptools py3-gpep517 py3-wheel"
source="
$pkgname-$pkgver.tar.gz::https://codeload.github.com/maciej-gol/tenant-schemas-celery/tar.gz/refs/tags/$pkgver
"
options="!check" # Test suite wants docker
builddir="$srcdir/$_pkgreal-$pkgver"
subpackages="$pkgname-pyc"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
DJANGO_SETTINGS_MODULE=tests.settings .testenv/bin/python3 -m pytest -v
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
dad71011306936dc84d966797b113008780750e9e973513092bec892be0d1468e0a0e7e8e2fcca9765309a27767e1c72bdaad7c8aca16353ae1eef783c239148 py3-tenant-schemas-celery-2.2.0.tar.gz
"

49
ilot/uptime-kuma/APKBUILD Normal file
View file

@ -0,0 +1,49 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=uptime-kuma
pkgver=1.23.13
pkgrel=1
pkgdesc='A fancy self-hosted monitoring tool'
arch="all"
url="https://github.com/louislam/uptime-kuma"
license="MIT"
depends="nodejs"
makedepends="npm"
source="
uptime-kuma-$pkgver.tar.gz::https://github.com/louislam/uptime-kuma/archive/refs/tags/$pkgver.tar.gz
uptime-kuma.openrc
uptime-kuma.conf
"
subpackages="$pkgname-doc $pkgname-openrc"
install="$pkgname.pre-install"
build() {
npm ci
npm run build
rm -Rf "$builddir"/node_modules
npm ci --omit=dev
}
package() {
install -dm 755 \
"$pkgdir"/usr/share/webapps \
"$pkgdir"/usr/share/doc \
"$pkgdir"/usr/share/licenses/uptime-kuma \
"$pkgdir"/etc/init.d \
"$pkgdir"/etc/conf.d
# install
cp -a "$builddir" "$pkgdir/usr/share/webapps/uptime-kuma"
# openrc
install -Dm755 "$srcdir"/uptime-kuma.openrc "$pkgdir"/etc/init.d/uptime-kuma
install -Dm755 "$srcdir"/uptime-kuma.conf "$pkgdir"/etc/conf.d/uptime-kuma
# docs and licenses
mv "$pkgdir"/usr/share/webapps/uptime-kuma/LICENSE "$pkgdir"/usr/share/licenses/uptime-kuma/.
}
sha512sums="
9045cdc69d46ce34011f7866844a8d1866eee21850be6eede3226e77b9c0d3ecc0190481671f04f25da40345b29cc2d13de07bcc27e7baeff7901b4bd9c8b93f uptime-kuma-1.23.13.tar.gz
0ceddb98a6f318029b8bd8b5a49b55c883e77a5f8fffe2b9b271c9abf0ac52dc7a6ea4dbb4a881124a7857f1e43040f18755c1c2a034479e6a94d2b65a73d847 uptime-kuma.openrc
1dbae536b23e3624e139155abbff383bba3209ff2219983da2616b4376b1a5041df812d1e5164716fc6e967a8446d94baae3b96ee575d400813cc6fdc2cc274e uptime-kuma.conf
"

View file

@ -0,0 +1,47 @@
# uptime-kuma config
# for more info
# see https://github.com/louislam/uptime-kuma/wiki/Environment-Variables
# Set the directory where the data should be stored (could be relative)
# DATA_DIR=/var/lib/uptime-kuma
# Host to bind to, could be an ip.
# UPTIME_KUMA_HOST=::
# Port to listen to
# UPTIME_KUMA_PORT=3001
# Path to SSL key
# UPTIME_KUMA_SSL_KEY=
# Path to SSL certificate
# UPTIME_KUMA_SSL_CERT=
# SSL Key Passphrase
# UPTIME_KUMA_SSL_KEY_PASSPHRASE=
# Cloudflare Tunnel Token
# UPTIME_KUMA_CLOUDFLARED_TOKEN=
# By default, Uptime Kuma is not allowed in iframe if the domain name is not
# the same as the parent. It protects your Uptime Kuma to be a phishing
# website. If you don't need this protection, you can set it to true
# UPTIME_KUMA_DISABLE_FRAME_SAMEORIGIN=false
# By default, Uptime Kuma is verifying that the websockets ORIGIN-Header
# matches your servers hostname. If you don't need this protection, you can
# set it to bypass. See GHSA-mj22-23ff-2hrr for further context.
# UPTIME_KUMA_WS_ORIGIN_CHECK=cors-like
# Allow to specify any executables as Chromium
# UPTIME_KUMA_ALLOW_ALL_CHROME_EXEC=0
# Add your self-signed ca certs.
# NODE_EXTRA_CA_CERTS=
# Ignore all TLS errors
# NOTE_TLS_REJECT_UNAUTHORIZED=0
# Set it to --insecure-http-parser, if you encountered error Invalid header
# value char when your website using WAF
# NODE_OPTIONS=

View file

@ -0,0 +1,48 @@
#!/sbin/openrc-run
description="Uptime Kuma self-hosted monitoring tool"
# Change $directory to path to uptime-kuma
directory=${directory:-/usr/share/webapps/uptime-kuma}
pidfile=${pidfile:-/run/$RC_SVCNAME.pid}
DATA_DIR=${DATA_DIR:-/var/lib/uptime-kuma}
log_dir="/var/log/$RC_SVCNAME"
logfile=${logfile:-$log_dir/$RC_SVCNAME.log}
output_log="${output_log:-$logfile}"
error_log="${error_log:-$logfile}"
command=${command:-/usr/bin/node}
command_args="$directory/server/server.js"
command_user=${command_user:-uptime-kuma:uptime-kuma}
command_background=true
depend() {
need net
}
start_pre() {
checkpath --owner=$command_user --directory $log_dir \
$DATA_DIR \
$DATA_DIR/upload
checkpath --owner=$command_user --file $logfile \
$DATA_DIR/error.log
[ ! -e $DATA_DIR/kuma.db ] &&
cp $directory/db/kuma.db $DATA_DIR
checkpath --owner=$command_user --mode 600 --file $DATA_DIR/kuma.db*
cd $directory
export DATA_DIR UPTIME_KUMA_HOST UPTIME_KUMA_PORT UPTIME_KUMA_SSL_KEY \
UPTIME_KUMA_SSL_CERT UPTIME_KUMA_SSL_KEY_PASSPHRASE \
UPTIME_KUMA_CLOUDFLARED_TOKEN UPTIME_KUMA_DISABLE_FRAME_SAMEORIGIN \
UPTIME_KUMA_WS_ORIGIN_CHECK UPTIME_KUMA_ALLOW_ALL_CHROME_EXEC \
NODE_EXTRA_CA_CERTS NODE_TLS_REJECT_UNAUTHORIZED NODE_OPTIONS
}
start_post() {
# Wait for the server to be started
sleep 10
}

View file

@ -0,0 +1,25 @@
#!/bin/sh
DATADIR='/var/lib/uptime-kuma'
if ! getent group uptime-kuma 1>/dev/null; then
echo '* Creating group uptime-kuma' 1>&2
addgroup -S uptime-kuma
fi
if ! id uptime-kuma 2>/dev/null 1>&2; then
echo '* Creating user uptime-kuma' 1>&2
adduser -DHS -G uptime-kuma -h "$DATADIR" -s /bin/sh \
-g "added by apk for uptime-kuma" uptime-kuma
passwd -u uptime-kuma 1>/dev/null # unlock
fi
if ! id -Gn uptime-kuma | grep -Fq www-data; then
echo '* Adding user uptime-kuma to group www-data' 1>&2
addgroup uptime-kuma www-data
fi
exit 0

59
ilot/wikijs/APKBUILD Normal file
View file

@ -0,0 +1,59 @@
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=wikijs
pkgver=2.5.303
pkgrel=1
pkgdesc="Wiki.js | A modern, lightweight and powerful wiki app built on Node.js"
license="AGPL-3.0"
arch="!armv7 x86_64"
options="!check" # No test suite
depends="
libcap-setcap
nodejs>=10.12.0
postgresql
python3
"
makedepends="
yarn
npm
"
url="https://github.com/Requarks/wiki"
subpackages="$pkgname-openrc"
install="$pkgname.post-install $pkgname.pre-install"
builddir="$srcdir"/wiki-$pkgver
pkgusers="wikijs"
pkggroups="wikijs"
source="
$pkgname-$pkgver.tar.gz::https://github.com/requarks/wiki/archive/refs/tags/v$pkgver.tar.gz
wikijs.initd
config.sample.yml.patch
"
prepare() {
default_prepare
sed -i "s|\"version.*|\"version\": \"$pkgver\",|" "$builddir"/package.json
sed -i 's|"dev": true.*|"dev": "false",|' "$builddir"/package.json
}
build() {
yarn --frozen-lockfile --non-interactive
yarn build
rm -rf node_modules
yarn --production --frozen-lockfile --non-interactive
}
package() {
install -Dm755 "$srcdir"/wikijs.initd "$pkgdir"/etc/init.d/wikijs
install -Dm644 -o 5494 -g 5494 "$builddir"/config.sample.yml "$pkgdir"/etc/wikijs/config.yml
install -Dm644 "$builddir"/package.json -t "$pkgdir"/usr/lib/bundles/wikijs
cp -aR "$builddir"/assets "$builddir"/server "$builddir"/node_modules "$pkgdir"/usr/lib/bundles/wikijs
mkdir -p "$pkgdir"/var/lib/wikijs
chown 5494:5494 "$pkgdir"/var/lib/wikijs
}
sha512sums="
a463d79ad0d8ff15dbe568b839094d697c6de0b2e991b77a4944e2a82f9789de6840e504a4673e4e0900d61596e880ca276008de86dac4f05f5823dc0427d2fc wikijs-2.5.303.tar.gz
355131ee5617348b82681cb8543c784eea59689990a268ecd3b77d44fe9abcca9c86fb8b047f0a8faeba079c650faa7790c5dd65418d313cd7561f38bb590c03 wikijs.initd
07b536c20e370d2a926038165f0e953283259c213a80a8648419565f5359ab05f528ac310e81606914013da212270df6feddb22e514cbcb2464c8274c956e4af config.sample.yml.patch
"

View file

@ -0,0 +1,13 @@
diff --git a/config.sample.yml.orig b/config.sample.yml
index 47edd8d..458472a 100644
--- a/config.sample.yml.orig
+++ b/config.sample.yml
@@ -136,7 +136,7 @@ ha: false
# Data Path
# ---------------------------------------------------------------------
# Writeable data path used for cache and temporary user uploads.
-dataPath: ./data
+dataPath: /var/lib/wikijs/data
# ---------------------------------------------------------------------
# Body Parser Limit

24
ilot/wikijs/wikijs.initd Normal file
View file

@ -0,0 +1,24 @@
#!/sbin/openrc-run
name="$RC_SVCNAME"
cfgfile="/etc/conf.d/$RC_SVCNAME"
pidfile="/var/run/$RC_SVCNAME.pid"
command="/usr/bin/node server"
command_args=""
command_user="wikijs"
command_group="wikijs"
supervisor="supervise-daemon"
start_stop_daemon_args=""
command_background="yes"
output_log="/var/log/$RC_SVCNAME/$RC_SVCNAME.log"
error_log="/var/log/$RC_SVCNAME/$RC_SVCNAME.err"
working_directory="/usr/lib/bundles/wikijs"
start_pre() {
checkpath --directory --owner $command_user:$command_user --mode 0775 \
/var/log/$RC_SVCNAME \
/var/lib/$RC_SVCNAME
export NODE_ENV=production
export CONFIG_FILE=/etc/wikijs/config.yml
cd "$working_directory"
}

18
ilot/wikijs/wikijs.post-install Executable file
View file

@ -0,0 +1,18 @@
#!/bin/sh
set -eu
group=wikijs
config_file='/etc/wikijs/config.yml'
setcap 'cap_net_bind_service=+ep' /usr/bin/node
cat >&2 <<-EOF
*
* 1. Adjust settings in /etc/wikijs/config.yml.
*
* 2. Create database for wikijs:
*
* psql -c "CREATE ROLE wikijs PASSWORD 'top-secret' INHERIT LOGIN;"
* psql -c "CREATE DATABASE wkijs OWNER wikijs ENCODING 'UTF-8';"
*
EOF

View file

@ -0,0 +1,20 @@
#!/bin/sh
# It's very important to set user/group correctly.
wikijs_dir='/var/lib/wikijs'
if ! getent group wikijs 1>/dev/null; then
echo '* Creating group wikijs' 1>&2
addgroup -S wikijs -g 5494
fi
if ! id wikijs 2>/dev/null 1>&2; then
echo '* Creating user wikijs' 1>&2
adduser -DHS -G wikijs -h "$wikijs_dir" -u 5494 -s /bin/sh \
-g "added by apk for wikijs" wikijs
passwd -u wikijs 1>/dev/null # unlock
fi
exit 0