diff --git a/.gitlab/bin/build.sh b/.forgejo/bin/build.sh similarity index 72% rename from .gitlab/bin/build.sh rename to .forgejo/bin/build.sh index 286f965..b6dcbe0 100755 --- a/.gitlab/bin/build.sh +++ b/.forgejo/bin/build.sh @@ -1,27 +1,26 @@ #!/bin/sh # shellcheck disable=SC3043 -. $CI_PROJECT_DIR/.gitlab/bin/functions.sh +. /usr/local/lib/functions.sh # shellcheck disable=SC3040 set -eu -o pipefail readonly APORTSDIR=$CI_PROJECT_DIR -readonly REPOS="cross backports user testing community" -readonly ALPINE_REPOS="main community" +readonly REPOS="ilot backports" +readonly ALPINE_REPOS="main community testing" readonly ARCH=$(apk --print-arch) # gitlab variables readonly BASEBRANCH=$CI_MERGE_REQUEST_TARGET_BRANCH_NAME : "${REPODEST:=$HOME/packages}" -: "${MIRROR:=https://lab.ilot.io/ayakael/repo-apk/-/raw}" +: "${MIRROR:=https://forge.ilot.io/api/packages/ilot/alpine}" : "${ALPINE_MIRROR:=http://dl-cdn.alpinelinux.org/alpine}" : "${MAX_ARTIFACT_SIZE:=300000000}" #300M : "${CI_DEBUG_BUILD:=}" : "${CI_ALPINE_BUILD_OFFSET:=0}" : "${CI_ALPINE_BUILD_LIMIT:=9999}" -: "${CI_ALPINE_TARGET_ARCH:=$(uname -m)}" msg() { local color=${2:-green} @@ -71,7 +70,7 @@ report() { get_release() { case $BASEBRANCH in - v*) echo "${BASEBRANCH%-*}";; + v*) echo "$BASEBRANCH";; edge) echo edge;; *) die "Branch \"$BASEBRANCH\" not supported!" esac @@ -80,9 +79,8 @@ get_release() { build_aport() { local repo="$1" aport="$2" cd "$APORTSDIR/$repo/$aport" - export CHOST=$CI_ALPINE_TARGET_ARCH if abuild -r 2>&1 | report "build-$aport"; then - checkapk | report "checkapk-$aport" || true + checkapk 2>&1 | report "checkapk-$aport" || true aport_ok="$aport_ok $repo/$aport" else aport_ng="$aport_ng $repo/$aport" @@ -92,12 +90,6 @@ build_aport() { check_aport() { local repo="$1" aport="$2" cd "$APORTSDIR/$repo/$aport" - export CHOST=$CI_ALPINE_TARGET_ARCH - # TODO: this enables crossbuild only on user, this should be cleaner - if [ "$repo" != "user" ] && [ "$repo" != "backports" ] && [ "$CI_ALPINE_TARGET_ARCH" != "$ARCH" ]; then - aport_na="$aport_na $repo/$aport" - return 1 - fi if ! abuild check_arch 2>/dev/null; then aport_na="$aport_na $repo/$aport" return 1 @@ -110,16 +102,13 @@ set_repositories_for() { release=$(get_release) for repo in $REPOS; do + [ "$repo" = "non-free" ] && continue + [ "$release" == "edge" ] && [ "$repo" == "backports" ] && continue repos="$repos $MIRROR/$release/$repo $REPODEST/$repo" [ "$repo" = "$target_repo" ] && break done - sudo sh -c "printf '%s\n' $repos >> /etc/apk/repositories" - sudo apk update || true - if [ "$CI_ALPINE_TARGET_ARCH" != "$ARCH" ]; then - sudo sh -c "printf '%s\n' $repos >> $HOME/sysroot-$CI_ALPINE_TARGET_ARCH/etc/apk/repositories" - sudo cp -R /etc/apk/keys/* $HOME/sysroot-$CI_ALPINE_TARGET_ARCH/etc/apk/keys/. - sudo apk --root=$HOME/sysroot-$CI_ALPINE_TARGET_ARCH update || true - fi + doas sh -c "printf '%s\n' $repos >> /etc/apk/repositories" + doas apk update || true } apply_offset_limit() { @@ -139,22 +128,10 @@ setup_system() { [ "$release" != "edge" ] && [ "$repo" == "testing" ] && continue repos="$repos $ALPINE_MIRROR/$release/$repo" done - repos="$repos $MIRROR/$release/cross" - sudo sh -c "printf '%s\n' $repos > /etc/apk/repositories" - sudo apk -U upgrade -a || sudo apk fix || die "Failed to up/downgrade system" - if [ "$CI_ALPINE_TARGET_ARCH" != "$ARCH" ]; then - sudo apk add gcc-$CI_ALPINE_TARGET_ARCH - fi - gitlab_key_to_rsa $ABUILD_KEY rsa-private $HOME/.abuild/$ABUILD_KEY_NAME.rsa - gitlab_key_to_rsa $ABUILD_KEY_PUB rsa-public $HOME/.abuild/$ABUILD_KEY_NAME.rsa.pub - chmod 700 $HOME/.abuild/$ABUILD_KEY_NAME.rsa - echo "PACKAGER_PRIVKEY=$HOME/.abuild/$ABUILD_KEY_NAME.rsa" >> $HOME/.abuild/abuild.conf - sudo cp $HOME/.abuild/$ABUILD_KEY_NAME.rsa.pub /etc/apk/keys/$ABUILD_KEY_NAME.rsa.pub - - # patch abuild for crosscompiling - sudo patch -p1 -d / -i $CI_PROJECT_DIR/.gitlab/patches/abuild-cross.patch - - sudo sed -i -E 's/export JOBS=[0-9]+$/export JOBS=$(nproc)/' /etc/abuild.conf + doas sh -c "printf '%s\n' $repos > /etc/apk/repositories" + doas apk -U upgrade -a || apk fix || die "Failed to up/downgrade system" + abuild-keygen -ain + doas sed -i -E 's/export JOBS=[0-9]+$/export JOBS=$(nproc)/' /etc/abuild.conf ( . /etc/abuild.conf && echo "Building with $JOBS jobs" ) mkdir -p "$REPODEST" git config --global init.defaultBranch master @@ -203,7 +180,7 @@ sysinfo || true setup_system || die "Failed to setup system" # git no longer allows to execute in repositories owned by different users -sudo chown -R $USER: . +doas chown -R buildozer: . fetch_flags="-qn" debugging && fetch_flags="-v" @@ -226,7 +203,6 @@ build_start=$CI_ALPINE_BUILD_OFFSET build_limit=$CI_ALPINE_BUILD_LIMIT for repo in $(changed_repos); do - mkdir -p "$APORTSDIR"/logs "$APORTSDIR"/packages "$APORTSDIR"/keys set_repositories_for "$repo" built_aports=0 changed_aports_in_repo=$(changed_aports "$repo") @@ -267,7 +243,7 @@ for ok in $aport_ok; do done for na in $aport_na; do - msg "$na: disabled for $CI_ALPINE_TARGET_ARCH" yellow + msg "$na: disabled for $ARCH" yellow done for ng in $aport_ng; do @@ -281,3 +257,4 @@ if [ "$failed" = true ]; then elif [ -z "$aport_ok" ]; then msg "No packages found to be built." yellow fi + diff --git a/.forgejo/bin/check_ver.sh b/.forgejo/bin/check_ver.sh new file mode 100755 index 0000000..66c7fd0 --- /dev/null +++ b/.forgejo/bin/check_ver.sh @@ -0,0 +1,34 @@ +#!/bin/bash + +# expects the following env variables: +# downstream: downstream repo + +repo=${downstream/*\/} + +curl --silent $downstream/x86_64/APKINDEX.tar.gz | tar -O -zx APKINDEX > APKINDEX + +owned_by_you=$(awk -v RS= -v ORS="\n\n" '/m:Antoine Martin \(ayakael\) /' APKINDEX | awk -F ':' '{if($1=="o"){print $2}}' | sort | uniq) + +echo "Found $(printf '%s\n' $owned_by_you | wc -l ) packages owned by you" + +rm -f out_of_date not_in_anitya + +for pkg in $owned_by_you; do + upstream_version=$(curl --fail -X GET -sS -H 'Content-Type: application/json' "https://release-monitoring.org/api/v2/packages/?name=$pkg&distribution=Alpine" | jq -r '.items.[].stable_version') + downstream_version=$(sed -n "/^P:$pkg$/,/^$/p" APKINDEX | awk -F ':' '{if($1=="V"){print $2}}' | sort -V | tail -n 1) + downstream_version=${downstream_version/-*} + + # special case for forgejo-aneksajo: + upstream_version=${upstream_version/-git-annex/_git} + + if [ -z "$upstream_version" ]; then + echo "$pkg not in anitya" + echo "$pkg" >> not_in_anitya + elif [ "$downstream_version" != "$(printf '%s\n' $upstream_version $downstream_version | sort -V | head -n 1)" ]; then + echo "$pkg higher downstream" + continue + elif [ "$upstream_version" != "$downstream_version" ]; then + echo "$pkg upstream version $upstream_version does not match downstream version $downstream_version" + echo "$pkg $downstream_version $upstream_version $repo" >> out_of_date + fi +done diff --git a/.forgejo/bin/create_issue.sh b/.forgejo/bin/create_issue.sh new file mode 100755 index 0000000..d162758 --- /dev/null +++ b/.forgejo/bin/create_issue.sh @@ -0,0 +1,165 @@ +#!/bin/bash + +# expects: +# env variable FORGEJO_TOKEN +# file out_of_date + +IFS=' +' +repo=${downstream/*\/} + +does_it_exist() { + name=$1 + downstream_version=$2 + upstream_version=$3 + repo=$4 + + query="$repo/$name: upgrade to $upstream_version" + query="$(echo $query | sed 's| |%20|g' | sed 's|:|%3A|g' | sed 's|/|%2F|g' )" + + result="$(curl --silent -X 'GET' \ + "$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues?state=open&q=$query&type=issues" \ + -H 'accept: application/json' \ + -H "authorization: Basic $FORGEJO_TOKEN" + )" + + if [ "$result" == "[]" ]; then + return 1 + fi +} + +is_it_old() { + name=$1 + downstream_version=$2 + upstream_version=$3 + repo=$4 + + query="$repo/$name: upgrade to" + query="$(echo $query | sed 's| |%20|g' | sed 's|:|%3A|g' | sed 's|/|%2F|g' )" + + result="$(curl --silent -X 'GET' \ + "$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues?state=open&q=$query&type=issues" \ + -H 'accept: application/json' \ + -H "authorization: Basic $FORGEJO_TOKEN" + )" + + result_title="$(echo $result | jq -r '.[].title' )" + result_id="$(echo $result | jq -r '.[].number' )" + result_upstream_version="$(echo $result_title | awk '{print $4}')" + + if [ "$upstream_version" != "$result_upstream_version" ]; then + echo $result_id + else + echo 0 + fi +} + +update_title() { + name=$1 + downstream_version=$2 + upstream_version=$3 + repo=$4 + id=$5 + + result=$(curl --silent -X 'PATCH' \ + "$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues/$id" \ + -H 'accept: application/json' \ + -H "authorization: Basic $FORGEJO_TOKEN" \ + -H 'Content-Type: application/json' \ + -d "{ + \"title\": \"$repo/$name: upgrade to $upstream_version\" + }" + ) + + return 0 +} + +create_issue() { + name=$1 + downstream_version=$2 + upstream_version=$3 + repo=$4 + + result=$(curl --silent -X 'POST' \ + "$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues" \ + -H 'accept: application/json' \ + -H "authorization: Basic $FORGEJO_TOKEN" \ + -H 'Content-Type: application/json' \ + -d "{ + \"title\": \"$repo/$name: upgrade to $upstream_version\", + \"labels\": [ + $LABEL_NUMBER + ] + }") + + return 0 +} + +if [ -f out_of_date ]; then + out_of_date="$(cat out_of_date)" + + echo "Detected $(wc -l out_of_date) out-of-date packages, creating issues" + + for pkg in $out_of_date; do + name="$(echo $pkg | awk '{print $1}')" + downstream_version="$(echo $pkg | awk '{print $2}')" + upstream_version="$(echo $pkg | awk '{print $3}')" + repo="$(echo $pkg | awk '{print $4}')" + + if does_it_exist $name $downstream_version $upstream_version $repo; then + echo "Issue for $repo/$name already exists" + continue + fi + + id=$(is_it_old $name $downstream_version $upstream_version $repo) + + if [ "$id" != "0" ] && [ -n "$id" ]; then + echo "Issue for $repo/$name needs updating" + update_title $name $downstream_version $upstream_version $repo $id + continue + fi + + echo "Creating issue for $repo/$name" + create_issue $name $downstream_version $upstream_version $repo + done +fi + +if [ -f not_in_anitya ]; then + query="Add missing $repo packages to anitya" + query="$(echo $query | sed 's| |%20|g')" + + result="$(curl --silent -X 'GET' \ + "$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues?state=open&q=$query&type=issues" \ + -H 'accept: application/json' \ + -H "authorization: Basic $FORGEJO_TOKEN" + )" + + if [ "$result" == "[]" ]; then + echo "Creating anitya issue" + result=$(curl --silent -X 'POST' \ + "$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues" \ + -H 'accept: application/json' \ + -H "authorization: Basic $FORGEJO_TOKEN" \ + -H 'Content-Type: application/json' \ + -d "{ + \"title\": \"Add missing $repo packages to anitya\", + \"body\": \"- [ ] $(sed '{:q;N;s/\n/\\n- [ ] /g;t q}' not_in_anitya)\", + \"labels\": [ + $LABEL_NUMBER + ] + }") + + else + echo "Updating anitya issue" + result_id="$(echo $result | jq -r '.[].number' )" + result=$(curl --silent -X 'PATCH' \ + "$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues/$result_id" \ + -H 'accept: application/json' \ + -H "authorization: Basic $FORGEJO_TOKEN" \ + -H 'Content-Type: application/json' \ + -d "{ + \"body\": \"- [ ] $(sed '{:q;N;s/\n/\\n- [ ] /g;t q}' not_in_anitya)\" + }" + ) + fi +fi diff --git a/.forgejo/bin/deploy.sh b/.forgejo/bin/deploy.sh new file mode 100755 index 0000000..daf2496 --- /dev/null +++ b/.forgejo/bin/deploy.sh @@ -0,0 +1,25 @@ +#!/bin/sh + +# shellcheck disable=SC3040 +set -eu -o pipefail + +readonly REPOS="backports user" +readonly BASEBRANCH=$GITHUB_BASE_REF +readonly TARGET_REPO=$CI_ALPINE_REPO + +apkgs=$(find package -type f -name "*.apk") + +for apk in $apkgs; do + branch=$(echo $apk | awk -F '/' '{print $2}') + arch=$(echo $apk | awk -F '/' '{print $3}') + name=$(echo $apk | awk -F '/' '{print $4}') + + echo "Sending $name of arch $arch to $TARGET_REPO/$BASEBRANCH/$branch" + return=$(curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN --upload-file $apk $TARGET_REPO/$BASEBRANCH/$branch 2>&1) + echo $return + if [ "$return" == "package file already exists" ]; then + echo "Package already exists, refreshing..." + curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN -X DELETE $TARGET_REPO/$BASEBRANCH/$branch/$arch/$name + curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN --upload-file $apk $TARGET_REPO/$BASEBRANCH/$branch + fi +done diff --git a/.forgejo/workflows/build-aarch64.yaml b/.forgejo/workflows/build-aarch64.yaml new file mode 100644 index 0000000..0364014 --- /dev/null +++ b/.forgejo/workflows/build-aarch64.yaml @@ -0,0 +1,56 @@ +on: + pull_request: + types: [ assigned, opened, synchronize, reopened ] + +concurrency: + group: ${{ github.head_ref || github.ref_name }} + cancel-in-progress: true + +jobs: + build-aarch64: + runs-on: aarch64 + container: + image: alpinelinux/alpine-gitlab-ci:latest + env: + CI_PROJECT_DIR: ${{ github.workspace }} + CI_DEBUG_BUILD: ${{ runner.debug }} + CI_MERGE_REQUEST_PROJECT_URL: ${{ github.server_url }}/${{ github.repository }} + CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }} + steps: + - name: Environment setup + run: | + doas apk add nodejs git patch curl net-tools + doas hostname host.docker.internal + cd /etc/apk/keys + doas curl -JO https://forge.ilot.io/api/packages/ilot/alpine/key + - name: Repo pull + uses: actions/checkout@v4 + with: + fetch-depth: 500 + - name: Package build + run: ${{ github.workspace }}/.forgejo/bin/build.sh + - name: Package upload + uses: forgejo/upload-artifact@v3 + with: + name: package + path: packages + + deploy-aarch64: + needs: [build-aarch64] + runs-on: aarch64 + container: + image: alpine:latest + env: + CI_ALPINE_REPO: 'https://forge.ilot.io/api/packages/ilot/alpine' + FORGE_REPO_TOKEN: ${{ secrets.FORGE_REPO_TOKEN }} + FORGE_REPO_USER: ${{ vars.FORGE_REPO_USER }} + GITHUB_EVENT_NUMBER: ${{ github.event.number }} + steps: + - name: Setting up environment + run: apk add nodejs curl findutils git gawk jq + - name: Repo pull + uses: actions/checkout@v4 + - name: Package download + uses: forgejo/download-artifact@v3 + - name: Package deployment + run: ${{ github.workspace }}/.forgejo/bin/deploy.sh diff --git a/.forgejo/workflows/build-x86_64.yaml b/.forgejo/workflows/build-x86_64.yaml new file mode 100644 index 0000000..c805199 --- /dev/null +++ b/.forgejo/workflows/build-x86_64.yaml @@ -0,0 +1,56 @@ +on: + pull_request: + types: [ assigned, opened, synchronize, reopened ] + +concurrency: + group: ${{ github.head_ref || github.ref_name }} + cancel-in-progress: true + +jobs: + build-x86_64: + runs-on: x86_64 + container: + image: alpinelinux/alpine-gitlab-ci:latest + env: + CI_PROJECT_DIR: ${{ github.workspace }} + CI_DEBUG_BUILD: ${{ runner.debug }} + CI_MERGE_REQUEST_PROJECT_URL: ${{ github.server_url }}/${{ github.repository }} + CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }} + steps: + - name: Environment setup + run: | + doas apk add nodejs git patch curl net-tools + doas hostname host.docker.internal + cd /etc/apk/keys + doas curl -JO https://forge.ilot.io/api/packages/ilot/alpine/key + - name: Repo pull + uses: actions/checkout@v4 + with: + fetch-depth: 500 + - name: Package build + run: ${{ github.workspace }}/.forgejo/bin/build.sh + - name: Package upload + uses: forgejo/upload-artifact@v3 + with: + name: package + path: packages + + deploy-x86_64: + needs: [build-x86_64] + runs-on: x86_64 + container: + image: alpine:latest + env: + CI_ALPINE_REPO: 'https://forge.ilot.io/api/packages/ilot/alpine' + FORGE_REPO_TOKEN: ${{ secrets.FORGE_REPO_TOKEN }} + FORGE_REPO_USER: ${{ vars.FORGE_REPO_USER }} + GITHUB_EVENT_NUMBER: ${{ github.event.number }} + steps: + - name: Setting up environment + run: apk add nodejs curl findutils git gawk jq + - name: Repo pull + uses: actions/checkout@v4 + - name: Package download + uses: forgejo/download-artifact@v3 + - name: Package deployment + run: ${{ github.workspace }}/.forgejo/bin/deploy.sh diff --git a/.forgejo/workflows/check-ilot.yml b/.forgejo/workflows/check-ilot.yml new file mode 100644 index 0000000..6a3e2a7 --- /dev/null +++ b/.forgejo/workflows/check-ilot.yml @@ -0,0 +1,27 @@ +on: + workflow_dispatch: + + schedule: + - cron: '0 5 * * *' + +jobs: + check-user: + name: Check user repo + runs-on: x86_64 + container: + image: alpine:latest + env: + downstream: https://forge.ilot.io/api/packages/ilot/alpine/v3.21/ilot + FORGEJO_TOKEN: ${{ secrets.forgejo_token }} + LABEL_NUMBER: 8 + steps: + - name: Environment setup + run: apk add grep coreutils gawk curl wget bash nodejs git jq sed + - name: Get scripts + uses: actions/checkout@v4 + with: + fetch-depth: 1 + - name: Check out-of-date packages + run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh + - name: Create issues + run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh diff --git a/.forgejo/workflows/lint.yaml b/.forgejo/workflows/lint.yaml new file mode 100644 index 0000000..3614deb --- /dev/null +++ b/.forgejo/workflows/lint.yaml @@ -0,0 +1,21 @@ +on: + pull_request: + types: [ assigned, opened, synchronize, reopened ] + +jobs: + lint: + run-name: lint + runs-on: x86_64 + container: + image: alpinelinux/apkbuild-lint-tools:latest + env: + CI_PROJECT_DIR: ${{ github.workspace }} + CI_DEBUG_BUILD: ${{ runner.debug }} + CI_MERGE_REQUEST_PROJECT_URL: ${{ github.server_url }}/${{ github.repository }} + CI_MERGE_REQUEST_TARGET_BRANCH_NAME: ${{ github.base_ref }} + steps: + - run: doas apk add nodejs git + - uses: actions/checkout@v4 + with: + fetch-depth: 500 + - run: lint diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml deleted file mode 100644 index dd8afae..0000000 --- a/.gitlab-ci.yml +++ /dev/null @@ -1,109 +0,0 @@ -stages: - - verify - - build - - deploy - -variables: - GIT_STRATEGY: clone - GIT_DEPTH: "500" - -lint: - stage: verify - interruptible: true - script: - - | - sudo apk add shellcheck atools sudo abuild - export PATH="$PATH:$CI_PROJECT_DIR/.gitlab/bin" - lint - allow_failure: true - only: - - merge_requests - tags: - - apk-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME-x86_64 - -.build: - stage: build - interruptible: true - script: - - | - sudo apk add alpine-sdk lua-aports sudo - sudo addgroup $USER abuild - export PATH="$PATH:$CI_PROJECT_DIR/.gitlab/bin" - sudo -Eu $USER build.sh - artifacts: - paths: - - packages/ - - keys/ - - logs/ - expire_in: 7 days - when: always - only: - - merge_requests - -.cross: - stage: build - interruptible: true - script: - - | - sudo apk add alpine-sdk lua-aports sudo gzip xz qemu-$CI_QEMU_TARGET_ARCH - sudo addgroup $USER abuild - export PATH="$PATH:$CI_PROJECT_DIR/.gitlab/bin" - build-rootfs.sh alpine${CI_MERGE_REQUEST_TARGET_BRANCH_NAME/v} $CI_ALPINE_TARGET_ARCH --rootfsdir $HOME/sysroot-$CI_ALPINE_TARGET_ARCH - cp /etc/apk/repositories $HOME/sysroot-$CI_ALPINE_TARGET_ARCH/etc/apk/. - sudo -Eu $USER CHOST=$CI_TARGET_ALPINE_ARCH build.sh - artifacts: - paths: - - packages/ - - keys/ - - logs/ - expire_in: 7 days - when: always - only: - - merge_requests - -build-x86_64: - extends: .build - when: always - tags: - - apk-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME-x86_64 - -build-aarch64: - extends: .build - when: always - tags: - - apk-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME-aarch64 - -build-ppc64le: - extends: .build - when: manual - tags: - - apk-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME-ppc64le - -build-s390x: - extends: .build - when: manual - tags: - - apk-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME-s390x - -build-armv7: - extends: .cross - when: manual - tags: - - apk-$CI_MERGE_REQUEST_TARGET_BRANCH_NAME-x86_64 - variables: - CI_ALPINE_TARGET_ARCH: armv7 - CI_QEMU_TARGET_ARCH: arm - -push: - interruptible: true - stage: deploy - script: - - | - sudo apk add abuild git-lfs findutils - export PATH="$PATH:$CI_PROJECT_DIR/.gitlab/bin" - push.sh - rules: - - if: $CI_PIPELINE_SOURCE == "merge_request_event" - when: manual - tags: - - repo diff --git a/.gitlab/bin/APKBUILD_SHIM b/.gitlab/bin/APKBUILD_SHIM deleted file mode 100755 index 76577ff..0000000 --- a/.gitlab/bin/APKBUILD_SHIM +++ /dev/null @@ -1,111 +0,0 @@ -#!/bin/sh - -set -e - -arch= -builddir= -checkdepends= -depends= -depends_dev= -depends_doc= -depends_libs= -depends_openrc= -depends_static= -install= -install_if= -langdir= -ldpath= -license= -makedepends= -makedepends_build= -makedepends_host= -md5sums= -options= -patch_args= -pkgbasedir= -pkgdesc= -pkgdir= -pkgname= -pkgrel= -pkgver= -pkggroups= -pkgusers= -provides= -provider_priority= -replaces= -sha256sums= -sha512sums= -sonameprefix= -source= -srcdir= -startdir= -subpackages= -subpkgdir= -subpkgname= -triggers= -url= - -# abuild.conf - -CFLAGS= -CXXFLAGS= -CPPFLAGS= -LDFLAGS= -JOBS= -MAKEFLAGS= -CMAKE_CROSSOPTS= - -. ./APKBUILD - -: "$arch" -: "$builddir" -: "$checkdepends" -: "$depends" -: "$depends_dev" -: "$depends_doc" -: "$depends_libs" -: "$depends_openrc" -: "$depends_static" -: "$install" -: "$install_if" -: "$langdir" -: "$ldpath" -: "$license" -: "$makedepends" -: "$makedepends_build" -: "$makedepends_host" -: "$md5sums" -: "$options" -: "$patch_args" -: "$pkgbasedir" -: "$pkgdesc" -: "$pkgdir" -: "$pkgname" -: "$pkgrel" -: "$pkgver" -: "$pkggroups" -: "$pkgusers" -: "$provides" -: "$provider_priority" -: "$replaces" -: "$sha256sums" -: "$sha512sums" -: "$sonameprefix" -: "$source" -: "$srcdir" -: "$startdir" -: "$subpackages" -: "$subpkgdir" -: "$subpkgname" -: "$triggers" -: "$url" - -# abuild.conf - -: "$CFLAGS" -: "$CXXFLAGS" -: "$CPPFLAGS" -: "$LDFLAGS" -: "$JOBS" -: "$MAKEFLAGS" -: "$CMAKE_CROSSOPTS" diff --git a/.gitlab/bin/apkbuild-shellcheck b/.gitlab/bin/apkbuild-shellcheck deleted file mode 100755 index 3126684..0000000 --- a/.gitlab/bin/apkbuild-shellcheck +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/sh - -shellcheck -s ash \ - -e SC3043 \ - -e SC3057 \ - -e SC3060 \ - -e SC2016 \ - -e SC2086 \ - -e SC2169 \ - -e SC2155 \ - -e SC2100 \ - -e SC2209 \ - -e SC2030 \ - -e SC2031 \ - -e SC1090 \ - -xa $CI_PROJECT_DIR/.gitlab/bin/APKBUILD_SHIM diff --git a/.gitlab/bin/build-rootfs.sh b/.gitlab/bin/build-rootfs.sh deleted file mode 100755 index 44c4372..0000000 --- a/.gitlab/bin/build-rootfs.sh +++ /dev/null @@ -1,556 +0,0 @@ -#!/usr/bin/env bash -# Availabl here: https://lab.ilot.io/dotnet/arcade/-/blob/7f6d9796cc7f594772f798358dbdd8c69b6a97af/eng/common/cross/build-rootfs.sh -# Only modification: qemu-$arch-static becomes qemu-$arch - -set -e - -usage() -{ - echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [llvmx[.y]] [--skipunmount] --rootfsdir ]" - echo "BuildArch can be: arm(default), arm64, armel, armv6, ppc64le, riscv64, s390x, x64, x86" - echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine" - echo " for alpine can be specified with version: alpineX.YY or alpineedge" - echo " for FreeBSD can be: freebsd12, freebsd13" - echo " for illumos can be: illumos" - echo " for Haiku can be: haiku." - echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FreeBSD" - echo "llvmx[.y] - optional, LLVM version for LLVM related packages." - echo "--skipunmount - optional, will skip the unmount of rootfs folder." - echo "--use-mirror - optional, use mirror URL to fetch resources, when available." - echo "--jobs N - optional, restrict to N jobs." - exit 1 -} - -__CodeName=xenial -__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) -__BuildArch=arm -__AlpineArch=armv7 -__FreeBSDArch=arm -__FreeBSDMachineArch=armv7 -__IllumosArch=arm7 -__QEMUArch=arm -__UbuntuArch=armhf -__UbuntuRepo="http://ports.ubuntu.com/" -__LLDB_Package="liblldb-3.9-dev" -__SkipUnmount=0 - -# base development support -__UbuntuPackages="build-essential" - -__AlpinePackages="alpine-base" -__AlpinePackages+=" build-base" - -# symlinks fixer -__UbuntuPackages+=" symlinks" - -# runtime dependencies -__UbuntuPackages+=" libicu-dev" -__UbuntuPackages+=" liblttng-ust-dev" -__UbuntuPackages+=" libunwind8-dev" -__UbuntuPackages+=" libnuma-dev" - -# runtime libraries' dependencies -__UbuntuPackages+=" libcurl4-openssl-dev" -__UbuntuPackages+=" libkrb5-dev" -__UbuntuPackages+=" libssl-dev" -__UbuntuPackages+=" zlib1g-dev" - -__FreeBSDBase="12.3-RELEASE" -__FreeBSDPkg="1.17.0" -__FreeBSDABI="12" -__FreeBSDPackages="libunwind" -__FreeBSDPackages+=" icu" -__FreeBSDPackages+=" libinotify" -__FreeBSDPackages+=" openssl" -__FreeBSDPackages+=" krb5" -__FreeBSDPackages+=" terminfo-db" - -__IllumosPackages="icu" -__IllumosPackages+=" mit-krb5" -__IllumosPackages+=" openssl" -__IllumosPackages+=" zlib" - -__HaikuPackages="gmp" -__HaikuPackages+=" gmp_devel" -__HaikuPackages+=" krb5" -__HaikuPackages+=" krb5_devel" -__HaikuPackages+=" libiconv" -__HaikuPackages+=" libiconv_devel" -__HaikuPackages+=" llvm12_libunwind" -__HaikuPackages+=" llvm12_libunwind_devel" -__HaikuPackages+=" mpfr" -__HaikuPackages+=" mpfr_devel" - -# ML.NET dependencies -__UbuntuPackages+=" libomp5" -__UbuntuPackages+=" libomp-dev" - -__Keyring= -__UseMirror=0 - -__UnprocessedBuildArgs= -while :; do - if [[ "$#" -le 0 ]]; then - break - fi - - lowerI="$(echo "$1" | tr "[:upper:]" "[:lower:]")" - case $lowerI in - -\?|-h|--help) - usage - exit 1 - ;; - arm) - __BuildArch=arm - __UbuntuArch=armhf - __AlpineArch=armv7 - __QEMUArch=arm - ;; - arm64) - __BuildArch=arm64 - __UbuntuArch=arm64 - __AlpineArch=aarch64 - __QEMUArch=aarch64 - __FreeBSDArch=arm64 - __FreeBSDMachineArch=aarch64 - ;; - armel) - __BuildArch=armel - __UbuntuArch=armel - __UbuntuRepo="http://ftp.debian.org/debian/" - __CodeName=jessie - ;; - armv6) - __BuildArch=armv6 - __UbuntuArch=armhf - __QEMUArch=arm - __UbuntuRepo="http://raspbian.raspberrypi.org/raspbian/" - __CodeName=buster - __LLDB_Package="liblldb-6.0-dev" - - if [[ -e "/usr/share/keyrings/raspbian-archive-keyring.gpg" ]]; then - __Keyring="--keyring /usr/share/keyrings/raspbian-archive-keyring.gpg" - fi - ;; - riscv64) - __BuildArch=riscv64 - __AlpineArch=riscv64 - __QEMUArch=riscv64 - __UbuntuArch=riscv64 - __UbuntuRepo="http://deb.debian.org/debian-ports" - __UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}" - unset __LLDB_Package - - if [[ -e "/usr/share/keyrings/debian-ports-archive-keyring.gpg" ]]; then - __Keyring="--keyring /usr/share/keyrings/debian-ports-archive-keyring.gpg --include=debian-ports-archive-keyring" - fi - ;; - ppc64le) - __BuildArch=ppc64le - __AlpineArch=ppc64le - __QEMUArch=ppc64le - __UbuntuArch=ppc64el - __UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/" - __UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}" - __UbuntuPackages="${__UbuntuPackages// libomp-dev/}" - __UbuntuPackages="${__UbuntuPackages// libomp5/}" - unset __LLDB_Package - ;; - s390x) - __BuildArch=s390x - __AlpineArch=s390x - __QEMUArch=s390x - __UbuntuArch=s390x - __UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/" - __UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}" - __UbuntuPackages="${__UbuntuPackages// libomp-dev/}" - __UbuntuPackages="${__UbuntuPackages// libomp5/}" - unset __LLDB_Package - ;; - x64) - __BuildArch=x64 - __AlpineArch=x86_64 - __QEMUArch=x86_64 - __UbuntuArch=amd64 - __FreeBSDArch=amd64 - __FreeBSDMachineArch=amd64 - __illumosArch=x86_64 - __UbuntuRepo= - ;; - x86) - __BuildArch=x86 - __AlpineArch=i386 - __QEMUArch=i386 - __UbuntuArch=i386 - __AlpineArch=x86 - __UbuntuRepo="http://archive.ubuntu.com/ubuntu/" - ;; - lldb*) - version="${lowerI/lldb/}" - parts=(${version//./ }) - - # for versions > 6.0, lldb has dropped the minor version - if [[ "${parts[0]}" -gt 6 ]]; then - version="${parts[0]}" - fi - - __LLDB_Package="liblldb-${version}-dev" - ;; - no-lldb) - unset __LLDB_Package - ;; - llvm*) - version="${lowerI/llvm/}" - parts=(${version//./ }) - __LLVM_MajorVersion="${parts[0]}" - __LLVM_MinorVersion="${parts[1]}" - - # for versions > 6.0, llvm has dropped the minor version - if [[ -z "$__LLVM_MinorVersion" && "$__LLVM_MajorVersion" -le 6 ]]; then - __LLVM_MinorVersion=0; - fi - ;; - xenial) # Ubuntu 16.04 - if [[ "$__CodeName" != "jessie" ]]; then - __CodeName=xenial - fi - ;; - zesty) # Ubuntu 17.04 - if [[ "$__CodeName" != "jessie" ]]; then - __CodeName=zesty - fi - ;; - bionic) # Ubuntu 18.04 - if [[ "$__CodeName" != "jessie" ]]; then - __CodeName=bionic - fi - ;; - focal) # Ubuntu 20.04 - if [[ "$__CodeName" != "jessie" ]]; then - __CodeName=focal - fi - ;; - jammy) # Ubuntu 22.04 - if [[ "$__CodeName" != "jessie" ]]; then - __CodeName=jammy - fi - ;; - jessie) # Debian 8 - __CodeName=jessie - - if [[ -z "$__UbuntuRepo" ]]; then - __UbuntuRepo="http://ftp.debian.org/debian/" - fi - ;; - stretch) # Debian 9 - __CodeName=stretch - __LLDB_Package="liblldb-6.0-dev" - - if [[ -z "$__UbuntuRepo" ]]; then - __UbuntuRepo="http://ftp.debian.org/debian/" - fi - ;; - buster) # Debian 10 - __CodeName=buster - __LLDB_Package="liblldb-6.0-dev" - - if [[ -z "$__UbuntuRepo" ]]; then - __UbuntuRepo="http://ftp.debian.org/debian/" - fi - ;; - bullseye) # Debian 11 - __CodeName=bullseye - - if [[ -z "$__UbuntuRepo" ]]; then - __UbuntuRepo="http://ftp.debian.org/debian/" - fi - ;; - sid) # Debian sid - __CodeName=sid - - if [[ -z "$__UbuntuRepo" ]]; then - __UbuntuRepo="http://ftp.debian.org/debian/" - fi - ;; - tizen) - __CodeName= - __UbuntuRepo= - __Tizen=tizen - ;; - alpine*) - __CodeName=alpine - __UbuntuRepo= - version="${lowerI/alpine/}" - - if [[ "$version" == "edge" ]]; then - __AlpineVersion=edge - else - parts=(${version//./ }) - __AlpineMajorVersion="${parts[0]}" - __AlpineMinoVersion="${parts[1]}" - __AlpineVersion="$__AlpineMajorVersion.$__AlpineMinoVersion" - fi - ;; - freebsd12) - __CodeName=freebsd - __SkipUnmount=1 - ;; - freebsd13) - __CodeName=freebsd - __FreeBSDBase="13.0-RELEASE" - __FreeBSDABI="13" - __SkipUnmount=1 - ;; - illumos) - __CodeName=illumos - __SkipUnmount=1 - ;; - haiku) - __CodeName=haiku - __BuildArch=x64 - __SkipUnmount=1 - ;; - --skipunmount) - __SkipUnmount=1 - ;; - --rootfsdir|-rootfsdir) - shift - __RootfsDir="$1" - ;; - --use-mirror) - __UseMirror=1 - ;; - --use-jobs) - shift - MAXJOBS=$1 - ;; - *) - __UnprocessedBuildArgs="$__UnprocessedBuildArgs $1" - ;; - esac - - shift -done - -if [[ "$__BuildArch" == "armel" ]]; then - __LLDB_Package="lldb-3.5-dev" -fi - -__UbuntuPackages+=" ${__LLDB_Package:-}" - -if [[ -n "$__LLVM_MajorVersion" ]]; then - __UbuntuPackages+=" libclang-common-${__LLVM_MajorVersion}${__LLVM_MinorVersion:+.$__LLVM_MinorVersion}-dev" -fi - -if [[ -z "$__RootfsDir" && -n "$ROOTFS_DIR" ]]; then - __RootfsDir="$ROOTFS_DIR" -fi - -if [[ -z "$__RootfsDir" ]]; then - __RootfsDir="$__CrossDir/../../../.tools/rootfs/$__BuildArch" -fi - -if [[ -d "$__RootfsDir" ]]; then - if [[ "$__SkipUnmount" == "0" ]]; then - umount "$__RootfsDir"/* || true - fi - rm -rf "$__RootfsDir" -fi - -mkdir -p "$__RootfsDir" -__RootfsDir="$( cd "$__RootfsDir" && pwd )" - -if [[ "$__CodeName" == "alpine" ]]; then - __ApkToolsVersion=2.12.11 - __ApkToolsDir="$(mktemp -d)" - - wget "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic//v$__ApkToolsVersion/x86_64/apk.static" -P "$__ApkToolsDir" - chmod +x "$__ApkToolsDir/apk.static" - - mkdir -p "$__RootfsDir"/usr/bin - cp -v "/usr/bin/qemu-$__QEMUArch" "$__RootfsDir/usr/bin" - - if [[ "$__AlpineVersion" == "edge" ]]; then - version=edge - else - version="v$__AlpineVersion" - fi - - # initialize DB - "$__ApkToolsDir/apk.static" \ - -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ - -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ - -U --allow-untrusted --root "$__RootfsDir" --arch "$__AlpineArch" --initdb add - - if [[ "$__AlpineLlvmLibsLookup" == 1 ]]; then - __AlpinePackages+=" $("$__ApkToolsDir/apk.static" \ - -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ - -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ - -U --allow-untrusted --root "$__RootfsDir" --arch "$__AlpineArch" \ - search 'llvm*-libs' | sort | tail -1 | sed 's/-[^-]*//2g')" - fi - - # install all packages in one go - "$__ApkToolsDir/apk.static" \ - -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ - -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ - -U --allow-untrusted --no-scripts --root "$__RootfsDir" --arch "$__AlpineArch" \ - add $__AlpinePackages - - rm -r "$__ApkToolsDir" -elif [[ "$__CodeName" == "freebsd" ]]; then - mkdir -p "$__RootfsDir"/usr/local/etc - JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} - wget -O - "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version - echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf - echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf - mkdir -p "$__RootfsDir"/tmp - # get and build package manager - wget -O - "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf - - cd "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}" - # needed for install to succeed - mkdir -p "$__RootfsDir"/host/etc - ./autogen.sh && ./configure --prefix="$__RootfsDir"/host && make -j "$JOBS" && make install - rm -rf "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}" - # install packages we need. - INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf update - INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages -elif [[ "$__CodeName" == "illumos" ]]; then - mkdir "$__RootfsDir/tmp" - pushd "$__RootfsDir/tmp" - JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} - echo "Downloading sysroot." - wget -O - https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf - - echo "Building binutils. Please wait.." - wget -O - https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf - - mkdir build-binutils && cd build-binutils - ../binutils-2.33.1/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" - make -j "$JOBS" && make install && cd .. - echo "Building gcc. Please wait.." - wget -O - https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf - - CFLAGS="-fPIC" - CXXFLAGS="-fPIC" - CXXFLAGS_FOR_TARGET="-fPIC" - CFLAGS_FOR_TARGET="-fPIC" - export CFLAGS CXXFLAGS CXXFLAGS_FOR_TARGET CFLAGS_FOR_TARGET - mkdir build-gcc && cd build-gcc - ../gcc-8.4.0/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \ - --with-gnu-ld --disable-nls --disable-libgomp --disable-libquadmath --disable-libssp --disable-libvtv --disable-libcilkrts --disable-libada --disable-libsanitizer \ - --disable-libquadmath-support --disable-shared --enable-tls - make -j "$JOBS" && make install && cd .. - BaseUrl=https://pkgsrc.smartos.org - if [[ "$__UseMirror" == 1 ]]; then - BaseUrl=https://pkgsrc.smartos.skylime.net - fi - BaseUrl="$BaseUrl/packages/SmartOS/trunk/${__illumosArch}/All" - echo "Downloading manifest" - wget "$BaseUrl" - echo "Downloading dependencies." - read -ra array <<<"$__IllumosPackages" - for package in "${array[@]}"; do - echo "Installing '$package'" - # find last occurrence of package in listing and extract its name - package="$(sed -En '/.*href="('"$package"'-[0-9].*).tgz".*/h;$!d;g;s//\1/p' All)" - echo "Resolved name '$package'" - wget "$BaseUrl"/"$package".tgz - ar -x "$package".tgz - tar --skip-old-files -xzf "$package".tmp.tg* -C "$__RootfsDir" 2>/dev/null - done - echo "Cleaning up temporary files." - popd - rm -rf "$__RootfsDir"/{tmp,+*} - mkdir -p "$__RootfsDir"/usr/include/net - mkdir -p "$__RootfsDir"/usr/include/netpacket - wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h - wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h - wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h - wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h -elif [[ "$__CodeName" == "haiku" ]]; then - JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} - - echo "Building Haiku sysroot for x86_64" - mkdir -p "$__RootfsDir/tmp" - cd "$__RootfsDir/tmp" - git clone -b hrev56235 https://review.haiku-os.org/haiku - git clone -b btrev43195 https://review.haiku-os.org/buildtools - cd "$__RootfsDir/tmp/buildtools" && git checkout 7487388f5110021d400b9f3b88e1a7f310dc066d - - # Fetch some unmerged patches - cd "$__RootfsDir/tmp/haiku" - ## Add development build profile (slimmer than nightly) - git fetch origin refs/changes/64/4164/1 && git -c commit.gpgsign=false cherry-pick FETCH_HEAD - - # Build jam - cd "$__RootfsDir/tmp/buildtools/jam" - make - - # Configure cross tools - echo "Building cross-compiler" - mkdir -p "$__RootfsDir/generated" - cd "$__RootfsDir/generated" - "$__RootfsDir/tmp/haiku/configure" -j"$JOBS" --sysroot "$__RootfsDir" --cross-tools-source "$__RootfsDir/tmp/buildtools" --build-cross-tools x86_64 - - # Build Haiku packages - echo "Building Haiku" - echo 'HAIKU_BUILD_PROFILE = "development-raw" ;' > UserProfileConfig - "$__RootfsDir/tmp/buildtools/jam/jam0" -j"$JOBS" -q 'package' 'Haiku' - - BaseUrl="https://depot.haiku-os.org/__api/v2/pkg/get-pkg" - - # Download additional packages - echo "Downloading additional required packages" - read -ra array <<<"$__HaikuPackages" - for package in "${array[@]}"; do - echo "Downloading $package..." - # API documented here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L60 - # The schema here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L598 - hpkgDownloadUrl="$(wget -qO- --post-data='{"name":"'"$package"'","repositorySourceCode":"haikuports_x86_64","versionType":"LATEST","naturalLanguageCode":"en"}' \ - --header='Content-Type:application/json' "$BaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')" - wget -P "$__RootfsDir/generated/download" "$hpkgDownloadUrl" - done - - # Setup the sysroot - echo "Setting up sysroot and extracting needed packages" - mkdir -p "$__RootfsDir/boot/system" - for file in "$__RootfsDir/generated/objects/haiku/x86_64/packaging/packages/"*.hpkg; do - "$__RootfsDir/generated/objects/linux/x86_64/release/tools/package/package" extract -C "$__RootfsDir/boot/system" "$file" - done - for file in "$__RootfsDir/generated/download/"*.hpkg; do - "$__RootfsDir/generated/objects/linux/x86_64/release/tools/package/package" extract -C "$__RootfsDir/boot/system" "$file" - done - - # Cleaning up temporary files - echo "Cleaning up temporary files" - rm -rf "$__RootfsDir/tmp" - for name in "$__RootfsDir/generated/"*; do - if [[ "$name" =~ "cross-tools-" ]]; then - : # Keep the cross-compiler - else - rm -rf "$name" - fi - done -elif [[ -n "$__CodeName" ]]; then - qemu-debootstrap $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo" - cp "$__CrossDir/$__BuildArch/sources.list.$__CodeName" "$__RootfsDir/etc/apt/sources.list" - chroot "$__RootfsDir" apt-get update - chroot "$__RootfsDir" apt-get -f -y install - chroot "$__RootfsDir" apt-get -y install $__UbuntuPackages - chroot "$__RootfsDir" symlinks -cr /usr - chroot "$__RootfsDir" apt-get clean - - if [[ "$__SkipUnmount" == "0" ]]; then - umount "$__RootfsDir"/* || true - fi - - if [[ "$__BuildArch" == "armel" && "$__CodeName" == "jessie" ]]; then - pushd "$__RootfsDir" - patch -p1 < "$__CrossDir/$__BuildArch/armel.jessie.patch" - popd - fi -elif [[ "$__Tizen" == "tizen" ]]; then - ROOTFS_DIR="$__RootfsDir" "$__CrossDir/tizen-build-rootfs.sh" "$__BuildArch" -else - echo "Unsupported target platform." - usage; - exit 1 -fi diff --git a/.gitlab/bin/changed-aports b/.gitlab/bin/changed-aports deleted file mode 100755 index 4541230..0000000 --- a/.gitlab/bin/changed-aports +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/sh - -if [ $# -lt 1 ]; then - echo "Usage: $0 " - exit 1 -fi - -if ! git rev-parse --is-inside-work-tree >/dev/null 2>&1; then - echo "Fatal: not inside a git repository" - exit 2 -fi - -basebranch=$1 - -if ! git rev-parse --verify --quiet $basebranch >/dev/null; then - # The base branch does not eixst, probably due to a shallow clone - git fetch -v $CI_MERGE_REQUEST_PROJECT_URL.git +refs/heads/$basebranch:refs/heads/$basebranch -fi - -git --no-pager diff --diff-filter=ACMR --name-only $basebranch...HEAD -- "*/APKBUILD" | xargs -r -n1 dirname diff --git a/.gitlab/bin/functions.sh b/.gitlab/bin/functions.sh deleted file mode 100755 index 44de1fe..0000000 --- a/.gitlab/bin/functions.sh +++ /dev/null @@ -1,74 +0,0 @@ -# shellcheck disable=SC3043 - -: - -# shellcheck disable=SC3040 -set -eu -o pipefail - -changed_repos() { - : "${APORTSDIR?APORTSDIR missing}" - : "${BASEBRANCH?BASEBRANCH missing}" - - cd "$APORTSDIR" - for repo in $REPOS; do - git diff --diff-filter=ACMR --exit-code "$BASEBRANCH"...HEAD -- "$repo" >/dev/null \ - || echo "$repo" - done -} - -changed_aports() { - : "${APORTSDIR?APORTSDIR missing}" - : "${BASEBRANCH?BASEBRANCH missing}" - - cd "$APORTSDIR" - local repo="$1" - local aports - - aports=$(git diff --name-only --diff-filter=ACMR --relative="$repo" \ - "$BASEBRANCH"...HEAD -- "*/APKBUILD" | xargs -rn1 dirname) - - # shellcheck disable=2086 - ap builddirs -d "$APORTSDIR/$repo" $aports 2>/dev/null | xargs -rn1 basename -} - -section_start() { - name=${1?arg 1 name missing} - header=${2?arg 2 header missing} - collapsed=$2 - timestamp=$(date +%s) - - options="" - case $collapsed in - yes|on|collapsed|true) options="[collapsed=true]";; - esac - - printf "\e[0Ksection_start:%d:%s%s\r\e[0K%s\n" "$timestamp" "$name" "$options" "$header" -} - -section_end() { - name=$1 - timestamp=$(date +%s) - - printf "\e[0Ksection_end:%d:%s\r\e[0K" "$timestamp" "$name" -} - -gitlab_key_to_rsa() { - KEY=$1 - TYPE=$2 - TGT=$3 - TGT_DIR=${TGT%/*} - if [ "$TGT" == "$TGT_DIR" ]; then - TGT_DIR="./" - fi - if [ ! -d "$TGT_DIR" ]; then - mkdir -p "$TGT_DIR" - fi - case $TYPE in - rsa-public) local type="PUBLIC";; - rsa-private) local type="RSA PRIVATE";; - esac - echo "-----BEGIN $type KEY-----" > "$TGT" - echo $1 | sed 's/.\{64\}/&\ -/g' >> "$TGT" - echo "-----END $type KEY-----" >> "$TGT" -} diff --git a/.gitlab/bin/lint b/.gitlab/bin/lint deleted file mode 100755 index c1edcfb..0000000 --- a/.gitlab/bin/lint +++ /dev/null @@ -1,96 +0,0 @@ -#!/bin/sh - -BLUE="\e[34m" -MAGENTA="\e[35m" -RESET="\e[0m" - -readonly BASEBRANCH=$CI_MERGE_REQUEST_TARGET_BRANCH_NAME - -verbose() { - echo "> " "$@" - # shellcheck disable=SC2068 - $@ -} - -debugging() { - [ -n "$CI_DEBUG_BUILD" ] -} - -debug() { - if debugging; then - verbose "$@" - fi -} - -# git no longer allows to execute in repositories owned by different users -sudo chown -R gitlab-runner: . - -fetch_flags="-qn" -debugging && fetch_flags="-v" - -git fetch $fetch_flags "$CI_MERGE_REQUEST_PROJECT_URL" \ - "+refs/heads/$BASEBRANCH:refs/heads/$BASEBRANCH" - -if debugging; then - merge_base=$(git merge-base "$BASEBRANCH" HEAD) - echo "$merge_base" - git --version - git config -l - git tag merge-base "$merge_base" || { echo "Could not determine merge-base"; exit 50; } - git log --oneline --graph --decorate --all -fi - -has_problems=0 - -for PKG in $(changed-aports "$BASEBRANCH"); do - printf "$BLUE==>$RESET Linting $PKG\n" - - ( - cd "$PKG" - - repo=$(basename $(dirname $PKG)); - - if [ "$repo" == "backports" ]; then - echo "Skipping $PKG as backports (we don't care)" - continue - fi - - printf "\n\n" - printf "$BLUE" - printf '======================================================\n' - printf " parse APKBUILD:\n" - printf '======================================================' - printf "$RESET\n\n" - ( . ./APKBUILD ) || has_problems=1 - - printf "\n\n" - printf "$BLUE" - printf '======================================================\n' - printf " abuild sanitycheck:\n" - printf '======================================================' - printf "$RESET\n\n" - abuild sanitycheck || has_problems=1 - - printf "\n\n" - printf "$BLUE" - printf '======================================================\n' - printf " apkbuild-shellcheck:\n" - printf '======================================================' - printf "$RESET\n" - apkbuild-shellcheck || has_problems=1 - - printf "\n\n" - printf "$BLUE" - printf '======================================================\n' - printf " apkbuild-lint:\n" - printf '======================================================' - printf "$RESET\n\n" - apkbuild-lint APKBUILD || has_problems=1 - - return $has_problems - ) || has_problems=1 - - echo -done - -exit $has_problems diff --git a/.gitlab/bin/push.sh b/.gitlab/bin/push.sh deleted file mode 100755 index e93101a..0000000 --- a/.gitlab/bin/push.sh +++ /dev/null @@ -1,56 +0,0 @@ -#!/bin/sh - -# shellcheck disable=SC3043 - -. $CI_PROJECT_DIR/.gitlab/bin/functions.sh - -# shellcheck disable=SC3040 -set -eu -o pipefail - -readonly APORTSDIR=$CI_PROJECT_DIR -readonly REPOS="backports user" -readonly BASEBRANCH=$CI_MERGE_REQUEST_TARGET_BRANCH_NAME - -export GIT_SSH_COMMAND="ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no" - -gitlab_key_to_rsa $ABUILD_KEY rsa-private $HOME/.abuild/$ABUILD_KEY_NAME.rsa -gitlab_key_to_rsa $ABUILD_KEY_PUB rsa-public $HOME/.abuild/$ABUILD_KEY_NAME.rsa.pub -gitlab_key_to_rsa $SSH_KEY rsa-private $HOME/.ssh/id_rsa -chmod 700 "$HOME"/.ssh/id_rsa -chmod 700 "$HOME"/.abuild/$ABUILD_KEY_NAME.rsa - -echo "PACKAGER_PRIVKEY=$HOME/.abuild/$ABUILD_KEY_NAME.rsa" > $HOME/.abuild/abuild.conf -echo "REPODEST=$HOME/repo-apk" >> $HOME/.abuild/abuild.conf -sudo cp $HOME/.abuild/$ABUILD_KEY_NAME.rsa.pub /etc/apk/keys/. - -if [ -d $HOME/repo-apk ]; then - git -C $HOME/repo-apk fetch - git -C $HOME/repo-apk checkout $BASEBRANCH - git -C $HOME/repo-apk pull --rebase -else - git clone git@lab.ilot.io:ayakael/repo-apk -b $BASEBRANCH $HOME/repo-apk -fi - -for i in $(find packages -type f -name "*.apk"); do - install -vDm644 $i ${i/packages/$HOME\/repo-apk} -done - -fetch_flags="-qn" -git fetch $fetch_flags "$CI_MERGE_REQUEST_PROJECT_URL" \ - "+refs/heads/$BASEBRANCH:refs/heads/$BASEBRANCH" - -for repo in $(changed_repos); do - rm $HOME/repo-apk/$repo/*/APKINDEX.tar.gz | true - mkdir -p $repo/DUMMY - echo "pkgname=DUMMY" > $repo/DUMMY/APKBUILD - cd $repo/DUMMY - for i in $(find $HOME/repo-apk/$repo -maxdepth 1 -mindepth 1 -printf '%P '); do - CHOST=$i abuild index - done - cd "$CI_PROJECT_DIR" - rm -R $repo/DUMMY -done - -git -C $HOME/repo-apk add . -git -C $HOME/repo-apk commit -m "Update from $CI_MERGE_REQUEST_IID - $CI_MERGE_REQUEST_TITLE" -git -C $HOME/repo-apk push diff --git a/.gitlab/patches/abuild-cross.patch b/.gitlab/patches/abuild-cross.patch deleted file mode 100644 index 50afd23..0000000 --- a/.gitlab/patches/abuild-cross.patch +++ /dev/null @@ -1,17 +0,0 @@ -diff --git a/usr/bin/abuild.orig b/usr/bin/abuild -index 71e0681..d4ae3dd 100755 ---- a/usr/bin/abuild.orig -+++ b/usr/bin/abuild -@@ -2231,7 +2231,11 @@ calcdeps() { - list_has $i $builddeps && continue - subpackages_has ${i%%[<>=]*} || builddeps="$builddeps $i" - done -- hostdeps="$EXTRADEPENDS_TARGET" -+ for i in $EXTRADEPENDS_HOST $EXTRADEPENDS_TARGET $depends $makedepends; do -+ [ "$pkgname" = "${i%%[<>=]*}" ] && continue -+ list_has $i $hostdeps && continue -+ subpackages_has ${i%%[<>=]*} || hostdeps="$hostdeps $i" -+ done - fi - } - diff --git a/README.md b/README.md index ae24788..24fdd2a 100644 --- a/README.md +++ b/README.md @@ -1,44 +1,43 @@ -# user-aports -Upstream: https://lab.ilot.io/ayakael/user-aports +# iports +Upstream: https://forge.ilot.io/ilot/iports ## Description This repository contains aports that are not yet merged in the official Alpine Linux repository or don’t adhere to Alpine polices. Packages are automatically -built using GitLab CI on my own GitLab instance. Once built, they are deployed -to a git-lfs repository, making them available to apk. - -Branches are matched to Alpine releases. +built using CI. Once built, they are deployed to a Forgejo-backed Alpine +repository. +Branches are matched to Alpine latest released. ## Repositories -You can browse all the repositories at https://lab.ilot.io/ayakael/repo-apk. +You can browse all the repositories at https://forge.ilot.io/ilot/iports/packages Affixed to each repository description is the appropriate link for use in `/etc/apk/repositories`. #### Backports ``` -https://lab.ilot.io/ayakael/repo-apk/-/raw/edge/backports +https://forge.ilot.io/api/packages/ilot/alpine/v3.20/backports ``` Aports from the official Alpine repositories backported from edge. -#### User +#### Ilot ``` -https://lab.ilot.io/ayakael/repo-apk/-/raw/edge/user +https://forge.ilot.io/api/packages/ilot/alpine/v3.20/backports ``` Aports that have yet to be (or may never be) upstreamed to the official -aports. +aports and that are used by ilot coop. ## How to use -Add security key of the repo-apk repository to your /etc/apk/keys: +Add security key of the apk repository to your /etc/apk/keys: ```shell cd /etc/apk/keys -wget https://lab.ilot.io/ayakael/repo-apk/-/raw/edge/antoine.martin@protonmail.com-5b3109ad.rsa.pub +curl -JO https://forge.ilot.io/api/packages/ilot/alpine/key ``` Add repositories that you want to use (see above) to `/etc/apk/repositories`. @@ -52,10 +51,10 @@ they will work for you. ## Contribution & bug reports If you wish to contribute to this aports collection, or wish to report a bug, -you can do so on Alpine's GitLab instance here: -https://gitlab.alpinelinux.org/ayakael/user-aports +you can do so on Forge here: +https://forge.ilot.io/ilot/iports/issues -For packages that are in testing/community, bug reports and merge requests +For packages that are in backports, bug reports and merge requests should be done on Alpine's aports repo instance: https://gitlab.alpinelinux.org/alpine/aports diff --git a/archives/gitaly/APKBUILD b/archives/gitaly/APKBUILD new file mode 100644 index 0000000..1b400ca --- /dev/null +++ b/archives/gitaly/APKBUILD @@ -0,0 +1,86 @@ +# Maintainer: Antoine Martin (ayakael) +# Contributor: Antoine Martin (ayakael) +# Contributor: Jakub Jirutka +pkgname=gitaly +pkgver=17.0.4 +pkgrel=0 +pkgdesc="A Git RPC service for handling all the git calls made by GitLab" +url="https://gitlab.com/gitlab-org/gitaly/" +arch="all" +# GPL-2.0-only WITH GCC-exception-2.0: bundled libgit2 +license="MIT AND GPL-2.0-only WITH GCC-exception-2.0" +depends=" + git>=2.42 + " +makedepends=" + bash + cmake + go + icu-dev + libssh2-dev + libxml2-dev + libxslt-dev + " +subpackages=" + $pkgname-backup + $pkgname-blackbox + $pkgname-praefect + $pkgname-openrc + " +source="https://gitlab.com/gitlab-org/gitaly/-/archive/v$pkgver/gitaly-v$pkgver.tar.gz + config.patch + $pkgname.initd + " +builddir="$srcdir/$pkgname-v$pkgver" +options="!check" + +build() { + make V=1 BUILD_TAGS="tracer_static tracer_static_jaeger" +} + +package() { + ## Go part + + make install DESTDIR="$pkgdir" PREFIX=/usr + + # Not very useful for us. + rm "$pkgdir"/usr/bin/gitaly-debug + rm "$pkgdir"/usr/bin/gitaly-wrapper + + install -m644 -D config.toml.example "$pkgdir"/etc/gitlab/gitaly.toml + install -m644 -D config.praefect.toml.example "$pkgdir"/etc/gitlab/praefect.toml + install -m644 -D cmd/gitaly-blackbox/config.toml.example "$pkgdir"/etc/gitlab/gitaly-blackbox.toml + + install -m755 -D "$srcdir"/gitaly.initd "$pkgdir"/etc/init.d/gitlab.gitaly +} + +backup() { + pkgdesc="Utility used by the backup Rake task to create/restore repository backups from Gitaly" + depends="" + + amove usr/bin/gitaly-backup +} + +# TODO: Add init script. +blackbox() { + pkgdesc="Prometheus exporter that measures GitLab server performance by performing a Git HTTP clone" + depends="" + + amove etc/gitlab/gitaly-blackbox.toml + amove usr/bin/gitaly-blackbox +} + +# TODO: Add init script. +praefect() { + pkgdesc="A reverse-proxy for Gitaly to manage a cluster of Gitaly nodes for HA" + depends="" + + amove etc/gitlab/praefect.toml + amove usr/bin/praefect +} + +sha512sums=" +2d06498c519c20804dd592cac3214cf8124ece1dda0d15342f8ccc6d9c9d2715dad24f9940e4d87b824320483c9882004bcef3747a8de347c1d48ec983a9f5cb gitaly-v17.0.4.tar.gz +7685330e637c3a34db941c9e6b8776d0611ec16297e8be998a3eb4716c455d9f015d433a4d27720c24e520d489dd56bdab7c0e4264f2852b4b0bfd6ecaa7f773 config.patch +c32105d921be16eaf559cf21d6840bc346cd92b5e37974cedecdb5a2d2ca1eb5e8fbb144f5fc8a1289bf9415102b313cf2d61ee510c80f08ab33a799f5ac7122 gitaly.initd +" diff --git a/archives/gitaly/config.patch b/archives/gitaly/config.patch new file mode 100644 index 0000000..9df7db0 --- /dev/null +++ b/archives/gitaly/config.patch @@ -0,0 +1,91 @@ +diff --git a/config.toml.example.orig b/config.toml.example +index 82b8502..9982087 100644 +--- a/config.toml.example.orig ++++ b/config.toml.example +@@ -2,19 +2,24 @@ + # For Gitaly documentation, see https://docs.gitlab.com/ee/administration/gitaly/. + + # A path which Gitaly should open a Unix socket. +-socket_path = "/home/git/gitlab/tmp/sockets/private/gitaly.socket" ++socket_path = "/run/gitlab/gitaly.socket" + + # Directory containing Gitaly executables. +-bin_dir = "/home/git/gitaly/_build/bin" ++bin_dir = "/usr/bin" + + # # Optional. The directory where Gitaly can create all files required to + # # properly operate at runtime. If not set, Gitaly will create a directory in + # # the global temporary directory. This directory must exist. +-# runtime_dir = "/home/git/gitaly/run" ++runtime_dir = "/run/gitaly" + + # # Optional if socket_path is set. TCP address for Gitaly to listen on. This is insecure (unencrypted connection). + # listen_addr = "localhost:9999" + ++# # Optional: configure where the Gitaly creates the sockets for internal connections. If unset, Gitaly will create a randomly ++# # named temp directory each time it boots. ++# # Non Gitaly clients should never connect to these sockets. ++internal_socket_dir = "/run/gitaly/internal" ++ + # # Optional. TCP over TLS address for Gitaly to listen on. + # tls_listen_addr = "localhost:8888" + +@@ -35,9 +40,9 @@ bin_dir = "/home/git/gitaly/_build/bin" + # # Gitaly supports TLS encryption. You must bring your own certificates because this isn’t provided automatically. + # [tls] + # # Path to the certificate. +-# certificate_path = '/home/git/cert.cert' ++# certificate_path = '/etc/gitlab/ssl/gitaly.crt' + # # Path to the key. +-# key_path = '/home/git/key.pem' ++# key_path = '/etc/gitlab/ssl/gitaly.key' + + # # Git settings + # [git] +@@ -58,7 +63,7 @@ bin_dir = "/home/git/gitaly/_build/bin" + # # The name of the storage + name = "default" + # # The path to the storage. +-path = "/home/git/repositories" ++path = "/var/lib/gitlab/repositories" + + # # You can optionally configure more storages for this Gitaly instance to serve up + # +@@ -70,12 +75,12 @@ path = "/home/git/repositories" + # # Optional. Configure Gitaly to output JSON-formatted log messages to stdout. + # [logging] + # # Directory where Gitaly stores extra log files. +-dir = "/home/git/gitlab/log" ++dir = "/vat/log/gitlab" + # # Log format. Either 'text' or 'json'. +-# format = "json" ++format = "text" + # # Optional. Set log level to only log entries with that severity or above. + # # Valid values are, in order, 'debug', 'info', 'warn', 'error', 'fatal', and 'panic'. Defaults to 'info'. +-# level = "warn" ++level = "warn" + # # Additionally, exceptions from the Go server can be reported to Sentry. Sentry DSN (Data Source Name) + # # for exception monitoring. + # sentry_dsn = "https://:@sentry.io/" +@@ -91,18 +96,18 @@ sentry_environment = "" + # # Custom Git hooks that are used to perform tasks based on changes performed in any repository. + [hooks] + # # Directory where custom Git hooks are installed. If left unset, no custom hooks are used. +-custom_hooks_dir = "/home/git/custom_hooks" ++custom_hooks_dir = "/etc/gitlab/custom_hooks" + + # # Gitaly must connect to the GitLab application to perform access checks when a user performs a change. + [gitlab] + # # URL of the GitLab server. +-url = "http+unix://%2Fhome%2Fgit%2Fgitlab%2Ftmp%2Fsockets%2Fgitlab-workhorse.socket" ++url = "http+unix://%2Frun%2Fgitlab%2Fworkhorse.socket" + # # 'relative_url_root' is only needed if a UNIX socket is used in 'url' and GitLab is configured to + # # use a relative path. For example, '/gitlab'. + # relative_url_root = '/' + # # Path of the file containing the secret token used to authenticate with GitLab. Use either 'secret_token' or 'secret' + # # but not both. +-secret_file = "/home/git/gitlab-shell/.gitlab_shell_secret" ++secret_file = "/etc/gitlab/gitlab_shell_secret" + # # Secret token used to authenticate with GitLab. + # secret = "" + diff --git a/archives/gitaly/gitaly.initd b/archives/gitaly/gitaly.initd new file mode 100644 index 0000000..290c922 --- /dev/null +++ b/archives/gitaly/gitaly.initd @@ -0,0 +1,39 @@ +#!/sbin/openrc-run + +name="Gitaly" +description="A Git RPC service for handling all the git calls made by GitLab" + +: ${gitaly_config:="/etc/gitlab/gitaly.toml"} +: ${gitaly_logfile:="/var/log/gitlab/gitaly.log"} + +command="/usr/bin/gitaly" +command_args="$gitaly_config" +command_background="yes" +command_user="git" + +output_log="$gitaly_logfile" +error_log="$gitaly_logfile" +pidfile="/run/gitaly.pid" +supervise_daemon_args="--env TZ=:/etc/localtime" +start_stop_daemon_args="$supervise_daemon_args" + +rc_ulimit="-n 15000" + +required_files="$gitaly_config" + +depend() { + use net +} + +start_pre() { + local socket_path=$(sed -En "s/^\s*socket_path\s*=\s*[\"']([^\"']+)[\"']/\1/p" "$gitaly_config") + local runtime_dir=$(sed -En "s/^\s*runtime_dir\s*=\s*[\"']([^\"']+)[\"']/\1/p" "$gitaly_config") + + if [ "$socket_path" ]; then + checkpath -q -d -m 755 -o $command_user "${socket_path%/*}" || return 1 + fi + if [ "$runtime_dir" ]; then + checkpath -q -d -m 750 -o $command_user "$runtime_dir" || return 1 + fi + checkpath -f -m 640 -o $command_user "$gitaly_logfile" +} diff --git a/archives/gitlab-foss/APKBUILD b/archives/gitlab-foss/APKBUILD new file mode 100644 index 0000000..21331a4 --- /dev/null +++ b/archives/gitlab-foss/APKBUILD @@ -0,0 +1,375 @@ +# Maintainer: Antoine Martin (ayakael) +# Contributor: Jakub Jirutka +# Contributor: Antoine Martin (ayakael) +pkgname=gitlab-foss +_pkgname=${pkgname%-foss} +pkgver=17.0.4 +_gittag=v$pkgver +pkgrel=0 +pkgdesc="A version control for your server" +url="https://gitlab.com/gitlab-org/gitlab-foss" +arch="x86_64 aarch64" +license="MIT" +# ruby-irb is needed only for Rails console (gitlab-rails console) +depends=" + $pkgname-assets=$pkgver-r$pkgrel + ca-certificates + cmd:dpkg-deb + exiftool + git>=2.42.0 + gitaly~=17.0 + gitlab-shell>=14.35 + graphicsmagick + http-parser + procps + py-docutils + python3 + redis>=2.8 + ruby3.2 + ruby3.2-bigdecimal + ruby3.2-bundler + ruby3.2-fiddle + ruby3.2-io-console + ruby3.2-irb + ruby3.2-json + ruby3.2-rake + ruby3.2-rdoc + ruby3.2-webrick + shared-mime-info + tzdata + " +makedepends=" + cargo + clang-dev + cmd:chrpath + cmake + file-dev + go + gpgme-dev + icu-dev + libffi-dev + libgcrypt-dev + libpq-dev + libxml2-dev + libxslt-dev + linux-headers + llvm + nodejs + openssl-dev + protobuf-dev + re2-dev + ruby3.2-dev + rust + yarn>=1.2.0 + " +pkgusers="git" +pkggroups="git www-data" +install="$pkgname.pre-install $pkgname.post-install $pkgname.post-upgrade" +subpackages="$pkgname-assets::noarch $pkgname-openrc" +source="https://gitlab.com/gitlab-org/gitlab-foss/-/archive/$_gittag/gitlab-foss-$_gittag.tar.gz + database-config.patch + $_pkgname.initd + $_pkgname.mailroom.initd + $_pkgname.rails.initd + $_pkgname.sidekiq.initd + $_pkgname.workhorse.initd + $_pkgname.confd + $_pkgname.logrotate + bin-wrapper.in + upgrade-sys-filesystem-depend.patch + " +builddir="$srcdir/gitlab-foss-$_gittag" + +_prefix="usr/lib/bundles/$_pkgname" + +export BUNDLE_DEPLOYMENT=true +export BUNDLE_FORCE_RUBY_PLATFORM=true +export BUNDLE_FROZEN=true +# Should be tied to $JOBS, but rust native code fails to build +export BUNDLE_JOBS=1 + +prepare() { + default_prepare + + # The default log level is very chatty. + sed -i 's/^\(\s*config.log_level\s*=\).*$/\1 :warn/' \ + config/environments/production.rb + + # This is not needed, the secret_token is generated by the + # gitlab-shell package. It also makes problems in the build phase. + rm config/initializers/gitlab_shell_secret_token.rb + + # Remove all locale files except en. + find locale -type d -mindepth 1 ! -name en -exec rm -rf {} + + + # Allow use of any bundler + sed -i -e '/BUNDLED/,+1d' Gemfile.lock +} + +build() { + local bundle_without='exclude development kerberos mysql test' + + cd "$builddir"/workhorse + + make + + cd "$builddir" + + msg "Installing Ruby gems..." + bundle config --local without "$bundle_without" + bundle config --local build.ffi --enable-system-libffi + bundle config --local build.gpgme --use-system-libraries + bundle config --local build.re2 --enable-system-libraries + bundle config --local build.nokogiri --use-system-libraries \ + --with-xml2-include=/usr/include/libxml2 \ + --with-xslt-include=/usr/include/libxslt + bundle config --local build.ruby-magic --enable-system-libraries + bundle config --local build.google-protobuf '-- --with-cflags=-D__va_copy=va_copy' + bundle config --local path "vendor/bundle" + + bundle install --no-cache + + # Replace bundled CA bundle with symlink. + ( + cd vendor/bundle/ruby/*/gems/aws-sdk-core-*/ + rm ca-bundle.crt + ln -s /etc/ssl/certs/ca-certificates.crt ca-bundle.crt + ) + + # Remove faulty RPATH. + chrpath -d vendor/bundle/ruby/*/extensions/*/*/ruby-magic-*/magic/magic.so + + # Patch installed gem gitlab-markup to use python3. + # Option "-S" causes that Python cannot find docutils module. + sed -i 's/python2 -S/python3/g' \ + vendor/bundle/ruby/*/gems/gitlab-markup-*/lib/github/markups.rb + + # Remove non-sense require of code for tests from top-level module + # (we're gonna delete tests from the package). + sed -i '/require .carrierwave\/test\/matchers./d' \ + vendor/bundle/ruby/*/gems/carrierwave-*/lib/carrierwave.rb + + msg "Installing npm modules..." + yarn install --production --frozen-lockfile + + # Since we have moved assets gems into a group, they are not implicitly + # loaded by default. This will be reverted after compiling assets. + sed -i.bak '/Bundler.require(\*Rails.groups/s/)/, :assets)/' \ + config/application.rb + + # assets:precompile and gettext:compile bootstraps the app, + # so they needs configs. + cp config/gitlab.yml.example config/gitlab.yml + cp config/database.yml.postgresql config/database.yml + cp config/secrets.yml.example config/secrets.yml + + # The configured path is not readable for the user building + # the package, so we must remove it; GitLab will use the default path. + sed -i '/^\s*secret_file:.*/d' config/gitlab.yml + + ( + export NODE_ENV=production + export RAILS_ENV=production + export SKIP_STORAGE_VALIDATION=true + export USE_DB=false + export NO_SOURCEMAPS=true + export NODE_OPTIONS="--max_old_space_size=3584" + + msg "Compiling GetText PO files..." + bundle exec rake gettext:compile + + msg "Compiling assets (this will take few minutes)..." + bundle exec rake gitlab:assets:compile + ) + + # Revert changes. + mv config/application.rb.bak config/application.rb + + msg "Cleaning assets gems..." + bundle config --local without 'exclude development kerberos mysql test assets' + bundle clean + + # Create executables in bin/*. + # See also https://github.com/bundler/bundler/issues/6149. + bundle binstubs --force bundler gitlab-mail_room puma sidekiq + + # Cleanup + rm config/database.yml config/gitlab.yml config/secrets.yml +} + +package() { + local destdir="$pkgdir/$_prefix" + local datadir="$pkgdir/var/lib/gitlab" + local file dest + + install -d -m755 "$destdir" "$destdir"/bin + + install -d -m755 -o git -g git \ + "$datadir" \ + "$pkgdir"/etc/gitlab \ + "$pkgdir"/var/log/gitlab \ + "$datadir"/pages + + install -d -m700 -o git -g git \ + "$datadir"/artifacts \ + "$datadir"/builds \ + "$datadir"/ci_secure_files \ + "$datadir"/dependency_proxy \ + "$datadir"/encrypted_settings \ + "$datadir"/external-diffs \ + "$datadir"/lfs-objects \ + "$datadir"/packages \ + "$datadir"/pages \ + "$datadir"/terraform_state \ + "$datadir"/uploads + + install -d -m0750 -o git -g www-data \ + "$datadir"/pages + + install -d -m02770 -o git -g git \ + "$datadir"/repositories + + # Install application files. + # Note: *VERSION files and doc directory are required (Help in GitLab + # menu refers to the doc directory). + cp -rl .bundle config.ru Gemfile* INSTALLATION_TYPE Rakefile ./*VERSION \ + app data db doc fixtures config lib locale metrics_server public sidekiq_cluster vendor gems \ + "$destdir"/ + + install -m755 -t "$destdir"/bin/ \ + bin/bundle \ + bin/mail_room \ + bin/metrics-server \ + bin/rails \ + bin/rake \ + bin/sidekiq \ + bin/sidekiq-cluster \ + bin/sidekiqmon \ + bin/puma + + cd "$destdir" + + # Not needed in runtime since we have already compiled all assets. + rm -r app/assets + rm -r vendor/assets + find public/assets -name '*.vue' -delete + find public/assets -type d -exec rmdir --ignore-fail-on-non-empty '{}' \; + # These load gems in the assets group. + rm config/initializers/sprockets.rb + + # Remove more stuff not neeeded in production. + rm -r lib/support + rm -r db/fixtures/development + find lib/tasks -maxdepth 1 -type f ! -name cache.rake ! -name setup.rake -delete + find lib/tasks/gitlab \( -name 'generate_docs.*' \ + -o -name 'shell.*' \ + -o -name 'test.*' \) -delete + + + cd "$destdir"/vendor/bundle/ruby/*/ + + # Remove tests, documentations and other useless files. + find gems/ \( -name 'doc' \ + -o -name 'spec' \ + -o -name 'test' \) \ + -type d -maxdepth 2 -exec rm -fr "{}" + + find gems/ \( -name 'README*' \ + -o -name 'CHANGELOG*' \ + -o -name 'CONTRIBUT*' \ + -o -name '*LICENSE*' \ + -o -name 'Rakefile' \ + -o -name '.*' \) \ + -type f -delete + + # Remove bundled libgit2 sources. + rm -r gems/rugged-*/vendor/libgit2 + + # Remove assets, they are already compiled. + rm -r gems/tanuki_emoji-*/app/assets + + # Remove build logs and cache. + rm -rf build_info/ cache/ + find extensions/ \( -name gem_make.out -o -name mkmf.log \) -delete + + + cd "$destdir" + + # Install and symlink config files. + for file in cable.yml.example \ + database.yml.postgresql \ + gitlab.yml.example \ + puma.rb.example \ + resque.yml.example \ + sidekiq.yml.example \ + initializers/smtp_settings.rb.sample + do + dest="$(basename "${file%.*}")" + install -m640 -g git -D config/$file "$pkgdir"/etc/gitlab/$dest + ln -sf /etc/gitlab/$dest "$pkgdir"/$_prefix/config/${file%.*} + done + + # This file will be generated by the post-install script, just prepare symlink. + ln -sf /etc/gitlab/secrets.yml config/secrets.yml + # These shouldn't be necessary, they are all configurable, but OmniBus + # creates them too, so just to be sure... + ln -sf /etc/gitlab/gitlab_kas_secret .gitlab_kas_secret + ln -sf /etc/gitlab/gitlab_pages_secret .gitlab_pages_secret + ln -sf /etc/gitlab/gitlab_shell_secret .gitlab_shell_secret + ln -sf /etc/gitlab/gitlab_workhorse_secret .gitlab_workhorse_secret + + # Some paths are hard-coded in GitLab, so we must make symlinks. :( + ln -sf /var/lib/gitlab/uploads public/uploads + ln -sf /var/log/gitlab log + ln -sf /var/tmp/gitlab tmp + + cat > "$datadir"/.profile <<-EOF + export RAILS_ENV=production + export NODE_ENV=production + export EXECJS_RUNTIME=Disabled + EOF + + # Install wrapper scripts to /usr/bin. + local name; for name in rake rails; do + sed "s/__COMMAND__/$name/g" "$srcdir"/bin-wrapper.in \ + > "$builddir"/gitlab-$name + install -m755 -D "$builddir"/gitlab-$name "$pkgdir"/usr/bin/gitlab-$name + done + + + cd "$builddir"/workhorse + + # Install workhorse. + make install DESTDIR="$pkgdir" PREFIX=/usr + install -m644 config.toml.example "$pkgdir"/etc/gitlab/workhorse.toml + + + for file in $_pkgname $_pkgname.rails $_pkgname.sidekiq $_pkgname.mailroom $_pkgname.workhorse; do + install -m755 -D "$srcdir"/$file.initd "$pkgdir"/etc/init.d/$file + done + + install -m644 -D "$srcdir"/$_pkgname.confd \ + "$pkgdir"/etc/conf.d/$_pkgname + + install -m644 -D "$srcdir"/$_pkgname.logrotate \ + "$pkgdir"/etc/logrotate.d/$_pkgname +} + +assets() { + depends="" + + amove $_prefix/public/assets +} + +sha512sums=" +e09cfbbe4237f42bd8509c551031fd3526b75762beae7dac5164ecc4056ae07890a3ddb8500f1573f0ca9d697150654d1fcab3b3d0a3b93e5382addcee298c5b gitlab-foss-v17.0.4.tar.gz +daa496f3d9146f9dbddff62477bf49d5c7bd2f2a4cdbadc70ee51c8230f3ef01dc950ef157154b31c7e7bef0beecc5cbac50fbac65a79d6d9099b27bcba8b2ab database-config.patch +80d9bf2d064c1d4310566e087e14220e075430c46d9a6c4641c1141fbdc05381ae14a3ae7dfcb7dcb75dbf7af17a136f81764c7a4d109f248a81033782dce23b gitlab.initd +1f451b67a5d5e58650b0fe862a2b65cfb8bff5502b37d94ae90619c1ff9affbecf24428303a2849bebce5f94bef37078f0e5710e344bbab616134e910938384a gitlab.mailroom.initd +d8cdeb54c46f8204936bf5750833649e4586d3dd1942eed45955ed1661ae5f5080f59184fcb59a8f73c1405faccbf02b3db3d2c12fc2a4a81424cd35ce390768 gitlab.rails.initd +cb4ec100f0ea7ffcbb37aead8423e636629e2f4848b2974a7b2468e96cb1081ca732ac336417b08dd943afb961df888c73af1334dcbe054dfd361e74f492fd86 gitlab.sidekiq.initd +85c4e257a030832bd70ad1e257ae7cb568b31e01201fc845abac02d00f02492ca694be1fa2bf743dd8c8623e6a79d36adee3f4de02040134c11158a6001c064b gitlab.workhorse.initd +4dc00b16462f30591297fcb535fc364185d3ed76e9956597f0423a8dfd8a9a351f6ac29d9f0c73052c11324fba4768eb89a21c6bef4da99f15baaea8c9ab8407 gitlab.confd +57f258246925fbef0780caebdf005983c72fe3db1ab3242a1e00137bd322f5ec6c0fd958db7178b8fc22103d071f550d6f71f08422bcd9e859d2a734b2ecef00 gitlab.logrotate +a944c3886388ba1574bf8c96b6de4d9f24ef4a83f553c31a224e17a3b01f2a5c65b60c59b7ed7ca4b25670c60ea8dd41b96a8a623d909d2bb09bdf2520ed7f23 bin-wrapper.in +0eaa7de9a906ddb0fe84b7afbaec893a134bbbdb9e71da75cf4095ef40404643e51447aee88d3cad6e565bc709b34ffd8901cc93061e4a2a410838aed42d3644 upgrade-sys-filesystem-depend.patch +" diff --git a/archives/gitlab-foss/bin-wrapper.in b/archives/gitlab-foss/bin-wrapper.in new file mode 100644 index 0000000..aa1d411 --- /dev/null +++ b/archives/gitlab-foss/bin-wrapper.in @@ -0,0 +1,15 @@ +#!/bin/sh + +BUNDLE_DIR='/usr/lib/bundles/gitlab' +export RAILS_ENV='production' +export NODE_ENV='production' +export EXECJS_RUNTIME='Disabled' + +cd $BUNDLE_DIR +install -m 700 -o git -g git -d "$(readlink ./tmp)" + +if [ "$(id -un)" != 'git' ]; then + exec su git -c '"$0" "$@"' -- bin/__COMMAND__ "$@" +else + exec bin/__COMMAND__ "$@" +fi diff --git a/archives/gitlab-foss/database-config.patch b/archives/gitlab-foss/database-config.patch new file mode 100644 index 0000000..9b113e1 --- /dev/null +++ b/archives/gitlab-foss/database-config.patch @@ -0,0 +1,66 @@ +diff --git a/config/database.yml.postgresql.orig b/config/database.yml.postgresql +index da9f458..2d6d44e 100644 +--- a/config/database.yml.postgresql.orig ++++ b/config/database.yml.postgresql +@@ -26,13 +26,6 @@ production: + username: git + password: "secure password" + host: localhost +- geo: +- adapter: postgresql +- encoding: unicode +- database: gitlabhq_geo_production +- username: git +- password: "secure password" +- host: localhost + + # + # Development specific +@@ -57,13 +50,6 @@ development: + host: localhost + variables: + statement_timeout: 15s +- geo: +- adapter: postgresql +- encoding: unicode +- database: gitlabhq_geo_development +- username: postgres +- password: "secure password" +- host: localhost + + # + # Staging specific +@@ -84,13 +70,6 @@ staging: + username: git + password: "secure password" + host: localhost +- geo: +- adapter: postgresql +- encoding: unicode +- database: gitlabhq_geo_staging +- username: git +- password: "secure password" +- host: localhost + + # Warning: The database defined as "test" will be erased and + # re-generated from your development database when you run "rake". +@@ -119,19 +98,3 @@ test: &test + reaping_frequency: nil + variables: + statement_timeout: 15s +- geo: +- adapter: postgresql +- encoding: unicode +- database: gitlabhq_geo_test +- username: postgres +- password: +- host: localhost +- reaping_frequency: nil +- embedding: +- adapter: postgresql +- encoding: unicode +- database: gitlabhq_embedding_test +- username: postgres +- password: +- host: localhost +- reaping_frequency: nil diff --git a/archives/gitlab-foss/gitlab-foss.post-install b/archives/gitlab-foss/gitlab-foss.post-install new file mode 100644 index 0000000..65d05cc --- /dev/null +++ b/archives/gitlab-foss/gitlab-foss.post-install @@ -0,0 +1,108 @@ +#!/bin/sh +set -eu + +group='git' +data_dir='/var/lib/gitlab' +secrets_file='/etc/gitlab/secrets.yml' +shell_secret_file='/etc/gitlab/gitlab_shell_secret' +workhorse_secret_file='/etc/gitlab/gitlab_workhorse_secret' +kas_secret_file='/etc/gitlab/gitlab_kas_secret' + +gen_random_b64() { + local bits="$1" + ruby <<-EOF + require 'securerandom' + require 'base64' + puts Base64.strict_encode64(SecureRandom.random_bytes($bits)) + EOF +} + + +echo "* Checking $secrets_file" >&2 + +ruby <<-EOF + require 'openssl' + require 'securerandom' + require 'yaml' + + secrets_file = '$secrets_file' + changed = false + + secrets = YAML.load_file(secrets_file) if File.exist?(secrets_file) + secrets ||= {} + prod = secrets['production'] ||= {} + prod['db_key_base'] ||= ( changed = true; SecureRandom.hex(64) ) + prod['secret_key_base'] ||= ( changed = true; SecureRandom.hex(64) ) + prod['otp_key_base'] ||= ( changed = true; SecureRandom.hex(64) ) + prod['encrypted_settings_key_base'] ||= ( changed = true; SecureRandom.hex(64) ) + prod['openid_connect_signing_key'] ||= begin + changed = true + prod.delete('jws_private_key') || OpenSSL::PKey::RSA.new(2048).to_pem + end + # db/fixtures/production/010_settings.rb + prod['ci_jwt_signing_key'] ||= ( changed = true; OpenSSL::PKey::RSA.new(2048).to_pem ) + + if changed + STDERR.puts "* Generating random secrets into #{secrets_file}" + File.write(secrets_file, YAML.dump(secrets), mode: 'w', perm: 0640) + end +EOF +chown root:$group "$secrets_file" + +if [ ! -f "$shell_secret_file" ]; then + echo "* Generating random secret in $shell_secret_file" >&2 + + head -c 512 /dev/urandom | LC_CTYPE=C tr -cd 'a-zA-Z0-9' | head -c 64 > "$shell_secret_file" + chown root:$group "$shell_secret_file" + chmod 0640 "$shell_secret_file" +fi + +if [ ! -f "$workhorse_secret_file" ]; then + echo "* Generating random secret in $workhorse_secret_file" >&2 + + # Sync with lib/gitlab/workhorse.rb. + gen_random_b64 32 > "$workhorse_secret_file" + chown root:$group "$workhorse_secret_file" + chmod 0640 "$workhorse_secret_file" +fi + +if [ ! -f "$kas_secret_file" ]; then + echo "* Generating random secret in $kas_secret_file" >&2 + + # Sync with lib/gitlab/workhorse.rb. + gen_random_b64 32 > "$kas_secret_file" + chown root:$group "$kas_secret_file" + chmod 0640 "$kas_secret_file" +fi + +# NOTE: We create this symlink in post-install script instead of APKBULD, +# so user can decide to have tmp dir inside $data_dir (e.g. it's on bigger disk). +if [ ! -e "$data_dir"/tmp ]; then + ln -s /var/tmp/gitlab "$data_dir"/tmp +fi + + +if [ "${0##*.}" = 'post-upgrade' ]; then + cat >&2 <<-EOF + * + * To finish GitLab upgrade run: + * + * gitlab-rake gitlab:db:configure + * + EOF +else + cat >&2 <<-EOF + * + * 1. Adjust settings in /etc/gitlab/database.yml and gitlab.yml. + * + * 2. Create database for GitLab: + * + * psql -c "CREATE ROLE gitlab PASSWORD 'top-secret' INHERIT LOGIN;" + * psql -c "CREATE DATABASE gitlab OWNER gitlab ENCODING 'UTF-8';" + * psql -d gitlab -c "CREATE EXTENSION pg_trgm; CREATE EXTENSION btree_gist;" + * + * 3. Run "gitlab-rake gitlab:setup", or "gitlab-rake gitlab:db:configure" if + * you are updating existing database. + * + EOF +fi diff --git a/archives/gitlab-foss/gitlab-foss.post-upgrade b/archives/gitlab-foss/gitlab-foss.post-upgrade new file mode 120000 index 0000000..20d2b0c --- /dev/null +++ b/archives/gitlab-foss/gitlab-foss.post-upgrade @@ -0,0 +1 @@ +gitlab-foss.post-install \ No newline at end of file diff --git a/archives/gitlab-foss/gitlab-foss.pre-install b/archives/gitlab-foss/gitlab-foss.pre-install new file mode 100644 index 0000000..66ad895 --- /dev/null +++ b/archives/gitlab-foss/gitlab-foss.pre-install @@ -0,0 +1,53 @@ +#!/bin/sh +# It's very important to set user/group correctly. + +git_dir='/var/lib/gitlab' + +if ! getent group git 1>/dev/null; then + echo '* Creating group git' 1>&2 + + addgroup -S git +fi + +if ! id git 2>/dev/null 1>&2; then + echo '* Creating user git' 1>&2 + + adduser -DHS -G git -h "$git_dir" -s /bin/sh \ + -g "added by apk for gitlab-foss" git + passwd -u git 1>/dev/null # unlock +fi + +if ! id -Gn git | grep -Fq redis; then + echo '* Adding user git to group redis' 1>&2 + + addgroup git redis +fi + +if [ "$(id -gn git)" != 'git' ]; then + cat >&2 <<-EOF + !! + !! User git has primary group $(id -gn git). We strongly recommend to change + !! git's primary group to git, otherwise GitLab may not work correctly. + !! + EOF + + # Add it at least as a supplementary group. + adduser git git +fi + +user_home="$(getent passwd git | cut -d: -f6)" + +if [ "$user_home" != "$git_dir" ]; then + cat >&2 <<-EOF + !! + !! User git has home directory in $user_home, but this package assumes + !! $git_dir. Although it's possible to use a different directory, + !! it's really not easy. + !! + !! Please change git's home directory to $git_dir, or adjust settings + !! and move files yourself. Otherwise GitLab will not work! + !! + EOF +fi + +exit 0 diff --git a/archives/gitlab-foss/gitlab-rails.confd b/archives/gitlab-foss/gitlab-rails.confd new file mode 100644 index 0000000..d85aa9c --- /dev/null +++ b/archives/gitlab-foss/gitlab-rails.confd @@ -0,0 +1,20 @@ +# Configuration for /etc/init.d/gitlab.rails + +# Path to the Puma configuration file. +#puma_config="/etc/gitlab/puma.rb" + +# IP address and port for Puma server to listen on. +#puma_listen_tcp="127.0.0.1:8080" + +# Absolute path of unix socket for Puma server to listen on. +#puma_listen_unix="/run/gitlab/gitlab.socket" + +# Path to the file to redirect stdout from Puma server to. +#puma_stdout_file="/var/log/gitlab/puma_stdout.log" + +# Path to the file to redirect stderr from Puma server to. +#puma_stderr_file="/var/log/gitlab/puma_stderr.log" + +# Action Cable uses a separate thread pool per Puma worker. This configures +# number of threads in the pool. +#action_cable_worker_pool_size=4 diff --git a/archives/gitlab-foss/gitlab.confd b/archives/gitlab-foss/gitlab.confd new file mode 100644 index 0000000..ade6bcc --- /dev/null +++ b/archives/gitlab-foss/gitlab.confd @@ -0,0 +1,85 @@ +# Configuration file for /etc/init.d/gitlab and +# /etc/init.d/gitlab.{mailroom,rails,sidekiq,workhorse} + + +# Path to the base directory for the Prometheus metrics used by Puma and +# Sidekiq. +#metrics_dir=/dev/shm/gitlab + + +# How many Puma worker processes to create (0 to disable cluster mode). +#puma_workers=3 + +# IP address and port for Puma server to listen on. +#puma_listen_tcp="127.0.0.1:8080" + +# Absolute path of unix socket for Puma server to listen on. +#puma_listen_unix="/run/gitlab/gitlab.socket" + +# Action Cable uses a separate thread pool per Puma worker. This configures +# number of threads in the pool. +#action_cable_worker_pool_size=4 + + +# IP address and port, or absolute path of the unix socket, where should +# Workhorse listen on for connections from a web server. +#workhorse_listen="/run/gitlab/workhorse.socket" + +# How long to wait for response headers when proxying the request. +#workhorse_proxy_header_timeout="1m0s" + +# Number of API requests allowed at single time. +#workhorse_api_limit= + +# Maximum queueing duration of requests (default 30s). +#workhorse_api_queue_duration= + +# Number of API requests allowed to be queued. +#workhorse_api_queue_limit= + +# Long polling duration for job requesting for runners (default 0s - disabled) +#workhorse_ci_long_polling_duration= + +# Log format to use: text, json, structured, none. Defaults to "text". +#workhorse_log_format= + +# Prometheus listening address. +#workhorse_prometheus_listen= + +# Sentry DSN for Workhorse. +#workhorse_sentry_dsn= + + +# Specify how many processes to create using sidekiq-cluster and which queue +# they should handle. Each whitespace-separated item equates to one additional +# Sidekiq process, and comma-separated values in each item determine the queues +# it works on. The special queue name "*" means all queues. +# Example: "* gitlab_shell process_commit,post_receive" +# See https://docs.gitlab.com/ee/administration/sidekiq/extra_sidekiq_processes.html. +#sidekiq_queue_groups="*" + +# Maximum threads to use with Sidekiq (default: 50, 0 to disable). +#sidekiq_max_concurrency= + +# Minimum threads to use with Sidekiq (default: 0). +#sidekiq_min_concurrency= + +# The number of seconds to wait between worker checks. +#sidekiq_interval= + +# Graceful timeout for all running processes. +#sidekiq_shutdown_timeout= + +# Run workers for all queues in sidekiq_queues.yml except the given ones. +#sidekiq_negate=no + +# Run workers based on the provided selector. +#sidekiq_queue_selector=no + +# Memory limit (in MiB) for the Sidekiq process. If the RSS (Resident Set Size) +# of the Sidekiq process exceeds this limit, a delayed shutdown is triggered. +#sidekiq_memkiller_max_rss=2000 + + +# Enable mail_room to handle incoming mails? +#mailroom_enabled="no" diff --git a/archives/gitlab-foss/gitlab.initd b/archives/gitlab-foss/gitlab.initd new file mode 100644 index 0000000..cdf212e --- /dev/null +++ b/archives/gitlab-foss/gitlab.initd @@ -0,0 +1,50 @@ +#!/sbin/openrc-run + +name="GitLab" +description="Meta script for starting/stopping all the GitLab components" + +: ${mailroom_enabled:="no"} +: ${pages_enabled:="yes"} + +subservices="gitlab.rails gitlab.gitaly gitlab.sidekiq gitlab.workhorse" +if yesno "$mailroom_enabled"; then + subservices="$subservices gitlab.mailroom" +fi +if yesno "$pages_enabled" && [ -e /etc/init.d/gitlab.pages ]; then + subservices="$subservices gitlab.pages" +fi + +depend() { + need redis postgresql + use net +} + +start() { + local ret=0 + + ebegin "Starting all GitLab components" + local svc; for svc in $subservices; do + service $svc start || ret=1 + done + eend $ret +} + +stop() { + local ret=0 + + ebegin "Stopping all GitLab components" + local svc; for svc in $subservices; do + service $svc stop || ret=1 + done + eend $ret +} + +status() { + local ret=0 + + local svc; for svc in $subservices; do + echo "$svc:" + service $svc status || ret=1 + done + eend $ret +} diff --git a/archives/gitlab-foss/gitlab.logrotate b/archives/gitlab-foss/gitlab.logrotate new file mode 100644 index 0000000..721ff49 --- /dev/null +++ b/archives/gitlab-foss/gitlab.logrotate @@ -0,0 +1,24 @@ +/var/log/gitlab/workhorse.log { + compress + maxsize 10M + minsize 1M + missingok + postrotate + /etc/init.d/gitlab.workhorse --quiet --ifstarted reopen + endscript + sharedscripts + rotate 5 + weekly +} + +/var/log/gitlab/*.log { + compress + copytruncate + delaycompress + maxsize 10M + minsize 1M + missingok + sharedscripts + rotate 10 + weekly +} diff --git a/archives/gitlab-foss/gitlab.mailroom.initd b/archives/gitlab-foss/gitlab.mailroom.initd new file mode 100644 index 0000000..e6d6a64 --- /dev/null +++ b/archives/gitlab-foss/gitlab.mailroom.initd @@ -0,0 +1,40 @@ +#!/sbin/openrc-run + +supervisor=supervise-daemon + +name="GitLab (mailroom)" +description="GitLab service for processing incoming mails." + +: ${gitlab_base:="/usr/lib/bundles/gitlab"} +: ${gitlab_config:="/etc/gitlab/gitlab.yml"} +: ${mailroom_logfile:="/var/log/gitlab/mail_room.log"} +: ${mailroom_config:="$gitlab_base/config/mail_room.yml"} + +command="$gitlab_base/bin/mail_room" +command_args="-c $mailroom_config" +command_background="yes" +command_user="git" + +directory="$gitlab_base" +error_log="$mailroom_logfile" +output_log="$mailroom_logfile" + +supervise_daemon_args=" + --env RAILS_ENV=production + --env TZ=:/etc/localtime + --env MAIL_ROOM_GITLAB_CONFIG_FILE=$gitlab_config + " +start_stop_daemon_args="--interpreted $supervise_daemon_args" +pidfile="/run/gitlab/mail_room.pid" + +required_files="$mailroom_config $gitlab_config" + +depend() { + need redis + use net +} + +start_pre() { + checkpath -d -m 755 -o $command_user -q "${pidfile%/*}" || return 1 + checkpath -f -m 640 -o $command_user "$mailroom_logfile" +} diff --git a/archives/gitlab-foss/gitlab.rails.initd b/archives/gitlab-foss/gitlab.rails.initd new file mode 100644 index 0000000..4c824d9 --- /dev/null +++ b/archives/gitlab-foss/gitlab.rails.initd @@ -0,0 +1,119 @@ +#!/sbin/openrc-run + +name="GitLab Rails" +description="GitLab application" + +extra_started_commands="reload reopen" +description_reload="Reload configuration" +description_reopen="Reopen log files" + +: ${gitlab_base:="/usr/lib/bundles/gitlab"} +: ${metrics_dir:="/dev/shm/gitlab"} + +: ${action_cable_worker_pool_size:=4} +: ${gitlab_config:="/etc/gitlab/gitlab.yml"} +: ${puma_workers:=3} +: ${puma_listen_unix:="/run/gitlab/gitlab.socket"} +: ${puma_listen_tcp:="127.0.0.1:8080"} +: ${puma_stdout_file:="/var/log/gitlab/puma_stdout.log"} +: ${puma_stderr_file:="/var/log/gitlab/puma_stderr.log"} +: ${puma_config:="/etc/gitlab/puma.rb"} +: ${puma_metrics_dir:="$metrics_dir/puma"} + +command="$gitlab_base/bin/puma" +command_args=" + --config $puma_config + --workers $puma_workers + --bind tcp://$puma_listen_tcp + --bind unix://$puma_listen_unix + --redirect-stdout $puma_stdout_file + --redirect-stderr $puma_stderr_file + --redirect-append + --state /run/gitlab/puma.state + " +command_background="yes" +command_user="git" +directory="$gitlab_base" + +supervise_daemon_args=" + --env ACTION_CABLE_WORKER_POOL_SIZE=$action_cable_worker_pool_size + --env RAILS_ENV=production + --env NODE_ENV=production + --env EXECJS_RUNTIME=Disabled + --env GITLAB_BASE=$gitlab_base + --env TZ=:/etc/localtime + --env prometheus_multiproc_dir=$puma_metrics_dir + ${supervise_daemon_args:-} + " +start_stop_daemon_args=" + --interpreted + $supervise_daemon_args + $start_stop_daemon_args + " +pidfile="/run/gitlab/puma.pid" + +required_files="$gitlab_config $puma_config" + +depend() { + need redis + want sshd postgresql docker-registry + use net +} + +start_pre() { + checkpath -d -m 755 -o $command_user -q "${pidfile%/*}" || return 1 + checkpath -d -m 700 -o $command_user -q "$(readlink -f "$gitlab_base"/tmp)" || return 1 + checkpath -d -m 700 -o $command_user -q "$metrics_dir" || return 1 + checkpath -d -m 700 -o $command_user --directory-truncate "$puma_metrics_dir" || return 1 + checkpath -f -m 644 -o $command_user "$puma_stdout_file" || return 1 + checkpath -f -m 644 -o $command_user "$puma_stderr_file" || return 1 + + # Ruby requires sticky bit on TMP directory. + checkpath -d -m 1777 /tmp + + local downloads_path="$(_parse_yaml "$gitlab_config" \ + production.gitlab.repository_downloads_path)" + + if [ -n "$downloads_path" ]; then + checkpath -d -m 700 -o $command_user -q "$downloads_path" + fi + + checkpath --directory --owner $command_user --mode 0775 \ + /var/tmp/gitlab/downloads \ + /var/tmp/gitlab/backups + +} + +reload() { + ebegin "Reloading $name" + + if [ "$supervisor" ]; then + $supervisor "$RC_SVCNAME" --signal USR2 + else + start-stop-daemon --pidfile "$pidfile" --signal USR2 + fi + eend $? +} + +reopen() { + ebegin "Telling $name to reopen log files" + + if [ "$supervisor" ]; then + $supervisor "$RC_SVCNAME" --signal USR1 + else + start-stop-daemon --pidfile "$pidfile" --signal USR1 + fi + eend $? +} + +_parse_yaml() { + local file="$1" + local key="$2" + local default="${3:-}" + local key_path="$(echo "[\"$key\"]" | sed 's/\./"]["/g')" + + ruby <<-EOF + require "yaml" + puts YAML.load_file("$file")$key_path rescue puts "$default" + EOF +} diff --git a/archives/gitlab-foss/gitlab.sidekiq.initd b/archives/gitlab-foss/gitlab.sidekiq.initd new file mode 100644 index 0000000..eb30f4c --- /dev/null +++ b/archives/gitlab-foss/gitlab.sidekiq.initd @@ -0,0 +1,76 @@ +#!/sbin/openrc-run + +extra_started_commands="finish" + +name="GitLab Sidekiq" +description="GitLab backgroud workers" +description_finish="Stop fetching new jobs and finish current ones" + +: ${gitlab_base:="/usr/lib/bundles/gitlab"} +: ${metrics_dir:="/dev/shm/gitlab"} + +: ${sidekiq_logfile:="/var/log/gitlab/sidekiq.log"} +: ${sidekiq_memkiller_max_rss:="2000"} # default per Omnibus +: ${sidekiq_metrics_dir:="$metrics_dir/sidekiq"} +: ${sidekiq_negate:="no"} +: ${sidekiq_queue_groups:="*"} +: ${sidekiq_queue_selector:="no"} + +command="$gitlab_base/bin/sidekiq-cluster" +# Note: The rest of the options is set in start_pre(). +command_args="-r $gitlab_base -e production ${command_args:-}" +command_background="yes" +command_user="git" + +directory="$gitlab_base" +error_log="$sidekiq_logfile" +output_log="$sidekiq_logfile" + +supervise_daemon_args=" + --env RAILS_ENV=production + --env NODE_ENV=production + --env EXECJS_RUNTIME=Disabled + --env TZ=:/etc/localtime + --env SIDEKIQ_MEMORY_KILLER_MAX_RSS=$(( sidekiq_memkiller_max_rss * 1024 )) + --env prometheus_multiproc_dir=$sidekiq_metrics_dir + " +start_stop_daemon_args="--interpreted $supervise_daemon_args" +pidfile="/run/gitlab/sidekiq.pid" + +depend() { + need redis + use net postgresql +} + +start_pre() { + yesno "$sidekiq_queue_selector" && command_args="$command_args --queue-selector" + + command_args="$command_args + $(optif --max-concurrency ${sidekiq_max_concurrency:-}) + $(optif --min-concurrency ${sidekiq_min_concurrency:-}) + $(optif --interval ${sidekiq_interval:-}) + $(optif --timeout ${sidekiq_shutdown_timeout:-}) + $(set -f; printf "'%s' " $sidekiq_queue_groups) + " + yesno "$sidekiq_negate" && command_args="$command_args --negate" + + checkpath -d -m 755 -o $command_user -q "${pidfile%/*}" || return 1 + checkpath -d -m 700 -o $command_user -q "$metrics_dir" || return 1 + checkpath -d -m 700 -o $command_user --directory-truncate "$sidekiq_metrics_dir" || return 1 + checkpath -f -m 644 -o $command_user "$sidekiq_logfile" +} + +finish() { + ebegin "Telling $name to stop fetching new jobs" + + if [ "$supervisor" ]; then + $supervisor "$RC_SVCNAME" --signal TSTP + else + start-stop-daemon --pidfile "$pidfile" --signal TSTP + fi + eend $? +} + +optif() { + test -n "$2" && printf '%s/n' "$1=$2" || true +} diff --git a/archives/gitlab-foss/gitlab.workhorse.initd b/archives/gitlab-foss/gitlab.workhorse.initd new file mode 100644 index 0000000..4b04d7c --- /dev/null +++ b/archives/gitlab-foss/gitlab.workhorse.initd @@ -0,0 +1,75 @@ +#!/sbin/openrc-run + +extra_started_commands="reopen" + +name="GitLab Workhorse" +description="A reverse proxy for GitLab." +description_reopen="Reopen log files" + +: ${gitlab_base:="/usr/lib/bundles/gitlab"} +: ${workhorse_logfile:="/var/log/gitlab/workhorse.log"} +: ${workhorse_access_log:="no"} + +command="/usr/bin/gitlab-workhorse" +# Note: The rest of the options is set in start_pre(). +command_args=" + -authBackend=http://${puma_listen_tcp:="127.0.0.1:8080"} + -config=${workhorse_config:="/etc/gitlab/workhorse.toml"} + -documentRoot=${gitlab_public_dir:="$gitlab_base/public"} + -listenAddr=${workhorse_listen:="/run/gitlab/workhorse.socket"} + -listenUmask=${workhorse_listen_umask:="000"} + -logFile=$workhorse_logfile + -secretPath=${workhorse_secret_path:="/etc/gitlab/gitlab_workhorse_secret"} + " +command_background="yes" +command_user="git" +directory="$gitlab_base" +pidfile="/run/gitlab/workhorse.pid" + +depend() { + use net +} + +start_pre() { + local listen_net="tcp" + [ "${workhorse_listen:0:1}" = '/' ] && listen_net="unix" + + command_args="$command_args + -listenNetwork=$listen_net + $(optif -apiCiLongPollingDuration "$workhorse_ci_long_polling_duration") + $(optif -apiLimit "$workhorse_api_limit") + $(optif -apiQueueDuration "$workhorse_api_queue_duration") + $(optif -apiQueueLimit "$workhorse_api_queue_limit") + $(optif -authSocket "$puma_listen_unix") + $(optif -logFormat "$workhorse_log_format") + $(optif -prometheusListenAddr "$workhorse_prometheus_listen_addr") + $(optif -proxyHeadersTimeout "$workhorse_proxy_header_timeout")" + # FIXME: not implemented + #yesno "$workhorse_access_log" || command_args="$command_args -disableAccessLog" + + start_stop_daemon_args="$start_stop_daemon_args + $(optif '--env GITLAB_WORKHORSE_SENTRY_DSN' "$workhorse_sentry_dns")" + supervise_daemon_args="$supervise_daemon_args + $(optif '--env GITLAB_WORKHORSE_SENTRY_DSN' "$workhorse_sentry_dns")" + + checkpath -d -m 755 -o $command_user -q "${pidfile%/*}" || return 1 + if [ "$listen_net" = "unix" ]; then + checkpath -d -m 755 -o $command_user -q "${workhorse_listen%/*}" || return 1 + fi + checkpath -f -m 640 -o $command_user "$workhorse_logfile" +} + +reopen() { + ebegin "Telling $name to reopen log files" + + if [ "$supervisor" ]; then + $supervisor "$RC_SVCNAME" --signal HUP + else + start-stop-daemon --pidfile "$pidfile" --signal HUP + fi + eend $? +} + +optif() { + test -n "$2" && printf '%s/n' "$1=$2" || true +} diff --git a/archives/gitlab-foss/upgrade-sys-filesystem-depend.patch b/archives/gitlab-foss/upgrade-sys-filesystem-depend.patch new file mode 100644 index 0000000..d608191 --- /dev/null +++ b/archives/gitlab-foss/upgrade-sys-filesystem-depend.patch @@ -0,0 +1,35 @@ +diff --git a/Gemfile.orig b/Gemfile +index c1e9e34..a4448b7 100644 +--- a/Gemfile.orig ++++ b/Gemfile +@@ -525,7 +525,7 @@ gem 'health_check', '~> 3.0' # rubocop:todo Gemfile/MissingFeatureCategory + + # System information + gem 'vmstat', '~> 2.3.0' # rubocop:todo Gemfile/MissingFeatureCategory +-gem 'sys-filesystem', '~> 1.4.3' # rubocop:todo Gemfile/MissingFeatureCategory ++gem 'sys-filesystem', '~> 1.4.5' # rubocop:todo Gemfile/MissingFeatureCategory + + # NTP client + gem 'net-ntp' # rubocop:todo Gemfile/MissingFeatureCategory +diff --git a/Gemfile.lock.orig b/Gemfile.lock +index bb66169..a4da10b 100644 +--- a/Gemfile.lock.orig ++++ b/Gemfile.lock +@@ -1657,7 +1657,7 @@ GEM + attr_required (>= 0.0.5) + httpclient (>= 2.4) + sync (0.5.0) +- sys-filesystem (1.4.3) ++ sys-filesystem (1.4.5) + ffi (~> 1.1) + sysexits (1.2.0) + table_print (1.5.7) +@@ -2123,7 +2123,7 @@ DEPENDENCIES + stackprof (~> 0.2.25) + state_machines-activerecord (~> 0.8.0) + static_holmes (~> 0.7.7) +- sys-filesystem (~> 1.4.3) ++ sys-filesystem (~> 1.4.5) + tanuki_emoji (~> 0.9) + telesignenterprise (~> 2.2) + terser (= 1.0.2) diff --git a/archives/gitlab-pages/APKBUILD b/archives/gitlab-pages/APKBUILD new file mode 100644 index 0000000..6ab7745 --- /dev/null +++ b/archives/gitlab-pages/APKBUILD @@ -0,0 +1,35 @@ +# Maintainer: Antoine Martin (ayakael) +# Contributor: Antoine Martin (ayakael) +# Contributor: Jakub Jirutka +pkgname=gitlab-pages +pkgver=17.0.4 +_gittag="v$pkgver" +pkgrel=0 +pkgdesc="A daemon used to serve static websites for GitLab users" +url="https://gitlab.com/gitlab-org/gitlab-pages/" +arch="all" +license="MIT" +makedepends="go>=1.5" +source=" + https://gitlab.com/gitlab-org/gitlab-pages/-/archive/$_gittag/gitlab-pages-$_gittag.tar.gz + ungit-makefile.patch + $pkgname.initd + " +subpackages="$pkgname-openrc" +builddir="$srcdir"/$pkgname-$_gittag + +build() { + make VERSION=$pkgver REVISION=$pkgrel GOPATH="$srcdir" CGO_ENABLED=0 +} + +package() { + install -D -m 755 $pkgname "$pkgdir"/usr/bin/$pkgname + install -m755 -D "$srcdir"/$pkgname.initd \ + "$pkgdir"/etc/init.d/gitlab.pages +} + +sha512sums=" +fde33d01f7b3810a9a094c09fce19976c41a2ccc9eaf720a0f4dd285eb2d0f35de8d2d607cdbaa670221711919043d681fd3fda6e14d67ae1454619746c1e453 gitlab-pages-v17.0.4.tar.gz +710a9b652327e57e620c2bdb02bf912a6f61044eaaf61d36c6612284e9b951d2ac6f5eef77dfea16a0cde328bd4c556d9e47791c560139c27cb9659076f809b1 ungit-makefile.patch +20bc66c1c3548568ed353ca8d584f9108b9688f9375f212a18efc7b8386fdaafb3b2dc9e865f21c7f8fd31ada6e91842a8bb8d397f64851d853bb0de3e0e60bb gitlab-pages.initd +" diff --git a/archives/gitlab-pages/gitlab-pages.initd b/archives/gitlab-pages/gitlab-pages.initd new file mode 100644 index 0000000..4a34507 --- /dev/null +++ b/archives/gitlab-pages/gitlab-pages.initd @@ -0,0 +1,55 @@ +#!/sbin/openrc-run + +name="GitLab Pages" +description="A daemon used to serve static websites for GitLab users" + +: ${pages_user:=${user:-"git"}} +: ${pages_root:="/var/lib/gitlab/pages"} +: ${pages_logfile:="/var/log/gitlab/pages.log"} + +command="/usr/bin/gitlab-pages" +# Note: The rest of the options is set in start_pre(). +command_args=" + -pages-domain=$pages_domain + -pages-root=$pages_root + -redirect-http=${pages_redirect_http:-true} + -use-http2=${pages_use_http2:-true} + " +command_background="yes" + +start_stop_daemon_args=" + --chdir $pages_root + --user $pages_user + --stdout $pages_logfile + --stderr $pages_logfile" +pidfile="/run/gitlab-pages.pid" + + +depend() { + use net +} + +start_pre() { + local item + + for item in $pages_listen_http; do + command_args="$command_args -listen-http=$item" + done + for item in $pages_listen_https; do + command_args="$command_args -listen-https=$item" + done + for item in $pages_listen_proxy; do + command_args="$command_args -listen-proxy=$item" + done + + command_args="$command_args + $(optif -metrics-address "$pages_metrics_address") + $(optif -root-cert "$pages_root_cert") + $(optif -root-key "$pages_root_key")" + + checkpath -m 640 -o $pages_user -f "$pages_logfile" +} + +optif() { + test -n "$2" && printf '%s/n' "$1=$2" || true +} diff --git a/archives/gitlab-pages/ungit-makefile.patch b/archives/gitlab-pages/ungit-makefile.patch new file mode 100644 index 0000000..4cbc132 --- /dev/null +++ b/archives/gitlab-pages/ungit-makefile.patch @@ -0,0 +1,18 @@ +diff --git a/Makefile.internal.mk.orig b/Makefile.internal.mk +index 6dfaa1b..207bdaf 100644 +--- a/Makefile.internal.mk.orig ++++ b/Makefile.internal.mk +@@ -1,13 +1,3 @@ +-REVISION := $(shell git rev-parse --short HEAD || echo unknown) +-LAST_TAG := $(shell git describe --tags --abbrev=0) +-COMMITS := $(shell echo `git log --oneline $(LAST_TAG)..HEAD | wc -l`) +-VERSION := $(shell cat VERSION) +-BRANCH := $(shell git rev-parse --abbrev-ref HEAD) +- +-ifneq (v$(VERSION),$(LAST_TAG)) +- VERSION := $(shell echo $(VERSION)~beta.$(COMMITS).g$(REVISION)) +-endif +- + VERSION_FLAGS :=-X "main.VERSION=$(VERSION)" -X "main.REVISION=$(REVISION)" + + export GOBIN := $(CURDIR)/bin diff --git a/archives/gitlab-shell/APKBUILD b/archives/gitlab-shell/APKBUILD new file mode 100644 index 0000000..b1d202a --- /dev/null +++ b/archives/gitlab-shell/APKBUILD @@ -0,0 +1,66 @@ +# Maintainer: Antoine Martin (ayakael) +# Contributor: Antoine Martin (ayakael) +# Contributor: Jakub Jirutka +pkgname=gitlab-shell +pkgver=14.36.0 +pkgrel=0 +pkgdesc="GitLab Shell handles git SSH sessions for GitLab" +url="https://gitlab.com/gitlab-org/gitlab-shell" +arch="all" +license="MIT" +depends="git openssh" +makedepends="go krb5-dev" +pkgusers="git" +pkggroups="git" +install="$pkgname.pre-install $pkgname.post-install" +# NOTE: user vs system gitconfig, see https://gitlab.com/gitlab-org/omnibus-gitlab/-/merge_requests/6166 +source="https://gitlab.com/gitlab-org/gitlab-shell/-/archive/v$pkgver/gitlab-shell-v$pkgver.tar.gz + config.patch + change-config-path.patch + gitconfig + " +builddir="$srcdir/$pkgname-v$pkgver" +options="!check" + +build() { + # BUILD_TAGS - build without tracing libs, + # see https://gitlab.com/gitlab-org/labkit/-/merge_requests/2 + make build \ + VERSION_STRING="$pkgver" \ + BUILD_TAGS="" +} + +package() { + local datadir="$pkgdir/var/lib/gitlab" + local libdir="$pkgdir/usr/lib/gitlab-shell" + + # XXX: I couldn't figure out how/where is gitlab-shell called, + # so I kept /usr/lib/gitlab-shell. It should be changed to /usr. + make install DESTDIR="$pkgdir" PREFIX=/usr/lib/gitlab-shell + + install -m644 VERSION "$libdir"/ + install -m644 -D config.yml.example "$pkgdir"/etc/gitlab/gitlab-shell.yml + + cd "$pkgdir" + + rm "$libdir"/bin/gitlab-sshd + + install -d -m755 -o git -g git \ + "$pkgdir"/var/log/gitlab \ + "$datadir" + + install -d -m02770 -o git -g git \ + "$datadir"/repositories + + install -m644 -o git -g git "$srcdir"/gitconfig "$datadir"/.gitconfig + + ln -s /etc/gitlab/gitlab-shell.yml "$libdir"/config.yml + ln -s /etc/gitlab/gitlab_shell_secret "$libdir"/.gitlab_shell_secret +} + +sha512sums=" +6b302be3630e60e3c9f76e58c61674bf08c3fe1395c9af5f354b9a557ecd1ddb43d27c9a995f868c4e4e2e734dd424a37c73e78d26b00f1f6a78f8670b45c371 gitlab-shell-v14.36.0.tar.gz +e9dd69c57c65197493f75bdde682075c6ab22892ed07d37c7a73129fb42a8349a676d5986bfd17f1df331645334248383845f21ce08d1e9664c38e4bbf5343ba config.patch +499b3a46ea94a33a23b01f6a7509d74f5a6781b930619b3b8ae42bdeae8a052cc636578744d7992b4ae4f9b9f72b11ee3d3c0f5e50986fa3f7e35b979b08aada change-config-path.patch +c53da7f145593693392d9fa880ad5a1909bfc7504fd1c93d94a468c3e0f5cc80f712f41ee1dc8bf38105b410c1165658f208bd88a70c4674104c78af33d8d09c gitconfig +" diff --git a/archives/gitlab-shell/change-config-path.patch b/archives/gitlab-shell/change-config-path.patch new file mode 100644 index 0000000..52d44ce --- /dev/null +++ b/archives/gitlab-shell/change-config-path.patch @@ -0,0 +1,11 @@ +--- a/support/gitlab_config.rb ++++ b/support/gitlab_config.rb +@@ -4,7 +4,7 @@ class GitlabConfig + attr_reader :config + + def initialize +- @config = YAML.load_file(File.join(ROOT_PATH, 'config.yml')) ++ @config = YAML.load_file(ENV.fetch('GITLAB_SHELL_CONFIG', '/etc/gitlab/gitlab-shell.yml')) + end + + def home diff --git a/archives/gitlab-shell/config.patch b/archives/gitlab-shell/config.patch new file mode 100644 index 0000000..6dabe44 --- /dev/null +++ b/archives/gitlab-shell/config.patch @@ -0,0 +1,112 @@ +diff --git a/config.yml.example.orig b/config.yml.example +index fb147c4..98eb0e3 100644 +--- a/config.yml.example.orig ++++ b/config.yml.example +@@ -13,7 +13,7 @@ user: git + # only listen on a Unix domain socket. For Unix domain sockets use + # "http+unix://", e.g. + # "http+unix://%2Fpath%2Fto%2Fsocket" +-gitlab_url: "http+unix://%2Fhome%2Fgit%2Fgitlab%2Ftmp%2Fsockets%2Fgitlab-workhorse.socket" ++gitlab_url: "http+unix://%2Frun%2Fgitlab%2Fworkhorse.socket" + + # When a http+unix:// is used in gitlab_url, this is the relative URL root to GitLab. + # Not used if gitlab_url is http:// or https://. +@@ -29,15 +29,15 @@ http_settings: + # + + # File used as authorized_keys for gitlab user +-auth_file: "/home/git/.ssh/authorized_keys" ++auth_file: "/var/lib/gitlab/.ssh/authorized_keys" + + # SSL certificate dir where custom certificates can be placed + # https://golang.org/pkg/crypto/x509/ +-# ssl_cert_dir: /opt/gitlab/embedded/ssl/certs/ ++# ssl_cert_dir: /etc/gitlab/ssl/certs/ + + # File that contains the secret key for verifying access to GitLab. + # Default is .gitlab_shell_secret in the gitlab-shell directory. +-# secret_file: "/home/git/gitlab-shell/.gitlab_shell_secret" ++secret_file: "/etc/gitlab/gitlab_shell_secret" + # + # The secret field supersedes the secret_file, and if set that + # file will not be read. +@@ -45,13 +45,13 @@ auth_file: "/home/git/.ssh/authorized_keys" + + # Log file. + # Default is gitlab-shell.log in the root directory. +-# log_file: "/home/git/gitlab-shell/gitlab-shell.log" ++log_file: "/var/log/gitlab/gitlab-shell.log" + + # Log level. INFO by default +-log_level: INFO ++log_level: WARN + + # Log format. 'json' by default, can be changed to 'text' if needed +-# log_format: json ++log_format: text + + # Audit usernames. + # Set to true to see real usernames in the logs instead of key ids, which is easier to follow, but +@@ -62,62 +62,6 @@ audit_usernames: false + # For more details, visit https://docs.gitlab.com/ee/development/distributed_tracing.html + # gitlab_tracing: opentracing://driver + +-# This section configures the built-in SSH server. Ignored when running on OpenSSH. +-sshd: +- # Address which the SSH server listens on. Defaults to [::]:22. +- listen: "[::]:22" +- # Set to true if gitlab-sshd is being fronted by a load balancer that implements +- # the PROXY protocol. +- proxy_protocol: false +- # Proxy protocol policy ("use", "require", "reject", "ignore"), "use" is the default value +- # Values: https://github.com/pires/go-proxyproto/blob/195fedcfbfc1be163f3a0d507fac1709e9d81fed/policy.go#L20 +- proxy_policy: "use" +- # Proxy allowed IP addresses. Takes precedent over proxy_policy. Disabled by default. +- # proxy_allowed: +- # - "192.168.0.1" +- # - "192.168.1.0/24" +- # Address which the server listens on HTTP for monitoring/health checks. Defaults to localhost:9122. +- web_listen: "localhost:9122" +- # Maximum number of concurrent sessions allowed on a single SSH connection. Defaults to 10. +- concurrent_sessions_limit: 10 +- # Sets an interval after which server will send keepalive message to a client. Defaults to 15s. +- client_alive_interval: 15 +- # The server waits for this time for the ongoing connections to complete before shutting down. Defaults to 10s. +- grace_period: 10 +- # The server disconnects after this time if the user has not successfully logged in. Defaults to 60s. +- login_grace_time: 60 +- # A short timeout to decide to abort the connection if the protocol header is not seen within it. Defaults to 500ms +- proxy_header_timeout: 500ms +- # The endpoint that returns 200 OK if the server is ready to receive incoming connections; otherwise, it returns 503 Service Unavailable. Defaults to "/start". +- readiness_probe: "/start" +- # The endpoint that returns 200 OK if the server is alive. Defaults to "/health". +- liveness_probe: "/health" +- # Specifies the available message authentication code algorithms that are used for protecting data integrity +- macs: [hmac-sha2-256-etm@openssh.com, hmac-sha2-512-etm@openssh.com, hmac-sha2-256, hmac-sha2-512, hmac-sha1] +- # Specifies the available Key Exchange algorithms +- kex_algorithms: [curve25519-sha256, curve25519-sha256@libssh.org, ecdh-sha2-nistp256, ecdh-sha2-nistp384, ecdh-sha2-nistp521, diffie-hellman-group14-sha256, diffie-hellman-group14-sha1] +- # Specified the ciphers allowed +- ciphers: [aes128-gcm@openssh.com, chacha20-poly1305@openssh.com, aes256-gcm@openssh.com, aes128-ctr, aes192-ctr,aes256-ctr] +- # Specified the available Public Key algorithms +- public_key_algorithms: [ssh-rsa, ssh-dss, ecdsa-sha2-nistp256, sk-ecdsa-sha2-nistp256@openssh.com, ecdsa-sha2-nistp384, ecdsa-sha2-nistp521, ssh-ed25519, sk-ssh-ed25519@openssh.com, rsa-sha2-256, rsa-sha2-512] +- # SSH host key files. +- host_key_files: +- - /run/secrets/ssh-hostkeys/ssh_host_rsa_key +- - /run/secrets/ssh-hostkeys/ssh_host_ecdsa_key +- - /run/secrets/ssh-hostkeys/ssh_host_ed25519_key +- host_key_certs: +- - /run/secrets/ssh-hostkeys/ssh_host_rsa_key-cert.pub +- - /run/secrets/ssh-hostkeys/ssh_host_ecdsa_key-cert.pub +- - /run/secrets/ssh-hostkeys/ssh_host_ed25519_key-cert.pub +- # GSSAPI-related settings +- gssapi: +- # Enable the gssapi-with-mic authentication method. Defaults to false. +- enabled: false +- # Keytab path. Defaults to "", system default (usually /etc/krb5.keytab). +- keytab: "" +- # The Kerberos service name to be used by sshd. Defaults to "", accepts any service name in keytab file. +- service_principal_name: "" +- + lfs: + # https://gitlab.com/groups/gitlab-org/-/epics/11872, disabled by default. + pure_ssh_protocol: false diff --git a/archives/gitlab-shell/gitconfig b/archives/gitlab-shell/gitconfig new file mode 100644 index 0000000..ccf8053 --- /dev/null +++ b/archives/gitlab-shell/gitconfig @@ -0,0 +1,17 @@ +# Based on files/gitlab-cookbooks/gitlab/templates/default/gitconfig.erb +# in omnibus-gitlab. + +[user] +name = GitLab +email = gitlab@local.host + +[core] +# Needed for the web editor. +autocrlf = input +alternateRefsCommand="exit 0 #" +# This option is unnecessary on journaled file systems and it's not recognized +# by git >= 2.36. +# fsyncObjectFiles = true + +[gc] +auto = 0 diff --git a/archives/gitlab-shell/gitlab-shell.post-install b/archives/gitlab-shell/gitlab-shell.post-install new file mode 100644 index 0000000..01c425c --- /dev/null +++ b/archives/gitlab-shell/gitlab-shell.post-install @@ -0,0 +1,23 @@ +#!/bin/sh +set -eu + +keys_file='/var/lib/gitlab/.ssh/authorized_keys' + +if [ ! -f "$keys_file" ]; then + keys_dir="$(dirname "$keys_file")" + echo "* Initializing authorized_keys file in $keys_dir" 1>&2 + + mkdir -m0700 -p "$keys_dir" + chown git:git "$keys_dir" + + touch "$keys_file" + chmod 0600 "$keys_file" + chown git:git "$keys_file" +fi + +cat <&2 +* +* GitLab Shell has been initialized. Read /etc/gitlab/gitlab-shell.yml and +* modify settings as need. +* +EOF diff --git a/archives/gitlab-shell/gitlab-shell.pre-install b/archives/gitlab-shell/gitlab-shell.pre-install new file mode 100644 index 0000000..9421862 --- /dev/null +++ b/archives/gitlab-shell/gitlab-shell.pre-install @@ -0,0 +1,41 @@ +#!/bin/sh +# It's very important to set user/group correctly. + +git_dir='/var/lib/gitlab' + +if ! getent group git >/dev/null; then + echo '* Creating group git' >&2 + + addgroup -S git +fi + +if ! id git 2>/dev/null 1>&2; then + echo '* Creating user git' >&2 + + adduser -DHS -G git -h "$git_dir" -s /bin/sh \ + -g "added by apk for gitlab-shell" git + passwd -u git >/dev/null # unlock +fi + +if ! id -Gn git | grep -Fq redis; then + echo '* Adding user git to group redis' >&2 + + addgroup git redis +fi + +user_home="$(getent passwd git | cut -d: -f6)" + +if [ "$user_home" != "$git_dir" ]; then + cat >&2 <<-EOF + !! + !! User git has home directory in $user_home, but this package and gitlab-ce + !! package assumes $git_dir. Although it's possible to use a different + !! directory, it's really not easy. + !! + !! Please change git's home directory to $git_dir, or adjust settings + !! and move files yourself. Otherwise GitLab will not work! + !! + EOF +fi + +exit 0 diff --git a/archives/mastodon/APKBUILD b/archives/mastodon/APKBUILD new file mode 100644 index 0000000..954ff0b --- /dev/null +++ b/archives/mastodon/APKBUILD @@ -0,0 +1,202 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=mastodon +_pkgname=$pkgname +pkgver=4.2.10 +_gittag=v$pkgver +pkgrel=1 +pkgdesc="Self-hosted social media and network server based on ActivityPub and OStatus" +arch="x86_64" +url="https://github.com/mastodon/mastodon" +license="AGPL-3.0-only" +depends=" + $pkgname-assets=$pkgver-r$pkgrel + ffmpeg + file + gcompat + imagemagick + nodejs + npm + protobuf + py3-elasticsearch + redis + ruby3.2 + ruby3.2-bundler + yarn + " +makedepends=" + gnu-libiconv-dev + icu-dev + libffi-dev + libidn-dev + libxml2-dev + libxslt-dev + openssl-dev + postgresql-dev + protobuf-dev + ruby3.2-dev + yaml-dev + zlib-dev + " +install=" + $pkgname.pre-install + $pkgname.post-upgrade + $pkgname.post-install + " +source=" + mastodon-$_gittag.tar.gz::https://github.com/mastodon/mastodon/archive/$_gittag.tar.gz + mastodon.initd + mastodon.web.initd + mastodon.sidekiq.initd + mastodon.streaming.initd + mastodon.logrotate + bin-wrapper.in + " +subpackages="$pkgname-openrc $pkgname-assets::noarch" +options="!check" # No test suite + +_prefix="usr/lib/bundles/$_pkgname" + +export BUNDLE_DEPLOYMENT=true +export BUNDLE_FORCE_RUBY_PLATFORM=true +export BUNDLE_FROZEN=true +export BUNDLE_JOBS=${JOBS:-2} + +prepare() { + default_prepare + + # Allow use of any bundler + sed -i -e '/BUNDLED/,+1d' Gemfile.lock + + # Allow use of higher Node versions + sed -i 's/"node": .*"/"node": ">=14.15"/' package.json + + mkdir -p "$srcdir"/gem-cache +} + +build() { + local bundle_without='exclude development' + + msg "Installing Ruby gems..." + bundle config --local build.nokogiri --use-system-libraries \ + --with-xml2-include=/usr/include/libxml2 \ + --with-xslt-include=/usr/include/libxslt + bundle config --local build.ffi --enable-system-libffi + bundle config --local build.idn --enable-system-libidn + bundle config --local path "vendor/bundle" + bundle config --local set deployment 'false' + bundle config --local set without "$bundle_without" + + bundle install --no-cache -j"$(getconf _NPROCESSORS_ONLN)" + + msg "Installing npm modules..." + yarn install --production --frozen-lockfile + + ( + msg "Compiling assets..." + export NODE_ENV=production + export RAILS_ENV=production + export NODE_OPTIONS="--openssl-legacy-provider" + + OTP_SECRET=precompile_placeholder SECRET_KEY_BASE=precompile_placeholder bundle exec rails assets:precompile + ) + + msg "Cleaning assets gems..." + bundle config --local without "$bundle_without" + bundle clean + + # Create executables in bin/*. + # See also https://github.com/bundler/bundler/issues/6149. + bundle binstubs --force bundler puma sidekiq +} + +package() { + local destdir="$pkgdir"/$_prefix + local datadir="$pkgdir/var/lib/gitlab" + # directory creation + install -dm 755 \ + "$destdir" \ + "$datadir" \ + "$pkgdir"/etc/init.d + + + # Install application files. + rmdir "$destdir" + cp -a "$builddir" "$destdir" + + install -m755 -t "$destdir"/bin/ \ + bin/bundle \ + bin/rails \ + bin/rake \ + bin/sidekiq \ + bin/sidekiqmon \ + bin/tootctl \ + bin/puma + + cd "$destdir"/vendor/bundle/ruby/*/ + + # Remove tests, documentations and other useless files. + find gems/ \( -name 'doc' \ + -o -name 'spec' \ + -o -name 'test' \) \ + -type d -maxdepth 2 -exec rm -fr "{}" + + find gems/ \( -name 'README*' \ + -o -name 'CHANGELOG*' \ + -o -name 'CONTRIBUT*' \ + -o -name '*LICENSE*' \ + -o -name 'Rakefile' \ + -o -name '.*' \) \ + -type f -delete + + # Remove assets, they are already compiled. + rm -r gems/doorkeeper-*/app/assets + rm -r gems/pghero-*/app/assets + + # Remove build logs and cache. + rm -rf build_info/ cache/ + find extensions/ \( -name gem_make.out -o -name mkmf.log \) -delete + + cat > "$datadir"/.profile <<-EOF + export RAILS_ENV=production + export NODE_ENV=production + export EXECJS_RUNTIME=Disabled + EOF + + # Install wrapper scripts to /usr/bin. + local name; for name in rake rails tootctl; do + sed "s/__COMMAND__/$name/g" "$srcdir"/bin-wrapper.in \ + > "$builddir"/mastodon-$name + install -m755 -D "$builddir"/mastodon-$name "$pkgdir"/usr/bin/mastodon-$name + done + + # Put the config file in /etc and link to it + touch "$pkgdir"/etc/mastodon.conf + ln -s /etc/mastodon.conf "$destdir"/.env.production + ln -s /usr/bin/node "$destdir"/node + + for file in $_pkgname $_pkgname.sidekiq $_pkgname.web $_pkgname.streaming; do + install -m755 -D "$srcdir"/$file.initd "$pkgdir"/etc/init.d/$file + done + + # Removing all prebuilt artifacts + rm -R "$destdir"/node_modules/*/prebuilds 2>&1 || true + + install -m644 -D "$srcdir"/$_pkgname.logrotate \ + "$pkgdir"/etc/logrotate.d/$_pkgname +} + +assets() { + depends="" + + amove $_prefix/public/assets +} + +sha512sums=" +1fe5417136bc020a83b83eaccef7f1f46c13fc8318681f12ba556b1b6b03e25ef7b6335c28f4e6722101e97b63020cbd0d3fbacdaf9b3b5a4b73c3cf3e230813 mastodon-v4.2.10.tar.gz +d49fea9451c97ccefe5e35b68e4274aeb427f9d1e910b89c1f6c810489c3bec1ccff72952fdaef95abf944b8aff0da84a52347540d36ff1fba5ccc19e1d935c6 mastodon.initd +eefe12a31268245f802222c0001dac884e03adb0d301e53a1512a3cd204836ca03ad083908cd14d146cf0dce99e3a4366570efd0e40a9a490ccd381d4c63c32f mastodon.web.initd +8fc9249c01693bb02b8d1a6177288d5d3549addde8c03eb35cc7a32dde669171872ebc2b5deb8019dc7a12970098f1af707171fa41129be31b04e1dc1651a777 mastodon.sidekiq.initd +03433a2f58600ca0d58e7c3713df2146ccdfc92033ccfe801dbd38bac39b66d6297f2b5ca02300caa36455b484eab2caa68c912c2f72150203bfa0e106c375fc mastodon.streaming.initd +83b3bae5b6fdb4d0dbc1cbe546c62c0aa77397b97d1a5d5377af032466677de188065b556710c0d96576bbae89cc76800f1ffb8cd718155eb2784da818f27619 mastodon.logrotate +dfd0e43ac6c28387bd4aa57fd98ae41aeb5a098b6deb3e44b89f07818e2470773b025364afee7ef6fd0f664cb86bbbbe8796c9f222f5436c256a787282fbe3e1 bin-wrapper.in +" diff --git a/archives/mastodon/bin-wrapper.in b/archives/mastodon/bin-wrapper.in new file mode 100644 index 0000000..eb1d637 --- /dev/null +++ b/archives/mastodon/bin-wrapper.in @@ -0,0 +1,15 @@ + +#!/bin/sh + +BUNDLE_DIR='/usr/lib/bundles/mastodon' +export RAILS_ENV='production' +export NODE_ENV='production' +export EXECJS_RUNTIME='Disabled' + +cd $BUNDLE_DIR + +if [ "$(id -un)" != 'mastodon' ]; then + exec su mastodon -c '"$0" "$@"' -- bin/__COMMAND__ "$@" +else + exec bin/__COMMAND__ "$@" +fi diff --git a/archives/mastodon/mastodon.initd b/archives/mastodon/mastodon.initd new file mode 100644 index 0000000..1454603 --- /dev/null +++ b/archives/mastodon/mastodon.initd @@ -0,0 +1,41 @@ +#!/sbin/openrc-run + +name="Mastodon" +description="Meta script for starting/stopping all the Mastodon components" + +subservices="mastodon.sidekiq mastodon.streaming mastodon.web" + +depend() { + need redis postgresql + use net +} + +start() { + local ret=0 + + ebegin "Starting all Mastodon components" + local svc; for svc in $subservices; do + service $svc start || ret=1 + done + eend $ret +} + +stop() { + local ret=0 + + ebegin "Stopping all Mastodon components" + local svc; for svc in $subservices; do + service $svc stop || ret=1 + done + eend $ret +} + +status() { + local ret=0 + + local svc; for svc in $subservices; do + echo "$svc:" + service $svc status || ret=1 + done + eend $ret +} diff --git a/archives/mastodon/mastodon.logrotate b/archives/mastodon/mastodon.logrotate new file mode 100644 index 0000000..cbfecfc --- /dev/null +++ b/archives/mastodon/mastodon.logrotate @@ -0,0 +1,11 @@ +/var/log/mastodon/*.log { + compress + copytruncate + delaycompress + maxsize 10M + minsize 1M + missingok + sharedscripts + rotate 10 + weekly +} diff --git a/archives/mastodon/mastodon.post-install b/archives/mastodon/mastodon.post-install new file mode 100644 index 0000000..9387d45 --- /dev/null +++ b/archives/mastodon/mastodon.post-install @@ -0,0 +1,27 @@ +#!/bin/sh +set -eu + +if [ "${0##*.}" = 'post-upgrade' ]; then + cat >&2 <<-EOF + * + * To finish Mastodon upgrade run: + * + * mastodon-rails db:migrate + * + EOF +else + cat >&2 <<-EOF + * + * 1. Adjust settings in /etc/mastodon.conf + * + * 2. Create database for Mastodon: + * + * psql -c "CREATE ROLE mastodon PASSWORD 'top-secret' INHERIT LOGIN;" + * psql -c "CREATE DATABASE mastodon OWNER mastodon ENCODING 'UTF-8';" + * psql -d mastodon -c "CREATE EXTENSION pg_trgm; CREATE EXTENSION btree_gist;" + * psql -c "ALTER DATABASE name OWNER TO new_owner;" + * + * 3. Run "mastodon-rake db:migrate" + * + EOF +fi diff --git a/archives/mastodon/mastodon.post-upgrade b/archives/mastodon/mastodon.post-upgrade new file mode 120000 index 0000000..0fcc8b2 --- /dev/null +++ b/archives/mastodon/mastodon.post-upgrade @@ -0,0 +1 @@ +mastodon.post-install \ No newline at end of file diff --git a/archives/mastodon/mastodon.pre-install b/archives/mastodon/mastodon.pre-install new file mode 100644 index 0000000..c869177 --- /dev/null +++ b/archives/mastodon/mastodon.pre-install @@ -0,0 +1,54 @@ +#!/bin/sh +# It's very important to set user/group correctly. + +mastodon_dir='/var/lib/mastodon' + +if ! getent group mastodon 1>/dev/null; then + echo '* Creating group mastodon' 1>&2 + + addgroup -S mastodon +fi + +if ! id mastodon 2>/dev/null 1>&2; then + echo '* Creating user mastodon' 1>&2 + + adduser -DHS -G mastodon -h "$mastodon_dir" -s /bin/sh \ + -g "added by apk for mastodon" mastodon + passwd -u mastodon 1>/dev/null # unlock +fi + +if ! id -Gn mastodon | grep -Fq redis; then + echo '* Adding user mastodon to group redis' 1>&2 + + addgroup mastodon redis +fi + +if [ "$(id -gn mastodon)" != 'mastodon' ]; then + cat >&2 <<-EOF + !! + !! User mastodon has primary group $(id -gn mastodon). We strongly recommend to change + !! mastodon's primary group to mastodon. + !! + EOF + + # Add it at least as a supplementary group. + adduser mastodon mastodon +fi + +user_home="$(getent passwd mastodon | cut -d: -f6)" + +if [ "$user_home" != "$mastodon_dir" ]; then + cat >&2 <<-EOF + !! + !! User mastodon has home directory in $user_home, but this package assumes + !! $mastodon_dir. Although it's possible to use a different directory, + !! it's really not easy. + !! + !! Please change mastodon's home directory to $mastodon_dir, or adjust settings + !! and move files yourself. Otherwise Mastodon will not work! + !! + EOF +fi + +exit 0 + diff --git a/archives/mastodon/mastodon.sidekiq.initd b/archives/mastodon/mastodon.sidekiq.initd new file mode 100644 index 0000000..98c0377 --- /dev/null +++ b/archives/mastodon/mastodon.sidekiq.initd @@ -0,0 +1,32 @@ +#!/sbin/openrc-run + +name="Mastodon background workers Service" +root="/usr/lib/bundles/mastodon" +pidfile="/run/mastodon-sidekiq.pid" +logfile="/var/log/mastodon/sidekiq.log" + +depend() { + use net + need redis +} + +start() { + ebegin "Starting Mastodon background workers" + + cd $root + + start-stop-daemon --start --background \ + --chdir "${root}" \ + --user="mastodon" \ + --make-pidfile --pidfile="${pidfile}" \ + -1 "${logfile}" -2 "${logfile}" \ + --exec /usr/bin/env -- RAILS_ENV=production DB_POOL=25 MALLOC_ARENA_MAX=2 bundle exec sidekiq -c 25 + eend $? +} + +stop() { + ebegin "Stopping Mastodon background workers" + start-stop-daemon --stop \ + --pidfile=${pidfile} \ + eend $? +} diff --git a/archives/mastodon/mastodon.streaming.initd b/archives/mastodon/mastodon.streaming.initd new file mode 100644 index 0000000..b41adc2 --- /dev/null +++ b/archives/mastodon/mastodon.streaming.initd @@ -0,0 +1,33 @@ +#!/sbin/openrc-run + +name="Mastodon streaming API service" +root="/usr/lib/bundles/mastodon" +pidfile="/run/mastodon-streaming.pid" +logfile="/var/log/mastodon/streaming.log" + +depend() { + use net +} + +start() { + ebegin "Starting Mastodon streaming API" + + cd $root + + start-stop-daemon --start \ + --background --quiet \ + --chdir "${root}" \ + --user="mastodon" \ + --make-pidfile --pidfile="${pidfile}" \ + --stdout "${logfile}" --stderr "${logfile}" \ + --exec /usr/bin/env -- NODE_ENV=production PORT=4000 /usr/bin/node ./streaming/index.js + eend $? +} + +stop() { + ebegin "Stopping Mastodon streaming API" + start-stop-daemon --stop \ + --pidfile="${pidfile}" \ + eend $? +} + diff --git a/archives/mastodon/mastodon.web.initd b/archives/mastodon/mastodon.web.initd new file mode 100644 index 0000000..42eace6 --- /dev/null +++ b/archives/mastodon/mastodon.web.initd @@ -0,0 +1,29 @@ +#!/sbin/openrc-run + +name="Mastodon Web Service" +root="/usr/lib/bundles/mastodon" +pidfile="/run/mastodon-web.pid" +logfile="/var/log/mastodon/web.log" + +depend() { + use net +} + +start() { + ebegin "Starting Mastodon web workers" + cd $root + start-stop-daemon --start --background \ + --chdir "${root}" \ + --user="mastodon" \ + --pidfile="${pidfile}" --make-pidfile \ + --stdout="${logfile}" --stderr="${logfile}" \ + --exec /usr/bin/env -- RAILS_ENV=production PORT=3000 bundle exec puma -C config/puma.rb + eend $? +} + +stop() { + ebegin "Stopping Mastodon web workers" + start-stop-daemon --stop \ + --pidfile=${pidfile} \ + eend $? +} diff --git a/archives/ruby3.2-bundler/APKBUILD b/archives/ruby3.2-bundler/APKBUILD new file mode 100644 index 0000000..b21a7d8 --- /dev/null +++ b/archives/ruby3.2-bundler/APKBUILD @@ -0,0 +1,51 @@ +# Maintainer: Jakub Jirutka +pkgname=ruby3.2-bundler +_gemname=bundler +pkgver=2.3.26 +pkgrel=0 +pkgdesc="Manage an application's gem dependencies" +url="https://bundler.io/" +arch="noarch" +license="MIT" +depends="ruby3.2" +makedepends="ruby3.2-rake" +subpackages="$pkgname-doc" +source="https://github.com/rubygems/rubygems/archive/bundler-v$pkgver.tar.gz + manpages.patch + " +builddir="$srcdir/rubygems-bundler-v$pkgver/bundler" +options="!check" # tests require deps not available in main repo + +build() { + rake build_metadata + gem build $_gemname.gemspec +} + +package() { + local gemdir="$pkgdir/$(ruby -e 'puts Gem.default_dir')" + + gem install \ + --local \ + --install-dir "$gemdir" \ + --bindir "$pkgdir/usr/bin" \ + --ignore-dependencies \ + --no-document \ + --verbose \ + $_gemname + + local n; for n in 1 5; do + mkdir -p "$pkgdir"/usr/share/man/man$n + mv "$gemdir"/gems/$_gemname-$pkgver/lib/bundler/man/*.$n "$pkgdir"/usr/share/man/man$n/ + done + + rm -rf "$gemdir"/cache \ + "$gemdir"/build_info \ + "$gemdir"/doc \ + "$gemdir"/gems/$_gemname-$pkgver/man \ + "$gemdir"/gems/$_gemname-$pkgver/*.md +} + +sha512sums=" +0a02d5130ecb8ca96e1850fc409a55d9f07481bbb8ec9b20554cdc6f3b3d3aada67717ab17dd30835615e4c228f39f895bd9b6f55bc22d4dbd88caef9cc105ba bundler-v2.3.26.tar.gz +77a36e61ed205aeea6114b1039dfbe29fcaf916eeae3f91785aa53b3ac534e004aa257e218534d927f39e3673eebbfb3ef9ee17f04ed81f74117799b88e53cf4 manpages.patch +" diff --git a/archives/ruby3.2-bundler/manpages.patch b/archives/ruby3.2-bundler/manpages.patch new file mode 100644 index 0000000..cc11b02 --- /dev/null +++ b/archives/ruby3.2-bundler/manpages.patch @@ -0,0 +1,37 @@ +From: Jakub Jirutka +Date: Fri, 26 Mar 2021 23:17:29 +0100 +Subject: [PATCH] Fix --help when man pages are moved out + +* Allow to move man pages from the gem's directory to the standard + system location (/usr/share/man) without breaking `bundler --help`. +* Fallback to the bundled ronn pages when the man command is available, + but the bundler man pages are not (i.e. ruby-bundler-doc is not + installed). +* Execute man with '-c' option to print the man page to the terminal + instead of using pager. + +--- a/lib/bundler/cli.rb ++++ b/lib/bundler/cli.rb +@@ -118,16 +118,17 @@ + end + + man_path = File.expand_path("man", __dir__) +- man_pages = Hash[Dir.glob(File.join(man_path, "**", "*")).grep(/.*\.\d*\Z/).collect do |f| +- [File.basename(f, ".*"), f] ++ man_pages = Hash[Dir.glob(File.join(man_path, "**", "*")).grep(/.*\.\d*\.ronn\Z/).collect do |f| ++ man_name = File.basename(f, ".ronn") ++ [File.basename(man_name, ".*"), man_name] + end] + + if man_pages.include?(command) + man_page = man_pages[command] +- if Bundler.which("man") && man_path !~ %r{^file:/.+!/META-INF/jruby.home/.+} +- Kernel.exec "man #{man_page}" ++ if Bundler.which("man") && Kernel.system("man -w #{command} >/dev/null 2>&1") && man_path !~ %r{^file:/.+!/META-INF/jruby.home/.+} ++ Kernel.exec "man -c #{command}" + else +- puts File.read("#{man_path}/#{File.basename(man_page)}.ronn") ++ puts File.read("#{man_path}/#{man_page}.ronn") + end + elsif command_path = Bundler.which("bundler-#{cli}") + Kernel.exec(command_path, "--help") diff --git a/archives/ruby3.2-minitest/APKBUILD b/archives/ruby3.2-minitest/APKBUILD new file mode 100644 index 0000000..a3193fb --- /dev/null +++ b/archives/ruby3.2-minitest/APKBUILD @@ -0,0 +1,66 @@ +# Contributor: Jakub Jirutka +# Maintainer: Jakub Jirutka +pkgname=ruby3.2-minitest +_gemname=minitest +# Keep version in sync with "Bundled gems" (https://stdgems.org) for the +# packaged Ruby version. +pkgver=5.15.0 +pkgrel=1 +pkgdesc="Suite of testing facilities supporting TDD, BDD, mocking, and benchmarking for Ruby" +url="https://github.com/minitest/minitest" +arch="noarch" +license="MIT" +depends="ruby3.2" +makedepends="ruby3.2-rdoc" +subpackages="$pkgname-doc" +source="https://github.com/minitest/minitest/archive/v$pkgver/$_gemname-$pkgver.tar.gz + https://rubygems.org/downloads/$_gemname-$pkgver.gem + " +builddir="$srcdir/$_gemname-$pkgver" + +prepare() { + default_prepare + + # Generate gemspec (there's no gemspec in the source). + gem specification -l --ruby "$srcdir"/$_gemname-$pkgver.gem \ + > "$builddir"/$_gemname.gemspec +} + +build() { + gem build $_gemname.gemspec +} + +check() { + ruby -Ilib -Itest -e "Dir.glob('./test/**/test_*.rb', &method(:require))" +} + +package() { + local gemdir="$pkgdir/$(ruby -e 'puts Gem.default_dir')" + local geminstdir="$gemdir/gems/$_gemname-$pkgver" + + gem install \ + --local \ + --install-dir "$gemdir" \ + --ignore-dependencies \ + --document ri \ + --verbose \ + $_gemname + + # Remove unnessecary files + cd "$gemdir" + rm -rf build_info cache extensions plugins + + cd "$geminstdir" + rm -rf History.* Manifest.* README.* Rakefile test/ +} + +doc() { + pkgdesc="$pkgdesc (ri docs)" + + amove "$(ruby -e 'puts Gem.default_dir')"/doc +} + +sha512sums=" +194d074fa83a87b21f551f86d2bb682bcbac53d5a23d4e0f81fbf570427c5cdfcb27e10618bea69037f9e55bea637ed96e52a10808c586ab4020d788556bda71 minitest-5.15.0.tar.gz +5e97a7aa616966ffc60e10cdc0ba123a7e793f10283ec3b6bf36066177036788cb950ad566fbac49e613b93f08b9846534f463017cde966b4890c3a34a2286be minitest-5.15.0.gem +" diff --git a/archives/ruby3.2-minitest/gemspec.patch b/archives/ruby3.2-minitest/gemspec.patch new file mode 100644 index 0000000..a21a0c5 --- /dev/null +++ b/archives/ruby3.2-minitest/gemspec.patch @@ -0,0 +1,15 @@ +--- a/webrick.gemspec ++++ b/webrick.gemspec +@@ -14,12 +14,6 @@ + + s.require_path = %w{lib} + s.files = [ +- "Gemfile", +- "LICENSE.txt", +- "README.md", +- "Rakefile", +- "bin/console", +- "bin/setup", + "lib/webrick.rb", + "lib/webrick/accesslog.rb", + "lib/webrick/cgi.rb", diff --git a/archives/ruby3.2-power_assert/APKBUILD b/archives/ruby3.2-power_assert/APKBUILD new file mode 100644 index 0000000..24d62b2 --- /dev/null +++ b/archives/ruby3.2-power_assert/APKBUILD @@ -0,0 +1,62 @@ +# Contributor: Jakub Jirutka +# Maintainer: Jakub Jirutka +pkgname=ruby3.2-power_assert +_gemname=power_assert +# Keep version in sync with "Bundled gems" (https://stdgems.org) for the +# packaged Ruby version. +pkgver=2.0.3 +pkgrel=0 +pkgdesc="Debug tool for Ruby that displays intermediate results of a method chain" +url="https://github.com/ruby/power_assert" +arch="noarch" +license="BSD-2-Clause AND Ruby" +depends="ruby3.2" +checkdepends="ruby3.2-pry ruby3.2-rake ruby3.2-test-unit" +makedepends="ruby3.2-rdoc" +subpackages="$pkgname-doc" +source="https://github.com/ruby/power_assert/archive/v$pkgver/$_gemname-$pkgver.tar.gz + gemspec.patch + " +builddir="$srcdir/$_gemname-$pkgver" +# Avoid circular dependency with ruby-test-unit. +options="!check" + +prepare() { + default_prepare + sed -i '/require .bundler/d' Rakefile +} + +build() { + gem build $_gemname.gemspec +} + +check() { + rake test +} + +package() { + local gemdir="$pkgdir/$(ruby -e 'puts Gem.default_dir')" + + gem install \ + --local \ + --install-dir "$gemdir" \ + --ignore-dependencies \ + --document ri \ + --verbose \ + $_gemname + + # Remove unnessecary files + cd "$gemdir" + rm -rf build_info cache extensions plugins +} + +doc() { + pkgdesc="$pkgdesc (ri docs)" + + amove "$(ruby -e 'puts Gem.default_dir')"/doc +} + +sha512sums=" +f5658d18b3b78e7757ddfc1ccdabc011076c009a7343eaad2748ca7aeb4d112bf19c70621cb938e7dcf1582c8bb7c5512017885ea51503b3ed274980b7d7c0b1 power_assert-2.0.3.tar.gz +eb4321b8ce33476e21f0cd6da92f1f2be93e0892f5e6043d6d5f5578160f1793993b10645c0b06b3b2df3e8190a10c83e5325c367001e222d98b290222c2edfe gemspec.patch +" diff --git a/archives/ruby3.2-power_assert/gemspec.patch b/archives/ruby3.2-power_assert/gemspec.patch new file mode 100644 index 0000000..ace46ba --- /dev/null +++ b/archives/ruby3.2-power_assert/gemspec.patch @@ -0,0 +1,9 @@ +--- a/power_assert.gemspec ++++ b/power_assert.gemspec +@@ -15,5 +15 @@ +- s.files = `git ls-files -z`.split("\x0").reject do |f| +- f.match(%r{\A(?:test|spec|features|benchmark|bin)/}) +- end +- s.bindir = 'exe' +- s.executables = s.files.grep(%r{^exe/}) { |f| File.basename(f) } ++ s.files = Dir['lib/**/*.rb'] diff --git a/archives/ruby3.2-rake/APKBUILD b/archives/ruby3.2-rake/APKBUILD new file mode 100644 index 0000000..6c34011 --- /dev/null +++ b/archives/ruby3.2-rake/APKBUILD @@ -0,0 +1,58 @@ +# Contributor: Jakub Jirutka +# Maintainer: Jakub Jirutka +pkgname=ruby3.2-rake +_gemname=rake +# Keep version in sync with "Bundled gems" (https://stdgems.org) for the +# packaged Ruby version. +pkgver=13.0.6 +pkgrel=1 +pkgdesc="A Ruby task runner, inspired by make" +url="https://github.com/ruby/rake" +arch="noarch" +license="MIT" +depends="ruby3.2" +checkdepends="ruby3.2-minitest" +makedepends="ruby3.2-rdoc" +subpackages="$pkgname-doc" +source="https://github.com/ruby/rake/archive/v$pkgver/$_gemname-$pkgver.tar.gz" +builddir="$srcdir/$_gemname-$pkgver" + +build() { + gem build $_gemname.gemspec +} + +check() { + # FIXME: Fix test_signal_propagation_in_tests + ruby -Ilib -Itest -e "Dir.glob('./test/**/test_*.rb', &method(:require))" -- \ + --exclude=test_signal_propagation_in_tests +} + +package() { + local gemdir="$pkgdir/$(ruby -e 'puts Gem.default_dir')" + + gem install \ + --local \ + --install-dir "$gemdir" \ + --bindir "$pkgdir/usr/bin" \ + --ignore-dependencies \ + --document ri \ + --verbose \ + $_gemname + + # Remove unnessecary files + cd "$gemdir" + rm -rf build_info cache extensions plugins + + cd gems/rake-* + rm -rf doc ./*.rdoc MIT-LICENSE +} + +doc() { + pkgdesc="$pkgdesc (ri docs)" + + amove "$(ruby -e 'puts Gem.default_dir')"/doc +} + +sha512sums=" +1b438be96d8cedaf70e961b0bbd2217692b0b5537b2e1d5f599158e7da3e300cf6ab0c5f0c52fea89be0beb675effbbf563d24e55c84fb673b4982013355e03c rake-13.0.6.tar.gz +" diff --git a/archives/ruby3.2-test-unit/APKBUILD b/archives/ruby3.2-test-unit/APKBUILD new file mode 100644 index 0000000..6e30887 --- /dev/null +++ b/archives/ruby3.2-test-unit/APKBUILD @@ -0,0 +1,54 @@ +# Contributor: Jakub Jirutka +# Maintainer: Jakub Jirutka +pkgname=ruby3.2-test-unit +_gemname=test-unit +# Keep version in sync with "Bundled gems" (https://stdgems.org) for the +# packaged Ruby version. +pkgver=3.5.7 +pkgrel=0 +pkgdesc="An xUnit family unit testing framework for Ruby" +url="https://test-unit.github.io" +arch="noarch" +license="BSD-2-Clause AND Python-2.0 AND Ruby" +depends="ruby3.2 ruby3.2-power_assert" +makedepends="ruby3.2-rdoc" +subpackages="$pkgname-doc" +source="https://github.com/test-unit/test-unit/archive/$pkgver/$_gemname-$pkgver.tar.gz + gemspec.patch + " +builddir="$srcdir/$_gemname-$pkgver" + +build() { + gem build $_gemname.gemspec +} + +check() { + ruby test/run-test.rb +} + +package() { + local gemdir="$pkgdir/$(ruby -e 'puts Gem.default_dir')" + + gem install \ + --local \ + --install-dir "$gemdir" \ + --ignore-dependencies \ + --document ri \ + --verbose \ + $_gemname + + # Remove unnessecary files + cd "$gemdir" + rm -rf build_info cache extensions plugins +} + +doc() { + pkgdesc="$pkgdesc (ri docs)" + + amove "$(ruby -e 'puts Gem.default_dir')"/doc +} + +sha512sums=" +af678a89590c9305eeac3a4e5c7e99354df5b49157de573ee3ff312dad9f12dbcaef3dfe7ffc256194e39e0438625acdd9ab3e9686d7e2c58b2cf225f7f1f74c test-unit-3.5.7.tar.gz +22f54fcf272856a9455d5a7276896ec329377b106ab47e3d376158eee72cf570f4487dd87606d730d061e7b06e5d7a0ff561cd8d279a64d8af0ac04e0f2dba92 gemspec.patch +" diff --git a/archives/ruby3.2-test-unit/gemspec.patch b/archives/ruby3.2-test-unit/gemspec.patch new file mode 100644 index 0000000..f2beca1 --- /dev/null +++ b/archives/ruby3.2-test-unit/gemspec.patch @@ -0,0 +1,8 @@ +--- a/test-unit.gemspec ++++ b/test-unit.gemspec +@@ -27,4 +27 @@ +- spec.files = ["README.md", "Rakefile"] +- spec.files += ["COPYING", "BSDL", "PSFL"] +- spec.files += Dir.glob("{lib,sample}/**/*.rb") +- spec.files += Dir.glob("doc/text/**/*.*") ++ spec.files += Dir.glob("lib/**/*.rb") diff --git a/archives/ruby3.2-webrick/APKBUILD b/archives/ruby3.2-webrick/APKBUILD new file mode 100644 index 0000000..a0c0b1e --- /dev/null +++ b/archives/ruby3.2-webrick/APKBUILD @@ -0,0 +1,58 @@ +# Contributor: omni +# Maintainer: Jakub Jirutka +pkgname=ruby3.2-webrick +_gemname=webrick +pkgver=1.8.1 +pkgrel=0 +pkgdesc="HTTP server toolkit for Ruby" +url="https://github.com/ruby/webrick" +arch="noarch" +license="BSD-2-Clause" +depends="ruby3.2" +checkdepends="ruby3.2-rake ruby3.2-test-unit" +makedepends="ruby3.2-rdoc" +subpackages="$pkgname-doc" +source="https://github.com/ruby/webrick/archive/v$pkgver/ruby-webrick-$pkgver.tar.gz + gemspec.patch + " +builddir="$srcdir/$_gemname-$pkgver" + +prepare() { + default_prepare + sed -i '/require .bundler/d' Rakefile +} + +build() { + gem build $_gemname.gemspec +} + +check() { + rake test +} + +package() { + local gemdir="$pkgdir/$(ruby -e 'puts Gem.default_dir')" + + gem install \ + --local \ + --install-dir "$gemdir" \ + --ignore-dependencies \ + --document ri \ + --verbose \ + $_gemname + + # Remove unnessecary files + cd "$gemdir" + rm -rf build_info cache extensions plugins +} + +doc() { + pkgdesc="$pkgdesc (ri docs)" + + amove "$(ruby -e 'puts Gem.default_dir')"/doc +} + +sha512sums=" +21cb396887025f85cfe04868e7fa7ef039809ca42a3acadfe1decb4dcd02eeeb3c9163e970324b56a9e0eb6202d971370af56e200c69de2d224c1941f866400c ruby-webrick-1.8.1.tar.gz +5c657602228ba5aef4c272b75bc5d7c42855876811a49a7736bfa72b00d65a2bb550ea76ffcc2bc1e2ef9575796f5981eadd97cc92b1f3bf06c0105b8d166222 gemspec.patch +" diff --git a/archives/ruby3.2-webrick/gemspec.patch b/archives/ruby3.2-webrick/gemspec.patch new file mode 100644 index 0000000..db18f02 --- /dev/null +++ b/archives/ruby3.2-webrick/gemspec.patch @@ -0,0 +1,13 @@ +--- a/webrick.gemspec ++++ b/webrick.gemspec +@@ -14,10 +14,6 @@ + + s.require_path = %w{lib} + s.files = [ +- "Gemfile", +- "LICENSE.txt", +- "README.md", +- "Rakefile", + "lib/webrick.rb", + "lib/webrick/accesslog.rb", + "lib/webrick/cgi.rb", diff --git a/archives/ruby3.2/APKBUILD b/archives/ruby3.2/APKBUILD new file mode 100644 index 0000000..59e7332 --- /dev/null +++ b/archives/ruby3.2/APKBUILD @@ -0,0 +1,253 @@ +# Contributor: Carlo Landmeter +# Contributor: Jakub Jirutka +# Maintainer: Jakub Jirutka +# +# secfixes: +# 3.1.4-r0: +# - CVE-2023-28755 +# - CVE-2023-28756 +# 3.1.3-r0: +# - CVE-2021-33621 +# 3.1.2-r0: +# - CVE-2022-28738 +# - CVE-2022-28739 +# 3.0.3-r0: +# - CVE-2021-41817 +# - CVE-2021-41816 +# - CVE-2021-41819 +# 2.7.4-r0: +# - CVE-2021-31799 +# - CVE-2021-31810 +# - CVE-2021-32066 +# 2.7.3-r0: +# - CVE-2021-28965 +# - CVE-2021-28966 +# 2.7.2-r0: +# - CVE-2020-25613 +# 2.6.6-r0: +# - CVE-2020-10663 +# - CVE-2020-10933 +# 2.6.5-r0: +# - CVE-2019-16255 +# - CVE-2019-16254 +# - CVE-2019-15845 +# - CVE-2019-16201 +# 2.5.2-r0: +# - CVE-2018-16395 +# - CVE-2018-16396 +# 2.5.1-r0: +# - CVE-2017-17742 +# - CVE-2018-6914 +# - CVE-2018-8777 +# - CVE-2018-8778 +# - CVE-2018-8779 +# - CVE-2018-8780 +# 2.4.2-r0: +# - CVE-2017-0898 +# - CVE-2017-10784 +# - CVE-2017-14033 +# - CVE-2017-14064 +# - CVE-2017-0899 +# - CVE-2017-0900 +# - CVE-2017-0901 +# - CVE-2017-0902 +# 2.4.3-r0: +# - CVE-2017-17405 +# +pkgname=ruby3.2 +# When upgrading, upgrade also each ruby- aport listed in file +# gems/bundled_gems. If some aport is missing or not in the main repo, +# create/move it. +pkgver=3.2.2 +_abiver="${pkgver%.*}.0" +pkgrel=0 +pkgdesc="An object-oriented language for quick and easy programming" +url="https://www.ruby-lang.org/" +arch="all" +license="Ruby AND BSD-2-Clause AND MIT" +depends="ca-certificates" +depends_dev=" + $pkgname=$pkgver-r$pkgrel + $pkgname-rdoc=$pkgver-r$pkgrel + gmp-dev + libucontext-dev + " +makedepends="$depends_dev + autoconf + gdbm-dev + libffi-dev + linux-headers + openssl-dev>3 + readline-dev + yaml-dev + zlib-dev + " +install="$pkgname.post-upgrade" +subpackages="$pkgname-dbg $pkgname-doc $pkgname-dev + $pkgname-rdoc::noarch + $pkgname-libs + $pkgname-full::noarch + " +source="https://cache.ruby-lang.org/pub/ruby/${pkgver%.*}/ruby-$pkgver.tar.gz + test_insns-lower-recursion-depth.patch + fix-get_main_stack.patch + dont-install-bundled-gems.patch + fix-riscv64-build.patch + " +replaces="ruby3.2-gems" +builddir="$srcdir"/ruby-$pkgver + +# For backward compatibility (pre 3.x). +for _i in bigdecimal etc fiddle gdbm io-console irb json; do + provides="$provides ruby3.2-$_i=$pkgver-r$pkgrel" +done + +_gemdir="/usr/lib/ruby/gems/$_abiver" +_rubydir="/usr/lib/ruby/$_abiver" +_chost="${CHOST/-alpine-/-}" + +case "$CARCH" in + x86) _arch="i386";; + *) _arch="$CARCH";; +esac + +prepare() { + default_prepare + autoconf + + # v2.7.1 - Of all the bootstraptest only test_fiber fails on s390x: + # test_fiber.rb bootstraptest.tmp.rb:8: [BUG] vm_call_cfunc: cfp consistency error (0x000003ffb63fefb0, 0x000003ffb42f5f58) + case "$CARCH" in + s390x) rm bootstraptest/test_fiber.rb;; + esac + + local name ver; while read -r name ver _; do + case "$name=$ver" in + [a-z]*=[0-9]*.[0-9]*) + if ! apk add -qs "ruby-$name>=$ver" >/dev/null 2>&1; then + warning "bump package ruby-$name to version $ver" + fi + echo "ruby-$name>=$ver" >> "$srcdir"/.ruby-full.depends + esac + done < "$builddir"/gems/bundled_gems +} + +build() { + # -fomit-frame-pointer makes ruby segfault, see gentoo bug #150413 + # In many places aliasing rules are broken; play it safe + # as it's risky with newer compilers to leave it as it is. + # -O2 - ruby is a language runtime, so performance is crucial. Moreover, + # ruby 3.1.1 fails with Bus Error when compiled with -Os on armhf/armv7. + # This makes ruby-libs 7% bigger (13.4 -> 14.4 MiB). + export CFLAGS="${CFLAGS/-Os/-O2} -fno-omit-frame-pointer -fno-strict-aliasing" + export CPPFLAGS="${CPPFLAGS/-Os/-O2} -fno-omit-frame-pointer -fno-strict-aliasing" + + # Needed for coroutine stuff + export LIBS="-lucontext" + + # ruby saves path to install. we want use $PATH + export INSTALL=install + + # the configure script does not detect isnan/isinf as macros + export ac_cv_func_isnan=yes + export ac_cv_func_isinf=yes + + ./configure \ + --build=$CBUILD \ + --host=$CHOST \ + --prefix=/usr \ + --sysconfdir=/etc \ + --mandir=/usr/share/man \ + --infodir=/usr/share/info \ + --with-sitedir=/usr/local/lib/site_ruby \ + --with-search-path="/usr/lib/site_ruby/\$(ruby_ver)/$_arch-linux" \ + --enable-pthread \ + --disable-rpath \ + --enable-shared \ + --with-mantype=man + make +} + +check() { + # https://bugs.ruby-lang.org/issues/18380 + local disable_tests="-n !/TestAddressResolve#test_socket_getnameinfo_domain_blocking/" + + case "$CARCH" in + x86 | armhf | armv7) + # TestReadline#test_interrupt_in_other_thread fails on 32 bit arches according + # to upstream, but the test is disabled just on Travis, not in test suite. + # https://bugs.ruby-lang.org/issues/18393 + disable_tests="$disable_tests -n !/TestReadline#test_interrupt_in_other_thread/" + ;; + esac + + make test TESTS="$disable_tests" +} + +package() { + make DESTDIR="$pkgdir" SUDO="" install + + install -m 644 -D COPYING "$pkgdir"/usr/share/licenses/$pkgname/COPYING + + cd "$pkgdir" + + # Remove bundled gem bundler; it's provided by a separate aport/package + # ruby-bundler. + rm -rf ./$_rubydir/bundler + rm ./$_rubydir/bundler.rb + rm -rf ./$_gemdir/gems/bundler-* + rm ./$_gemdir/specifications/default/bundler-*.gemspec + rm usr/bin/bundle usr/bin/bundler + + # Remove bundled CA certificates; they are provided by ca-certificates. + rm ./$_rubydir/rubygems/ssl_certs/*/*.pem + rmdir ./$_rubydir/rubygems/ssl_certs/* || true + + rm -Rf ./$_gemdir/cache/* + + if [ -d usr/local ]; then + local f=$(find usr/local -type f) + if [ -n "$f" ]; then + error "Found files in /usr/local:" + echo "$f" + return 1 + fi + rm -r usr/local + fi +} + +rdoc() { + pkgdesc="Ruby documentation tool" + license="Ruby" + depends="$pkgname" + + amove $_rubydir/rdoc + amove $_gemdir/gems/rdoc-* + amove $_gemdir/specifications/default/rdoc-* + amove usr/bin/ri + amove usr/bin/rdoc +} + +libs() { + pkgdesc="Libraries necessary to run Ruby" + depends="" + + amove usr/lib +} + +full() { + pkgdesc="Ruby with all bundled gems" + # bundler is bundled since Ruby 2.6, so include it in ruby-full despite + # that it's provided by a seprate aport/package. + depends="ruby ruby-rdoc ruby-bundler $(cat "$srcdir"/.ruby-full.depends)" + + mkdir -p "$subpkgdir" +} + +sha512sums=" +bcc68f3f24c1c8987d9c80b57332e5791f25b935ba38daf5addf60dbfe3a05f9dcaf21909681b88e862c67c6ed103150f73259c6e35c564f13a00f432e3c1e46 ruby-3.2.2.tar.gz +16fc1f35aee327d1ecac420b091beaa53c675e0504d5a6932004f17ca68a2c38f57b053b0a3903696f2232c5add160d363e3972a962f7f7bcb52e4e998c7315d test_insns-lower-recursion-depth.patch +42cd45c1db089a1ae57834684479a502e357ddba82ead5fa34e64c13971e7ab7ad2919ddd60a104a817864dd3e2e35bdbedb679210eb41d82cab36a0687e43d4 fix-get_main_stack.patch +a77da5e5eb7d60caf3f1cabb81e09b88dc505ddd746e34efd1908c0096621156d81cc65095b846ba9bdb66028891aefce883a43ddec6b56b5beb4aac5e4ee33f dont-install-bundled-gems.patch +000530316af1fca007fe8cee694b59e2e801674bcc1a2ebea95e67745d4afc0ce66c902fdbc88ee847a4fbf55115b183cd803cbf7c98ef685938efb3e2b7c991 fix-riscv64-build.patch +" diff --git a/archives/ruby3.2/dont-install-bundled-gems.patch b/archives/ruby3.2/dont-install-bundled-gems.patch new file mode 100644 index 0000000..b125fa0 --- /dev/null +++ b/archives/ruby3.2/dont-install-bundled-gems.patch @@ -0,0 +1,20 @@ +Don't install bundled gems - we package them separately. + +--- a/tool/rbinstall.rb ++++ b/tool/rbinstall.rb +@@ -990,6 +990,7 @@ + end + end + ++=begin XXX-Patched + install?(:ext, :comm, :gem, :'bundled-gems') do + gem_dir = Gem.default_dir + install_dir = with_destdir(gem_dir) +@@ -1057,6 +1058,7 @@ + puts "skipped bundled gems: #{gems.join(' ')}" + end + end ++=end + + parse_args() + diff --git a/archives/ruby3.2/fix-get_main_stack.patch b/archives/ruby3.2/fix-get_main_stack.patch new file mode 100644 index 0000000..864a314 --- /dev/null +++ b/archives/ruby3.2/fix-get_main_stack.patch @@ -0,0 +1,68 @@ +--- a/thread_pthread.c ++++ b/thread_pthread.c +@@ -858,9 +858,6 @@ + # define MAINSTACKADDR_AVAILABLE 0 + # endif + #endif +-#if MAINSTACKADDR_AVAILABLE && !defined(get_main_stack) +-# define get_main_stack(addr, size) get_stack(addr, size) +-#endif + + #ifdef STACKADDR_AVAILABLE + /* +@@ -942,6 +939,55 @@ + return 0; + #undef CHECK_ERR + } ++ ++#if defined(__linux__) && !defined(__GLIBC__) && defined(HAVE_GETRLIMIT) ++ ++#ifndef PAGE_SIZE ++#include ++#define PAGE_SIZE sysconf(_SC_PAGE_SIZE) ++#endif ++ ++static int ++get_main_stack(void **addr, size_t *size) ++{ ++ size_t start, end, limit, prevend = 0; ++ struct rlimit r; ++ FILE *f; ++ char buf[PATH_MAX+80], s[8]; ++ int n; ++ STACK_GROW_DIR_DETECTION; ++ ++ f = fopen("/proc/self/maps", "re"); ++ if (!f) ++ return -1; ++ n = 0; ++ while (fgets(buf, sizeof buf, f)) { ++ n = sscanf(buf, "%zx-%zx %*s %*s %*s %*s %7s", &start, &end, s); ++ if (n >= 2) { ++ if (n == 3 && strcmp(s, "[stack]") == 0) ++ break; ++ prevend = end; ++ } ++ n = 0; ++ } ++ fclose(f); ++ if (n == 0) ++ return -1; ++ ++ limit = 100 << 20; /* 100MB stack limit */ ++ if (getrlimit(RLIMIT_STACK, &r)==0 && r.rlim_cur < limit) ++ limit = r.rlim_cur & -PAGE_SIZE; ++ if (limit > end) limit = end; ++ if (prevend < end - limit) prevend = end - limit; ++ if (start > prevend) start = prevend; ++ *addr = IS_STACK_DIR_UPPER() ? (void *)start : (void *)end; ++ *size = end - start; ++ return 0; ++} ++#else ++# define get_main_stack(addr, size) get_stack(addr, size) ++#endif ++ + #endif + + static struct { diff --git a/archives/ruby3.2/fix-riscv64-build.patch b/archives/ruby3.2/fix-riscv64-build.patch new file mode 100644 index 0000000..e81e8b6 --- /dev/null +++ b/archives/ruby3.2/fix-riscv64-build.patch @@ -0,0 +1,38 @@ +Patch-Source: https://lists.openembedded.org/g/openembedded-core/message/161168 +partially extracted to actually apply onto a release tarball + +--- +From dfb22e4d6662bf72879eda806eaa78c7b52b519e Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Tue, 25 Jan 2022 20:29:14 -0800 +Subject: [PATCH] vm_dump.c: Define REG_S1 and REG_S2 for musl/riscv + +These defines are missing in musl, there is a possible +patch to add them to musl, but we need a full list of +these names for mcontext that can be added once for all + +Upstream-Status: Inappropriate [musl bug] +Signed-off-by: Khem Raj +--- + vm_dump.c | 5 +++++ + 1 file changed, 5 insertions(+) + +diff --git a/vm_dump.c b/vm_dump.c +index a98f5aa..957b785 100644 +--- a/vm_dump.c ++++ b/vm_dump.c +@@ -39,6 +39,11 @@ + + #define MAX_POSBUF 128 + ++#if defined(__riscv) && !defined(__GLIBC__) ++# define REG_S1 9 ++# define REG_S2 18 ++#endif ++ + #define VM_CFP_CNT(ec, cfp) \ + ((rb_control_frame_t *)((ec)->vm_stack + (ec)->vm_stack_size) - \ + (rb_control_frame_t *)(cfp)) +-- +2.35.0 + diff --git a/archives/ruby3.2/ruby3.2.post-upgrade b/archives/ruby3.2/ruby3.2.post-upgrade new file mode 100644 index 0000000..6cba787 --- /dev/null +++ b/archives/ruby3.2/ruby3.2.post-upgrade @@ -0,0 +1,17 @@ +#!/bin/sh + +ver_new="$1" +ver_old="$2" + +if [ "$(apk version -t "$ver_old" "2.5.0-r0")" = "<" ]; then + cat >&2 <<-EOF + * + * In Ruby 2.5 more parts of the stdlib has been splitted into standalone + * gems, yet still installed with Ruby by default. We have moved some of + * them into separate subpackages. If you don't know which subpackages you + * need, you may install meta-package "ruby-full". + * + EOF +fi + +exit 0 diff --git a/archives/ruby3.2/test_insns-lower-recursion-depth.patch b/archives/ruby3.2/test_insns-lower-recursion-depth.patch new file mode 100644 index 0000000..0069720 --- /dev/null +++ b/archives/ruby3.2/test_insns-lower-recursion-depth.patch @@ -0,0 +1,47 @@ +The patched test is a recursion function. We have lower stack size, +so we hit SystemStackError sooner than on other platforms. + + #361 test_insns.rb:389:in `block in ': + # recursive once + def once n + return %r/#{ + if n == 0 + true + else + once(n-1) # here + end + }/ox + end + x = once(128); x = once(7); x = once(16); + x =~ "true" && $~ + #=> "" (expected "true") once + Stderr output is not empty + bootstraptest.tmp.rb:3:in `once': stack level too deep (SystemStackError) + from bootstraptest.tmp.rb:7:in `block in once' + from bootstraptest.tmp.rb:3:in `once' + from bootstraptest.tmp.rb:7:in `block in once' + from bootstraptest.tmp.rb:3:in `once' + from bootstraptest.tmp.rb:7:in `block in once' + from bootstraptest.tmp.rb:3:in `once' + from bootstraptest.tmp.rb:7:in `block in once' + from bootstraptest.tmp.rb:3:in `once' + ... 125 levels... + from bootstraptest.tmp.rb:3:in `once' + from bootstraptest.tmp.rb:7:in `block in once' + from bootstraptest.tmp.rb:3:in `once' + from bootstraptest.tmp.rb:11:in `
' + Test_insns.rb FAIL 1/187 + FAIL 1/1197 tests failed + Make: *** [uncommon.mk:666: yes-btest-ruby] Error 1 + +--- a/bootstraptest/test_insns.rb ++++ b/bootstraptest/test_insns.rb +@@ -274,7 +274,7 @@ + end + }/ox + end +- x = once(128); x = once(7); x = once(16); ++ x = once(32); x = once(7); x = once(16); + x =~ "true" && $~ + }, + [ 'once', <<-'},', ], # { diff --git a/ilot/authentik/APKBUILD b/ilot/authentik/APKBUILD new file mode 100644 index 0000000..42b271d --- /dev/null +++ b/ilot/authentik/APKBUILD @@ -0,0 +1,295 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=authentik +pkgver=2024.10.5 +pkgrel=0 +pkgdesc="An open-source Identity Provider focused on flexibility and versatility" +url="https://github.com/goauthentik/authentik" +# s390x: missing py3-celery py3-flower and py3-kombu +# armhf/armv7/x86: out of memory error when building goauthentik +# ppc64le: not supported by Rollup build +arch="aarch64 x86_64" +license="MIT" +# following depends aren't direct dependencies, but are needed: +# py3-asn1crypto, py3-cbor2, py3-email-validator, py3-websockets +# py3-openssl, py3-uvloop, py3-httptools +depends=" + bash + libcap-setcap + nginx + postgresql + procps + pwgen + py3-asn1crypto + py3-cbor2 + py3-celery + py3-cffi + py3-channels + py3-channels_redis + py3-cryptography + py3-dacite + py3-daphne + py3-deepmerge + py3-defusedxml + py3-docker-py + py3-django + py3-django-countries + py3-django-cte + py3-django-filter + py3-django-guardian + py3-django-model-utils + py3-django-prometheus + py3-django-pglock + py3-django-redis + py3-django-rest-framework~3.14.0 + py3-django-rest-framework-guardian + py3-django-storages + py3-django-tenants + py3-dumb-init + py3-duo-client + py3-drf-orjson-renderer + py3-drf-spectacular + py3-email-validator + py3-fido2 + py3-flower + py3-geoip2 + py3-google-api-python-client + py3-gunicorn + py3-httptools + py3-jsonpatch + py3-jwt + py3-jwcrypto + py3-kadmin + py3-kubernetes + py3-ldap3 + py3-lxml + py3-maxminddb + py3-msgpack + py3-msgraph-sdk + py3-opencontainers + py3-openssl + py3-paramiko + py3-psycopg + py3-psycopg-c + py3-pydantic + py3-pydantic-scim + py3-pyrad + py3-python-gssapi + py3-requests-oauthlib + py3-scim2-filter-parser + py3-setproctitle + py3-sentry-sdk + py3-service_identity + py3-six + py3-sniffio + py3-sqlparse + py3-structlog + py3-swagger-spec-validator + py3-twilio + py3-tenant-schemas-celery + py3-ua-parser + py3-unidecode + py3-urllib3-secure-extra + py3-uvloop + py3-watchdog + py3-webauthn + py3-websockets + py3-wsproto + py3-xmlsec + py3-yaml + py3-zxcvbn + valkey + uvicorn + " +makedepends="go npm py3-packaging" +checkdepends=" + py3-pip + py3-coverage + py3-codespell + py3-colorama + py3-pytest + py3-pytest-django + py3-pytest-randomly + py3-pytest-timeout + py3-freezegun + py3-boto3 + py3-requests-mock + py3-k5test + " +install="$pkgname.post-install $pkgname.post-upgrade $pkgname.pre-install" +source=" + $pkgname-$pkgver.tar.gz::https://github.com/goauthentik/authentik/archive/refs/tags/version/$pkgver.tar.gz + authentik.openrc + authentik-worker.openrc + authentik-ldap.openrc + authentik-ldap.conf + authentik-manage.sh + fix-ak-bash.patch + root-settings-csrf_trusted_origins.patch + go-downgrade-1.22.patch + " +builddir="$srcdir/"authentik-version-$pkgver +subpackages="$pkgname-openrc $pkgname-doc $pkgname-pyc" +pkgusers="authentik" +pkggroups="authentik" + +export GOPATH=$srcdir/go +export GOCACHE=$srcdir/go-build +export GOTMPDIR=$srcdir + +build() { + msg "Building authentik-ldap" + go build -o ldap cmd/ldap/main.go + msg "Building authentik-proxy" + go build -o proxy cmd/proxy/main.go + msg "Building authentik-radius" + go build -o radius cmd/proxy/main.go + msg "Building authentik-server" + go build -o server cmd/server/*.go + + msg "Building authentik-web" + cd web + npm ci --no-audit + npm run build + cd .. + + msg "Building website" + cd website + npm ci --no-audit + npm run build +} + +# test failure neutralized due to: +# relation authentik_core_user_pb_groups_id_seq does not exist + +check() { + msg "Setting up test environments" + export POSTGRES_DB=authentik + export POSTGRES_USER=authentik + export POSTGRES_PASSWORD="EK-5jnKfjrGRm<77" + export AUTHENTIK_POSTGRESQL__TEST__NAME=authentik + + rm -Rf "$srcdir"/tmp + initdb -D "$srcdir"/tmp + postgres -D "$srcdir"/tmp --unix-socket-directories="$srcdir" > "$srcdir"/tmp/psql.log 2>&1 & + valkey-server > "$srcdir"/tmp/valkey.log 2>&1 & + trap "pkill valkey-server; pkill postgres" EXIT + sleep 5 + psql -h "$srcdir" -d postgres -c "CREATE ROLE $POSTGRES_USER PASSWORD '$POSTGRES_PASSWORD' INHERIT LOGIN;" + psql -h "$srcdir" -d postgres -c "CREATE DATABASE $POSTGRES_DB OWNER $POSTGRES_USER ENCODING 'UTF-8';" + psql -h "$srcdir" -d postgres -c "CREATE DATABASE test_$POSTGRES_DB OWNER $POSTGRES_USER ENCODING 'UTF-8';" + + # .github/actions/setup/action.yml: Generate config + csrf + python3 -c " +from authentik.lib.generators import generate_id +from yaml import safe_dump + +with open(\"local.env.yml\", \"w\") as _config: + safe_dump( + { + \"log_level\": \"debug\", + \"secret_key\": generate_id(), + \"csrf\": { \"trusted_origins\": ['https://*']}, + }, + _config, + default_flow_style=False, + ) +" + python -m lifecycle.migrate + + # no selenium package + pip install selenium drf_jsonschema_serializer pdoc --break-system-packages + + msg "Starting tests" + make test || true + + # TODO: Fix go-tests + # make go-test + + pkill valkey-server + pkill postgres +} + +package() { + msg "Packaging $pkgname" + local prefix="/usr/share/webapps" + local destdir="$pkgdir"$prefix/authentik + + # authentik install + install -d -m755 \ + "$destdir" \ + "$destdir"/web \ + "$pkgdir"/usr/bin \ + "$pkgdir"/usr/share/doc \ + "$pkgdir"/var/lib/authentik + + cp -rl authentik lifecycle locale tests \ + "$destdir"/ + + cp -rl blueprints \ + "$pkgdir"/var/lib/authentik/ + + cp -rl web/dist web/authentik \ + "$destdir"/web/ + + install -m755 -t "$destdir" \ + "$builddir"/server \ + "$builddir"/ldap \ + "$builddir"/radius \ + "$builddir"/proxy \ + "$builddir"/manage.py + + cp -rl website/build/ "$pkgdir"/usr/share/doc/authentik/ + + # symbolic bin links to usr/bin + for i in server proxy ldap radius; do + ln -s $prefix/authentik/$i "$pkgdir"/usr/bin/authentik-$i + done + + # openrc install + for i in $pkgname $pkgname-worker $pkgname-ldap; do + install -Dm755 "$srcdir"/$i.openrc "$pkgdir"/etc/init.d/$i + done + + # config file setup + install -Dm640 "$builddir"/authentik/lib/default.yml \ + "$pkgdir"/etc/authentik/config.yml + ln -s "/etc/authentik/config.yml" "$pkgdir"/usr/share/webapps/authentik/local.env.yml + chown root:www-data "$pkgdir"/etc/authentik/config.yml + + sed -i 's|cert_discovery_dir.*|cert_discovery_dir: /var/lib/authentik/certs|' "$pkgdir"/etc/authentik/config.yml + sed -i 's|blueprints_dir.*|blueprints_dir: /var/lib/authentik/blueprints|' "$pkgdir"/etc/authentik/config.yml + sed -i 's|template_dir.*|template_dir: /var/lib/authentik/templates|' "$pkgdir"/etc/authentik/config.yml + printf "\ncsrf:\n trusted_origins: ['auth.example.com']" >> "$pkgdir"/etc/authentik/config.yml + printf "\nsecret_key: '@@SECRET_KEY@@'" >> "$pkgdir"/etc/authentik/config.yml + + # custom css location change + mv "$pkgdir"/usr/share/webapps/authentik/web/dist/custom.css "$pkgdir"/etc/authentik/custom.css + ln -s "/etc/authentik/custom.css" "$pkgdir"/usr/share/webapps/authentik/web/dist/custom.css + chown root:www-data "$pkgdir"/etc/authentik/custom.css + + # Install wrapper script to /usr/bin. + install -m755 -D "$srcdir"/authentik-manage.sh "$pkgdir"/usr/bin/authentik-manage +} + +pyc() { + default_pyc + + cd "$pkgdir" + # shellcheck disable=SC3003 + local IFS=$'\n' + # shellcheck disable=SC2046 + amove $(find usr/share/webapps/authentik -type d -name __pycache__) +} + +sha512sums=" +f6e04ac1d1ac3a46e6d0f89548c0c2748f2214c551157e65f9071721dfdccac53c98b1664ecd1bc70650b4fceec47c5a5ab805da34e82ccc86d6a64087441702 authentik-2024.10.5.tar.gz +4defb4fe3a4230f4aa517fbecd5e5b8bcef2a64e1b40615660ae9eec33597310a09df5e126f4d39ce7764bd1716c0a7040637699135c103cbc1879593c6c06f1 authentik.openrc +6cb03b9b69df39bb4539fe05c966536314d766b2e9307a92d87070ba5f5b7e7ab70f1b5ee1ab3c0c50c23454f9c5a4caec29e63fdf411bbb7a124ad687569b89 authentik-worker.openrc +351e6920d987861f8bf0d7ab2f942db716a8dbdad1f690ac662a6ef29ac0fd46cf817cf557de08f1c024703503d36bc8b46f0d9eb1ecaeb399dce4c3bb527d17 authentik-ldap.openrc +89ee5f0ffdade1c153f3a56ff75b25a7104aa81d8c7a97802a8f4b0eab34850cee39f874dabe0f3c6da3f71d6a0f938f5e8904169e8cdd34d407c8984adee6b0 authentik-ldap.conf +f1a3cb215b6210fa7d857a452a9f2bc4dc0520e49b9fa7027547cff093d740a7e2548f1bf1f8831f7d5ccb80c8e523ee0c8bafcc4dc42d2788725f2137d21bee authentik-manage.sh +3e47db684a3f353dcecdb7bab8836b9d5198766735d77f676a51d952141a0cf9903fcb92e6306c48d2522d7a1f3028b37247fdc1dc74d4d6e043da7eb4f36d49 fix-ak-bash.patch +5c60e54b6a7829d611af66f5cb8184a002b5ae927efbd024c054a7c176fcb9efcfbe5685279ffcf0390b0f0abb3bb03e02782c6867c2b38d1ad2d508aae83fa0 root-settings-csrf_trusted_origins.patch +badff70b19aad79cf16046bd46cb62db25c2a8b85b2673ce7c44c42eb60d42f6fcb1b9a7a7236c00f24803b25d3c66a4d64423f7ce14a59763b8415db292a5b9 go-downgrade-1.22.patch +" diff --git a/ilot/authentik/authentik-ldap.conf b/ilot/authentik/authentik-ldap.conf new file mode 100644 index 0000000..c31e819 --- /dev/null +++ b/ilot/authentik/authentik-ldap.conf @@ -0,0 +1,3 @@ +AUTHENTIK_HOST=https://example.com +AUTHENTIK_TOKEN=your-authentik-token +AUTHENTIK_INSECURE=true diff --git a/ilot/authentik/authentik-ldap.openrc b/ilot/authentik/authentik-ldap.openrc new file mode 100644 index 0000000..fc033be --- /dev/null +++ b/ilot/authentik/authentik-ldap.openrc @@ -0,0 +1,24 @@ +#!/sbin/openrc-run + +name="$RC_SVCNAME" +cfgfile="/etc/conf.d/$RC_SVCNAME" +pidfile="/run/$RC_SVCNAME.pid" +working_directory="/usr/share/webapps/authentik" +command="/usr/bin/authentik-ldap" +command_user="authentik" +command_group="authentik" +start_stop_daemon_args="" +command_background="yes" +output_log="/var/log/authentik/$RC_SVCNAME.log" +error_log="/var/log/authentik/$RC_SVCNAME.err" + +depend() { + need authentik +} + +start_pre() { + cd "$working_directory" + checkpath --directory --owner $command_user:$command_group --mode 0775 \ + /var/log/authentik + export AUTHENTIK_HOST AUTHENTIK_TOKEN AUTHENTIK_INSECURE AUTHENTIK_DEBUG +} diff --git a/ilot/authentik/authentik-manage.sh b/ilot/authentik/authentik-manage.sh new file mode 100644 index 0000000..ef7357d --- /dev/null +++ b/ilot/authentik/authentik-manage.sh @@ -0,0 +1,11 @@ +#!/bin/sh + +BUNDLE_DIR='/usr/share/webapps/authentik' + +cd $BUNDLE_DIR + +if [ "$(id -un)" != 'authentik' ]; then + exec su authentik -c '"$0" "$@"' -- ./manage.py "$@" +else + exec ./manage.py "$@" +fi diff --git a/ilot/authentik/authentik-worker.openrc b/ilot/authentik/authentik-worker.openrc new file mode 100644 index 0000000..f0fa964 --- /dev/null +++ b/ilot/authentik/authentik-worker.openrc @@ -0,0 +1,32 @@ +#!/sbin/openrc-run + +name="$RC_SVCNAME" +cfgfile="/etc/conf.d/$RC_SVCNAME.conf" +pidfile="/run/$RC_SVCNAME.pid" +working_directory="/usr/share/webapps/authentik" +command="/usr/bin/authentik-manage" +command_args="worker" +command_user="authentik" +command_group="authentik" +start_stop_daemon_args="" +command_background="yes" +output_log="/var/log/authentik/$RC_SVCNAME.log" +error_log="/var/log/authentik/$RC_SVCNAME.err" + +depend() { + need redis + need postgresql +} + +start_pre() { + cd "$working_directory" + checkpath --directory --owner $command_user:$command_group --mode 0775 \ + /var/log/authentik \ + /var/lib/authentik/certs \ + /var/lib/authentik/blueprints +} + +stop_pre() { + ebegin "Killing child processes" + kill $(ps -o pid= --ppid $(cat $pidfile)) || true +} diff --git a/ilot/authentik/authentik.openrc b/ilot/authentik/authentik.openrc new file mode 100644 index 0000000..a036393 --- /dev/null +++ b/ilot/authentik/authentik.openrc @@ -0,0 +1,30 @@ +#!/sbin/openrc-run + +name="$RC_SVCNAME" +cfgfile="/etc/conf.d/$RC_SVCNAME.conf" +pidfile="/run/$RC_SVCNAME.pid" +working_directory="/usr/share/webapps/authentik" +command="/usr/share/webapps/authentik/server" +command_user="authentik" +command_group="authentik" +start_stop_daemon_args="" +command_background="yes" +output_log="/var/log/authentik/$RC_SVCNAME.log" +error_log="/var/log/authentik/$RC_SVCNAME.err" + +depend() { + need redis + need postgresql +} + +start_pre() { + cd "$working_directory" + checkpath --directory --owner $command_user:$command_group --mode 0775 \ + /var/log/authentik \ + /var/lib/authentik/certs +} + +stop_pre() { + ebegin "Killing child processes" + kill $(ps -o pid= --ppid $(cat $pidfile)) || true +} diff --git a/ilot/authentik/authentik.post-install b/ilot/authentik/authentik.post-install new file mode 100755 index 0000000..a715d20 --- /dev/null +++ b/ilot/authentik/authentik.post-install @@ -0,0 +1,39 @@ +#!/bin/sh +set -eu + +group=authentik +config_file='/etc/authentik/config.yml' + +setcap 'cap_net_bind_service=+ep' /usr/share/webapps/authentik/server + +if [ $(grep '@@SECRET_KEY@@' "$config_file") ]; then + echo "* Generating random secret in $config_file" >&2 + + secret_key="$(pwgen -s 50 1)" + sed -i "s|@@SECRET_KEY@@|$secret_key|" "$config_file" + chown root:$group "$config_file" +fi + +if [ "${0##*.}" = 'post-upgrade' ]; then + cat >&2 <<-EOF + * + * To finish Authentik upgrade run: + * + * authentik-manage migrate + * + EOF +else + cat >&2 <<-EOF + * + * 1. Adjust settings in /etc/authentik/config.yml. + * + * 2. Create database for Authentik: + * + * psql -c "CREATE ROLE authentik PASSWORD 'top-secret' INHERIT LOGIN;" + * psql -c "CREATE DATABASE authentik OWNER authentik ENCODING 'UTF-8';" + * + * 3. Run "authentik-manage migrate" + * 4. Setup admin user at https:///if/flow/initial-setup/ + * + EOF +fi diff --git a/ilot/authentik/authentik.post-upgrade b/ilot/authentik/authentik.post-upgrade new file mode 120000 index 0000000..d310dd8 --- /dev/null +++ b/ilot/authentik/authentik.post-upgrade @@ -0,0 +1 @@ +authentik.post-install \ No newline at end of file diff --git a/ilot/authentik/authentik.pre-install b/ilot/authentik/authentik.pre-install new file mode 100644 index 0000000..792f304 --- /dev/null +++ b/ilot/authentik/authentik.pre-install @@ -0,0 +1,26 @@ +#!/bin/sh +# It's very important to set user/group correctly. + +authentik_dir='/var/lib/authentik' + +if ! getent group authentik 1>/dev/null; then + echo '* Creating group authentik' 1>&2 + + addgroup -S authentik +fi + +if ! id authentik 2>/dev/null 1>&2; then + echo '* Creating user authentik' 1>&2 + + adduser -DHS -G authentik -h "$authentik_dir" -s /bin/sh \ + -g "added by apk for authentik" authentik + passwd -u authentik 1>/dev/null # unlock +fi + +if ! id -Gn authentik | grep -Fq redis; then + echo '* Adding user authentik to group redis' 1>&2 + + addgroup authentik redis +fi + +exit 0 diff --git a/ilot/authentik/fix-ak-bash.patch b/ilot/authentik/fix-ak-bash.patch new file mode 100644 index 0000000..c6afafb --- /dev/null +++ b/ilot/authentik/fix-ak-bash.patch @@ -0,0 +1,10 @@ +diff --git a/lifecycle/ak.orig b/lifecycle/ak +index 615bfe9..1646274 100755 +--- a/lifecycle/ak.orig ++++ b/lifecycle/ak +@@ -1,4 +1,4 @@ +-#!/usr/bin/env -S bash -e ++#!/usr/bin/env bash + MODE_FILE="${TMPDIR}/authentik-mode" + + function log { diff --git a/ilot/authentik/go-downgrade-1.22.patch b/ilot/authentik/go-downgrade-1.22.patch new file mode 100644 index 0000000..eeae018 --- /dev/null +++ b/ilot/authentik/go-downgrade-1.22.patch @@ -0,0 +1,38 @@ +diff --git a/go.mod.orig b/go.mod +index 65490a2..13a611e 100644 +--- a/go.mod.orig ++++ b/go.mod +@@ -1,8 +1,6 @@ + module goauthentik.io + +-go 1.23 +- +-toolchain go1.23.0 ++go 1.22.2 + + require ( + beryju.io/ldap v0.1.0 +@@ -16,7 +14,7 @@ require ( + github.com/gorilla/handlers v1.5.2 + github.com/gorilla/mux v1.8.1 + github.com/gorilla/securecookie v1.1.2 +- github.com/gorilla/sessions v1.4.0 ++ github.com/gorilla/sessions v1.3.0 + github.com/gorilla/websocket v1.5.3 + github.com/jellydator/ttlcache/v3 v3.2.1 + github.com/mitchellh/mapstructure v1.5.0 +diff --git a/go.sum.orig b/go.sum +index 94edf9c..856c2ee 100644 +--- a/go.sum.orig ++++ b/go.sum +@@ -175,8 +175,8 @@ github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+ + github.com/gorilla/securecookie v1.1.2 h1:YCIWL56dvtr73r6715mJs5ZvhtnY73hBvEF8kXD8ePA= + github.com/gorilla/securecookie v1.1.2/go.mod h1:NfCASbcHqRSY+3a8tlWJwsQap2VX5pwzwo4h3eOamfo= + github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= +-github.com/gorilla/sessions v1.4.0 h1:kpIYOp/oi6MG/p5PgxApU8srsSw9tuFbt46Lt7auzqQ= +-github.com/gorilla/sessions v1.4.0/go.mod h1:FLWm50oby91+hl7p/wRxDth9bWSuk0qVL2emc7lT5ik= ++github.com/gorilla/sessions v1.3.0 h1:XYlkq7KcpOB2ZhHBPv5WpjMIxrQosiZanfoy1HLZFzg= ++github.com/gorilla/sessions v1.3.0/go.mod h1:ePLdVu+jbEgHH+KWw8I1z2wqd0BAdAQh/8LRvBeoNcQ= + github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= + github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg= + github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= diff --git a/ilot/authentik/root-settings-csrf_trusted_origins.patch b/ilot/authentik/root-settings-csrf_trusted_origins.patch new file mode 100644 index 0000000..4c235f9 --- /dev/null +++ b/ilot/authentik/root-settings-csrf_trusted_origins.patch @@ -0,0 +1,12 @@ +diff --git a/authentik/root/settings.py b/authentik/root/settings.py +index 15e689b06..8b0c1d744 100644 +--- a/authentik/root/settings.py ++++ b/authentik/root/settings.py +@@ -33,6 +33,7 @@ AUTH_USER_MODEL = "authentik_core.User" + + CSRF_COOKIE_NAME = "authentik_csrf" + CSRF_HEADER_NAME = "HTTP_X_AUTHENTIK_CSRF" ++CSRF_TRUSTED_ORIGINS = CONFIG.get("csrf.trusted_origins") + LANGUAGE_COOKIE_NAME = "authentik_language" + SESSION_COOKIE_NAME = "authentik_session" + SESSION_COOKIE_DOMAIN = CONFIG.get("cookie_domain", None) diff --git a/ilot/certbot-dns-gandi/APKBUILD b/ilot/certbot-dns-gandi/APKBUILD new file mode 100644 index 0000000..d6845b5 --- /dev/null +++ b/ilot/certbot-dns-gandi/APKBUILD @@ -0,0 +1,40 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=certbot-dns-gandi +pkgdesc="gandi DNS authenticator plugin for certbot" +pkgver=1.5.0 +pkgrel=0 +arch="noarch" +url="https://github.com/obynio/certbot-plugin-gandi" +license="MIT" +depends="certbot" +makedepends=" + py3-setuptools + py3-gpep517 + py3-wheel +" +subpackages="$pkgname-pyc" +options="!check" # No test suite +source=" + $pkgname-$pkgver.tar.gz::https://github.com/obynio/certbot-plugin-gandi/archive/refs/tags/$pkgver.tar.gz + gandi.ini +" +builddir="$srcdir"/certbot-plugin-gandi-$pkgver + +build() { + gpep517 build-wheel \ + --wheel-dir .dist \ + --output-fd 3 3>&1 >&2 +} + +package() { + python3 -m installer -d "$pkgdir" \ + .dist/*.whl + mkdir -p "$pkgdir"/etc/letsencrypt/gandi + install -m 0600 "$srcdir"/gandi.ini "$pkgdir"/etc/letsencrypt/gandi/example.ini +} + +sha512sums=" +0688baec8e6de429eed12f9d85b28f47384a5bd8cd01615d94e55e38fdaf35c01707ee1ef1ec3e9196c1de06df7087798f3f5a19f07bd446f1d3fd2442b2d702 certbot-dns-gandi-1.5.0.tar.gz +7bdfd769c8a7256a8c2d171f1c8fa4c16bea7c1abcd3442603face90834efb5f9c0d9aec54f57fc83421588c0349acbc3554d4987cb7498a7e833481b01dd712 gandi.ini +" diff --git a/ilot/certbot-dns-gandi/gandi.ini b/ilot/certbot-dns-gandi/gandi.ini new file mode 100644 index 0000000..f1d20c3 --- /dev/null +++ b/ilot/certbot-dns-gandi/gandi.ini @@ -0,0 +1,6 @@ +# Uncomment following line as needed: +# Live DNS v5 api key +#dns_gandi_api_key=APIKEY + +# Optional organization id, remove it if not used +#dns_gandi_sharing_id=SHARINGID diff --git a/ilot/codeberg-pages-server/APKBUILD b/ilot/codeberg-pages-server/APKBUILD new file mode 100644 index 0000000..0fd0f1f --- /dev/null +++ b/ilot/codeberg-pages-server/APKBUILD @@ -0,0 +1,42 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=codeberg-pages-server +pkgver=6.2 +pkgrel=0 +pkgdesc="The Codeberg Pages Server – with custom domain support, per-repo pages using the "pages" branch, caching and more." +url="https://codeberg.org/Codeberg/pages-server" +arch="all" +license="EUPL-1.2" +depends="nginx" +makedepends="go just" +# tests disabled for now +options="!check" +source=" + $pkgname-$pkgver.tar.gz::https://codeberg.org/Codeberg/pages-server/archive/v$pkgver.tar.gz + codeberg-pages-server.openrc + " +builddir="$srcdir/"pages-server +subpackages="$pkgname-openrc" +pkgusers="git" +pkggroups="www-data" + +export GOPATH=$srcdir/go +export GOCACHE=$srcdir/go-build +export GOTMPDIR=$srcdir + +build() { + just build +} + +package() { + msg "Packaging $pkgname" + install -Dm755 "$builddir"/build/codeberg-pages-server "$pkgdir"/usr/bin/codeberg-pages-server + + install -Dm755 "$srcdir"/$pkgname.openrc \ + "$pkgdir"/etc/init.d/$pkgname +} + +sha512sums=" +d48e10262e94eb2e36696646e3431da066d2f820e037ab713f4446dd72c2e3895c9bf153fcbf702e05b21ec5750aa15ed9b71e2fb383f9357aeeef61073a721a codeberg-pages-server-6.2.tar.gz +4defb4fe3a4230f4aa517fbecd5e5b8bcef2a64e1b40615660ae9eec33597310a09df5e126f4d39ce7764bd1716c0a7040637699135c103cbc1879593c6c06f1 codeberg-pages-server.openrc +" diff --git a/ilot/codeberg-pages-server/codeberg-pages-server.openrc b/ilot/codeberg-pages-server/codeberg-pages-server.openrc new file mode 100644 index 0000000..a036393 --- /dev/null +++ b/ilot/codeberg-pages-server/codeberg-pages-server.openrc @@ -0,0 +1,30 @@ +#!/sbin/openrc-run + +name="$RC_SVCNAME" +cfgfile="/etc/conf.d/$RC_SVCNAME.conf" +pidfile="/run/$RC_SVCNAME.pid" +working_directory="/usr/share/webapps/authentik" +command="/usr/share/webapps/authentik/server" +command_user="authentik" +command_group="authentik" +start_stop_daemon_args="" +command_background="yes" +output_log="/var/log/authentik/$RC_SVCNAME.log" +error_log="/var/log/authentik/$RC_SVCNAME.err" + +depend() { + need redis + need postgresql +} + +start_pre() { + cd "$working_directory" + checkpath --directory --owner $command_user:$command_group --mode 0775 \ + /var/log/authentik \ + /var/lib/authentik/certs +} + +stop_pre() { + ebegin "Killing child processes" + kill $(ps -o pid= --ppid $(cat $pidfile)) || true +} diff --git a/ilot/forgejo-aneksajo/APKBUILD b/ilot/forgejo-aneksajo/APKBUILD new file mode 100644 index 0000000..efb6091 --- /dev/null +++ b/ilot/forgejo-aneksajo/APKBUILD @@ -0,0 +1,112 @@ +# Contributor: Carlo Landmeter +# Contributor: 6543 <6543@obermui.de> +# Contributor: techknowlogick +# Contributor: Patrycja Rosa +# Maintainer: Antoine Martin (ayakael) +pkgname=forgejo-aneksajo +pkgver=9.0.3_git0 +_gittag=v${pkgver/_git/-git-annex} +pkgrel=0 +pkgdesc="Self-hosted Git service written in Go with git-annex support" +url="https://forgejo.org" +# riscv64: builds fail https://codeberg.org/forgejo/forgejo/issues/3025 +arch="all !riscv64" +license="GPL-3.0-or-later" +depends="git git-lfs gnupg" +makedepends="go nodejs npm" +checkdepends="bash openssh openssh-keygen sqlite tzdata" +install="$pkgname.pre-install" +pkgusers="forgejo" +pkggroups="www-data" +subpackages="$pkgname-openrc" +source="$pkgname-$_gittag.tar.gz::https://codeberg.org/matrss/forgejo-aneksajo/archive/$_gittag.tar.gz + $pkgname.initd + $pkgname.ini + " +builddir="$srcdir/forgejo-aneksajo" +options="!check net chmod-clean" # broken with GIT_CEILING + +# secfixes: +# 7.0.4-r0: +# - CVE-2024-24789 +# 7.0.3-r0: +# - CVE-2024-24788 +# 1.21.10.0-r0: +# - CVE-2023-45288 +# 1.21.3.0-r0: +# - CVE-2023-48795 + +export GOCACHE="${GOCACHE:-"$srcdir/go-cache"}" +export GOTMPDIR="${GOTMPDIR:-"$srcdir"}" +export GOMODCACHE="${GOMODCACHE:-"$srcdir/go"}" + +# Skip tests for archs that fail unrelated in CI +case "$CARCH" in +s390x|x86|armhf|armv7) options="$options !check" ;; +esac + +prepare() { + default_prepare + + npm ci +} + +build() { + # XXX: LARGEFILE64 + export CGO_CFLAGS="$CFLAGS -O2 -D_LARGEFILE64_SOURCE" + export TAGS="bindata sqlite sqlite_unlock_notify" + export GITEA_VERSION="${pkgver/_git/-git-annex}" + export EXTRA_GOFLAGS="$GOFLAGS" + export CGO_LDFLAGS="$LDFLAGS" + unset LDFLAGS + ## make FHS compliant + local setting="code.gitea.io/gitea/modules/setting" + export LDFLAGS="$LDFLAGS -X $setting.CustomConf=/etc/forgejo/app.ini" + export LDFLAGS="$LDFLAGS -X $setting.AppWorkPath=/var/lib/forgejo/" + + make -j1 build +} + +check() { + local home="$srcdir"/home + mkdir -p "$home" + install -d -m700 "$home"/.ssh + touch "$home"/.gitconfig + + env GITEA_ROOT="$home" HOME="$home" GITEA_WORK_DIR="$(pwd)" timeout -s ABRT 20m make -j1 test-sqlite + ## "make test" - modified (exclude broken tests) + ## 'code.gitea.io/gitea/modules/migrations': github hase rate limits! 403 API + local tests=$(go list ./... | grep -v /vendor/ | + grep -v 'code.gitea.io/gitea/modules/migrations' | + grep -v 'code.gitea.io/gitea/modules/charset' | + grep -v 'code.gitea.io/gitea/models/migrations' | + grep -v 'code.gitea.io/gitea/services/migrations' | + grep -v 'code.gitea.io/gitea/integrations') + env GITEA_CONF="$PWD/tests/sqlite.ini" GITEA_ROOT="$home" HOME="$home" GO111MODULE=on go test -mod=vendor -tags='sqlite sqlite_unlock_notify' $tests + +} + +package() { + for dir in $pkgname $pkgname/git $pkgname/data $pkgname/db $pkgname/custom; do + install -dm750 -o forgejo -g www-data \ + "$pkgdir"/var/lib/$dir + done + + install -dm755 -o forgejo -g www-data "$pkgdir"/var/log/forgejo + + # TODO: rename when upstream does + install -Dm755 -g www-data gitea "$pkgdir"/usr/bin/forgejo + + install -Dm644 -o forgejo -g www-data "$srcdir"/forgejo-aneksajo.ini \ + "$pkgdir"/etc/forgejo/app.ini + chown forgejo:www-data "$pkgdir"/etc/forgejo + + install -Dm755 "$srcdir"/forgejo-aneksajo.initd \ + "$pkgdir"/etc/init.d/forgejo +} + +sha512sums=" +2c2493c0011e83994c12c11859c2153d855a2265d234a671d2ce855e4f45b8e1b7d7f257e9c7ffa6284b844e0068a6184ef39b88800a1d79f399ce11c7cb23b7 forgejo-aneksajo-v9.0.3-git-annex0.tar.gz +eb93a9f6c8f204de5c813f58727015f53f9feaab546589e016c60743131559f04fc1518f487b6d2a0e7fa8fab6d4a67cd0cd9713a7ccd9dec767a8c1ddebe129 forgejo-aneksajo.initd +b537b41b6b3a945274a6028800f39787b48c318425a37cf5d40ace0d1b305444fd07f17b4acafcd31a629bedd7d008b0bb3e30f82ffeb3d7e7e947bdbe0ff4f3 forgejo-aneksajo.ini +" diff --git a/ilot/forgejo-aneksajo/forgejo-aneksajo.ini b/ilot/forgejo-aneksajo/forgejo-aneksajo.ini new file mode 100644 index 0000000..3b46259 --- /dev/null +++ b/ilot/forgejo-aneksajo/forgejo-aneksajo.ini @@ -0,0 +1,26 @@ +# Configuration cheat sheet: https://forgejo.org/docs/latest/admin/config-cheat-sheet/ + +RUN_USER = forgejo +RUN_MODE = prod + +[repository] +ROOT = /var/lib/forgejo/git +SCRIPT_TYPE = sh + +[server] +STATIC_ROOT_PATH = /usr/share/webapps/forgejo +APP_DATA_PATH = /var/lib/forgejo/data +LFS_START_SERVER = true + +[database] +DB_TYPE = sqlite3 +PATH = /var/lib/forgejo/db/forgejo.db +SSL_MODE = disable + +[session] +PROVIDER = file + +[log] +ROOT_PATH = /var/log/forgejo +MODE = file +LEVEL = Info diff --git a/ilot/forgejo-aneksajo/forgejo-aneksajo.initd b/ilot/forgejo-aneksajo/forgejo-aneksajo.initd new file mode 100644 index 0000000..24dd085 --- /dev/null +++ b/ilot/forgejo-aneksajo/forgejo-aneksajo.initd @@ -0,0 +1,15 @@ +#!/sbin/openrc-run + +supervisor=supervise-daemon +name=forgejo +command="/usr/bin/forgejo" +command_user="${FORGEJO_USER:-forgejo}:www-data" +command_args="web --config '${FORGEJO_CONF:-/etc/forgejo/app.ini}'" +supervise_daemon_args="--env FORGEJO_WORK_DIR='${FORGEJO_WORK_DIR:-/var/lib/forgejo}' --chdir '${FORGEJO_WORK_DIR:-/var/lib/forgejo}' --stdout '${FORGEJO_LOG_FILE:-/var/log/forgejo/http.log}' --stderr '${FORGEJO_LOG_FILE:-/var/log/forgejo/http.log}'" +pidfile="/run/forgejo.pid" + +depend() { + use logger dns + need net + after firewall mysql postgresql +} diff --git a/ilot/forgejo-aneksajo/forgejo-aneksajo.pre-install b/ilot/forgejo-aneksajo/forgejo-aneksajo.pre-install new file mode 100644 index 0000000..c7e8b7b --- /dev/null +++ b/ilot/forgejo-aneksajo/forgejo-aneksajo.pre-install @@ -0,0 +1,7 @@ +#!/bin/sh + +addgroup -S -g 82 www-data 2>/dev/null +adduser -S -D -h /var/lib/forgejo -s /bin/sh -G www-data -g forgejo forgejo 2>/dev/null \ + && passwd -u forgejo 2>/dev/null + +exit 0 diff --git a/ilot/freescout/APKBUILD b/ilot/freescout/APKBUILD new file mode 100644 index 0000000..0093ab0 --- /dev/null +++ b/ilot/freescout/APKBUILD @@ -0,0 +1,84 @@ +# Maintainer: Antoine Martin (ayakael) +# Contributor: Antoine Martin (ayakael) +pkgname=freescout +pkgver=1.8.160 +pkgrel=0 +pkgdesc="Free self-hosted help desk & shared mailbox" +arch="noarch" +url="freescout.net" +license="AGPL-3.0" +_php=php83 +_php_mods="-fpm -mbstring -xml -imap -zip -gd -curl -intl -tokenizer -pdo_pgsql -openssl -session -iconv -fileinfo -dom -pcntl" +depends="$_php ${_php_mods//-/$_php-} nginx postgresql pwgen bash" +makedepends="composer pcre" +install="$pkgname.post-install $pkgname.post-upgrade $pkgname.pre-install" +source=" + $pkgname-$pkgver.tar.gz::https://github.com/freescout-helpdesk/freescout/archive/refs/tags/$pkgver.tar.gz + freescout.nginx + freescout-manage.sh + rename-client-to-membre-fr-en.patch + fix-laravel-log-viewer.patch + " +pkgusers="freescout" +pkggroups="freescout" + +build() { + composer install --ignore-platform-reqs +} + +package() { + local logdir="/var/log/$pkgname" + local datadir="/var/lib/$pkgname" + local wwwdir="/usr/share/webapps/$pkgname" + local confdir="/etc/$pkgname" + + # Make directories + install -dm 755 \ + "$pkgdir"/$wwwdir \ + "$pkgdir"/$confdir \ + "$pkgdir"/$logdir \ + "$pkgdir"/$datadir + + # Copy and ln operations + cp $builddir/* -R "$pkgdir"/$wwwdir/. + for i in storage/app storage/framework bootstrap/cache \ + public/css/builds public/js/builds public/modules Modules; do + + if [ -d "$pkgdir"$wwwdir/$i ]; then + if [ ! -d "$pkgdir"/$datadir/${i%/*} ]; then + mkdir -p "$pkgdir"/$datadir/${i%/*} + fi + mv "$pkgdir"$wwwdir/$i "$pkgdir"/$datadir/$i + else + mkdir -p "$pkgdir"/$datadir/$i + fi + ln -s $datadir/$i "$pkgdir"/$wwwdir/$i + done + ln -s /etc/freescout/freescout.conf "$pkgdir"/usr/share/webapps/freescout/.env + ln -s $wwwdir/storage/app/public "$pkgdir"/$wwwdir/public/storage + + # log dir + rm -R "$pkgdir"/$wwwdir/storage/logs + ln -s "$logdir" "$pkgdir"/$wwwdir/storage/logs + + # Permission settings + chown -R freescout:www-data "$pkgdir"/$datadir "$pkgdir"/$logdir + + # config files + install -Dm644 "$srcdir"/freescout.nginx \ + "$pkgdir"/etc/nginx/http.d/freescout.conf + install -Dm640 "$builddir"/.env.example \ + "$pkgdir"/etc/freescout/freescout.conf + sed -i 's|APP_KEY.*|APP_KEY=@@SECRET_KEY@@|' "$pkgdir"/etc/freescout/freescout.conf + chown root:www-data "$pkgdir"/etc/freescout/freescout.conf + + # Install wrapper script to /usr/bin. + install -m755 -D "$srcdir"/freescout-manage.sh "$pkgdir"/usr/bin/freescout-manage +} +sha512sums=" +8441385a36d9ee5b542936f34e7700e86e1595d9a16b07afeac42bf48409ba0ecd1c542bc82b48afb0bb9201c7219bd146fe9455491ba40116dc66953b994488 freescout-1.8.160.tar.gz +e4af6c85dc12f694bef2a02e4664e31ed50b2c109914d7ffad5001c2bbd764ef25b17ecaa59ff55ef41bccf17169bf910d1a08888364bdedd0ecc54d310e661f freescout.nginx +7ce9b3ee3a979db44f5e6d7daa69431e04a5281f364ae7be23e5a0a0547f96abc858d2a8010346be2fb99bd2355fb529e7030ed20d54f310249e61ed5db4d0ba freescout-manage.sh +0cba00b7d945ce84f72a2812d40028a073a5278856f610e46dbfe0ac78deff6bf5eba7643635fa4bc64d070c4d49eb47d24ea0a05ba1e6ea76690bfd77906366 rename-client-to-membre-fr-en.patch +2c651db6adac6d53597ba36965d0c65e005293f9b030e6be167853e4089384920524737aa947c5066877ee8caefb46741ccba797f653e7c2678556063540d261 fix-laravel-log-viewer.patch +" diff --git a/ilot/freescout/fix-laravel-log-viewer.patch b/ilot/freescout/fix-laravel-log-viewer.patch new file mode 100644 index 0000000..8f29a36 --- /dev/null +++ b/ilot/freescout/fix-laravel-log-viewer.patch @@ -0,0 +1,13 @@ +diff --git a/vendor/composer/installed.json.orig b/vendor/composer/installed.json +index 0b826f5..9d14ec8 100644 +--- a/vendor/composer/installed.json.orig ++++ b/vendor/composer/installed.json +@@ -4494,7 +4494,7 @@ + "installation-source": "dist", + "autoload": { + "classmap": [ +- "src/controllers" ++ "src/" + ], + "psr-0": { + "Rap2hpoutre\\LaravelLogViewer\\": "src/" diff --git a/ilot/freescout/freescout-manage.sh b/ilot/freescout/freescout-manage.sh new file mode 100644 index 0000000..9367807 --- /dev/null +++ b/ilot/freescout/freescout-manage.sh @@ -0,0 +1,11 @@ +#!/bin/sh + +BUNDLE_DIR='/usr/share/webapps/freescout' + +cd $BUNDLE_DIR + +if [ "$(id -un)" != 'freescout' ]; then + exec su freescout -c '"$0" "$@"' -- php artisan "$@" +else + exec php artisan "$@" +fi diff --git a/ilot/freescout/freescout.nginx b/ilot/freescout/freescout.nginx new file mode 100644 index 0000000..15f2161 --- /dev/null +++ b/ilot/freescout/freescout.nginx @@ -0,0 +1,56 @@ +server { + listen 80; + listen [::]:80; + + server_name example.com www.example.com; + + root /usr/share/webapps/freescout/public; + + index index.php index.html index.htm; + + error_log /var/www/html/storage/logs/web-server.log; + + # Max. attachment size. + # It must be also set in PHP.ini via "upload_max_filesize" and "post_max_size" directives. + client_max_body_size 20M; + + location / { + try_files $uri $uri/ /index.php?$query_string; + } + location ~ \.php$ { + fastcgi_split_path_info ^(.+\.php)(/.+)$; + fastcgi_pass unix:/run/php/php8.0-fpm.sock; + fastcgi_index index.php; + fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name; + include fastcgi_params; + } + # Uncomment this location if you want to improve attachments downloading speed. + # Also make sure to set APP_DOWNLOAD_ATTACHMENTS_VIA=nginx in the .env file. + #location ^~ /storage/app/attachment/ { + # internal; + # alias /var/www/html/storage/app/attachment/; + #} + location ~* ^/storage/attachment/ { + expires 1M; + access_log off; + try_files $uri $uri/ /index.php?$query_string; + } + location ~* ^/(?:css|js)/.*\.(?:css|js)$ { + expires 2d; + access_log off; + add_header Cache-Control "public, must-revalidate"; + } + # The list should be in sync with /storage/app/public/uploads/.htaccess and /config/app.php + location ~* ^/storage/.*\.((?!(jpg|jpeg|jfif|pjpeg|pjp|apng|bmp|gif|ico|cur|png|tif|tiff|webp|pdf|txt|diff|patch|json|mp3|wav|ogg|wma)).)*$ { + add_header Content-disposition "attachment; filename=$2"; + default_type application/octet-stream; + } + location ~* ^/(?:css|fonts|img|installer|js|modules|[^\\\]+\..*)$ { + expires 1M; + access_log off; + add_header Cache-Control "public"; + } + location ~ /\. { + deny all; + } +} diff --git a/ilot/freescout/freescout.post-install b/ilot/freescout/freescout.post-install new file mode 100755 index 0000000..467962b --- /dev/null +++ b/ilot/freescout/freescout.post-install @@ -0,0 +1,48 @@ +#!/bin/sh +set -eu + +group=www-data +config_file='/etc/freescout/freescout.conf' + +if [ $(grep '@@SECRET_KEY@@' "$config_file") ]; then + echo "* Generating random secret in $config_file" >&2 + + secret_key="$(freescout-manage key:generate --show)" + sed -i "s|@@SECRET_KEY@@|$secret_key|" "$config_file" +fi + +if [ "${0##*.}" = 'post-upgrade' ]; then + cat >&2 <<-EOF + * + * To finish Freescout upgrade run: + * + * freescout-manage freescout:after-app-update + * + EOF +else + cat >&2 <<-EOF + * + * 1. Adjust settings in /etc/freescout/freescout.conf + * + * 2. Make sure cgi.fix_pathinfo=0 is set in /etc/php8x/php.ini is set + * + * 3. Create database for Freescout: + * + * psql -c "CREATE ROLE freescout PASSWORD 'top-secret' INHERIT LOGIN;" + * psql -c "CREATE DATABASE freescout OWNER freescout ENCODING 'UTF-8';" + * + * 4. Clear application cache and apply .env file changes: + * + * freescout-manage freescout:clear-cache + * + * 5. Create tables: + * + * freescout-manage migrate + * + * 6. Create admin user + * + * freescout-manage freescout:create-user + * + EOF +fi + diff --git a/ilot/freescout/freescout.post-upgrade b/ilot/freescout/freescout.post-upgrade new file mode 120000 index 0000000..d53f932 --- /dev/null +++ b/ilot/freescout/freescout.post-upgrade @@ -0,0 +1 @@ +freescout.post-install \ No newline at end of file diff --git a/ilot/freescout/freescout.pre-install b/ilot/freescout/freescout.pre-install new file mode 100755 index 0000000..6332408 --- /dev/null +++ b/ilot/freescout/freescout.pre-install @@ -0,0 +1,25 @@ +#!/bin/sh + +freescout_dir='/var/lib/freescout' + +if ! getent group freescout 1>/dev/null; then + echo '* Creating group freescout' 1>&2 + + addgroup -S freescout +fi + +if ! id freescout 2>/dev/null 1>&2; then + echo '* Creating user freescout' 1>&2 + + adduser -DHS -G freescout -h "$freescout_dir" -s /bin/sh \ + -g "added by apk for freescout" freescout + passwd -u freescout 1>/dev/null # unlock +fi + +if ! id -Gn freescout | grep -Fq www-data; then + echo '* Adding user freescout to group www-data' 1>&2 + + addgroup freescout www-data +fi + +exit 0 diff --git a/ilot/freescout/rename-client-to-membre-fr-en.patch b/ilot/freescout/rename-client-to-membre-fr-en.patch new file mode 100644 index 0000000..90e75b8 --- /dev/null +++ b/ilot/freescout/rename-client-to-membre-fr-en.patch @@ -0,0 +1,220 @@ +diff --git a/resources/lang/en.json b/resources/lang/en.json +new file mode 100644 +index 00000000..82d26052 +--- /dev/null ++++ b/resources/lang/en.json +@@ -0,0 +1,32 @@ ++{ ++ ":person changed the customer to :customer": ":person changed the member to :customer", ++ ":person changed the customer to :customer in conversation #:conversation_number": ":person changed the member to :customer in conversation #:conversation_number", ++ "Auto reply to customer": "Auto reply to member", ++ "Change Customer": "Change Member", ++ "Change the customer to :customer_email?": "Change the member to :customer_email?", ++ "Create a new customer": "Create a new member", ++ "Customer": "Member", ++ "Customer Name": "Member Name", ++ "Customer Profile": "Member Profile", ++ "Customer changed": "Member changed", ++ "Customer saved successfully.": "Member saved successfully", ++ "Customer viewed :when": "Member viewed :when", ++ "Customers": "Members", ++ "Customers email this address for help (e.g. support@domain.com)": "Members email this address for help (e.g. support@domain.com)", ++ "Email :tag_email_begin:email:tag_email_end has been moved from another customer: :a_begin:customer:a_end.": "Email :tag_email_begin:email:tag_email_end has been moved from another member: :a_begin:customer:a_end.", ++ "Email to customer": "Email to member", ++ "Emails to Customers": "Emails to Members", ++ "Error sending email to customer": "Error sending email to member", ++ "Message not sent to customer": "Message not sent to member", ++ "Name that will appear in the From<\/strong> field when a customer views your email.": "Name that will appear in the From<\/strong> field when a member views your email.", ++ "No customers found": "No members found", ++ "No customers found. Would you like to create one?": "No members found. Would you like to create one?", ++ "Notify :person when a customer replies…": "Notify :person when a member replies…", ++ "Notify me when a customer replies…": "Notify me when a member replies…", ++ "Search for a customer by name or email": "Search for a member by name or email", ++ "Sending emails need to be configured for the mailbox in order to send emails to customers and support agents": "Sending emails need to be configured for the mailbox in order to send emails to members and support agents", ++ "This number is not visible to customers. It is only used to track conversations within :app_name": "This number is not visible to members. It is only used to track conversations within :app_name", ++ "This reply will go to the customer. :%switch_start%Switch to a note:switch_end if you are replying to :user_name.": "This reply will go to the member. :%switch_start%Switch to a note:switch_end if you are replying to :user_name.", ++ "This text will be added to the beginning of each email reply sent to a customer.": "This text will be added to the beginning of each email reply sent to a member.", ++ "When a customer emails this mailbox, application can send an auto reply to the customer immediately.Only one auto reply is sent per new conversation.": "When a member emails this mailbox, application can send an auto reply to the member immediately.Only one auto reply is sent per new conversation." ++} +\ No newline at end of file +diff --git a/resources/lang/fr.json.orig b/resources/lang/fr.json +index 6264973..8a7037e 100644 +--- a/resources/lang/fr.json.orig ++++ b/resources/lang/fr.json +@@ -26,8 +26,8 @@ + ":person added a note to conversation #:conversation_number": ":person a ajouté une note à la conversation #:conversation_number", + ":person assigned :assignee conversation #:conversation_number": ":person a assigné :assignee à la conversation #:conversation_number", + ":person assigned to :assignee": ":person a assigné :assignee", +- ":person changed the customer to :customer": ":person a changé le client en :customer", +- ":person changed the customer to :customer in conversation #:conversation_number": ":person a changé le client en :customer dans la conversation #:conversation_number", ++ ":person changed the customer to :customer": ":person a changé le membre en :customer", ++ ":person changed the customer to :customer in conversation #:conversation_number": ":person a changé le membre en :customer dans la conversation #:conversation_number", + ":person created a draft": ":person a créé un brouillon", + ":person deleted": ":person supprimée", + ":person edited :creator's draft": ":person a modifié brouillon de :creator", +@@ -112,7 +112,7 @@ + "Auto Reply": "Réponse Automatique", + "Auto Reply status saved": "Statut de réponse automatique enregistré", + "Auto replies don't include your mailbox signature, so be sure to add your contact information if necessary.": "Les réponses automatiques n'incluent pas la signature de votre boîte aux lettres, assurez-vous d'ajouter vos coordonnées si nécessaire.", +- "Auto reply to customer": "Réponse automatique au client", ++ "Auto reply to customer": "Réponse automatique au membre", + "Back": "Retour", + "Back to folder": "Retour au dossier", + "Background Jobs": "Emplois d'arrière-plan", +@@ -123,10 +123,10 @@ + "Cancel": "Annuler", + "Cc": "Cc", + "Change": "Modifier", +- "Change Customer": "Changer de client", ++ "Change Customer": "Changer de membre", + "Change address in mailbox settings": "Modifier l'adresse dans les paramètres de la boîte aux lettres", + "Change default redirect": "Modifier la redirection par défaut", +- "Change the customer to :customer_email?": "Changer le client en :customer_email ?", ++ "Change the customer to :customer_email?": "Changer le membre en :customer_email ?", + "Change your password": "Changer votre mot de passe", + "Chat": "Tchat", + "Check Connection": "Vérifier la connexion", +@@ -182,7 +182,7 @@ + "Create a New User": "Créer un nouvel utilisateur", + "Create a Password": "Créer un mot de passe", + "Create a mailbox": "Créer une boîte de réception", +- "Create a new customer": "Créer un nouveau client", ++ "Create a new customer": "Créer un nouveau membre", + "Create symlink manually": "Créer un lien symbolique manuellement", + "Created At": "Créé à", + "Created by :person": "Créé par :person", +@@ -190,14 +190,14 @@ + "Current Password": "Mot de passe actuel", + "Custom From Name": "Nom de l'expéditeur personnalisé", + "Custom Name": "Nom personnalisé", +- "Customer": "Client", +- "Customer Name": "Nom du client", +- "Customer Profile": "Profil client", +- "Customer changed": "Client changé", +- "Customer saved successfully.": "Client enregistré avec succès.", +- "Customer viewed :when": "Client vu :when", +- "Customers": "Clients", +- "Customers email this address for help (e.g. support@domain.com)": "Les clients utilisent cette adresse par e-mail pour obtenir de l'aide (par exemple, support@domain.com)", ++ "Customer": "Membre", ++ "Customer Name": "Nom du membre", ++ "Customer Profile": "Profil membre", ++ "Customer changed": "Membre changé", ++ "Customer saved successfully.": "Membre enregistré avec succès.", ++ "Customer viewed :when": "Membre vu :when", ++ "Customers": "Membres", ++ "Customers email this address for help (e.g. support@domain.com)": "Les membres utilisent cette adresse par e-mail pour obtenir de l'aide (par exemple, support@domain.com)", + "Daily": "Quotidien", + "Dashboard": "Tableau de bord", + "Date": "Date", +@@ -247,15 +247,15 @@ + "Edit User": "Modifier l'utilisateur", + "Edited by :whom :when": "Édité par :whom :when", + "Email": "Email", +- "Email :tag_email_begin:email:tag_email_end has been moved from another customer: :a_begin:customer:a_end.": "Email :tag_email_begin:email:tag_email_end a été déplacé depuis un autre client : :a_begin:customer:a_end.", ++ "Email :tag_email_begin:email:tag_email_end has been moved from another customer: :a_begin:customer:a_end.": "Email :tag_email_begin:email:tag_email_end a été déplacé depuis un autre membre : :a_begin:customer:a_end.", + "Email Address": "Adresse e-mail", + "Email Alerts For Administrators": "Envoyez des alertes par e-mail aux administrateurs", + "Email Header": "En-tête de l'e-mail", + "Email Signature": "Signature e-mail", + "Email Template": "Modèle d'e-mail", + "Email passed for delivery. If you don't receive a test email, check your mail server logs.": "E-mail transmis pour livraison. Si vous ne recevez pas d'e-mail de test, consultez les journaux de votre serveur de messagerie.", +- "Email to customer": "Courriel au client", +- "Emails to Customers": "Emails aux clients", ++ "Email to customer": "Courriel au membre", ++ "Emails to Customers": "Emails aux membres", + "Empty Trash": "Vider la corbeille", + "Empty license key": "Clé de licence vide", + "Enable Auto Reply": "Activer la réponse automatique", +@@ -276,7 +276,7 @@ + "Error occurred. Please try again later.": "Erreur est survenue. Veuillez réessayer plus tard.", + "Error occurred. Please try again or try another :%a_start%update method:%a_end%": "Erreur est survenue. Veuillez réessayer ou en essayer une autre :%a_start% méthode de mise à jour:%a_end%", + "Error sending alert": "Erreur lors de l'envoi de l'alerte", +- "Error sending email to customer": "Erreur lors de l'envoi d'un e-mail au client", ++ "Error sending email to customer": "Erreur lors de l'envoi d'un e-mail au membre", + "Error sending email to the user who replied to notification from wrong email": "Erreur lors de l'envoi d'un e-mail à l'utilisateur qui a répondu à la notification d'un mauvais e-mail", + "Error sending email to user": "Erreur lors de l'envoi d'un e-mail à l'utilisateur", + "Error sending invitation email to user": "Erreur lors de l'envoi d'un e-mail d'invitation à l'utilisateur", +@@ -419,7 +419,7 @@ + "Message bounced (:link)": "Message renvoyé (:link)", + "Message cannot be empty": "Le message ne peut pas être vide", + "Message has been already sent. Please discard this draft.": "Le message a déjà été envoyé. Veuillez effacer ce brouillon.", +- "Message not sent to customer": "Message non envoyé au client", ++ "Message not sent to customer": "Message non envoyé au membre", + "Method": "Méthode", + "Migrate DB": "Migrer la base de données", + "Mine": "Mes conversations", +@@ -439,7 +439,7 @@ + "My Apps": "Mes Applications", + "My open conversations": "Mes conversations ouvertes", + "Name": "Nom", +- "Name that will appear in the From<\/strong> field when a customer views your email.": "Nom qui apparaîtra dans le champ De<\/strong> lorsqu'un client consulte votre e-mail.", ++ "Name that will appear in the From<\/strong> field when a customer views your email.": "Nom qui apparaîtra dans le champ De<\/strong> lorsqu'un membre consulte votre e-mail.", + "New Conversation": "Nouvelle conversation", + "New Mailbox": "Nouvelle boîte de réception", + "New Password": "Nouveau mot de passe", +@@ -451,8 +451,8 @@ + "Next active conversation": "Conversation active suivante", + "No": "Non", + "No activations left for this license key": "Il ne reste aucune activation pour cette clé de licence", +- "No customers found": "Aucun client trouvé", +- "No customers found. Would you like to create one?": "Aucun client trouvé. Souhaitez-vous en créer un?", ++ "No customers found": "Aucun membre trouvé", ++ "No customers found. Would you like to create one?": "Aucun membre trouvé. Souhaitez-vous en créer un?", + "No invite was found. Please contact your administrator to have a new invite email sent.": "Aucune invitation trouvée. Veuillez contacter votre administrateur pour qu'il envoie une nouvelle invitation par email.", + "Non-writable files found": "Fichiers non-inscriptibles trouvés", + "None": "Aucun", +@@ -471,10 +471,10 @@ + "Notifications": "Notifications", + "Notifications saved successfully": "Notifications enregistrées", + "Notifications will start showing up here soon": "Les notifications commenceront bientôt à apparaître ici", +- "Notify :person when a customer replies…": "Avertir :person lorsqu'un client répond…", ++ "Notify :person when a customer replies…": "Avertir :person lorsqu'un membre répond…", + "Notify :person when another :app_name user replies or adds a note…": "Notifier :person quand un autre utilisateur :app_name répond ou ajoute une note…", + "Notify :person when…": "Avertir :person lorsque…", +- "Notify me when a customer replies…": "M'avertir lorsqu'un client répond…", ++ "Notify me when a customer replies…": "M'avertir lorsqu'un membre répond…", + "Notify me when another :app_name user replies or adds a note…": "M'avertir lorsqu'un autre utilisateur :app_name répond ou ajoute une note…", + "Notify me when…": "Prévenez-moi quand…", + "Number": "Numéro", +@@ -587,7 +587,7 @@ + "Search": "Recherche", + "Search Conversation by Number": "Rechercher une conversation par identifiant", + "Search Users": "Rechercher des utilisateurs", +- "Search for a customer by name or email": "Rechercher un client par nom ou par e-mail", ++ "Search for a customer by name or email": "Rechercher un membre par nom ou par e-mail", + "See logs": "Voir les journaux", + "Select Mailbox": "Sélectionnez une boîte aux lettres", + "Selected Users have access to this mailbox:": "Les utilisateurs sélectionnés ont accès à cette boîte aux lettres:", +@@ -613,7 +613,7 @@ + "Sending": "Envoi en cours", + "Sending Emails": "Sending Emails", + "Sending can not be undone": "L'envoie ne peut être annulé", +- "Sending emails need to be configured for the mailbox in order to send emails to customers and support agents": "L'envoi d'e-mails doit être configuré pour la boîte aux lettres afin d'envoyer des e-mails aux clients et aux agents de support", ++ "Sending emails need to be configured for the mailbox in order to send emails to customers and support agents": "L'envoi d'e-mails doit être configuré pour la boîte aux lettres afin d'envoyer des e-mails aux membre et aux agents de support", + "Sendmail": "Exécutable Sendmail", + "Separate each email with a comma.": "Séparez chaque e-mail par une virgule", + "Server": "Serveur", +@@ -670,11 +670,11 @@ + "This is a test mail sent by :app_name. It means that outgoing email settings of your :mailbox mailbox are fine.": "Il s'agit d'un mail de test envoyé par :app_name. Cela signifie que les paramètres de courrier électronique sortant de votre boîte aux lettres :mailbox sont corrects.", + "This is a test system mail sent by :app_name. It means that mail settings are fine.": "Il s'agit d'un e-mail du système de test envoyé par :app_name. Cela signifie que les paramètres de messagerie sont corrects.", + "This may take several minutes": "Cela peut prendre plusieurs minutes", +- "This number is not visible to customers. It is only used to track conversations within :app_name": "Ce numéro n'est pas visible pour les clients. Il est uniquement utilisé pour suivre les conversations dans :app_name", ++ "This number is not visible to customers. It is only used to track conversations within :app_name": "Ce numéro n'est pas visible pour les membres. Il est uniquement utilisé pour suivre les conversations dans :app_name", + "This password is incorrect.": "Ce mot de passe est incorrect.", +- "This reply will go to the customer. :%switch_start%Switch to a note:%switch_end% if you are replying to :user_name.": "Cette réponse ira au client. :%switch_start%Passez à une note:%switch_end% si vous répondez à :user_name.", ++ "This reply will go to the customer. :%switch_start%Switch to a note:%switch_end% if you are replying to :user_name.": "Cette réponse ira au membre. :%switch_start%Passez à une note:%switch_end% si vous répondez à :user_name.", + "This setting gives you control over what page loads after you perform an action (send a reply, add a note, change conversation status or assignee).": "Ce paramètre vous permet de contrôler la page qui se charge après avoir effectué une action (envoyer une réponse, ajouter une note, etc.).", +- "This text will be added to the beginning of each email reply sent to a customer.": "Ce texte sera ajouté au début de chaque réponse par e-mail envoyée à un client.", ++ "This text will be added to the beginning of each email reply sent to a customer.": "Ce texte sera ajouté au début de chaque réponse par e-mail envoyée à un membre.", + "Thread is not in a draft state": "Le fil n'est pas à l'état de brouillon", + "Thread not found": "Fil non trouvé", + "Time Format": "Format de l'heure", +@@ -751,7 +751,7 @@ + "Welcome to :company_name!": "Bienvenue chez :company_name !", + "Welcome to :company_name, :first_name!": "Bienvenue chez :company_name, :first_name!", + "Welcome to the team!": "Bienvenue dans l'équipe !", +- "When a customer emails this mailbox, application can send an auto reply to the customer immediately.Only one auto reply is sent per new conversation.": "Lorsqu'un client envoie un e-mail à cette boîte aux lettres, l'application peut envoyer immédiatement une réponse automatique au client. Une seule réponse automatique est envoyée par nouvelle conversation.", ++ "When a customer emails this mailbox, application can send an auto reply to the customer immediately.Only one auto reply is sent per new conversation.": "Lorsqu'un membre envoie un e-mail à cette boîte aux lettres, l'application peut envoyer immédiatement une réponse automatique au membre. Une seule réponse automatique est envoyée par nouvelle conversation.", + "Which mailboxes will user use?": "Quelles boîtes aux lettres l'utilisateur utilisera-t-il?", + "Who Else Will Use This Mailbox": "Qui d'autre utilisera cette boîte aux lettres", + "Work": "Professionnel", diff --git a/ilot/listmonk/APKBUILD b/ilot/listmonk/APKBUILD new file mode 100644 index 0000000..1bf9721 --- /dev/null +++ b/ilot/listmonk/APKBUILD @@ -0,0 +1,73 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=listmonk +pkgver=4.1.0 +pkgrel=0 +pkgdesc='Self-hosted newsletter and mailing list manager with a modern dashboard' +arch="all" +url=https://listmonk.app +license="AGPL3" +depends=" + libcap-setcap + postgresql + postgresql-contrib + procps + " +makedepends="go npm nodejs yarn" +source=" + $pkgname-$pkgver.tar.gz::https://github.com/knadh/listmonk/archive/v$pkgver.tar.gz + listmonk.sh + listmonk.openrc + " +install="$pkgname.pre-install $pkgname.post-install $pkgname.post-upgrade" +subpackages="$pkgname-openrc" +pkgusers="listmonk" +pkggroups="listmonk" + +build() { + go build \ + -trimpath \ + -buildmode=pie \ + -mod=readonly \ + -modcacherw \ + -ldflags "-extldflags '$LDFLAGS' -X 'main.buildString=Alpine Linux v$pkgver-$pkgrel' -X 'main.versionString=v$pkgver'" \ + -o $pkgname \ + cmd/*.go + + ( + cd frontend + export YARN_CACHE_FOLDER="$srcdir/node_modules" + export VUE_APP_VERSION="v$pkgver" + yarn install --frozen-lockfile + yarn build + ) +} + +check() { + go test ./... +} + +package() { + install -Dm755 "$srcdir"/listmonk.sh "$pkgdir"/usr/bin/listmonk + install -Dm644 config.toml.sample "$pkgdir"/etc/listmonk/config.toml + install -Dm644 -t "$pkgdir"/usr/share/webapps/listmonk/ \ + schema.sql \ + queries.sql \ + permissions.json \ + config.toml.sample + install -Dm755 listmonk "$pkgdir"/usr/share/webapps/listmonk/ + install -Dm644 -t "$pkgdir"/usr/share/webapps/listmonk/frontend/dist/ \ + frontend/dist/static/favicon.png + cp -a frontend/dist/static "$pkgdir"/usr/share/webapps/listmonk/frontend/dist/static + cp -a frontend/dist/index.html "$pkgdir"/usr/share/webapps/listmonk/frontend/dist/index.html + cp -a static "$pkgdir"/usr/share/webapps/listmonk/ + cp -a i18n "$pkgdir"/usr/share/webapps/listmonk/ + install -Dm755 "$srcdir"/$pkgname.openrc \ + "$pkgdir"/etc/init.d/$pkgname + ln -s /etc/listmonk/config.toml "$pkgdir"/usr/share/webapps/listmonk/config.toml +} +sha512sums=" +936b33d6de1d69ee4e7f768810116ac997c516754aace0371089bc8106bebee944197864afc11b7bc5725afa9a4f195d6629957bfcdd37c847e3780aa34558ec listmonk-4.1.0.tar.gz +939450af4b23708e3d23a5a88fad4c24b957090bdd21351a6dd520959e52e45e5fcac117a3eafa280d9506616dae39ad3943589571f008cac5abe1ffd8062424 listmonk.sh +8e9c0b1f335c295fb741418246eb17c7566e5e4200a284c6483433e8ddbf5250aa692435211cf062ad1dfcdce3fae9148def28f03f2492d33fe5e66cbeebd4bd listmonk.openrc +" diff --git a/ilot/listmonk/listmonk.openrc b/ilot/listmonk/listmonk.openrc new file mode 100644 index 0000000..e2ccb5b --- /dev/null +++ b/ilot/listmonk/listmonk.openrc @@ -0,0 +1,29 @@ +#!/sbin/openrc-run + +name="$RC_SVCNAME" +cfgfile="/etc/conf.d/$RC_SVCNAME.conf" +pidfile="/run/$RC_SVCNAME.pid" +working_directory="/usr/share/webapps/listmonk" +command="/usr/share/webapps/listmonk/listmonk" +command_user="listmonk" +command_group="listmonk" +start_stop_daemon_args="" +command_background="yes" +output_log="/var/log/listmonk/$RC_SVCNAME.log" +error_log="/var/log/listmonk/$RC_SVCNAME.err" + +depend() { + need postgresql +} + +start_pre() { + cd "$working_directory" + checkpath --directory --owner $command_user:$command_group --mode 0775 \ + /var/log/listmonk \ + /var/lib/listmonk +} + +stop_pre() { + ebegin "Killing child processes" + kill $(ps -o pid= --ppid $(cat $pidfile)) || true +} diff --git a/ilot/listmonk/listmonk.post-install b/ilot/listmonk/listmonk.post-install new file mode 100644 index 0000000..3e25f91 --- /dev/null +++ b/ilot/listmonk/listmonk.post-install @@ -0,0 +1,33 @@ +#!/bin/sh +set -eu + +setcap 'cap_net_bind_service=+ep' /usr/share/webapps/listmonk/listmonk + +if [ "${0##*.}" = 'post-upgrade' ]; then + cat >&2 <<-EOF + * + * To finish Listmonk upgrade run: + * + * listmonk --upgrade + * + * If upgrading from v3.0.0, please first set the following env variables: + * + * export LISTMONK_ADMIN_USER=your-admin-user + * export LISTMONK_ADMIN_PASSWORD=your-admin-password + * listmonk --upgrade + * + EOF +else + cat >&2 <<-EOF + * + * 1. Adjust settings in /etc/listmonk/config.toml. + * + * 2. Create database for Listmonk: + * + * psql -c "CREATE ROLE listmonk PASSWORD 'top-secret' INHERIT LOGIN;" + * psql -c "CREATE DATABASE listmonk OWNER listmonk ENCODING 'UTF-8';" + * + * 3. Run "listmonk --install" + * + EOF +fi diff --git a/ilot/listmonk/listmonk.post-upgrade b/ilot/listmonk/listmonk.post-upgrade new file mode 120000 index 0000000..0b729b1 --- /dev/null +++ b/ilot/listmonk/listmonk.post-upgrade @@ -0,0 +1 @@ +listmonk.post-install \ No newline at end of file diff --git a/ilot/listmonk/listmonk.pre-install b/ilot/listmonk/listmonk.pre-install new file mode 100644 index 0000000..71eb3a0 --- /dev/null +++ b/ilot/listmonk/listmonk.pre-install @@ -0,0 +1,21 @@ +#!/bin/sh +# It's very important to set user/group correctly. + +listmonk_dir='/var/lib/listmonk' + +if ! getent group listmonk 1>/dev/null; then + echo '* Creating group listmonk' 1>&2 + + addgroup -S listmonk +fi + +if ! id listmonk 2>/dev/null 1>&2; then + echo '* Creating user listmonk' 1>&2 + + adduser -DHS -G listmonk -h "$listmonk_dir" -s /bin/sh \ + -g "added by apk for listmonk" listmonk + passwd -u listmonk 1>/dev/null # unlock +fi + + +exit 0 diff --git a/ilot/listmonk/listmonk.sh b/ilot/listmonk/listmonk.sh new file mode 100644 index 0000000..d89ca52 --- /dev/null +++ b/ilot/listmonk/listmonk.sh @@ -0,0 +1,12 @@ + +#!/bin/sh + +BUNDLE_DIR='/usr/share/webapps/listmonk' + +cd $BUNDLE_DIR + +if [ "$(id -un)" != 'listmonk' ]; then + exec su listmonk -c '"$0" "$@"' -- ./listmonk "$@" +else + exec ./listmonk "$@" +fi diff --git a/ilot/loomio/APKBUILD b/ilot/loomio/APKBUILD new file mode 100644 index 0000000..1381afd --- /dev/null +++ b/ilot/loomio/APKBUILD @@ -0,0 +1,197 @@ +# Maintainer: Antoine Martin (ayakael) +# Contributor: Jakub Jirutka +# Contributor: Antoine Martin (ayakael) +pkgname=loomio +pkgver=2.21.4 +_gittag=v$pkgver +pkgrel=1 +pkgdesc="A collaborative decision making tool" +url="https://github.com/loomio/loomio" +# failing build +#arch="x86_64" +license="MIT" +depends=" + postgresql + postgresql-contrib + python3 + redis + ruby3.2 + ruby3.2-bundler + ruby3.2-grpc + vips + npm + procps-ng + " +makedepends=" + cmd:chrpath + ruby3.2-dev + nodejs + openssl-dev + readline-dev + zlib-dev + libpq-dev + libffi-dev + imagemagick-dev + " +pkgusers="loomio" +pkggroups="loomio www-data" +install="$pkgname.pre-install $pkgname.post-install $pkgname.post-upgrade" +subpackages="$pkgname-openrc" +source=" + $pkgname-$pkgver.tar.gz::https://github.com/loomio/loomio/archive/refs/tags/v$pkgver.tar.gz + bin-wrapper.in + loomio.confd + loomio.logrotate + loomio.sidekiq.initd + loomio.vue.initd + loomio.initd + " +_prefix="usr/lib/webapps/loomio" + +export BUNDLE_DEPLOYMENT=true +export BUNDLE_FORCE_RUBY_PLATFORM=true +export BUNDLE_FROZEN=true +export BUNDLE_JOBS=${JOBS:-2} + +prepare() { + local sysgemdir=$(ruby -e 'puts Gem.default_dir') + + default_prepare + + # Allow use of any bundler + sed -i -e '/BUNDLED/,+1d' Gemfile.lock + + # Allow use of any platform + sed -i -e 's/PLATFORMS/PLATFORMS\n ruby/' Gemfile.lock + + # Some gems are broken, so we copy our fixed version + # instead of installing it from RubyGems using Bundler. + mkdir -p vendor/gems/grpc/src/ruby/lib/grpc + cp -r "$sysgemdir"/gems/grpc-*/* vendor/gems/grpc/ + cp "$sysgemdir"/specifications/grpc-*.gemspec \ + vendor/gems/grpc/grpc.gemspec + cp "$sysgemdir"/extensions/*/*/grpc-*/grpc/*.so \ + vendor/gems/grpc/src/ruby/lib/grpc/ +} + +build() { + local bundle_without='exclude development test' + + bundle config --local build.ffi --enable-system-libffi + bundle config --local build.vips --enable-system-libraries + bundle config --local build.nokogiri --use-system-libraries \ + --with-xml2-include=/usr/include/libxml2 \ + --with-xslt-include=/usr/include/libxslt + bundle config --local build.google-protobuf '-- --with-cflags=-D__va_copy=va_copy' + + msg "Installing Ruby gems..." + bundle config --local without "$bundle_without" + bundle config --local path "vendor/bundle" + + bundle install --no-cache + + msg "Precompiling static assets..." + bundle exec bootsnap precompile --gemfile app/ lib/ + + # Create executables in bin/*. + # See also https://github.com/bundler/bundler/issues/6149. + bundle binstubs --force bundler puma sidekiq + + # Remove faulty RPATH. + chrpath -d vendor/bundle/ruby/*/gems/*/lib/nokogiri/*/nokogiri.so + + # cp grpc so + cp vendor/gems/grpc/src/ruby/lib/grpc/grpc_c.so vendor/bundle/ruby/*/gems/grpc*/src/ruby/lib/grpc/. + rm -R vendor/bundle/ruby/*/gems/grpc*/src/ruby/lib/grpc/3* vendor/bundle/ruby/*/gems/grpc*/src/ruby/lib/grpc/2* + + msg "Installing npm modules..." + cd vue + # force as vite-plugin-yaml hasn't updated their peerDependencies list yet + npm ci --force + npm run build +} + +package() { + local destdir="$pkgdir/$_prefix" + local datadir="$pkgdir/var/lib/loomio" + local file dest + + # Make directories + install -dm 755 \ + "$(dirname $destdir)" \ + "$datadir" + + mkdir -p "$(dirname $destdir)" + cp -R "$builddir" "$destdir" + + cd "$destdir"/vendor/bundle/ruby/*/ + + # Remove tests, documentations and other useless files. + find gems/ \( -name 'doc' \ + -o -name 'spec' \ + -o -name 'test' \) \ + -type d -maxdepth 2 -exec rm -fr "{}" + + find gems/ \( -name 'README*' \ + -o -name 'CHANGELOG*' \ + -o -name 'CONTRIBUT*' \ + -o -name '*LICENSE*' \ + -o -name 'Rakefile' \ + -o -name '.*' \) \ + -type f -delete + + # Remove build logs and cache. + rm -rf build_info/ cache/ + find extensions/ \( -name gem_make.out -o -name mkmf.log \) -delete + + cd "$destdir" + + # Install and symlink config files. + for file in database.yml.postgresql puma.rb sidekiq.yml; do + dest="$(basename "${file/.postgresql/}")" + install -m640 -g loomio -D config/$file "$pkgdir"/etc/loomio/$dest + ln -sf /etc/loomio/$dest "$pkgdir"/$_prefix/config/${file/.postgrewsql/} + done + + # This file will be generated by the post-install script, just prepare symlink. + ln -sf /etc/loomio/secrets.yml config/secrets.yml + # These shouldn't be necessary, they are all configurable, but OmniBus + + cat > "$datadir"/.profile <<-EOF + export RAILS_ENV=production + export NODE_ENV=production + export EXECJS_RUNTIME=Disabled + EOF + + # Install wrapper scripts to /usr/bin. + local name; for name in rake rails; do + sed "s/__COMMAND__/$name/g" "$srcdir"/bin-wrapper.in \ + > "$builddir"/loomio-$name + install -m755 -D "$builddir"/loomio-$name "$pkgdir"/usr/bin/loomio-$name + done + + for file in $pkgname $pkgname.sidekiq $pkgname.vue; do + install -m755 -D "$srcdir"/$file.initd "$pkgdir"/etc/init.d/$file + done + + install -m644 -D "$srcdir"/loomio.confd \ + "$pkgdir"/etc/conf.d/loomio + + install -m644 -D "$srcdir"/loomio.logrotate \ + "$pkgdir"/etc/logrotate.d/loomio +} + +assets() { + depends="" + + amove $_prefix/public/assets +} + +sha512sums=" +72a1238c1eaa3b963bd20a09d4fc2e52798264779bdf06d3f32891f2880d246059c77381329d1274bfa5979a35740017f0ced324f88b205369e77335b403ffba loomio-2.21.4.tar.gz +6cd4bb030660a9f4697eeb7c6de3f7509558aab3651e68218583dfeea56634f3b9f58acb50c7c9a4188a38c19434a815dd6c347e30207c4c0ae028c8dcb6ccaf bin-wrapper.in +0f1c91fbd4b8099f0a115705d5af799e4492fa2a0fd54175f3bfbfb5be1122bd7fd73a7709695c7caf2dcc667f3b8715051c24f424472e1115753e43a38fdf50 loomio.confd +1ecb0717cd5f04b894467b21d226b98d8f83b8f62afbf8da7edd57973aeabb13d121e9061cc48aec7572b1c710e82c8b44a1cedc0a924efd4bc4a124b3afe9a8 loomio.logrotate +c5dae2b6f9a23853c3c7ac068d97a7b0269b1775f6e0169c3d8999ec67c2baf3545515ea21037e882d900b15a7abf9061dd5a584bdc82c347b54d8c134f6d7a4 loomio.sidekiq.initd +f774954d8b06aacab27af9593b1b12fbe18ec2d0593dd4f82e4d3dfbc7e325fb1a423347fd974a2ec6665776a6cfe85f255f4fd7493c97eb840f34eb7fbdb329 loomio.vue.initd +645637c4112ec91ec2ea6022713e77a8ee76c0f0a81f9adf1f9210b52a578e94b5b02f0b6244b173905f580f72dc362b5434c714aae11e3619f73af223891bb8 loomio.initd +" diff --git a/ilot/loomio/bin-wrapper.in b/ilot/loomio/bin-wrapper.in new file mode 100644 index 0000000..fad9737 --- /dev/null +++ b/ilot/loomio/bin-wrapper.in @@ -0,0 +1,15 @@ +#!/bin/sh + +BUNDLE_DIR='/usr/lib/webapps/loomio' +export RAILS_ENV='production' +export NODE_ENV='production' +export EXECJS_RUNTIME='Disabled' + +cd $BUNDLE_DIR +install -m 700 -o loomio -g loomio -d "$(readlink ./tmp)" + +if [ "$(id -un)" != 'loomio' ]; then + exec su loomio -c '"$0" "$@"' -- bin/__COMMAND__ "$@" +else + exec bin/__COMMAND__ "$@" +fi diff --git a/ilot/loomio/loomio.confd b/ilot/loomio/loomio.confd new file mode 100644 index 0000000..890ad21 --- /dev/null +++ b/ilot/loomio/loomio.confd @@ -0,0 +1,32 @@ +# Configuration file for /etc/init.d/loomio and +# /etc/init.d/loomio.{vue,sidekiq} + +# Specify how many processes to create using sidekiq-cluster and which queue +# they should handle. Each whitespace-separated item equates to one additional +# Sidekiq process, and comma-separated values in each item determine the queues +# it works on. The special queue name "*" means all queues. +# Example: "* gitlab_shell process_commit,post_receive" +# See https://docs.gitlab.com/ee/administration/sidekiq/extra_sidekiq_processes.html. +#sidekiq_queue_groups="*" + +# Maximum threads to use with Sidekiq (default: 50, 0 to disable). +#sidekiq_max_concurrency= + +# Minimum threads to use with Sidekiq (default: 0). +#sidekiq_min_concurrency= + +# The number of seconds to wait between worker checks. +#sidekiq_interval= + +# Graceful timeout for all running processes. +#sidekiq_shutdown_timeout= + +# Run workers for all queues in sidekiq_queues.yml except the given ones. +#sidekiq_negate=no + +# Run workers based on the provided selector. +#sidekiq_queue_selector=no + +# Memory limit (in MiB) for the Sidekiq process. If the RSS (Resident Set Size) +# of the Sidekiq process exceeds this limit, a delayed shutdown is triggered. +#sidekiq_memkiller_max_rss=2000 diff --git a/ilot/loomio/loomio.initd b/ilot/loomio/loomio.initd new file mode 100644 index 0000000..864d102 --- /dev/null +++ b/ilot/loomio/loomio.initd @@ -0,0 +1,39 @@ +#!/sbin/openrc-run + +name="Loomio" +description="Meta script for starting/stopping all the Loomio components" +subservices="loomio.sidekiq loomio.vue" + +depend() { + use net +} + +start() { + local ret=0 + + ebegin "Starting all Loomio components" + local svc; for svc in $subservices; do + service $svc start || ret=1 + done + eend $ret +} + +stop() { + local ret=0 + + ebegin "Stopping all Loomio components" + local svc; for svc in $subservices; do + service $svc stop || ret=1 + done + eend $ret +} + +status() { + local ret=0 + + local svc; for svc in $subservices; do + echo "$svc:" + service $svc status || ret=1 + done + eend $ret +} diff --git a/ilot/loomio/loomio.logrotate b/ilot/loomio/loomio.logrotate new file mode 100644 index 0000000..f7fd264 --- /dev/null +++ b/ilot/loomio/loomio.logrotate @@ -0,0 +1,11 @@ +/var/log/loomio/*.log { + compress + copytruncate + delaycompress + maxsize 10M + minsize 1M + missingok + sharedscripts + rotate 10 + weekly +} diff --git a/ilot/loomio/loomio.post-install b/ilot/loomio/loomio.post-install new file mode 100755 index 0000000..2e2fb10 --- /dev/null +++ b/ilot/loomio/loomio.post-install @@ -0,0 +1,32 @@ +#!/bin/sh +set -eu + +group=loomio +config_file='/etc/loomio/config.yml' + +#if [ $(grep '@@SECRET_KEY@@' "$config_file") ]; then +# echo "* Generating random secret in $config_file" >&2 + +# secret_key="$(pwgen -s 50 1)" +# sed -i "s|@@SECRET_KEY@@|$secret_key|" "$config_file" +#fi + +if [ "${0##*.}" = 'post-upgrade' ]; then + cat >&2 <<-EOF + * + * To finish Loomio upgrade run: + * + * + EOF +else + cat >&2 <<-EOF + * + * 1. Adjust settings in /etc/loomio/config.yml. + * + * 2. Create database for loomio: + * + * psql -c "CREATE ROLE loomio PASSWORD 'top-secret' INHERIT LOGIN;" + * psql -c "CREATE DATABASE loomio OWNER loomio ENCODING 'UTF-8';" + * + EOF +fi diff --git a/ilot/loomio/loomio.post-upgrade b/ilot/loomio/loomio.post-upgrade new file mode 120000 index 0000000..ec5bf9b --- /dev/null +++ b/ilot/loomio/loomio.post-upgrade @@ -0,0 +1 @@ +loomio.post-install \ No newline at end of file diff --git a/ilot/loomio/loomio.pre-install b/ilot/loomio/loomio.pre-install new file mode 100644 index 0000000..612ce4c --- /dev/null +++ b/ilot/loomio/loomio.pre-install @@ -0,0 +1,26 @@ +#!/bin/sh +# It's very important to set user/group correctly. + +loomio_dir='/var/lib/loomio' + +if ! getent group loomio 1>/dev/null; then + echo '* Creating group loomio' 1>&2 + + addgroup -S loomio +fi + +if ! id loomio 2>/dev/null 1>&2; then + echo '* Creating user loomio' 1>&2 + + adduser -DHS -G loomio -h "$loomio_dir" -s /bin/sh \ + -g "added by apk for loomio" loomio + passwd -u loomio 1>/dev/null # unlock +fi + +if ! id -Gn loomio | grep -Fq redis; then + echo '* Adding user loomio to group www-data' 1>&2 + + addgroup loomio www-data +fi + +exit 0 diff --git a/ilot/loomio/loomio.sidekiq.initd b/ilot/loomio/loomio.sidekiq.initd new file mode 100644 index 0000000..fd3dd2d --- /dev/null +++ b/ilot/loomio/loomio.sidekiq.initd @@ -0,0 +1,32 @@ +#!/sbin/openrc-run + +name="Loomio background workers Service" +root="/usr/share/webapps/loomio" +pidfile="/run/loomio-sidekiq.pid" +logfile="/var/log/loomio/sidekiq.log" + +depend() { + use net + need redis +} + +start() { + ebegin "Starting Loomio background workers" + + cd $root + + start-stop-daemon --start --background \ + --chdir "${root}" \ + --user="loomio" \ + --make-pidfile --pidfile="${pidfile}" \ + -1 "${logfile}" -2 "${logfile}" \ + --exec /usr/bin/env -- RAILS_ENV=production bundle exec rails s + eend $? +} + +stop() { + ebegin "Stopping Loomio background workers" + start-stop-daemon --stop \ + --pidfile=${pidfile} \ + eend $? +} diff --git a/ilot/loomio/loomio.vue.initd b/ilot/loomio/loomio.vue.initd new file mode 100644 index 0000000..8fffb40 --- /dev/null +++ b/ilot/loomio/loomio.vue.initd @@ -0,0 +1,31 @@ +#!/sbin/openrc-run + +name="$RC_SVCNAME" +cfgfile="/etc/conf.d/$RC_SVCNAME.conf" +pidfile="/run/$RC_SVCNAME.pid" +working_directory="/usr/share/bundles/loomio" +command="npm" +command_args="run serve" +command_user="loomio" +command_group="loomio" +start_stop_daemon_args="" +command_background="yes" +output_log="/var/log/loomio/$RC_SVCNAME.log" +error_log="/var/log/loomio/$RC_SVCNAME.err" + +depend() { + need redis + need postgresql +} + +start_pre() { + cd "$working_directory" + checkpath --directory --owner $command_user:$command_group --mode 0775 \ + /var/log/loomio \ + /var/lib/loomio +} + +stop_pre() { + ebegin "Killing child processes" + kill $(ps -o pid= --ppid $(cat $pidfile)) || true +} diff --git a/ilot/peertube/APKBUILD b/ilot/peertube/APKBUILD new file mode 100644 index 0000000..809936b --- /dev/null +++ b/ilot/peertube/APKBUILD @@ -0,0 +1,82 @@ +# Maintainer: Antoine Martin (ayakael) +# Contributor: Antoine Martin (ayakael) +pkgname=peertube +pkgver=6.0.2 +pkgrel=1 +pkgdesc="ActivityPub-federated video streaming platform using P2P directly in your web browser" +# failing build +# arch="x86_64" +url="https://joinpeertube.org/" +license="AGPL" +depends=" + nodejs + ffmpeg + postgresql + openssl + redis + npm + procps-ng + " +makedepends=" + yarn + " +source=" + $pkgname-$pkgver.tar.gz::https://github.com/Chocobozzz/PeerTube/archive/refs/tags/v$pkgver.tar.gz + peertube-manage.sh + peertube.conf + peertube.openrc + " +builddir="$srcdir"/PeerTube-$pkgver +install="$pkgname.post-install $pkgname.pre-install $pkgname.post-upgrade" +subpackages="$pkgname-doc $pkgname-openrc" + +build() { + # need to fetch devel depencencies to build + yarn install --pure-lockfile + npm run build + rm -Rf "$builddir"/node_modules + yarn install --production --pure-lockfile +} + +package() { + install -dm 755 \ + "$pkgdir"/usr/share/webapps \ + "$pkgdir"/usr/share/doc \ + "$pkgdir"/usr/share/licenses/peertube \ + "$pkgdir"/etc/init.d \ + "$pkgdir"/etc/conf.d + + # install + cp -a "$builddir" "$pkgdir/usr/share/webapps/peertube" + + # wrapper script + install -Dm755 "$srcdir"/peertube-manage.sh "$pkgdir"/usr/bin/peertube-manage + + # openrc + install -Dm755 "$srcdir"/peertube.openrc "$pkgdir"/etc/init.d/peertube + install -Dm644 "$srcdir"/peertube.conf "$pkgdir"/etc/conf.d/peertube + + # config file setup + rm -R "$pkgdir"/usr/share/webapps/peertube/config + install -Dm644 "$builddir"/config/production.yaml.example "$pkgdir"/etc/peertube/production.yaml + install -Dm644 "$builddir"/config/default.yaml "$pkgdir"/etc/peertube/default.yaml + sed -i "s|/var/www/peertube/storage|/var/lib/peertube|g" "$pkgdir"/etc/peertube/production.yaml "$pkgdir"/etc/peertube/default.yaml + sed -i "s| tmp:.*| tmp: '/tmp/peertube/'|" "$pkgdir"/etc/peertube/production.yaml "$pkgdir"/etc/peertube/default.yaml + sed -i "s|tmp_persistent:.*|tmp_persistent: '/var/tmp/peertube/'|" "$pkgdir"/etc/peertube/production.yaml "$pkgdir"/etc/peertube/default.yaml + sed -i "s|logs:.*|logs: '/var/log/peertube/'|" "$pkgdir"/etc/peertube/production.yaml "$pkgdir"/etc/peertube/default.yaml + sed -i "s| peertube: ''| peertube: '@@SECRET_KEY@@'|" "$pkgdir"/etc/peertube/production.yaml + + # docs and licenses + mv "$pkgdir"/usr/share/webapps/peertube/support/doc "$pkgdir"/usr/share/doc/$pkgname + mv "$pkgdir"/usr/share/webapps/peertube/*.md "$pkgdir"/usr/share/doc/peertube/. + mv "$pkgdir"/usr/share/webapps/peertube/LICENSE "$pkgdir"/usr/share/licenses/peertube/. + + # delete arm64 prebuild + rm "$pkgdir"/usr/share/webapps/$pkgname/node_modules/fs-native-extensions/prebuilds/linux-arm64/node.napi.node +} +sha512sums=" +91bcec34902f171ffe9ab3f27ab4422319f91430cab22965a5cf9887c5293152f7f85c6fc0f355820000daea0a49327aa66f20bb4cff3850e5e3d192f347c926 peertube-6.0.2.tar.gz +92de1155410848937eeff3bef480c4a074875b4236ce0b6bf4cd7213d00173e7766d130408419c85c4432a8445a03f5d4525e4283384d906d781510cc4fd8fc0 peertube-manage.sh +494bb4daf98fcd62b354eb6fae18ccff19bef1243de083a93e438680deef1d9039e30eff8870b6955c3c7b10638e6df6cbeb4fbdb7539979466f502bcc72c843 peertube.conf +5b4d3f47d0dc2ce991971ff61c604a1566811612cff91f7e6ed19b65d0830695649ddef9afff474d916a5e6764d74bb4fa6b5c12eb5e753d8fc381cdd38ab179 peertube.openrc +" diff --git a/ilot/peertube/peertube-manage.sh b/ilot/peertube/peertube-manage.sh new file mode 100644 index 0000000..70bc387 --- /dev/null +++ b/ilot/peertube/peertube-manage.sh @@ -0,0 +1,15 @@ +#!/bin/sh + +BUNDLE_DIR='/usr/share/webapps/peertube' + +cd $BUNDLE_DIR + +if [ "$(id -un)" != 'peertube' ]; then + source /etc/conf.d/peertube + export NODE_ENV NODE_CONFIG_DIR + exec su peertube -c '"$0" "$@"' -- npm run "$@" +else + source /etc/conf.d/peertube + export NODE_ENV NODE_CONFIG_DIR + exec npm run "$@" +fi diff --git a/ilot/peertube/peertube.conf b/ilot/peertube/peertube.conf new file mode 100644 index 0000000..8a7d014 --- /dev/null +++ b/ilot/peertube/peertube.conf @@ -0,0 +1,2 @@ +NODE_CONFIG_DIR=/etc/peertube +NODE_ENV=production diff --git a/ilot/peertube/peertube.openrc b/ilot/peertube/peertube.openrc new file mode 100644 index 0000000..8f03ba0 --- /dev/null +++ b/ilot/peertube/peertube.openrc @@ -0,0 +1,34 @@ +#!/sbin/openrc-run + +name="$RC_SVCNAME" +cfgfile="/etc/conf.d/$RC_SVCNAME.conf" +pidfile="/run/$RC_SVCNAME.pid" +working_directory="/usr/share/webapps/peertube" +command="/usr/bin/node" +command_args="dist/server.js" +command_user="peertube" +command_group="peertube" +start_stop_daemon_args="" +command_background="yes" +output_log="/var/log/peertube/$RC_SVCNAME.log" + +depend() { + need redis + need postgresql +} + +start_pre() { + cd "$working_directory" + checkpath --directory --owner $command_user:$command_group --mode 0775 \ + /var/log/peertube \ + /var/lib/peertube \ + /var/tmp/peertube \ + /tmp/peertube + + export NODE_ENV NODE_CONFIG_DIR +} + +stop_pre() { + ebegin "Killing child processes" + kill $(ps -o pid= --ppid $(cat $pidfile)) || true +} diff --git a/ilot/peertube/peertube.post-install b/ilot/peertube/peertube.post-install new file mode 100755 index 0000000..a83bb10 --- /dev/null +++ b/ilot/peertube/peertube.post-install @@ -0,0 +1,41 @@ +#!/bin/sh +set -eu + +group=www-data +config_file='/etc/peertube/production.yaml' + +if grep '@@SECRET_KEY@@' "$config_file" >/dev/null; then + echo "* Generating random secret in $config_file" >&2 + + secret_key="$(openssl rand -hex 32)" + sed -i "s|@@SECRET_KEY@@|$secret_key|" "$config_file" +fi + +if [ "${0##*.}" = 'post-upgrade' ]; then + cat >&2 <<-EOF + * + * To finish Peertube upgrade run: + * + * + EOF +else + cat >&2 <<-EOF + * + * 1. Adjust settings in /etc/peertube/production.yaml + * + * 2. Create database for Peertube: + * + * psql -c "CREATE ROLE peertube PASSWORD 'top-secret' INHERIT LOGIN;" + * psql -c "CREATE DATABASE peertube OWNER peertube ENCODING 'UTF-8';" + * + * 3. Start Peertube + * + * service peertube start + * + * 4. Create admin user + * + * peertube-manage reset-password -- -u root + * + EOF +fi + diff --git a/ilot/peertube/peertube.post-upgrade b/ilot/peertube/peertube.post-upgrade new file mode 120000 index 0000000..2dd117d --- /dev/null +++ b/ilot/peertube/peertube.post-upgrade @@ -0,0 +1 @@ +peertube.post-install \ No newline at end of file diff --git a/ilot/peertube/peertube.pre-install b/ilot/peertube/peertube.pre-install new file mode 100755 index 0000000..2572d9c --- /dev/null +++ b/ilot/peertube/peertube.pre-install @@ -0,0 +1,25 @@ +#!/bin/sh + +DATADIR='/var/lib/peertube' + +if ! getent group peertube 1>/dev/null; then + echo '* Creating group peertube' 1>&2 + + addgroup -S peertube +fi + +if ! id peertube 2>/dev/null 1>&2; then + echo '* Creating user peertube' 1>&2 + + adduser -DHS -G peertube -h "$DATADIR" -s /bin/sh \ + -g "added by apk for peertube" peertube + passwd -u peertube 1>/dev/null # unlock +fi + +if ! id -Gn peertube | grep -Fq www-data; then + echo '* Adding user peertube to group www-data' 1>&2 + + addgroup peertube www-data +fi + +exit 0 diff --git a/ilot/php82-pecl-inotify/APKBUILD b/ilot/php82-pecl-inotify/APKBUILD new file mode 100644 index 0000000..d2bb518 --- /dev/null +++ b/ilot/php82-pecl-inotify/APKBUILD @@ -0,0 +1,35 @@ +# Contributor: Fabio Ribeiro +# Maintainer: Andy Postnikov +pkgname=php82-pecl-inotify +_extname=inotify +pkgver=3.0.0 +pkgrel=1 +pkgdesc="Inotify bindings for PHP 8.3" +url="https://pecl.php.net/package/inotify" +arch="all" +license="PHP-3.01" +depends="php82-common" +makedepends="php82-dev" +source="php-pecl-$_extname-$pkgver.tgz::https://pecl.php.net/get/$_extname-$pkgver.tgz" +builddir="$srcdir"/$_extname-$pkgver + +build() { + phpize82 + ./configure --prefix=/usr --with-php-config=php-config82 + make +} + +check() { + make NO_INTERACTION=1 REPORT_EXIT_STATUS=1 test +} + +package() { + make INSTALL_ROOT="$pkgdir" install + local _confdir="$pkgdir"/etc/php82/conf.d + install -d $_confdir + echo "extension=$_extname" > $_confdir/70_$_extname.ini +} + +sha512sums=" +f8b29f8611f16b92136ab8de89181c254bba1abee1e61cac2344440567a3155aae4b9b54b10fdb1b0254fd7a96da8c14b7dc5c9f7f08a03db30ab1645aca1eee php-pecl-inotify-3.0.0.tgz +" diff --git a/ilot/php83-pecl-inotify/APKBUILD b/ilot/php83-pecl-inotify/APKBUILD new file mode 100644 index 0000000..48f2bbf --- /dev/null +++ b/ilot/php83-pecl-inotify/APKBUILD @@ -0,0 +1,35 @@ +# Contributor: Fabio Ribeiro +# Maintainer: Andy Postnikov +pkgname=php83-pecl-inotify +_extname=inotify +pkgver=3.0.0 +pkgrel=1 +pkgdesc="Inotify bindings for PHP 8.3" +url="https://pecl.php.net/package/inotify" +arch="all" +license="PHP-3.01" +depends="php83-common" +makedepends="php83-dev" +source="php-pecl-$_extname-$pkgver.tgz::https://pecl.php.net/get/$_extname-$pkgver.tgz" +builddir="$srcdir"/$_extname-$pkgver + +build() { + phpize83 + ./configure --prefix=/usr --with-php-config=php-config83 + make +} + +check() { + make NO_INTERACTION=1 REPORT_EXIT_STATUS=1 test +} + +package() { + make INSTALL_ROOT="$pkgdir" install + local _confdir="$pkgdir"/etc/php83/conf.d + install -d $_confdir + echo "extension=$_extname" > $_confdir/70_$_extname.ini +} + +sha512sums=" +f8b29f8611f16b92136ab8de89181c254bba1abee1e61cac2344440567a3155aae4b9b54b10fdb1b0254fd7a96da8c14b7dc5c9f7f08a03db30ab1645aca1eee php-pecl-inotify-3.0.0.tgz +" diff --git a/ilot/py3-azure-core/APKBUILD b/ilot/py3-azure-core/APKBUILD new file mode 100644 index 0000000..6e76144 --- /dev/null +++ b/ilot/py3-azure-core/APKBUILD @@ -0,0 +1,39 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=py3-azure-core +#_pkgreal is used by apkbuild-pypi to find modules at PyPI +_pkgreal=azure-core +pkgver=1.32.0 +pkgrel=0 +pkgdesc="Microsoft Azure Core Library for Python" +url="https://pypi.python.org/project/microsoft-kiota-authentication-azure" +arch="noarch" +license="MIT" +depends="py3-aiohttp py3-requests" +checkdepends="py3-pytest-asyncio py3-trio" +makedepends="py3-setuptools py3-gpep517 py3-wheel py3-flit" +options="!check" #todo +source="$pkgname-$pkgver.tar.gz::https://github.com/Azure/azure-sdk-for-python/archive/refs/tags/azure-core_$pkgver.tar.gz" +builddir="$srcdir"/azure-sdk-for-python-azure-core_$pkgver/sdk/core/azure-core +subpackages="$pkgname-pyc" + +build() { + gpep517 build-wheel \ + --wheel-dir .dist \ + --output-fd 3 3>&1 >&2 +} + +check() { + python3 -m venv --clear --without-pip --system-site-packages .testenv + .testenv/bin/python3 -m installer .dist/*.whl + .testenv/bin/python3 -m pytest -v +} + +package() { + python3 -m installer -d "$pkgdir" \ + .dist/*.whl +} + +sha512sums=" +d258a2ca3bc2c9514dec91bf2dbb19c0ee4c0c0bec73a4301b47fb43be768be836f32621b70a8cdb0e39f1491a522191a82a00f318ee7c901e8861a62439e934 py3-azure-core-1.32.0.tar.gz +" diff --git a/ilot/py3-azure-identity/APKBUILD b/ilot/py3-azure-identity/APKBUILD new file mode 100644 index 0000000..9341e11 --- /dev/null +++ b/ilot/py3-azure-identity/APKBUILD @@ -0,0 +1,44 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=py3-azure-identity +#_pkgreal is used by apkbuild-pypi to find modules at PyPI +_pkgreal=azure-identity +pkgver=1.19.0 +pkgrel=0 +pkgdesc="Microsoft Azure Identity Library for Python" +url="https://pypi.org/project/azure-identity/" +arch="noarch" +license="MIT" +depends=" + py3-azure-core + py3-cryptography + py3-msal-extensions + py3-typing-extensions +" +checkdepends="py3-pytest" +makedepends="py3-setuptools py3-gpep517 py3-wheel py3-flit" +options="!check" #todo +source="$pkgname-$pkgver.tar.gz::https://github.com/Azure/azure-sdk-for-python/archive/refs/tags/azure-identity_$pkgver.tar.gz" +builddir="$srcdir"/azure-sdk-for-python-azure-identity_$pkgver/sdk/identity/azure-identity +subpackages="$pkgname-pyc" + +build() { + gpep517 build-wheel \ + --wheel-dir .dist \ + --output-fd 3 3>&1 >&2 +} + +check() { + python3 -m venv --clear --without-pip --system-site-packages .testenv + .testenv/bin/python3 -m installer .dist/*.whl + .testenv/bin/python3 -m pytest -v +} + +package() { + python3 -m installer -d "$pkgdir" \ + .dist/*.whl +} + +sha512sums=" +090aed812a7a72c649ded2574dc0a05dd7d9db41675e3d86921ab0555f8af7c83999cb879a2f2e0984880874b3b6dfead6b8de0563d8a99d81775715640a9e01 py3-azure-identity-1.19.0.tar.gz +" diff --git a/ilot/py3-django-countries/APKBUILD b/ilot/py3-django-countries/APKBUILD new file mode 100644 index 0000000..67e36b5 --- /dev/null +++ b/ilot/py3-django-countries/APKBUILD @@ -0,0 +1,40 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=py3-django-countries +#_pkgreal is used by apkbuild-pypi to find modules at PyPI +_pkgreal=django-countries +pkgver=7.6.1 +pkgrel=0 +pkgdesc="Provides a country field for Django models." +url="https://pypi.python.org/project/django-countries" +arch="noarch" +license="MIT" +depends="py3-django py3-asgiref py3-typing-extensions" +# missing py3-graphene +checkdepends="py3-pytest-django py3-pytest-cov py3-django-rest-framework" +makedepends="py3-setuptools py3-gpep517 py3-wheel" +source="$pkgname-$pkgver.tar.gz::https://github.com/SmileyChris/django-countries/archive/refs/tags/v$pkgver.tar.gz" +options="!check" # TODO +builddir="$srcdir/$_pkgreal-$pkgver" +subpackages="$pkgname-pyc" + +build() { + gpep517 build-wheel \ + --wheel-dir .dist \ + --output-fd 3 3>&1 >&2 +} + +check() { + python3 -m venv --clear --without-pip --system-site-packages .testenv + .testenv/bin/python3 -m installer .dist/*.whl + .testenv/bin/python3 -m pytest -v +} + +package() { + python3 -m installer -d "$pkgdir" \ + .dist/*.whl +} + +sha512sums=" +53c7db02244aad196c141d1d04db5087c802d69d12de25e86fe0b2abdfb4ce9ed6ec84b6344c423dc6e7d2e57c2bb14a5324739c7cead54ec7d261e7e3fe6112 py3-django-countries-7.6.1.tar.gz +" diff --git a/ilot/py3-django-rest-framework/APKBUILD b/ilot/py3-django-rest-framework/APKBUILD new file mode 100644 index 0000000..82a1497 --- /dev/null +++ b/ilot/py3-django-rest-framework/APKBUILD @@ -0,0 +1,59 @@ +# Contributor: Leonardo Arena +# Contributor: Justin Berthault +# Maintainer: Antoine Martin (ayakael) +pkgname=py3-django-rest-framework +_pkgname=django-rest-framework +pkgver=3.14.0 +pkgrel=1 +pkgdesc="Web APIs for Django" +url="https://github.com/encode/django-rest-framework" +arch="noarch" +license="Custom" +depends=" + py3-django + py3-tz +" +makedepends=" + py3-setuptools + py3-gpep517 + py3-wheel +" +checkdepends=" + py3-pytest-django + py3-pytest-cov + py3-core-api + py3-jinja2 + py3-uritemplate + py3-django-guardian + py3-psycopg2 + py3-markdown + py3-yaml + py3-inflection +" +subpackages="$pkgname-pyc" +source="$pkgname-$pkgver.tar.gz::https://github.com/encode/$_pkgname/archive/$pkgver.tar.gz" +options="!check" # Failing tests +builddir="$srcdir"/$_pkgname-$pkgver + +build() { + gpep517 build-wheel \ + --wheel-dir .dist \ + --output-fd 3 3>&1 >&2 +} + +check() { + python3 -m venv --clear --without-pip --system-site-packages .testenv + .testenv/bin/python3 -m installer "$builddir"/.dist/*.whl + # test_urlpatterns: AssertionError: assert [] is not [] + # test_markdown: rather hard to decipher assertion error + .testenv/bin/python3 -m pytest -v -k 'not test_urlpatterns and not test_markdown' +} + +package() { + python3 -m installer -d "$pkgdir" \ + .dist/*.whl +} + +sha512sums=" +c1012c656b427e0318b2056e2f984ddc75a5b4e85f375c76fba165ad06e285848eee1bc6dc76c097daec57d780efb2551110199d62ce636a03951aec13ab4013 py3-django-rest-framework-3.14.0.tar.gz +" diff --git a/ilot/py3-django-tenant-schemas/APKBUILD b/ilot/py3-django-tenant-schemas/APKBUILD new file mode 100644 index 0000000..b309c54 --- /dev/null +++ b/ilot/py3-django-tenant-schemas/APKBUILD @@ -0,0 +1,48 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=py3-django-tenant-schemas +#_pkgreal is used by apkbuild-pypi to find modules at PyPI +_pkgreal=django-tenant-schemas +pkgver=1.12.0 +pkgrel=0 +pkgdesc="Tenant support for Django using PostgreSQL schemas." +url="https://pypi.python.org/project/django-tenant-schemas" +arch="noarch" +license="MIT" +depends=" + py3-django + py3-ordered-set + py3-six + py3-psycopg2 + " +checkdepends="py3-pytest" +makedepends="py3-setuptools py3-setuptools_scm py3-gpep517 py3-wheel" +source=" + $pkgname-$pkgver.tar.gz::https://github.com/bernardopires/django-tenant-schemas/archive/refs/tags/v$pkgver.tar.gz + " +options="!check" # requires pg +builddir="$srcdir/$_pkgreal-$pkgver" +subpackages="$pkgname-pyc" + +build() { + export SETUPTOOLS_SCM_PRETEND_VERSION=$pkgver + gpep517 build-wheel \ + --wheel-dir .dist \ + --output-fd 3 3>&1 >&2 +} + +check() { + python3 -m venv --clear --without-pip --system-site-packages .testenv + .testenv/bin/python3 -m installer .dist/*.whl + cd tenant_schemas + DJANGO_SETTINGS_MODULE=tests.settings ../.testenv/bin/python3 -m pytest -v +} + +package() { + python3 -m installer -d "$pkgdir" \ + .dist/*.whl +} + +sha512sums=" +758f68dc834d4c0074097b166d742a7d63c86b6426ad67d3ce2f56983d417666bf05ae9c46b3ee89a04dee2d888892463651355d26eda7c265ebee8971992319 py3-django-tenant-schemas-1.12.0.tar.gz +" diff --git a/ilot/py3-kadmin/APKBUILD b/ilot/py3-kadmin/APKBUILD new file mode 100644 index 0000000..894a945 --- /dev/null +++ b/ilot/py3-kadmin/APKBUILD @@ -0,0 +1,40 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=py3-kadmin +#_pkgreal is used by apkbuild-pypi to find modules at PyPI +_pkgreal=kadmin +pkgver=0.2.0 +pkgrel=0 +pkgdesc="Python module for kerberos admin (kadm5)" +url="https://github.com/authentik-community/python-kadmin" +arch="all" +license="MIT" +checkdepends="py3-pytest py3-k5test" +makedepends="py3-setuptools py3-gpep517 py3-wheel poetry python3-dev" +source=" + $pkgname-$pkgver.tar.gz::https://github.com/authentik-community/python-kadmin/archive/refs/tags/v$pkgver.tar.gz + fix-int-conversion-error.patch" +builddir="$srcdir"/python-kadmin-$pkgver +subpackages="$pkgname-pyc" + +build() { + gpep517 build-wheel \ + --wheel-dir .dist \ + --output-fd 3 3>&1 >&2 +} + +check() { + python3 -m venv --clear --without-pip --system-site-packages .testenv + .testenv/bin/python3 -m installer .dist/*.whl + .testenv/bin/python3 test/tests.py +} + +package() { + python3 -m installer -d "$pkgdir" \ + .dist/*.whl +} + +sha512sums=" +b405e914cb296f2bfe4f78d2791329804a0db02816182517b59ed1452a21d51dafe303609fddafbbeea57128bba4bcdfcd9b363f193ae0402cc52cf1b3b9020e py3-kadmin-0.2.0.tar.gz +e17223f8597d51ea099f5d4483dd72545b7d64ad76895553a6b7112416536aae93a59a2fd7aea044420495ab8146db7290abd826b268b2d6e518442c3c85c506 fix-int-conversion-error.patch +" diff --git a/ilot/py3-kadmin/fix-int-conversion-error.patch b/ilot/py3-kadmin/fix-int-conversion-error.patch new file mode 100644 index 0000000..445b76b --- /dev/null +++ b/ilot/py3-kadmin/fix-int-conversion-error.patch @@ -0,0 +1,13 @@ +diff --git a/src/PyKAdminPolicyObject.c.orig b/src/PyKAdminPolicyObject.c +index 0bf3ee8..68387c4 100644 +--- a/src/PyKAdminPolicyObject.c.orig ++++ b/src/PyKAdminPolicyObject.c +@@ -120,7 +120,7 @@ PyTypeObject PyKAdminPolicyObject_Type = { + sizeof(PyKAdminPolicyObject), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + (destructor)PyKAdminPolicyObject_dealloc, /*tp_dealloc*/ +- KAdminPolicyObject_print, /*tp_print*/ ++ 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + 0, /*tp_compare*/ diff --git a/ilot/py3-microsoft-kiota-abstractions/APKBUILD b/ilot/py3-microsoft-kiota-abstractions/APKBUILD new file mode 100644 index 0000000..24d7e1e --- /dev/null +++ b/ilot/py3-microsoft-kiota-abstractions/APKBUILD @@ -0,0 +1,44 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=py3-microsoft-kiota-abstractions +#_pkgreal is used by apkbuild-pypi to find modules at PyPI +_pkgreal=microsoft-kiota-abstractions +pkgver=1.6.8 +pkgrel=0 +pkgdesc="Abstractions library for Kiota generated Python clients" +url="https://pypi.python.org/project/microsoft-kiota-abstractions" +arch="noarch" +license="MIT" +depends=" + py3-std-uritemplate<2.0.0 + py3-opentelemetry-sdk + py3-importlib-metadata + " +checkdepends="py3-pytest py3-pytest-asyncio" +makedepends="poetry py3-gpep517 py3-wheel py3-flit" +source=" + $pkgname-$pkgver.tar.gz::https://github.com/microsoft/kiota-python/archive/refs/tags/microsoft-kiota-abstractions-v$pkgver.tar.gz + " +builddir="$srcdir/kiota-python-microsoft-kiota-abstractions-v$pkgver/packages/abstractions" +subpackages="$pkgname-pyc" + +build() { + gpep517 build-wheel \ + --wheel-dir .dist \ + --output-fd 3 3>&1 >&2 +} + +check() { + python3 -m venv --clear --without-pip --system-site-packages .testenv + .testenv/bin/python3 -m installer .dist/*.whl + .testenv/bin/python3 -m pytest -v +} + +package() { + python3 -m installer -d "$pkgdir" \ + .dist/*.whl +} + +sha512sums=" +55341b1ff3fb1a516ceb84817db991d6e6aa83b01326f64cf21690dee1ab84e9c9c4f7162f9f71ec1261b4e0380b73b13284128bd786b80da29faf968720b355 py3-microsoft-kiota-abstractions-1.6.8.tar.gz +" diff --git a/ilot/py3-microsoft-kiota-authentication-azure/APKBUILD b/ilot/py3-microsoft-kiota-authentication-azure/APKBUILD new file mode 100644 index 0000000..c84acdc --- /dev/null +++ b/ilot/py3-microsoft-kiota-authentication-azure/APKBUILD @@ -0,0 +1,45 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=py3-microsoft-kiota-authentication-azure +#_pkgreal is used by apkbuild-pypi to find modules at PyPI +_pkgreal=microsoft-kiota-authentication-azure +pkgver=1.6.8 +pkgrel=0 +pkgdesc="Authentication provider for Kiota using Azure Identity" +url="https://pypi.python.org/project/microsoft-kiota-authentication-azure" +arch="noarch" +license="MIT" +depends=" + py3-azure-core + py3-microsoft-kiota-abstractions + py3-importlib-metadata + " +checkdepends="py3-pytest" +makedepends="poetry py3-gpep517 py3-wheel py3-flit" +source=" + $pkgname-$pkgver.tar.gz::https://github.com/microsoft/kiota-python/archive/refs/tags/microsoft-kiota-authentication-azure-v$pkgver.tar.gz + " +options="!check" # TODO +builddir="$srcdir/kiota-python-microsoft-kiota-authentication-azure-v$pkgver/packages/authentication/azure" +subpackages="$pkgname-pyc" + +build() { + gpep517 build-wheel \ + --wheel-dir .dist \ + --output-fd 3 3>&1 >&2 +} + +check() { + python3 -m venv --clear --without-pip --system-site-packages .testenv + .testenv/bin/python3 -m installer .dist/*.whl + .testenv/bin/python3 -m pytest -v +} + +package() { + python3 -m installer -d "$pkgdir" \ + .dist/*.whl +} + +sha512sums=" +d661d379f036b45bf356e349e28d3478f4a10b351dfde2d1b11a429c0f2160cde9696990cc18d72a224cfd3cc4c90bdc2e6f07d9e4763bd126cd9f66a09b9bec py3-microsoft-kiota-authentication-azure-1.6.8.tar.gz +" diff --git a/ilot/py3-microsoft-kiota-http/APKBUILD b/ilot/py3-microsoft-kiota-http/APKBUILD new file mode 100644 index 0000000..bebb592 --- /dev/null +++ b/ilot/py3-microsoft-kiota-http/APKBUILD @@ -0,0 +1,44 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=py3-microsoft-kiota-http +#_pkgreal is used by apkbuild-pypi to find modules at PyPI +_pkgreal=microsoft-kiota-http +pkgver=1.6.8 +pkgrel=0 +pkgdesc="Kiota http request adapter implementation for httpx library" +url="https://pypi.python.org/project/microsoft-kiota-http" +arch="noarch" +license="MIT" +depends=" + py3-microsoft-kiota-abstractions + py3-httpx + " +checkdepends="py3-pytest" +makedepends="poetry py3-gpep517 py3-wheel py3-flit" +source=" + $pkgname-$pkgver.tar.gz::https://github.com/microsoft/kiota-python/archive/refs/tags/microsoft-kiota-http-v$pkgver.tar.gz + " +options="!check" # TODO +builddir="$srcdir/kiota-python-microsoft-kiota-http-v$pkgver/packages/http/httpx" +subpackages="$pkgname-pyc" + +build() { + gpep517 build-wheel \ + --wheel-dir .dist \ + --output-fd 3 3>&1 >&2 +} + +check() { + python3 -m venv --clear --without-pip --system-site-packages .testenv + .testenv/bin/python3 -m installer .dist/*.whl + .testenv/bin/python3 -m pytest -v +} + +package() { + python3 -m installer -d "$pkgdir" \ + .dist/*.whl +} + +sha512sums=" +c453c89d31cc062f2d8be4a28bda0666dbde6b5a8e42855892cda72e5d104e6bb5516db01d9feb7f619b8fa77237c9e3badd24b29326f627f95b69210835321d py3-microsoft-kiota-http-1.6.8.tar.gz +" diff --git a/ilot/py3-microsoft-kiota-serialization-form/APKBUILD b/ilot/py3-microsoft-kiota-serialization-form/APKBUILD new file mode 100644 index 0000000..fccfd62 --- /dev/null +++ b/ilot/py3-microsoft-kiota-serialization-form/APKBUILD @@ -0,0 +1,43 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=py3-microsoft-kiota-serialization-form +#_pkgreal is used by apkbuild-pypi to find modules at PyPI +_pkgreal=microsoft-kiota-serialization-form +pkgver=1.6.8 +pkgrel=0 +pkgdesc="Kiota Form encoded serialization implementation for Python" +url="https://pypi.python.org/project/microsoft-kiota-serialization-form" +arch="noarch" +license="MIT" +depends=" + py3-microsoft-kiota-abstractions + py3-pendulum + " +checkdepends="py3-pytest" +makedepends="poetry py3-gpep517 py3-wheel py3-flit" +source=" + $pkgname-$pkgver.tar.gz::https://github.com/microsoft/kiota-python/archive/refs/tags/microsoft-kiota-serialization-form-v$pkgver.tar.gz + " +builddir="$srcdir/kiota-python-microsoft-kiota-serialization-form-v$pkgver/packages/serialization/form" +subpackages="$pkgname-pyc" + +build() { + gpep517 build-wheel \ + --wheel-dir .dist \ + --output-fd 3 3>&1 >&2 +} + +check() { + python3 -m venv --clear --without-pip --system-site-packages .testenv + .testenv/bin/python3 -m installer .dist/*.whl + .testenv/bin/python3 -m pytest -v +} + +package() { + python3 -m installer -d "$pkgdir" \ + .dist/*.whl +} + +sha512sums=" +0e4fabe18980612ca3f55fd7350148d2393da3f35dc79cd4fe56b01f50bc2af147bde5e294580d83b97b4a549d77e6581ece8ddb19ea09ee92fd6cbfead0d3db py3-microsoft-kiota-serialization-form-1.6.8.tar.gz +" diff --git a/ilot/py3-microsoft-kiota-serialization-json/APKBUILD b/ilot/py3-microsoft-kiota-serialization-json/APKBUILD new file mode 100644 index 0000000..f59d827 --- /dev/null +++ b/ilot/py3-microsoft-kiota-serialization-json/APKBUILD @@ -0,0 +1,44 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=py3-microsoft-kiota-serialization-json +#_pkgreal is used by apkbuild-pypi to find modules at PyPI +_pkgreal=microsoft-kiota-serialization-json +pkgver=1.6.8 +pkgrel=0 +pkgdesc="JSON serialization implementation for Kiota clients in Python" +url="https://pypi.python.org/project/microsoft-kiota-serialization-json" +arch="noarch" +license="MIT" +depends=" + py3-microsoft-kiota-abstractions + py3-pendulum + " +checkdepends="py3-pytest" +makedepends="poetry py3-gpep517 py3-wheel py3-flit" +source=" + $pkgname-$pkgver.tar.gz::https://github.com/microsoft/kiota-python/archive/refs/tags/microsoft-kiota-serialization-json-v$pkgver.tar.gz + " +options="!check" # TODO +builddir="$srcdir/kiota-python-microsoft-kiota-serialization-json-v$pkgver/packages/serialization/json" +subpackages="$pkgname-pyc" + +build() { + gpep517 build-wheel \ + --wheel-dir .dist \ + --output-fd 3 3>&1 >&2 +} + +check() { + python3 -m venv --clear --without-pip --system-site-packages .testenv + .testenv/bin/python3 -m installer .dist/*.whl + .testenv/bin/python3 -m pytest -v +} + +package() { + python3 -m installer -d "$pkgdir" \ + .dist/*.whl +} + +sha512sums=" +42b8e1d2bfb175e52876314a598647de7b70acb8140cefbfb20d0f8de241bbb03a1cfe6c7108a56047f2a8e3f8f781a23fe54d5612d68a5966340279ff0eb8bc py3-microsoft-kiota-serialization-json-1.6.8.tar.gz +" diff --git a/ilot/py3-microsoft-kiota-serialization-multipart/APKBUILD b/ilot/py3-microsoft-kiota-serialization-multipart/APKBUILD new file mode 100644 index 0000000..c0da7ff --- /dev/null +++ b/ilot/py3-microsoft-kiota-serialization-multipart/APKBUILD @@ -0,0 +1,40 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=py3-microsoft-kiota-serialization-multipart +#_pkgreal is used by apkbuild-pypi to find modules at PyPI +_pkgreal=microsoft-kiota-serialization-multipart +pkgver=1.6.8 +pkgrel=0 +pkgdesc="Multipart serialization implementation for python based kiota clients" +url="https://pypi.python.org/project/microsoft-kiota-serialization-multipart" +arch="noarch" +license="MIT" +depends="py3-microsoft-kiota-abstractions py3-microsoft-kiota-serialization-json" +checkdepends="py3-pytest" +makedepends="poetry py3-gpep517 py3-wheel py3-flit" +source=" + $pkgname-$pkgver.tar.gz::https://github.com/microsoft/kiota-python/archive/refs/tags/microsoft-kiota-serialization-multipart-v$pkgver.tar.gz + " +builddir="$srcdir/kiota-python-microsoft-kiota-serialization-multipart-v$pkgver/packages/serialization/multipart" +subpackages="$pkgname-pyc" + +build() { + gpep517 build-wheel \ + --wheel-dir .dist \ + --output-fd 3 3>&1 >&2 +} + +check() { + python3 -m venv --clear --without-pip --system-site-packages .testenv + .testenv/bin/python3 -m installer .dist/*.whl + .testenv/bin/python3 -m pytest -v +} + +package() { + python3 -m installer -d "$pkgdir" \ + .dist/*.whl +} + +sha512sums=" +d6d6d36fe55f4aa595d380e43f93f3de7674633edba676aec16fc26254a12e4f700427fedf1bedfddde30a7f708c93ccbbe586bb0e6950748a2debe609bf44c1 py3-microsoft-kiota-serialization-multipart-1.6.8.tar.gz +" diff --git a/ilot/py3-microsoft-kiota-serialization-text/APKBUILD b/ilot/py3-microsoft-kiota-serialization-text/APKBUILD new file mode 100644 index 0000000..3c38b26 --- /dev/null +++ b/ilot/py3-microsoft-kiota-serialization-text/APKBUILD @@ -0,0 +1,43 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=py3-microsoft-kiota-serialization-text +#_pkgreal is used by apkbuild-pypi to find modules at PyPI +_pkgreal=microsoft-kiota-serialization-text +pkgver=1.6.8 +pkgrel=0 +pkgdesc="Text serialization implementation for Kiota generated clients in Python" +url="https://pypi.python.org/project/microsoft-kiota-abstractions" +arch="noarch" +license="MIT" +depends=" + py3-microsoft-kiota-abstractions + py3-dateutil + " +checkdepends="py3-pytest" +makedepends="poetry py3-gpep517 py3-wheel py3-flit" +source=" + $pkgname-$pkgver.tar.gz::https://github.com/microsoft/kiota-python/archive/refs/tags/microsoft-kiota-serialization-text-v$pkgver.tar.gz + " +builddir="$srcdir/kiota-python-microsoft-kiota-serialization-text-v$pkgver/packages/serialization/text" +subpackages="$pkgname-pyc" + +build() { + gpep517 build-wheel \ + --wheel-dir .dist \ + --output-fd 3 3>&1 >&2 +} + +check() { + python3 -m venv --clear --without-pip --system-site-packages .testenv + .testenv/bin/python3 -m installer .dist/*.whl + .testenv/bin/python3 -m pytest -v +} + +package() { + python3 -m installer -d "$pkgdir" \ + .dist/*.whl +} + +sha512sums=" +55dbc87253819f496e2f25de2bf24b170761f335117da414bb35c6db9008e9ca8c6fd13d5e429914c322a850a57858d9abdee7dc209ad55e469182995290d568 py3-microsoft-kiota-serialization-text-1.6.8.tar.gz +" diff --git a/ilot/py3-msal-extensions/APKBUILD b/ilot/py3-msal-extensions/APKBUILD new file mode 100644 index 0000000..a2e26c4 --- /dev/null +++ b/ilot/py3-msal-extensions/APKBUILD @@ -0,0 +1,42 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=py3-msal-extensions +#_pkgreal is used by apkbuild-pypi to find modules at PyPI +_pkgreal=msal-extensions +pkgver=1.2.0 +pkgrel=0 +pkgdesc="Microsoft Authentication Library extensions (MSAL EX) provides a persistence API " +url="https://pypi.org/project/msal-extensions" +arch="noarch" +license="MIT" +depends=" + py3-msal + py3-portalocker +" +checkdepends="py3-pytest" +makedepends="py3-setuptools py3-gpep517 py3-wheel" +options="!check" #todo +source="$pkgname-$pkgver.tar.gz::https://github.com/AzureAD/microsoft-authentication-extensions-for-python/archive/refs/tags/$pkgver.tar.gz" +builddir="$srcdir"/microsoft-authentication-extensions-for-python-$pkgver +subpackages="$pkgname-pyc" + +build() { + gpep517 build-wheel \ + --wheel-dir .dist \ + --output-fd 3 3>&1 >&2 +} + +check() { + python3 -m venv --clear --without-pip --system-site-packages .testenv + .testenv/bin/python3 -m installer .dist/*.whl + .testenv/bin/python3 -m pytest -v +} + +package() { + python3 -m installer -d "$pkgdir" \ + .dist/*.whl +} + +sha512sums=" +847a87e2f7a7b71d47fb758bd3445666b2a9f1f2034c575f8a78ba687e1c5faa682b89ea78906d4afa1350bca608cd9452c7ad244c7ec456145c15c49ad46fb2 py3-msal-extensions-1.2.0.tar.gz +" diff --git a/ilot/py3-msal/APKBUILD b/ilot/py3-msal/APKBUILD new file mode 100644 index 0000000..02b267a --- /dev/null +++ b/ilot/py3-msal/APKBUILD @@ -0,0 +1,43 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=py3-msal +#_pkgreal is used by apkbuild-pypi to find modules at PyPI +_pkgreal=msal +pkgver=1.31.1 +pkgrel=0 +pkgdesc="Microsoft Authentication Library (MSAL) for Python" +url="https://pypi.org/project/msal" +arch="noarch" +license="MIT" +depends=" + py3-requests + py3-cryptography + py3-jwt +" +checkdepends="py3-pytest" +makedepends="py3-setuptools py3-gpep517 py3-wheel" +options="!check" #todo +source="$pkgname-$pkgver.tar.gz::https://github.com/AzureAD/microsoft-authentication-library-for-python/archive/refs/tags/$pkgver.tar.gz" +builddir="$srcdir"/microsoft-authentication-library-for-python-$pkgver +subpackages="$pkgname-pyc" + +build() { + gpep517 build-wheel \ + --wheel-dir .dist \ + --output-fd 3 3>&1 >&2 +} + +check() { + python3 -m venv --clear --without-pip --system-site-packages .testenv + .testenv/bin/python3 -m installer .dist/*.whl + .testenv/bin/python3 -m pytest -v +} + +package() { + python3 -m installer -d "$pkgdir" \ + .dist/*.whl +} + +sha512sums=" +f75541337f09ba29d4de13206346ad7793b3f2bdbdbf8fcb050ee7976b397ca666d61aee21121a4efdd7c150c9d2f87f75812e7b8aa96a5f8ac5219e7a946af2 py3-msal-1.31.1.tar.gz +" diff --git a/ilot/py3-msgraph-core/APKBUILD b/ilot/py3-msgraph-core/APKBUILD new file mode 100644 index 0000000..e8d9cb5 --- /dev/null +++ b/ilot/py3-msgraph-core/APKBUILD @@ -0,0 +1,43 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=py3-msgraph-core +#_pkgreal is used by apkbuild-pypi to find modules at PyPI +_pkgreal=msgraph-core +pkgver=1.1.8 +pkgrel=0 +pkgdesc="The Microsoft Graph Python SDK" +url="https://pypi.python.org/project/msgraph-core" +arch="noarch" +license="MIT" +depends=" + py3-azure-identity + py3-microsoft-kiota-authentication-azure + py3-microsoft-kiota-http + " +checkdepends="py3-pytest" +makedepends="py3-setuptools py3-gpep517 py3-wheel py3-flit" +source="$pkgname-$pkgver.tar.gz::https://github.com/microsoftgraph/msgraph-sdk-python-core/archive/refs/tags/v$pkgver.tar.gz" +options="!check" # TODO +builddir="$srcdir/msgraph-sdk-python-core-$pkgver" +subpackages="$pkgname-pyc" + +build() { + gpep517 build-wheel \ + --wheel-dir .dist \ + --output-fd 3 3>&1 >&2 +} + +check() { + python3 -m venv --clear --without-pip --system-site-packages .testenv + .testenv/bin/python3 -m installer .dist/*.whl + .testenv/bin/python3 -m pytest -v +} + +package() { + python3 -m installer -d "$pkgdir" \ + .dist/*.whl +} + +sha512sums=" +0cae6f76cb1373d1ef76448e47b9951e5076a144140c19edc14186f7bfd92930e50c9f6c459170e3362ef267903cdf261d1897566983a7302beab205f9d61389 py3-msgraph-core-1.1.8.tar.gz +" diff --git a/ilot/py3-msgraph-sdk/APKBUILD b/ilot/py3-msgraph-sdk/APKBUILD new file mode 100644 index 0000000..f23f733 --- /dev/null +++ b/ilot/py3-msgraph-sdk/APKBUILD @@ -0,0 +1,44 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=py3-msgraph-sdk +#_pkgreal is used by apkbuild-pypi to find modules at PyPI +_pkgreal=msgraph-sdk +pkgver=1.16.0 +pkgrel=0 +pkgdesc="The Microsoft Graph Python SDK" +url="https://pypi.python.org/project/msgraph-sdk" +arch="noarch" +license="MIT" +depends=" + py3-microsoft-kiota-serialization-text + py3-microsoft-kiota-serialization-form + py3-microsoft-kiota-serialization-multipart + py3-msgraph-core + " +checkdepends="py3-pytest" +makedepends="py3-setuptools py3-gpep517 py3-wheel py3-flit" +source="$pkgname-$pkgver.tar.gz::https://github.com/microsoftgraph/msgraph-sdk-python/archive/refs/tags/v$pkgver.tar.gz" +options="!check" # TODO +builddir="$srcdir/$_pkgreal-python-$pkgver" +subpackages="$pkgname-pyc" + +build() { + gpep517 build-wheel \ + --wheel-dir .dist \ + --output-fd 3 3>&1 >&2 +} + +check() { + python3 -m venv --clear --without-pip --system-site-packages .testenv + .testenv/bin/python3 -m installer .dist/*.whl + .testenv/bin/python3 -m pytest -v +} + +package() { + python3 -m installer -d "$pkgdir" \ + .dist/*.whl +} + +sha512sums=" +af930e5e470f6ac78724650885f70cf447482a53f90043d326b3e00dc7572fd0d476658ebb1677118010e38b54f1e4e609dcfb5fcef5664f05b25062786d11af py3-msgraph-sdk-1.16.0.tar.gz +" diff --git a/ilot/py3-opentelemetry-sdk/APKBUILD b/ilot/py3-opentelemetry-sdk/APKBUILD new file mode 100644 index 0000000..08bc2ad --- /dev/null +++ b/ilot/py3-opentelemetry-sdk/APKBUILD @@ -0,0 +1,75 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=py3-opentelemetry-sdk +#_pkgreal is used by apkbuild-pypi to find modules at PyPI +_pkgreal=opentelemetry-sdk +pkgver=1.29.0 +pkgrel=0 +pkgdesc="OpenTelemetry Python SDK" +url="https://github.com/open-telemetry/opentelemetry-python/tree/main" +arch="noarch" +license="Apache-2.0" +depends="py3-opentelemetry-semantic-conventions py3-typing-extensions" +checkdepends="py3-pytest" +makedepends="py3-setuptools py3-gpep517 py3-wheel py3-hatchling" +source="$pkgname-$pkgver.tar.gz::https://github.com/open-telemetry/opentelemetry-python/archive/refs/tags/v$pkgver.tar.gz" +builddir="$srcdir/opentelemetry-python-$pkgver" +options="!check" # TODO +# need to figure out -pyc +subpackages=" + $pkgname-pyc + py3-opentelemetry-api + py3-opentelemetry-semantic-conventions + py3-opentelemetry-proto + " + +build() { + for i in api semantic-conventions sdk proto; do + cd "$builddir"/opentelemetry-$i + gpep517 build-wheel \ + --wheel-dir .dist \ + --output-fd 3 3>&1 >&2 + done +} + +check() { + for i in api semantic-conventions sdk proto; do + python3 -m venv --clear --without-pip --system-site-packages "$builddir"/.testenv + "$builddir"/.testenv/bin/python3 -m installer .dist/*.whl + "$builddir"/.testenv/bin/python3 -m pytest -v + done +} + +package() { + cd "$builddir"/opentelemetry-sdk + python3 -m installer -d "$pkgdir" \ + .dist/*.whl +} + +api() { + depends="py3-deprecated" + pkgdesc="OpenTelemetry Python API" + cd "$builddir"/opentelemetry-api + python3 -m installer -d "$subpkgdir" \ + .dist/*.whl +} + +conventions() { + pkgdesc="OpenTelemetry Semantic Conventions" + depends="py3-opentelemetry-api py3-deprecated" + cd "$builddir"/opentelemetry-semantic-conventions + python3 -m installer -d "$subpkgdir" \ + .dist/*.whl +} + +proto() { + pkgdesc="OpenTelemetry Python Proto" + depends="py3-protobuf" + cd "$builddir"/opentelemetry-proto + python3 -m installer -d "$subpkgdir" \ + .dist/*.whl +} + +sha512sums=" +92c90e6a684d8cfab3bba4d72612ccf53ae54cdd9784e3434b25adc3730fe114f21fd7aa21da80edf6e0e7c80b39c64ee31fb16f68b04809289bbf5d49d4ca2e py3-opentelemetry-sdk-1.29.0.tar.gz +" diff --git a/ilot/py3-std-uritemplate/APKBUILD b/ilot/py3-std-uritemplate/APKBUILD new file mode 100644 index 0000000..caca02f --- /dev/null +++ b/ilot/py3-std-uritemplate/APKBUILD @@ -0,0 +1,41 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=py3-std-uritemplate +#_pkgreal is used by apkbuild-pypi to find modules at PyPI +_pkgreal=std-uritemplate +pkgver=2.0.1 +pkgrel=0 +pkgdesc="A complete and maintained cross-language implementation of the Uri Template specification RFC 6570 Level 4" +url="https://pypi.python.org/project/std-uritemplate" +arch="noarch" +license="Apache-2.0" +depends="python3" +checkdepends="py3-pytest" +makedepends="py3-setuptools py3-gpep517 py3-wheel poetry" +source="$pkgname-$pkgver.tar.gz::https://github.com/std-uritemplate/std-uritemplate/archive/refs/tags/$pkgver.tar.gz" +options="!check" # TODO +builddir="$srcdir"/$_pkgreal-$pkgver/python +subpackages="$pkgname-pyc" + +prepare() { + default_prepare + ln -s ../Readme.md Readme.md +} +build() { + gpep517 build-wheel \ + --wheel-dir .dist \ + --output-fd 3 3>&1 >&2 +} + +check() { + poetry run python test.py +} + +package() { + python3 -m installer -d "$pkgdir" \ + .dist/*.whl +} + +sha512sums=" +e073a1204d65bb639cc93480b0f68e1edfe5ac3cff607b72c8da8916b7660eea2b2b246b5db02979cd5c856087958c84dc3bc5e9d76a9540f2ac2a7da8cd18df py3-std-uritemplate-2.0.1.tar.gz +" diff --git a/ilot/uptime-kuma/APKBUILD b/ilot/uptime-kuma/APKBUILD new file mode 100644 index 0000000..6bc88c8 --- /dev/null +++ b/ilot/uptime-kuma/APKBUILD @@ -0,0 +1,49 @@ +# Contributor: Antoine Martin (ayakael) +# Maintainer: Antoine Martin (ayakael) +pkgname=uptime-kuma +pkgver=1.23.16 +pkgrel=0 +pkgdesc='A fancy self-hosted monitoring tool' +arch="all" +url="https://github.com/louislam/uptime-kuma" +license="MIT" +depends="nodejs" +makedepends="npm" +source=" + uptime-kuma-$pkgver.tar.gz::https://github.com/louislam/uptime-kuma/archive/refs/tags/$pkgver.tar.gz + uptime-kuma.openrc + uptime-kuma.conf + " +subpackages="$pkgname-doc $pkgname-openrc" +install="$pkgname.pre-install" + +build() { + npm ci + npm run build + rm -Rf "$builddir"/node_modules + npm ci --omit=dev +} + +package() { + install -dm 755 \ + "$pkgdir"/usr/share/webapps \ + "$pkgdir"/usr/share/doc \ + "$pkgdir"/usr/share/licenses/uptime-kuma \ + "$pkgdir"/etc/init.d \ + "$pkgdir"/etc/conf.d + + # install + cp -a "$builddir" "$pkgdir/usr/share/webapps/uptime-kuma" + + # openrc + install -Dm755 "$srcdir"/uptime-kuma.openrc "$pkgdir"/etc/init.d/uptime-kuma + install -Dm755 "$srcdir"/uptime-kuma.conf "$pkgdir"/etc/conf.d/uptime-kuma + + # docs and licenses + mv "$pkgdir"/usr/share/webapps/uptime-kuma/LICENSE "$pkgdir"/usr/share/licenses/uptime-kuma/. +} +sha512sums=" +a132d1cd796fbd868782627edfd45d2a6bd3d2fadece23e0bbf000e6a30482659062a43c4590c98e390cac9b8c1926efd8ff01c5b358b7ccea4438259b86f24e uptime-kuma-1.23.16.tar.gz +0ceddb98a6f318029b8bd8b5a49b55c883e77a5f8fffe2b9b271c9abf0ac52dc7a6ea4dbb4a881124a7857f1e43040f18755c1c2a034479e6a94d2b65a73d847 uptime-kuma.openrc +1dbae536b23e3624e139155abbff383bba3209ff2219983da2616b4376b1a5041df812d1e5164716fc6e967a8446d94baae3b96ee575d400813cc6fdc2cc274e uptime-kuma.conf +" diff --git a/ilot/uptime-kuma/uptime-kuma.conf b/ilot/uptime-kuma/uptime-kuma.conf new file mode 100644 index 0000000..f816a99 --- /dev/null +++ b/ilot/uptime-kuma/uptime-kuma.conf @@ -0,0 +1,47 @@ +# uptime-kuma config +# for more info +# see https://github.com/louislam/uptime-kuma/wiki/Environment-Variables + +# Set the directory where the data should be stored (could be relative) +# DATA_DIR=/var/lib/uptime-kuma + +# Host to bind to, could be an ip. +# UPTIME_KUMA_HOST=:: + +# Port to listen to +# UPTIME_KUMA_PORT=3001 + +# Path to SSL key +# UPTIME_KUMA_SSL_KEY= + +# Path to SSL certificate +# UPTIME_KUMA_SSL_CERT= + +# SSL Key Passphrase +# UPTIME_KUMA_SSL_KEY_PASSPHRASE= + +# Cloudflare Tunnel Token +# UPTIME_KUMA_CLOUDFLARED_TOKEN= + +# By default, Uptime Kuma is not allowed in iframe if the domain name is not +# the same as the parent. It protects your Uptime Kuma to be a phishing +# website. If you don't need this protection, you can set it to true +# UPTIME_KUMA_DISABLE_FRAME_SAMEORIGIN=false + +# By default, Uptime Kuma is verifying that the websockets ORIGIN-Header +# matches your servers hostname. If you don't need this protection, you can +# set it to bypass. See GHSA-mj22-23ff-2hrr for further context. +# UPTIME_KUMA_WS_ORIGIN_CHECK=cors-like + +# Allow to specify any executables as Chromium +# UPTIME_KUMA_ALLOW_ALL_CHROME_EXEC=0 + +# Add your self-signed ca certs. +# NODE_EXTRA_CA_CERTS= + +# Ignore all TLS errors +# NOTE_TLS_REJECT_UNAUTHORIZED=0 + +# Set it to --insecure-http-parser, if you encountered error Invalid header +# value char when your website using WAF +# NODE_OPTIONS= diff --git a/ilot/uptime-kuma/uptime-kuma.openrc b/ilot/uptime-kuma/uptime-kuma.openrc new file mode 100644 index 0000000..ce7b00e --- /dev/null +++ b/ilot/uptime-kuma/uptime-kuma.openrc @@ -0,0 +1,48 @@ +#!/sbin/openrc-run + +description="Uptime Kuma self-hosted monitoring tool" + +# Change $directory to path to uptime-kuma +directory=${directory:-/usr/share/webapps/uptime-kuma} +pidfile=${pidfile:-/run/$RC_SVCNAME.pid} +DATA_DIR=${DATA_DIR:-/var/lib/uptime-kuma} + +log_dir="/var/log/$RC_SVCNAME" +logfile=${logfile:-$log_dir/$RC_SVCNAME.log} +output_log="${output_log:-$logfile}" +error_log="${error_log:-$logfile}" + +command=${command:-/usr/bin/node} +command_args="$directory/server/server.js" +command_user=${command_user:-uptime-kuma:uptime-kuma} +command_background=true + +depend() { + need net +} + +start_pre() { + checkpath --owner=$command_user --directory $log_dir \ + $DATA_DIR \ + $DATA_DIR/upload + checkpath --owner=$command_user --file $logfile \ + $DATA_DIR/error.log + + [ ! -e $DATA_DIR/kuma.db ] && + cp $directory/db/kuma.db $DATA_DIR + + checkpath --owner=$command_user --mode 600 --file $DATA_DIR/kuma.db* + + cd $directory + + export DATA_DIR UPTIME_KUMA_HOST UPTIME_KUMA_PORT UPTIME_KUMA_SSL_KEY \ + UPTIME_KUMA_SSL_CERT UPTIME_KUMA_SSL_KEY_PASSPHRASE \ + UPTIME_KUMA_CLOUDFLARED_TOKEN UPTIME_KUMA_DISABLE_FRAME_SAMEORIGIN \ + UPTIME_KUMA_WS_ORIGIN_CHECK UPTIME_KUMA_ALLOW_ALL_CHROME_EXEC \ + NODE_EXTRA_CA_CERTS NODE_TLS_REJECT_UNAUTHORIZED NODE_OPTIONS +} + +start_post() { + # Wait for the server to be started + sleep 10 +} diff --git a/ilot/uptime-kuma/uptime-kuma.pre-install b/ilot/uptime-kuma/uptime-kuma.pre-install new file mode 100755 index 0000000..0217278 --- /dev/null +++ b/ilot/uptime-kuma/uptime-kuma.pre-install @@ -0,0 +1,25 @@ +#!/bin/sh + +DATADIR='/var/lib/uptime-kuma' + +if ! getent group uptime-kuma 1>/dev/null; then + echo '* Creating group uptime-kuma' 1>&2 + + addgroup -S uptime-kuma +fi + +if ! id uptime-kuma 2>/dev/null 1>&2; then + echo '* Creating user uptime-kuma' 1>&2 + + adduser -DHS -G uptime-kuma -h "$DATADIR" -s /bin/sh \ + -g "added by apk for uptime-kuma" uptime-kuma + passwd -u uptime-kuma 1>/dev/null # unlock +fi + +if ! id -Gn uptime-kuma | grep -Fq www-data; then + echo '* Adding user uptime-kuma to group www-data' 1>&2 + + addgroup uptime-kuma www-data +fi + +exit 0 diff --git a/ilot/uvicorn/2540_add-websocketssansioprotocol.patch b/ilot/uvicorn/2540_add-websocketssansioprotocol.patch new file mode 100644 index 0000000..0cb8db4 --- /dev/null +++ b/ilot/uvicorn/2540_add-websocketssansioprotocol.patch @@ -0,0 +1,618 @@ +diff --git a/docs/deployment.md b/docs/deployment.md +index d69fcf8..99dfbf3 100644 +--- a/docs/deployment.md ++++ b/docs/deployment.md +@@ -60,7 +60,7 @@ Options: + --loop [auto|asyncio|uvloop] Event loop implementation. [default: auto] + --http [auto|h11|httptools] HTTP protocol implementation. [default: + auto] +- --ws [auto|none|websockets|wsproto] ++ --ws [auto|none|websockets|websockets-sansio|wsproto] + WebSocket protocol implementation. + [default: auto] + --ws-max-size INTEGER WebSocket max size message in bytes +diff --git a/docs/index.md b/docs/index.md +index bb6fc32..50e2ab9 100644 +--- a/docs/index.md ++++ b/docs/index.md +@@ -130,7 +130,7 @@ Options: + --loop [auto|asyncio|uvloop] Event loop implementation. [default: auto] + --http [auto|h11|httptools] HTTP protocol implementation. [default: + auto] +- --ws [auto|none|websockets|wsproto] ++ --ws [auto|none|websockets|websockets-sansio|wsproto] + WebSocket protocol implementation. + [default: auto] + --ws-max-size INTEGER WebSocket max size message in bytes +diff --git a/pyproject.toml b/pyproject.toml +index 0a89966..8771bfb 100644 +--- a/pyproject.toml ++++ b/pyproject.toml +@@ -92,6 +92,10 @@ filterwarnings = [ + "ignore:Uvicorn's native WSGI implementation is deprecated.*:DeprecationWarning", + "ignore: 'cgi' is deprecated and slated for removal in Python 3.13:DeprecationWarning", + "ignore: remove second argument of ws_handler:DeprecationWarning:websockets", ++ "ignore: websockets.legacy is deprecated.*:DeprecationWarning", ++ "ignore: websockets.server.WebSocketServerProtocol is deprecated.*:DeprecationWarning", ++ "ignore: websockets.client.connect is deprecated.*:DeprecationWarning", ++ "ignore: websockets.exceptions.InvalidStatusCode is deprecated", + ] + + [tool.coverage.run] +diff --git a/tests/conftest.py b/tests/conftest.py +index 1b0c0e8..7061a14 100644 +--- a/tests/conftest.py ++++ b/tests/conftest.py +@@ -233,9 +233,9 @@ def unused_tcp_port() -> int: + marks=pytest.mark.skipif(not importlib.util.find_spec("wsproto"), reason="wsproto not installed."), + id="wsproto", + ), ++ pytest.param("uvicorn.protocols.websockets.websockets_impl:WebSocketProtocol", id="websockets"), + pytest.param( +- "uvicorn.protocols.websockets.websockets_impl:WebSocketProtocol", +- id="websockets", ++ "uvicorn.protocols.websockets.websockets_sansio_impl:WebSocketsSansIOProtocol", id="websockets-sansio" + ), + ] + ) +diff --git a/tests/middleware/test_logging.py b/tests/middleware/test_logging.py +index f27633a..63d7daf 100644 +--- a/tests/middleware/test_logging.py ++++ b/tests/middleware/test_logging.py +@@ -49,7 +49,9 @@ async def app(scope: Scope, receive: ASGIReceiveCallable, send: ASGISendCallable + await send({"type": "http.response.body", "body": b"", "more_body": False}) + + +-async def test_trace_logging(caplog: pytest.LogCaptureFixture, logging_config, unused_tcp_port: int): ++async def test_trace_logging( ++ caplog: pytest.LogCaptureFixture, logging_config: dict[str, typing.Any], unused_tcp_port: int ++): + config = Config( + app=app, + log_level="trace", +@@ -91,8 +93,8 @@ async def test_trace_logging_on_http_protocol(http_protocol_cls, caplog, logging + + async def test_trace_logging_on_ws_protocol( + ws_protocol_cls: WSProtocol, +- caplog, +- logging_config, ++ caplog: pytest.LogCaptureFixture, ++ logging_config: dict[str, typing.Any], + unused_tcp_port: int, + ): + async def websocket_app(scope: Scope, receive: ASGIReceiveCallable, send: ASGISendCallable): +@@ -104,7 +106,7 @@ async def test_trace_logging_on_ws_protocol( + elif message["type"] == "websocket.disconnect": + break + +- async def open_connection(url): ++ async def open_connection(url: str): + async with websockets.client.connect(url) as websocket: + return websocket.open + +diff --git a/tests/middleware/test_proxy_headers.py b/tests/middleware/test_proxy_headers.py +index 0ade974..d300c45 100644 +--- a/tests/middleware/test_proxy_headers.py ++++ b/tests/middleware/test_proxy_headers.py +@@ -465,6 +465,7 @@ async def test_proxy_headers_websocket_x_forwarded_proto( + host, port = scope["client"] + await send({"type": "websocket.accept"}) + await send({"type": "websocket.send", "text": f"{scheme}://{host}:{port}"}) ++ await send({"type": "websocket.close"}) + + app_with_middleware = ProxyHeadersMiddleware(websocket_app, trusted_hosts="*") + config = Config( +diff --git a/tests/protocols/test_websocket.py b/tests/protocols/test_websocket.py +index 15ccfdd..e728544 100644 +--- a/tests/protocols/test_websocket.py ++++ b/tests/protocols/test_websocket.py +@@ -7,6 +7,8 @@ from copy import deepcopy + import httpx + import pytest + import websockets ++import websockets.asyncio ++import websockets.asyncio.client + import websockets.client + import websockets.exceptions + from typing_extensions import TypedDict +@@ -601,12 +603,9 @@ async def test_connection_lost_before_handshake_complete( + await send_accept_task.wait() + disconnect_message = await receive() # type: ignore + +- response: httpx.Response | None = None +- + async def websocket_session(uri: str): +- nonlocal response + async with httpx.AsyncClient() as client: +- response = await client.get( ++ await client.get( + f"http://127.0.0.1:{unused_tcp_port}", + headers={ + "upgrade": "websocket", +@@ -623,9 +622,6 @@ async def test_connection_lost_before_handshake_complete( + send_accept_task.set() + await asyncio.sleep(0.1) + +- assert response is not None +- assert response.status_code == 500, response.text +- assert response.text == "Internal Server Error" + assert disconnect_message == {"type": "websocket.disconnect", "code": 1006} + await task + +@@ -920,6 +916,9 @@ async def test_server_reject_connection_with_body_nolength( + async def test_server_reject_connection_with_invalid_msg( + ws_protocol_cls: WSProtocol, http_protocol_cls: HTTPProtocol, unused_tcp_port: int + ): ++ if ws_protocol_cls.__name__ == "WebSocketsSansIOProtocol": ++ pytest.skip("WebSocketsSansIOProtocol sends both start and body messages in one message.") ++ + async def app(scope: Scope, receive: ASGIReceiveCallable, send: ASGISendCallable): + assert scope["type"] == "websocket" + assert "extensions" in scope and "websocket.http.response" in scope["extensions"] +@@ -951,6 +950,9 @@ async def test_server_reject_connection_with_invalid_msg( + async def test_server_reject_connection_with_missing_body( + ws_protocol_cls: WSProtocol, http_protocol_cls: HTTPProtocol, unused_tcp_port: int + ): ++ if ws_protocol_cls.__name__ == "WebSocketsSansIOProtocol": ++ pytest.skip("WebSocketsSansIOProtocol sends both start and body messages in one message.") ++ + async def app(scope: Scope, receive: ASGIReceiveCallable, send: ASGISendCallable): + assert scope["type"] == "websocket" + assert "extensions" in scope and "websocket.http.response" in scope["extensions"] +@@ -986,6 +988,8 @@ async def test_server_multiple_websocket_http_response_start_events( + The server should raise an exception if it sends multiple + websocket.http.response.start events. + """ ++ if ws_protocol_cls.__name__ == "WebSocketsSansIOProtocol": ++ pytest.skip("WebSocketsSansIOProtocol sends both start and body messages in one message.") + exception_message: str | None = None + + async def app(scope: Scope, receive: ASGIReceiveCallable, send: ASGISendCallable): +diff --git a/uvicorn/config.py b/uvicorn/config.py +index 664d191..cbfeea6 100644 +--- a/uvicorn/config.py ++++ b/uvicorn/config.py +@@ -25,7 +25,7 @@ from uvicorn.middleware.proxy_headers import ProxyHeadersMiddleware + from uvicorn.middleware.wsgi import WSGIMiddleware + + HTTPProtocolType = Literal["auto", "h11", "httptools"] +-WSProtocolType = Literal["auto", "none", "websockets", "wsproto"] ++WSProtocolType = Literal["auto", "none", "websockets", "websockets-sansio", "wsproto"] + LifespanType = Literal["auto", "on", "off"] + LoopSetupType = Literal["none", "auto", "asyncio", "uvloop"] + InterfaceType = Literal["auto", "asgi3", "asgi2", "wsgi"] +@@ -47,6 +47,7 @@ WS_PROTOCOLS: dict[WSProtocolType, str | None] = { + "auto": "uvicorn.protocols.websockets.auto:AutoWebSocketsProtocol", + "none": None, + "websockets": "uvicorn.protocols.websockets.websockets_impl:WebSocketProtocol", ++ "websockets-sansio": "uvicorn.protocols.websockets.websockets_sansio_impl:WebSocketsSansIOProtocol", + "wsproto": "uvicorn.protocols.websockets.wsproto_impl:WSProtocol", + } + LIFESPAN: dict[LifespanType, str] = { +diff --git a/uvicorn/protocols/websockets/websockets_sansio_impl.py b/uvicorn/protocols/websockets/websockets_sansio_impl.py +new file mode 100644 +index 0000000..994af07 +--- /dev/null ++++ b/uvicorn/protocols/websockets/websockets_sansio_impl.py +@@ -0,0 +1,405 @@ ++from __future__ import annotations ++ ++import asyncio ++import logging ++from asyncio.transports import BaseTransport, Transport ++from http import HTTPStatus ++from typing import Any, Literal, cast ++from urllib.parse import unquote ++ ++from websockets import InvalidState ++from websockets.extensions.permessage_deflate import ServerPerMessageDeflateFactory ++from websockets.frames import Frame, Opcode ++from websockets.http11 import Request ++from websockets.server import ServerProtocol ++ ++from uvicorn._types import ( ++ ASGIReceiveEvent, ++ ASGISendEvent, ++ WebSocketAcceptEvent, ++ WebSocketCloseEvent, ++ WebSocketDisconnectEvent, ++ WebSocketReceiveEvent, ++ WebSocketResponseBodyEvent, ++ WebSocketResponseStartEvent, ++ WebSocketScope, ++ WebSocketSendEvent, ++) ++from uvicorn.config import Config ++from uvicorn.logging import TRACE_LOG_LEVEL ++from uvicorn.protocols.utils import ( ++ ClientDisconnected, ++ get_local_addr, ++ get_path_with_query_string, ++ get_remote_addr, ++ is_ssl, ++) ++from uvicorn.server import ServerState ++ ++ ++class WebSocketsSansIOProtocol(asyncio.Protocol): ++ def __init__( ++ self, ++ config: Config, ++ server_state: ServerState, ++ app_state: dict[str, Any], ++ _loop: asyncio.AbstractEventLoop | None = None, ++ ) -> None: ++ if not config.loaded: ++ config.load() # pragma: no cover ++ ++ self.config = config ++ self.app = config.loaded_app ++ self.loop = _loop or asyncio.get_event_loop() ++ self.logger = logging.getLogger("uvicorn.error") ++ self.root_path = config.root_path ++ self.app_state = app_state ++ ++ # Shared server state ++ self.connections = server_state.connections ++ self.tasks = server_state.tasks ++ self.default_headers = server_state.default_headers ++ ++ # Connection state ++ self.transport: asyncio.Transport = None # type: ignore[assignment] ++ self.server: tuple[str, int] | None = None ++ self.client: tuple[str, int] | None = None ++ self.scheme: Literal["wss", "ws"] = None # type: ignore[assignment] ++ ++ # WebSocket state ++ self.queue: asyncio.Queue[ASGIReceiveEvent] = asyncio.Queue() ++ self.handshake_initiated = False ++ self.handshake_complete = False ++ self.close_sent = False ++ self.initial_response: tuple[int, list[tuple[str, str]], bytes] | None = None ++ ++ extensions = [] ++ if self.config.ws_per_message_deflate: ++ extensions = [ServerPerMessageDeflateFactory()] ++ self.conn = ServerProtocol( ++ extensions=extensions, ++ max_size=self.config.ws_max_size, ++ logger=logging.getLogger("uvicorn.error"), ++ ) ++ ++ self.read_paused = False ++ self.writable = asyncio.Event() ++ self.writable.set() ++ ++ # Buffers ++ self.bytes = b"" ++ ++ def connection_made(self, transport: BaseTransport) -> None: ++ """Called when a connection is made.""" ++ transport = cast(Transport, transport) ++ self.connections.add(self) ++ self.transport = transport ++ self.server = get_local_addr(transport) ++ self.client = get_remote_addr(transport) ++ self.scheme = "wss" if is_ssl(transport) else "ws" ++ ++ if self.logger.level <= TRACE_LOG_LEVEL: ++ prefix = "%s:%d - " % self.client if self.client else "" ++ self.logger.log(TRACE_LOG_LEVEL, "%sWebSocket connection made", prefix) ++ ++ def connection_lost(self, exc: Exception | None) -> None: ++ code = 1005 if self.handshake_complete else 1006 ++ self.queue.put_nowait({"type": "websocket.disconnect", "code": code}) ++ self.connections.remove(self) ++ ++ if self.logger.level <= TRACE_LOG_LEVEL: ++ prefix = "%s:%d - " % self.client if self.client else "" ++ self.logger.log(TRACE_LOG_LEVEL, "%sWebSocket connection lost", prefix) ++ ++ self.handshake_complete = True ++ if exc is None: ++ self.transport.close() ++ ++ def eof_received(self) -> None: ++ pass ++ ++ def shutdown(self) -> None: ++ if self.handshake_complete: ++ self.queue.put_nowait({"type": "websocket.disconnect", "code": 1012}) ++ self.conn.send_close(1012) ++ output = self.conn.data_to_send() ++ self.transport.write(b"".join(output)) ++ else: ++ self.send_500_response() ++ self.transport.close() ++ ++ def data_received(self, data: bytes) -> None: ++ self.conn.receive_data(data) ++ parser_exc = self.conn.parser_exc ++ if parser_exc is not None: ++ self.handle_parser_exception() ++ return ++ self.handle_events() ++ ++ def handle_events(self) -> None: ++ for event in self.conn.events_received(): ++ if isinstance(event, Request): ++ self.handle_connect(event) ++ if isinstance(event, Frame): ++ if event.opcode == Opcode.CONT: ++ self.handle_cont(event) ++ elif event.opcode == Opcode.TEXT: ++ self.handle_text(event) ++ elif event.opcode == Opcode.BINARY: ++ self.handle_bytes(event) ++ elif event.opcode == Opcode.PING: ++ self.handle_ping(event) ++ elif event.opcode == Opcode.CLOSE: ++ self.handle_close(event) ++ ++ # Event handlers ++ ++ def handle_connect(self, event: Request) -> None: ++ self.request = event ++ self.response = self.conn.accept(event) ++ self.handshake_initiated = True ++ if self.response.status_code != 101: ++ self.handshake_complete = True ++ self.close_sent = True ++ self.conn.send_response(self.response) ++ output = self.conn.data_to_send() ++ self.transport.write(b"".join(output)) ++ self.transport.close() ++ return ++ ++ headers = [ ++ (key.encode("ascii"), value.encode("ascii", errors="surrogateescape")) ++ for key, value in event.headers.raw_items() ++ ] ++ raw_path, _, query_string = event.path.partition("?") ++ self.scope: WebSocketScope = { ++ "type": "websocket", ++ "asgi": {"version": self.config.asgi_version, "spec_version": "2.3"}, ++ "http_version": "1.1", ++ "scheme": self.scheme, ++ "server": self.server, ++ "client": self.client, ++ "root_path": self.root_path, ++ "path": unquote(raw_path), ++ "raw_path": raw_path.encode("ascii"), ++ "query_string": query_string.encode("ascii"), ++ "headers": headers, ++ "subprotocols": event.headers.get_all("Sec-WebSocket-Protocol"), ++ "state": self.app_state.copy(), ++ "extensions": {"websocket.http.response": {}}, ++ } ++ self.queue.put_nowait({"type": "websocket.connect"}) ++ task = self.loop.create_task(self.run_asgi()) ++ task.add_done_callback(self.on_task_complete) ++ self.tasks.add(task) ++ ++ def handle_cont(self, event: Frame) -> None: ++ self.bytes += event.data ++ if event.fin: ++ self.send_receive_event_to_app() ++ ++ def handle_text(self, event: Frame) -> None: ++ self.bytes = event.data ++ self.curr_msg_data_type: Literal["text", "bytes"] = "text" ++ if event.fin: ++ self.send_receive_event_to_app() ++ ++ def handle_bytes(self, event: Frame) -> None: ++ self.bytes = event.data ++ self.curr_msg_data_type = "bytes" ++ if event.fin: ++ self.send_receive_event_to_app() ++ ++ def send_receive_event_to_app(self) -> None: ++ data_type = self.curr_msg_data_type ++ msg: WebSocketReceiveEvent ++ if data_type == "text": ++ msg = {"type": "websocket.receive", data_type: self.bytes.decode()} ++ else: ++ msg = {"type": "websocket.receive", data_type: self.bytes} ++ self.queue.put_nowait(msg) ++ if not self.read_paused: ++ self.read_paused = True ++ self.transport.pause_reading() ++ ++ def handle_ping(self, event: Frame) -> None: ++ output = self.conn.data_to_send() ++ self.transport.write(b"".join(output)) ++ ++ def handle_close(self, event: Frame) -> None: ++ if not self.close_sent and not self.transport.is_closing(): ++ disconnect_event: WebSocketDisconnectEvent = { ++ "type": "websocket.disconnect", ++ "code": self.conn.close_rcvd.code, # type: ignore[union-attr] ++ "reason": self.conn.close_rcvd.reason, # type: ignore[union-attr] ++ } ++ self.queue.put_nowait(disconnect_event) ++ output = self.conn.data_to_send() ++ self.transport.write(b"".join(output)) ++ self.transport.close() ++ ++ def handle_parser_exception(self) -> None: ++ disconnect_event: WebSocketDisconnectEvent = { ++ "type": "websocket.disconnect", ++ "code": self.conn.close_sent.code, # type: ignore[union-attr] ++ "reason": self.conn.close_sent.reason, # type: ignore[union-attr] ++ } ++ self.queue.put_nowait(disconnect_event) ++ output = self.conn.data_to_send() ++ self.transport.write(b"".join(output)) ++ self.close_sent = True ++ self.transport.close() ++ ++ def on_task_complete(self, task: asyncio.Task[None]) -> None: ++ self.tasks.discard(task) ++ ++ async def run_asgi(self) -> None: ++ try: ++ result = await self.app(self.scope, self.receive, self.send) ++ except ClientDisconnected: ++ self.transport.close() ++ except BaseException: ++ self.logger.exception("Exception in ASGI application\n") ++ self.send_500_response() ++ self.transport.close() ++ else: ++ if not self.handshake_complete: ++ msg = "ASGI callable returned without completing handshake." ++ self.logger.error(msg) ++ self.send_500_response() ++ self.transport.close() ++ elif result is not None: ++ msg = "ASGI callable should return None, but returned '%s'." ++ self.logger.error(msg, result) ++ self.transport.close() ++ ++ def send_500_response(self) -> None: ++ if self.initial_response or self.handshake_complete: ++ return ++ response = self.conn.reject(500, "Internal Server Error") ++ self.conn.send_response(response) ++ output = self.conn.data_to_send() ++ self.transport.write(b"".join(output)) ++ ++ async def send(self, message: ASGISendEvent) -> None: ++ await self.writable.wait() ++ ++ message_type = message["type"] ++ ++ if not self.handshake_complete and self.initial_response is None: ++ if message_type == "websocket.accept": ++ message = cast(WebSocketAcceptEvent, message) ++ self.logger.info( ++ '%s - "WebSocket %s" [accepted]', ++ self.scope["client"], ++ get_path_with_query_string(self.scope), ++ ) ++ headers = [ ++ (name.decode("latin-1").lower(), value.decode("latin-1").lower()) ++ for name, value in (self.default_headers + list(message.get("headers", []))) ++ ] ++ accepted_subprotocol = message.get("subprotocol") ++ if accepted_subprotocol: ++ headers.append(("Sec-WebSocket-Protocol", accepted_subprotocol)) ++ self.response.headers.update(headers) ++ ++ if not self.transport.is_closing(): ++ self.handshake_complete = True ++ self.conn.send_response(self.response) ++ output = self.conn.data_to_send() ++ self.transport.write(b"".join(output)) ++ ++ elif message_type == "websocket.close": ++ message = cast(WebSocketCloseEvent, message) ++ self.queue.put_nowait({"type": "websocket.disconnect", "code": 1006}) ++ self.logger.info( ++ '%s - "WebSocket %s" 403', ++ self.scope["client"], ++ get_path_with_query_string(self.scope), ++ ) ++ response = self.conn.reject(HTTPStatus.FORBIDDEN, "") ++ self.conn.send_response(response) ++ output = self.conn.data_to_send() ++ self.close_sent = True ++ self.handshake_complete = True ++ self.transport.write(b"".join(output)) ++ self.transport.close() ++ elif message_type == "websocket.http.response.start" and self.initial_response is None: ++ message = cast(WebSocketResponseStartEvent, message) ++ if not (100 <= message["status"] < 600): ++ raise RuntimeError("Invalid HTTP status code '%d' in response." % message["status"]) ++ self.logger.info( ++ '%s - "WebSocket %s" %d', ++ self.scope["client"], ++ get_path_with_query_string(self.scope), ++ message["status"], ++ ) ++ headers = [ ++ (name.decode("latin-1"), value.decode("latin-1")) ++ for name, value in list(message.get("headers", [])) ++ ] ++ self.initial_response = (message["status"], headers, b"") ++ else: ++ msg = ( ++ "Expected ASGI message 'websocket.accept', 'websocket.close' " ++ "or 'websocket.http.response.start' " ++ "but got '%s'." ++ ) ++ raise RuntimeError(msg % message_type) ++ ++ elif not self.close_sent and self.initial_response is None: ++ try: ++ if message_type == "websocket.send": ++ message = cast(WebSocketSendEvent, message) ++ bytes_data = message.get("bytes") ++ text_data = message.get("text") ++ if text_data: ++ self.conn.send_text(text_data.encode()) ++ elif bytes_data: ++ self.conn.send_binary(bytes_data) ++ output = self.conn.data_to_send() ++ self.transport.write(b"".join(output)) ++ ++ elif message_type == "websocket.close" and not self.transport.is_closing(): ++ message = cast(WebSocketCloseEvent, message) ++ code = message.get("code", 1000) ++ reason = message.get("reason", "") or "" ++ self.queue.put_nowait({"type": "websocket.disconnect", "code": code}) ++ self.conn.send_close(code, reason) ++ output = self.conn.data_to_send() ++ self.transport.write(b"".join(output)) ++ self.close_sent = True ++ self.transport.close() ++ else: ++ msg = "Expected ASGI message 'websocket.send' or 'websocket.close'," " but got '%s'." ++ raise RuntimeError(msg % message_type) ++ except InvalidState: ++ raise ClientDisconnected() ++ elif self.initial_response is not None: ++ if message_type == "websocket.http.response.body": ++ message = cast(WebSocketResponseBodyEvent, message) ++ body = self.initial_response[2] + message["body"] ++ self.initial_response = self.initial_response[:2] + (body,) ++ if not message.get("more_body", False): ++ response = self.conn.reject(self.initial_response[0], body.decode()) ++ response.headers.update(self.initial_response[1]) ++ self.queue.put_nowait({"type": "websocket.disconnect", "code": 1006}) ++ self.conn.send_response(response) ++ output = self.conn.data_to_send() ++ self.close_sent = True ++ self.transport.write(b"".join(output)) ++ self.transport.close() ++ else: ++ msg = "Expected ASGI message 'websocket.http.response.body' " "but got '%s'." ++ raise RuntimeError(msg % message_type) ++ ++ else: ++ msg = "Unexpected ASGI message '%s', after sending 'websocket.close'." ++ raise RuntimeError(msg % message_type) ++ ++ async def receive(self) -> ASGIReceiveEvent: ++ message = await self.queue.get() ++ if self.read_paused and self.queue.empty(): ++ self.read_paused = False ++ self.transport.resume_reading() ++ return message +diff --git a/uvicorn/server.py b/uvicorn/server.py +index cca2e85..50c5ed2 100644 +--- a/uvicorn/server.py ++++ b/uvicorn/server.py +@@ -23,9 +23,10 @@ if TYPE_CHECKING: + from uvicorn.protocols.http.h11_impl import H11Protocol + from uvicorn.protocols.http.httptools_impl import HttpToolsProtocol + from uvicorn.protocols.websockets.websockets_impl import WebSocketProtocol ++ from uvicorn.protocols.websockets.websockets_sansio_impl import WebSocketsSansIOProtocol + from uvicorn.protocols.websockets.wsproto_impl import WSProtocol + +- Protocols = Union[H11Protocol, HttpToolsProtocol, WSProtocol, WebSocketProtocol] ++ Protocols = Union[H11Protocol, HttpToolsProtocol, WSProtocol, WebSocketProtocol, WebSocketsSansIOProtocol] + + HANDLED_SIGNALS = ( + signal.SIGINT, # Unix signal 2. Sent by Ctrl+C. diff --git a/ilot/uvicorn/2541_bump-wesockets-on-requirements.patch b/ilot/uvicorn/2541_bump-wesockets-on-requirements.patch new file mode 100644 index 0000000..c1179f3 --- /dev/null +++ b/ilot/uvicorn/2541_bump-wesockets-on-requirements.patch @@ -0,0 +1,567 @@ +diff --git a/requirements.txt b/requirements.txt +index e26e6b3..b16569f 100644 +--- a/requirements.txt ++++ b/requirements.txt +@@ -7,7 +7,7 @@ h11 @ git+https://github.com/python-hyper/h11.git@master + # Explicit optionals + a2wsgi==1.10.7 + wsproto==1.2.0 +-websockets==13.1 ++websockets==14.1 + + # Packaging + build==1.2.2.post1 +diff --git a/tests/middleware/test_logging.py b/tests/middleware/test_logging.py +index 63d7daf..5aef174 100644 +--- a/tests/middleware/test_logging.py ++++ b/tests/middleware/test_logging.py +@@ -8,8 +8,7 @@ import typing + + import httpx + import pytest +-import websockets +-import websockets.client ++from websockets.asyncio.client import connect + + from tests.utils import run_server + from uvicorn import Config +@@ -107,8 +106,8 @@ async def test_trace_logging_on_ws_protocol( + break + + async def open_connection(url: str): +- async with websockets.client.connect(url) as websocket: +- return websocket.open ++ async with connect(url): ++ return True + + config = Config( + app=websocket_app, +diff --git a/tests/middleware/test_proxy_headers.py b/tests/middleware/test_proxy_headers.py +index d300c45..4b5f195 100644 +--- a/tests/middleware/test_proxy_headers.py ++++ b/tests/middleware/test_proxy_headers.py +@@ -5,7 +5,7 @@ from typing import TYPE_CHECKING + import httpx + import httpx._transports.asgi + import pytest +-import websockets.client ++from websockets.asyncio.client import connect + + from tests.response import Response + from tests.utils import run_server +@@ -479,7 +479,7 @@ async def test_proxy_headers_websocket_x_forwarded_proto( + async with run_server(config): + url = f"ws://127.0.0.1:{unused_tcp_port}" + headers = {X_FORWARDED_FOR: "1.2.3.4", X_FORWARDED_PROTO: forwarded_proto} +- async with websockets.client.connect(url, extra_headers=headers) as websocket: ++ async with connect(url, additional_headers=headers) as websocket: + data = await websocket.recv() + assert data == expected + +diff --git a/tests/protocols/test_websocket.py b/tests/protocols/test_websocket.py +index e728544..b9035ec 100644 +--- a/tests/protocols/test_websocket.py ++++ b/tests/protocols/test_websocket.py +@@ -12,6 +12,8 @@ import websockets.asyncio.client + import websockets.client + import websockets.exceptions + from typing_extensions import TypedDict ++from websockets.asyncio.client import ClientConnection, connect ++from websockets.exceptions import ConnectionClosed, ConnectionClosedError, InvalidHandshake, InvalidStatus + from websockets.extensions.permessage_deflate import ClientPerMessageDeflateFactory + from websockets.typing import Subprotocol + +@@ -130,8 +132,8 @@ async def test_accept_connection(ws_protocol_cls: WSProtocol, http_protocol_cls: + await self.send({"type": "websocket.accept"}) + + async def open_connection(url: str): +- async with websockets.client.connect(url) as websocket: +- return websocket.open ++ async with connect(url): ++ return True + + config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port) + async with run_server(config): +@@ -146,7 +148,7 @@ async def test_shutdown(ws_protocol_cls: WSProtocol, http_protocol_cls: HTTPProt + + config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port) + async with run_server(config) as server: +- async with websockets.client.connect(f"ws://127.0.0.1:{unused_tcp_port}"): ++ async with connect(f"ws://127.0.0.1:{unused_tcp_port}"): + # Attempt shutdown while connection is still open + await server.shutdown() + +@@ -160,8 +162,8 @@ async def test_supports_permessage_deflate_extension( + + async def open_connection(url: str): + extension_factories = [ClientPerMessageDeflateFactory()] +- async with websockets.client.connect(url, extensions=extension_factories) as websocket: +- return [extension.name for extension in websocket.extensions] ++ async with connect(url, extensions=extension_factories) as websocket: ++ return [extension.name for extension in websocket.protocol.extensions] + + config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port) + async with run_server(config): +@@ -180,8 +182,8 @@ async def test_can_disable_permessage_deflate_extension( + # enable per-message deflate on the client, so that we can check the server + # won't support it when it's disabled. + extension_factories = [ClientPerMessageDeflateFactory()] +- async with websockets.client.connect(url, extensions=extension_factories) as websocket: +- return [extension.name for extension in websocket.extensions] ++ async with connect(url, extensions=extension_factories) as websocket: ++ return [extension.name for extension in websocket.protocol.extensions] + + config = Config( + app=App, +@@ -203,8 +205,8 @@ async def test_close_connection(ws_protocol_cls: WSProtocol, http_protocol_cls: + + async def open_connection(url: str): + try: +- await websockets.client.connect(url) +- except websockets.exceptions.InvalidHandshake: ++ await connect(url) ++ except InvalidHandshake: + return False + return True # pragma: no cover + +@@ -224,8 +226,8 @@ async def test_headers(ws_protocol_cls: WSProtocol, http_protocol_cls: HTTPProto + await self.send({"type": "websocket.accept"}) + + async def open_connection(url: str): +- async with websockets.client.connect(url, extra_headers=[("username", "abraão")]) as websocket: +- return websocket.open ++ async with connect(url, additional_headers=[("username", "abraão")]): ++ return True + + config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port) + async with run_server(config): +@@ -239,8 +241,9 @@ async def test_extra_headers(ws_protocol_cls: WSProtocol, http_protocol_cls: HTT + await self.send({"type": "websocket.accept", "headers": [(b"extra", b"header")]}) + + async def open_connection(url: str): +- async with websockets.client.connect(url) as websocket: +- return websocket.response_headers ++ async with connect(url) as websocket: ++ assert websocket.response ++ return websocket.response.headers + + config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port) + async with run_server(config): +@@ -258,8 +261,8 @@ async def test_path_and_raw_path(ws_protocol_cls: WSProtocol, http_protocol_cls: + await self.send({"type": "websocket.accept"}) + + async def open_connection(url: str): +- async with websockets.client.connect(url) as websocket: +- return websocket.open ++ async with connect(url): ++ return True + + config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port) + async with run_server(config): +@@ -276,7 +279,7 @@ async def test_send_text_data_to_client( + await self.send({"type": "websocket.send", "text": "123"}) + + async def get_data(url: str): +- async with websockets.client.connect(url) as websocket: ++ async with connect(url) as websocket: + return await websocket.recv() + + config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port) +@@ -294,7 +297,7 @@ async def test_send_binary_data_to_client( + await self.send({"type": "websocket.send", "bytes": b"123"}) + + async def get_data(url: str): +- async with websockets.client.connect(url) as websocket: ++ async with connect(url) as websocket: + return await websocket.recv() + + config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port) +@@ -313,7 +316,7 @@ async def test_send_and_close_connection( + await self.send({"type": "websocket.close"}) + + async def get_data(url: str): +- async with websockets.client.connect(url) as websocket: ++ async with connect(url) as websocket: + data = await websocket.recv() + is_open = True + try: +@@ -342,7 +345,7 @@ async def test_send_text_data_to_server( + await self.send({"type": "websocket.send", "text": _text}) + + async def send_text(url: str): +- async with websockets.client.connect(url) as websocket: ++ async with connect(url) as websocket: + await websocket.send("abc") + return await websocket.recv() + +@@ -365,7 +368,7 @@ async def test_send_binary_data_to_server( + await self.send({"type": "websocket.send", "bytes": _bytes}) + + async def send_text(url: str): +- async with websockets.client.connect(url) as websocket: ++ async with connect(url) as websocket: + await websocket.send(b"abc") + return await websocket.recv() + +@@ -387,7 +390,7 @@ async def test_send_after_protocol_close( + await self.send({"type": "websocket.send", "text": "123"}) + + async def get_data(url: str): +- async with websockets.client.connect(url) as websocket: ++ async with connect(url) as websocket: + data = await websocket.recv() + is_open = True + try: +@@ -407,14 +410,14 @@ async def test_missing_handshake(ws_protocol_cls: WSProtocol, http_protocol_cls: + async def app(scope: Scope, receive: ASGIReceiveCallable, send: ASGISendCallable): + pass + +- async def connect(url: str): +- await websockets.client.connect(url) ++ async def open_connection(url: str): ++ await connect(url) + + config = Config(app=app, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port) + async with run_server(config): +- with pytest.raises(websockets.exceptions.InvalidStatusCode) as exc_info: +- await connect(f"ws://127.0.0.1:{unused_tcp_port}") +- assert exc_info.value.status_code == 500 ++ with pytest.raises(InvalidStatus) as exc_info: ++ await open_connection(f"ws://127.0.0.1:{unused_tcp_port}") ++ assert exc_info.value.response.status_code == 500 + + + async def test_send_before_handshake( +@@ -423,14 +426,14 @@ async def test_send_before_handshake( + async def app(scope: Scope, receive: ASGIReceiveCallable, send: ASGISendCallable): + await send({"type": "websocket.send", "text": "123"}) + +- async def connect(url: str): +- await websockets.client.connect(url) ++ async def open_connection(url: str): ++ await connect(url) + + config = Config(app=app, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port) + async with run_server(config): +- with pytest.raises(websockets.exceptions.InvalidStatusCode) as exc_info: +- await connect(f"ws://127.0.0.1:{unused_tcp_port}") +- assert exc_info.value.status_code == 500 ++ with pytest.raises(InvalidStatus) as exc_info: ++ await open_connection(f"ws://127.0.0.1:{unused_tcp_port}") ++ assert exc_info.value.response.status_code == 500 + + + async def test_duplicate_handshake(ws_protocol_cls: WSProtocol, http_protocol_cls: HTTPProtocol, unused_tcp_port: int): +@@ -440,10 +443,10 @@ async def test_duplicate_handshake(ws_protocol_cls: WSProtocol, http_protocol_cl + + config = Config(app=app, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port) + async with run_server(config): +- async with websockets.client.connect(f"ws://127.0.0.1:{unused_tcp_port}") as websocket: +- with pytest.raises(websockets.exceptions.ConnectionClosed): ++ async with connect(f"ws://127.0.0.1:{unused_tcp_port}") as websocket: ++ with pytest.raises(ConnectionClosed): + _ = await websocket.recv() +- assert websocket.close_code == 1006 ++ assert websocket.protocol.close_code == 1006 + + + async def test_asgi_return_value(ws_protocol_cls: WSProtocol, http_protocol_cls: HTTPProtocol, unused_tcp_port: int): +@@ -458,10 +461,10 @@ async def test_asgi_return_value(ws_protocol_cls: WSProtocol, http_protocol_cls: + + config = Config(app=app, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port) + async with run_server(config): +- async with websockets.client.connect(f"ws://127.0.0.1:{unused_tcp_port}") as websocket: +- with pytest.raises(websockets.exceptions.ConnectionClosed): ++ async with connect(f"ws://127.0.0.1:{unused_tcp_port}") as websocket: ++ with pytest.raises(ConnectionClosed): + _ = await websocket.recv() +- assert websocket.close_code == 1006 ++ assert websocket.protocol.close_code == 1006 + + + @pytest.mark.parametrize("code", [None, 1000, 1001]) +@@ -493,13 +496,13 @@ async def test_app_close( + + config = Config(app=app, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port) + async with run_server(config): +- async with websockets.client.connect(f"ws://127.0.0.1:{unused_tcp_port}") as websocket: ++ async with connect(f"ws://127.0.0.1:{unused_tcp_port}") as websocket: + await websocket.ping() + await websocket.send("abc") +- with pytest.raises(websockets.exceptions.ConnectionClosed): ++ with pytest.raises(ConnectionClosed): + await websocket.recv() +- assert websocket.close_code == (code or 1000) +- assert websocket.close_reason == (reason or "") ++ assert websocket.protocol.close_code == (code or 1000) ++ assert websocket.protocol.close_reason == (reason or "") + + + async def test_client_close(ws_protocol_cls: WSProtocol, http_protocol_cls: HTTPProtocol, unused_tcp_port: int): +@@ -518,7 +521,7 @@ async def test_client_close(ws_protocol_cls: WSProtocol, http_protocol_cls: HTTP + break + + async def websocket_session(url: str): +- async with websockets.client.connect(url) as websocket: ++ async with connect(url) as websocket: + await websocket.ping() + await websocket.send("abc") + await websocket.close(code=1001, reason="custom reason") +@@ -555,7 +558,7 @@ async def test_client_connection_lost( + port=unused_tcp_port, + ) + async with run_server(config): +- async with websockets.client.connect(f"ws://127.0.0.1:{unused_tcp_port}") as websocket: ++ async with connect(f"ws://127.0.0.1:{unused_tcp_port}") as websocket: + websocket.transport.close() + await asyncio.sleep(0.1) + got_disconnect_event_before_shutdown = got_disconnect_event +@@ -583,7 +586,7 @@ async def test_client_connection_lost_on_send( + config = Config(app=app, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port) + async with run_server(config): + url = f"ws://127.0.0.1:{unused_tcp_port}" +- async with websockets.client.connect(url): ++ async with connect(url): + await asyncio.sleep(0.1) + disconnect.set() + +@@ -642,11 +645,11 @@ async def test_send_close_on_server_shutdown( + disconnect_message = message + break + +- websocket: websockets.client.WebSocketClientProtocol | None = None ++ websocket: ClientConnection | None = None + + async def websocket_session(uri: str): + nonlocal websocket +- async with websockets.client.connect(uri) as ws_connection: ++ async with connect(uri) as ws_connection: + websocket = ws_connection + await server_shutdown_event.wait() + +@@ -676,9 +679,7 @@ async def test_subprotocols( + await self.send({"type": "websocket.accept", "subprotocol": subprotocol}) + + async def get_subprotocol(url: str): +- async with websockets.client.connect( +- url, subprotocols=[Subprotocol("proto1"), Subprotocol("proto2")] +- ) as websocket: ++ async with connect(url, subprotocols=[Subprotocol("proto1"), Subprotocol("proto2")]) as websocket: + return websocket.subprotocol + + config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port) +@@ -688,7 +689,7 @@ async def test_subprotocols( + + + MAX_WS_BYTES = 1024 * 1024 * 16 +-MAX_WS_BYTES_PLUS1 = MAX_WS_BYTES + 1 ++MAX_WS_BYTES_PLUS1 = MAX_WS_BYTES + 10 + + + @pytest.mark.parametrize( +@@ -731,15 +732,15 @@ async def test_send_binary_data_to_server_bigger_than_default_on_websockets( + port=unused_tcp_port, + ) + async with run_server(config): +- async with websockets.client.connect(f"ws://127.0.0.1:{unused_tcp_port}", max_size=client_size_sent) as ws: ++ async with connect(f"ws://127.0.0.1:{unused_tcp_port}", max_size=client_size_sent) as ws: + await ws.send(b"\x01" * client_size_sent) + if expected_result == 0: + data = await ws.recv() + assert data == b"\x01" * client_size_sent + else: +- with pytest.raises(websockets.exceptions.ConnectionClosedError): ++ with pytest.raises(ConnectionClosedError): + await ws.recv() +- assert ws.close_code == expected_result ++ assert ws.protocol.close_code == expected_result + + + async def test_server_reject_connection( +@@ -764,10 +765,10 @@ async def test_server_reject_connection( + disconnected_message = await receive() + + async def websocket_session(url: str): +- with pytest.raises(websockets.exceptions.InvalidStatusCode) as exc_info: +- async with websockets.client.connect(url): ++ with pytest.raises(InvalidStatus) as exc_info: ++ async with connect(url): + pass # pragma: no cover +- assert exc_info.value.status_code == 403 ++ assert exc_info.value.response.status_code == 403 + + config = Config(app=app, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port) + async with run_server(config): +@@ -937,10 +938,10 @@ async def test_server_reject_connection_with_invalid_msg( + await send(message) + + async def websocket_session(url: str): +- with pytest.raises(websockets.exceptions.InvalidStatusCode) as exc_info: +- async with websockets.client.connect(url): ++ with pytest.raises(InvalidStatus) as exc_info: ++ async with connect(url): + pass # pragma: no cover +- assert exc_info.value.status_code == 404 ++ assert exc_info.value.response.status_code == 404 + + config = Config(app=app, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port) + async with run_server(config): +@@ -971,10 +972,10 @@ async def test_server_reject_connection_with_missing_body( + # no further message + + async def websocket_session(url: str): +- with pytest.raises(websockets.exceptions.InvalidStatusCode) as exc_info: +- async with websockets.client.connect(url): ++ with pytest.raises(InvalidStatus) as exc_info: ++ async with connect(url): + pass # pragma: no cover +- assert exc_info.value.status_code == 404 ++ assert exc_info.value.response.status_code == 404 + + config = Config(app=app, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port) + async with run_server(config): +@@ -1014,17 +1015,17 @@ async def test_server_multiple_websocket_http_response_start_events( + exception_message = str(exc) + + async def websocket_session(url: str): +- with pytest.raises(websockets.exceptions.InvalidStatusCode) as exc_info: +- async with websockets.client.connect(url): ++ with pytest.raises(InvalidStatus) as exc_info: ++ async with connect(url): + pass # pragma: no cover +- assert exc_info.value.status_code == 404 ++ assert exc_info.value.response.status_code == 404 + + config = Config(app=app, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port) + async with run_server(config): + await websocket_session(f"ws://127.0.0.1:{unused_tcp_port}") + + assert exception_message == ( +- "Expected ASGI message 'websocket.http.response.body' but got " "'websocket.http.response.start'." ++ "Expected ASGI message 'websocket.http.response.body' but got 'websocket.http.response.start'." + ) + + +@@ -1053,7 +1054,7 @@ async def test_server_can_read_messages_in_buffer_after_close( + + config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port) + async with run_server(config): +- async with websockets.client.connect(f"ws://127.0.0.1:{unused_tcp_port}") as websocket: ++ async with connect(f"ws://127.0.0.1:{unused_tcp_port}") as websocket: + await websocket.send(b"abc") + await websocket.send(b"abc") + await websocket.send(b"abc") +@@ -1070,8 +1071,9 @@ async def test_default_server_headers( + await self.send({"type": "websocket.accept"}) + + async def open_connection(url: str): +- async with websockets.client.connect(url) as websocket: +- return websocket.response_headers ++ async with connect(url) as websocket: ++ assert websocket.response ++ return websocket.response.headers + + config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port) + async with run_server(config): +@@ -1085,8 +1087,9 @@ async def test_no_server_headers(ws_protocol_cls: WSProtocol, http_protocol_cls: + await self.send({"type": "websocket.accept"}) + + async def open_connection(url: str): +- async with websockets.client.connect(url) as websocket: +- return websocket.response_headers ++ async with connect(url) as websocket: ++ assert websocket.response ++ return websocket.response.headers + + config = Config( + app=App, +@@ -1108,8 +1111,9 @@ async def test_no_date_header_on_wsproto(http_protocol_cls: HTTPProtocol, unused + await self.send({"type": "websocket.accept"}) + + async def open_connection(url: str): +- async with websockets.client.connect(url) as websocket: +- return websocket.response_headers ++ async with connect(url) as websocket: ++ assert websocket.response ++ return websocket.response.headers + + config = Config( + app=App, +@@ -1140,8 +1144,9 @@ async def test_multiple_server_header( + ) + + async def open_connection(url: str): +- async with websockets.client.connect(url) as websocket: +- return websocket.response_headers ++ async with connect(url) as websocket: ++ assert websocket.response ++ return websocket.response.headers + + config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port) + async with run_server(config): +@@ -1176,8 +1181,8 @@ async def test_lifespan_state(ws_protocol_cls: WSProtocol, http_protocol_cls: HT + await self.send({"type": "websocket.accept"}) + + async def open_connection(url: str): +- async with websockets.client.connect(url) as websocket: +- return websocket.open ++ async with connect(url): ++ return True + + async def app_wrapper(scope: Scope, receive: ASGIReceiveCallable, send: ASGISendCallable): + if scope["type"] == "lifespan": +diff --git a/uvicorn/protocols/websockets/websockets_impl.py b/uvicorn/protocols/websockets/websockets_impl.py +index cd6c54f..685d6b6 100644 +--- a/uvicorn/protocols/websockets/websockets_impl.py ++++ b/uvicorn/protocols/websockets/websockets_impl.py +@@ -13,8 +13,7 @@ from websockets.datastructures import Headers + from websockets.exceptions import ConnectionClosed + from websockets.extensions.base import ServerExtensionFactory + from websockets.extensions.permessage_deflate import ServerPerMessageDeflateFactory +-from websockets.legacy.server import HTTPResponse +-from websockets.server import WebSocketServerProtocol ++from websockets.legacy.server import HTTPResponse, WebSocketServerProtocol + from websockets.typing import Subprotocol + + from uvicorn._types import ( +diff --git a/uvicorn/protocols/websockets/wsproto_impl.py b/uvicorn/protocols/websockets/wsproto_impl.py +index 828afe5..5d84bff 100644 +--- a/uvicorn/protocols/websockets/wsproto_impl.py ++++ b/uvicorn/protocols/websockets/wsproto_impl.py +@@ -149,12 +149,13 @@ class WSProtocol(asyncio.Protocol): + self.writable.set() # pragma: full coverage + + def shutdown(self) -> None: +- if self.handshake_complete: +- self.queue.put_nowait({"type": "websocket.disconnect", "code": 1012}) +- output = self.conn.send(wsproto.events.CloseConnection(code=1012)) +- self.transport.write(output) +- else: +- self.send_500_response() ++ if not self.response_started: ++ if self.handshake_complete: ++ self.queue.put_nowait({"type": "websocket.disconnect", "code": 1012}) ++ output = self.conn.send(wsproto.events.CloseConnection(code=1012)) ++ self.transport.write(output) ++ else: ++ self.send_500_response() + self.transport.close() + + def on_task_complete(self, task: asyncio.Task[None]) -> None: +@@ -221,13 +222,15 @@ class WSProtocol(asyncio.Protocol): + def send_500_response(self) -> None: + if self.response_started or self.handshake_complete: + return # we cannot send responses anymore ++ reject_data = b"Internal Server Error" + headers: list[tuple[bytes, bytes]] = [ + (b"content-type", b"text/plain; charset=utf-8"), ++ (b"content-length", str(len(reject_data)).encode()), + (b"connection", b"close"), + (b"content-length", b"21"), + ] + output = self.conn.send(wsproto.events.RejectConnection(status_code=500, headers=headers, has_body=True)) +- output += self.conn.send(wsproto.events.RejectData(data=b"Internal Server Error")) ++ output += self.conn.send(wsproto.events.RejectData(data=reject_data)) + self.transport.write(output) + + async def run_asgi(self) -> None: diff --git a/ilot/uvicorn/APKBUILD b/ilot/uvicorn/APKBUILD new file mode 100644 index 0000000..1f14918 --- /dev/null +++ b/ilot/uvicorn/APKBUILD @@ -0,0 +1,59 @@ +maintainer="Michał Polański " +pkgname=uvicorn +pkgver=0.34.0 +pkgrel=0 +pkgdesc="Lightning-fast ASGI server" +url="https://www.uvicorn.org/" +license="BSD-3-Clause" +# disable due to lack of support for websockets 14 +# https://gitlab.alpinelinux.org/alpine/aports/-/issues/16646 +arch="noarch" +depends="py3-click py3-h11" +makedepends="py3-gpep517 py3-hatchling" +checkdepends=" + py3-a2wsgi + py3-dotenv + py3-httptools + py3-httpx + py3-pytest + py3-pytest-mock + py3-trustme + py3-typing-extensions + py3-watchfiles + py3-websockets + py3-wsproto + py3-yaml + " +subpackages="$pkgname-pyc" +source="https://github.com/encode/uvicorn/archive/$pkgver/uvicorn-$pkgver.tar.gz + test_multiprocess.patch + 2540_add-websocketssansioprotocol.patch + 2541_bump-wesockets-on-requirements.patch + fix-test-wsgi.patch + " + +build() { + gpep517 build-wheel \ + --wheel-dir .dist \ + --output-fd 3 3>&1 >&2 +} + +check() { + python3 -m venv --clear --without-pip --system-site-packages .testenv + .testenv/bin/python3 -m installer .dist/*.whl + .testenv/bin/python3 -m pytest \ + -k "not test_close_connection_with_multiple_requests" # a known issue +} + +package() { + python3 -m installer -d "$pkgdir" \ + .dist/uvicorn-$pkgver-py3-none-any.whl +} + +sha512sums=" +260782e385a2934049da8c474750958826afe1bfe23b38fe2f6420f355af7a537563f8fe6ac3830814c7469203703d10f4f9f3d6e53e79113bfd2fd34f7a7c72 uvicorn-0.34.0.tar.gz +cfad91dd84f8974362f52d754d7a29f09d07927a46acaa0eb490b6115a5729d84d6df94fead10ccd4cce7f5ea376f1348b0f59daede661dd8373a3851c313c46 test_multiprocess.patch +858e9a7baaf1c12e076aecd81aaaf622b35a59dcaabea4ee1bfc4cda704c9fe271b1cc616a5910d845393717e4989cecb3b04be249cb5d0df1001ec5224c293f 2540_add-websocketssansioprotocol.patch +f8a8c190981b9070232ea985880685bc801947cc7f673d59abf73d3e68bc2e13515ad200232a1de2af0808bc85da48a341f57d47caf87bcc190bfdc3c45718e0 2541_bump-wesockets-on-requirements.patch +379963f9ccbda013e4a0bc3441eee70a581c91f60206aedc15df6a8737950824b7cb8d867774fc415763449bb3e0bba66601e8551101bfc1741098acd035f0cc fix-test-wsgi.patch +" diff --git a/ilot/uvicorn/fix-test-wsgi.patch b/ilot/uvicorn/fix-test-wsgi.patch new file mode 100644 index 0000000..ed49e52 --- /dev/null +++ b/ilot/uvicorn/fix-test-wsgi.patch @@ -0,0 +1,13 @@ +diff --git a/tests/middleware/test_wsgi.py.orig b/tests/middleware/test_wsgi.py +index 6003f27..2750487 100644 +--- a/tests/middleware/test_wsgi.py.orig ++++ b/tests/middleware/test_wsgi.py +@@ -73,7 +73,7 @@ async def test_wsgi_post(wsgi_middleware: Callable) -> None: + async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client: + response = await client.post("/", json={"example": 123}) + assert response.status_code == 200 +- assert response.text == '{"example":123}' ++ assert response.text == '{"example": 123}' + + + @pytest.mark.anyio diff --git a/ilot/uvicorn/test_multiprocess.patch b/ilot/uvicorn/test_multiprocess.patch new file mode 100644 index 0000000..231526e --- /dev/null +++ b/ilot/uvicorn/test_multiprocess.patch @@ -0,0 +1,14 @@ +Wait a bit longer, otherwise the workers might +not have time to finish restarting. + +--- a/tests/supervisors/test_multiprocess.py ++++ b/tests/supervisors/test_multiprocess.py +@@ -132,7 +132,7 @@ def test_multiprocess_sighup() -> None: + time.sleep(1) + pids = [p.pid for p in supervisor.processes] + supervisor.signal_queue.append(signal.SIGHUP) +- time.sleep(1) ++ time.sleep(3) + assert pids != [p.pid for p in supervisor.processes] + supervisor.signal_queue.append(signal.SIGINT) + supervisor.join_all() diff --git a/ilot/wikijs/APKBUILD b/ilot/wikijs/APKBUILD new file mode 100644 index 0000000..5b75746 --- /dev/null +++ b/ilot/wikijs/APKBUILD @@ -0,0 +1,62 @@ +# Maintainer: Antoine Martin (ayakael) +# Contributor: Antoine Martin (ayakael) +pkgname=wikijs +pkgver=2.5.305 +pkgrel=0 +pkgdesc="Wiki.js | A modern, lightweight and powerful wiki app built on Node.js" +license="AGPL-3.0" +arch="!armv7 x86_64" +options="!check" # No test suite +depends=" + libcap-setcap + nodejs>=10.12.0 + postgresql + python3 + " +makedepends=" + yarn + npm + " +url="https://github.com/Requarks/wiki" +subpackages="$pkgname-openrc" +install="$pkgname.post-install $pkgname.pre-install" +builddir="$srcdir"/wiki-$pkgver +pkgusers="wikijs" +pkggroups="wikijs" +source=" + $pkgname-$pkgver.tar.gz::https://github.com/requarks/wiki/archive/refs/tags/v$pkgver.tar.gz + wikijs.initd + config.sample.yml.patch +" + +prepare() { + default_prepare + sed -i "s|\"version.*|\"version\": \"$pkgver\",|" "$builddir"/package.json + sed -i 's|"dev": true.*|"dev": "false",|' "$builddir"/package.json +} +build() { + yarn --frozen-lockfile --non-interactive + yarn build + rm -rf node_modules + yarn --production --frozen-lockfile --non-interactive +} + +package() { + install -Dm755 "$srcdir"/wikijs.initd "$pkgdir"/etc/init.d/wikijs + + install -Dm644 -o 5494 -g 5494 "$builddir"/config.sample.yml "$pkgdir"/etc/wikijs/config.yml + + install -Dm644 "$builddir"/package.json -t "$pkgdir"/usr/lib/bundles/wikijs + cp -aR "$builddir"/assets "$builddir"/server "$builddir"/node_modules "$pkgdir"/usr/lib/bundles/wikijs + + # remove prebuilts + rm -Rf "$pkgdir"/usr/lib/bundles/wikijs/node_modules/*/prebuilds + + mkdir -p "$pkgdir"/var/lib/wikijs + chown 5494:5494 "$pkgdir"/var/lib/wikijs +} +sha512sums=" +e715e2d93fd176dc93676b3dd97d8dd745589552a7d67971fce0c1097f607fa44a3147534709a82b3ad13dda95d7c5833bc30ec37538c6cdef54ac309e6b44d1 wikijs-2.5.305.tar.gz +355131ee5617348b82681cb8543c784eea59689990a268ecd3b77d44fe9abcca9c86fb8b047f0a8faeba079c650faa7790c5dd65418d313cd7561f38bb590c03 wikijs.initd +07b536c20e370d2a926038165f0e953283259c213a80a8648419565f5359ab05f528ac310e81606914013da212270df6feddb22e514cbcb2464c8274c956e4af config.sample.yml.patch +" diff --git a/ilot/wikijs/config.sample.yml.patch b/ilot/wikijs/config.sample.yml.patch new file mode 100644 index 0000000..6532c25 --- /dev/null +++ b/ilot/wikijs/config.sample.yml.patch @@ -0,0 +1,13 @@ +diff --git a/config.sample.yml.orig b/config.sample.yml +index 47edd8d..458472a 100644 +--- a/config.sample.yml.orig ++++ b/config.sample.yml +@@ -136,7 +136,7 @@ ha: false + # Data Path + # --------------------------------------------------------------------- + # Writeable data path used for cache and temporary user uploads. +-dataPath: ./data ++dataPath: /var/lib/wikijs/data + + # --------------------------------------------------------------------- + # Body Parser Limit diff --git a/ilot/wikijs/wikijs.initd b/ilot/wikijs/wikijs.initd new file mode 100644 index 0000000..680efbf --- /dev/null +++ b/ilot/wikijs/wikijs.initd @@ -0,0 +1,24 @@ +#!/sbin/openrc-run +name="$RC_SVCNAME" +cfgfile="/etc/conf.d/$RC_SVCNAME" +pidfile="/var/run/$RC_SVCNAME.pid" +command="/usr/bin/node server" +command_args="" +command_user="wikijs" +command_group="wikijs" +supervisor="supervise-daemon" +start_stop_daemon_args="" +command_background="yes" +output_log="/var/log/$RC_SVCNAME/$RC_SVCNAME.log" +error_log="/var/log/$RC_SVCNAME/$RC_SVCNAME.err" +working_directory="/usr/lib/bundles/wikijs" + +start_pre() { + checkpath --directory --owner $command_user:$command_user --mode 0775 \ + /var/log/$RC_SVCNAME \ + /var/lib/$RC_SVCNAME + export NODE_ENV=production + export CONFIG_FILE=/etc/wikijs/config.yml + cd "$working_directory" +} + diff --git a/ilot/wikijs/wikijs.post-install b/ilot/wikijs/wikijs.post-install new file mode 100755 index 0000000..748e847 --- /dev/null +++ b/ilot/wikijs/wikijs.post-install @@ -0,0 +1,18 @@ +#!/bin/sh +set -eu + +group=wikijs +config_file='/etc/wikijs/config.yml' + +setcap 'cap_net_bind_service=+ep' /usr/bin/node + +cat >&2 <<-EOF +* +* 1. Adjust settings in /etc/wikijs/config.yml. +* +* 2. Create database for wikijs: +* +* psql -c "CREATE ROLE wikijs PASSWORD 'top-secret' INHERIT LOGIN;" +* psql -c "CREATE DATABASE wkijs OWNER wikijs ENCODING 'UTF-8';" +* +EOF diff --git a/ilot/wikijs/wikijs.pre-install b/ilot/wikijs/wikijs.pre-install new file mode 100644 index 0000000..579485d --- /dev/null +++ b/ilot/wikijs/wikijs.pre-install @@ -0,0 +1,20 @@ +#!/bin/sh +# It's very important to set user/group correctly. + +wikijs_dir='/var/lib/wikijs' + +if ! getent group wikijs 1>/dev/null; then + echo '* Creating group wikijs' 1>&2 + + addgroup -S wikijs -g 5494 +fi + +if ! id wikijs 2>/dev/null 1>&2; then + echo '* Creating user wikijs' 1>&2 + + adduser -DHS -G wikijs -h "$wikijs_dir" -u 5494 -s /bin/sh \ + -g "added by apk for wikijs" wikijs + passwd -u wikijs 1>/dev/null # unlock +fi + +exit 0