Compare commits

..

38 commits

Author SHA1 Message Date
fe5359e933
remove install files
Some checks failed
/ lint (pull_request) Failing after 29s
/ build-x86_64 (pull_request) Successful in 2m20s
/ deploy-x86_64 (pull_request) Successful in 26s
/ deploy-aarch64 (pull_request) Successful in 54s
/ build-aarch64 (pull_request) Successful in 8m10s
2024-08-25 15:16:10 -04:00
f6c84e562f
forge-ci: remove packages after WIP check
Some checks failed
/ lint (pull_request) Failing after 27s
/ build-x86_64 (pull_request) Successful in 2m18s
/ deploy-x86_64 (pull_request) Successful in 28s
/ build-aarch64 (pull_request) Successful in 8m9s
/ deploy-aarch64 (pull_request) Successful in 1m3s
2024-08-25 15:09:21 -04:00
655cadf71d
forgejo-ci: remove packages from all arches when deploying
Some checks failed
/ lint (pull_request) Failing after 25s
/ build-x86_64 (pull_request) Successful in 2m16s
/ deploy-x86_64 (pull_request) Successful in 27s
/ build-aarch64 (pull_request) Successful in 8m20s
/ deploy-aarch64 (pull_request) Successful in 54s
2024-08-25 14:58:11 -04:00
a882011e97 ilot/codeberg-pages-server: new aport
Some checks failed
/ lint (pull_request) Failing after 26s
/ build-x86_64 (pull_request) Successful in 2m12s
/ deploy-x86_64 (pull_request) Successful in 25s
/ build-aarch64 (pull_request) Successful in 8m30s
/ deploy-aarch64 (pull_request) Successful in 53s
2024-08-25 18:55:12 +00:00
357ebed98c ilot/forgejo-aneksajo: new aport 2024-08-25 15:01:10 +00:00
c9dc783fcb backports/forgejo-runner: new aport 2024-08-25 15:01:10 +00:00
61ef8d893c
ilot/py3-django-rest-framework: bump
All checks were successful
/ lint (pull_request) Successful in 31s
/ deploy-x86_64 (pull_request) Successful in 30s
/ build-x86_64 (pull_request) Successful in 7m17s
/ deploy-aarch64 (pull_request) Successful in 1m0s
/ build-aarch64 (pull_request) Successful in 29m53s
2024-08-25 09:42:24 -04:00
d985367f7b
ilot/authentik: upgrade to 2024.4.4
Some checks failed
/ lint (pull_request) Successful in 27s
/ deploy-aarch64 (pull_request) Has been skipped
/ build-aarch64 (pull_request) Failing after 1m1s
/ build-x86_64 (pull_request) Successful in 7m14s
/ deploy-x86_64 (pull_request) Successful in 28s
2024-08-25 09:38:13 -04:00
03803dabae
forgejo-ci: build.sh is now local rather than patched
All checks were successful
/ lint (pull_request) Successful in 28s
/ build-x86_64 (pull_request) Successful in 22m33s
/ deploy-x86_64 (pull_request) Successful in 36s
/ deploy-aarch64 (pull_request) Successful in 1m22s
/ build-aarch64 (pull_request) Successful in 1h2m32s
2024-08-24 21:23:33 -04:00
15acc98854
ilot/*: bump pkgrel, disable peertube and loomio
Some checks failed
/ lint (pull_request) Successful in 29s
/ deploy-aarch64 (pull_request) Has been skipped
/ build-aarch64 (pull_request) Failing after 57s
/ build-x86_64 (pull_request) Successful in 22m41s
/ deploy-x86_64 (pull_request) Failing after 29s
2024-08-24 21:12:45 -04:00
e97ff811f0
forgejo-ci: initial 2024-08-24 21:10:16 -04:00
7c15cbb47e
gitlab-ci: drop 2024-08-24 18:41:43 -04:00
f37777699f
ilot/wikijs: new aport 2024-08-09 22:28:49 -04:00
71cf1c997b
ilot/uptime-kuma: new aport 2024-08-09 22:28:47 -04:00
9410c4943e
ilot/py3-tenant-schemas-celery: new aport 2024-08-09 22:28:45 -04:00
2babe46d95
ilot/py3-scim2-filter-parser: new aport 2024-08-09 22:28:42 -04:00
ee490115e4
ilot/py3-django-tenants: new aport 2024-08-09 22:28:40 -04:00
3282acd59f
ilot/py3-django-rest-framework: new aport 2024-08-09 22:28:37 -04:00
1279f9642e
ilot/php83-pecl-inotify: new aport 2024-08-09 22:28:35 -04:00
732a6cc9da
ilot/php82-pecl-inotify: new aport 2024-08-09 22:28:32 -04:00
838f490ff0
ilot/peertube: new aport 2024-08-09 22:28:30 -04:00
9306c27137
ilot/loomio: new aport 2024-08-09 22:28:26 -04:00
cbd0cc098a
ilot/listmonk: new aport 2024-08-09 22:28:24 -04:00
7c7a4486cd
ilot/freescout: new aport 2024-08-09 22:28:22 -04:00
84fc5eb427
ilot/authentik: new aport 2024-08-09 22:28:19 -04:00
58be187bd1
archives/ruby3.2-webrick: new aport 2024-08-09 22:28:17 -04:00
b598842f29
archives/ruby3.2-test-unit: new aport 2024-08-09 22:28:14 -04:00
d696cbb525
archives/ruby3.2-rake: new aport 2024-08-09 22:28:11 -04:00
f7eea066ee
archives/ruby3.2-power_assert: new aport 2024-08-09 22:28:09 -04:00
8497be9439
archives/ruby3.2-minitest: new aport 2024-08-09 22:28:07 -04:00
cfdd98d12f
archives/ruby3.2-bundler: new aport 2024-08-09 22:28:04 -04:00
c434b5145e
archives/ruby3.2: new aport 2024-08-09 22:28:02 -04:00
cda65a1f03
archives/mastodon: new aport 2024-08-09 22:27:59 -04:00
fa80820d8e
archives/gitlab-shell: new aport 2024-08-09 22:27:57 -04:00
9f600f271f
archives/gitlab-pages: new aport 2024-08-09 22:27:54 -04:00
8403bcb534
archives/gitlab-foss: new aport 2024-08-09 22:27:52 -04:00
6496493030
archives/gitaly: new aport 2024-08-09 22:27:50 -04:00
4f090a8ad5
push 2024-08-09 22:27:01 -04:00
58 changed files with 4263 additions and 2622 deletions

View file

@ -1,34 +0,0 @@
#!/bin/bash
# expects the following env variables:
# downstream: downstream repo
repo=${downstream/*\/}
curl --silent $downstream/x86_64/APKINDEX.tar.gz | tar -O -zx APKINDEX > APKINDEX
owned_by_you=$(awk -v RS= -v ORS="\n\n" '/m:Antoine Martin \(ayakael\) <dev@ayakael.net>/' APKINDEX | awk -F ':' '{if($1=="o"){print $2}}' | sort | uniq)
echo "Found $(printf '%s\n' $owned_by_you | wc -l ) packages owned by you"
rm -f out_of_date not_in_anitya
for pkg in $owned_by_you; do
upstream_version=$(curl --fail -X GET -sS -H 'Content-Type: application/json' "https://release-monitoring.org/api/v2/packages/?name=$pkg&distribution=Alpine" | jq -r '.items.[].stable_version')
downstream_version=$(sed -n "/^P:$pkg$/,/^$/p" APKINDEX | awk -F ':' '{if($1=="V"){print $2}}' | sort -V | tail -n 1)
downstream_version=${downstream_version/-*}
# special case for forgejo-aneksajo:
upstream_version=${upstream_version/-git-annex/_git}
if [ -z "$upstream_version" ]; then
echo "$pkg not in anitya"
echo "$pkg" >> not_in_anitya
elif [ "$downstream_version" != "$(printf '%s\n' $upstream_version $downstream_version | sort -V | head -n 1)" ]; then
echo "$pkg higher downstream"
continue
elif [ "$upstream_version" != "$downstream_version" ]; then
echo "$pkg upstream version $upstream_version does not match downstream version $downstream_version"
echo "$pkg $downstream_version $upstream_version $repo" >> out_of_date
fi
done

View file

@ -1,165 +0,0 @@
#!/bin/bash
# expects:
# env variable FORGEJO_TOKEN
# file out_of_date
IFS='
'
repo=${downstream/*\/}
does_it_exist() {
name=$1
downstream_version=$2
upstream_version=$3
repo=$4
query="$repo/$name: upgrade to $upstream_version"
query="$(echo $query | sed 's| |%20|g' | sed 's|:|%3A|g' | sed 's|/|%2F|g' )"
result="$(curl --silent -X 'GET' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues?state=open&q=$query&type=issues" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN"
)"
if [ "$result" == "[]" ]; then
return 1
fi
}
is_it_old() {
name=$1
downstream_version=$2
upstream_version=$3
repo=$4
query="$repo/$name: upgrade to"
query="$(echo $query | sed 's| |%20|g' | sed 's|:|%3A|g' | sed 's|/|%2F|g' )"
result="$(curl --silent -X 'GET' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues?state=open&q=$query&type=issues" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN"
)"
result_title="$(echo $result | jq -r '.[].title' )"
result_id="$(echo $result | jq -r '.[].number' )"
result_upstream_version="$(echo $result_title | awk '{print $4}')"
if [ "$upstream_version" != "$result_upstream_version" ]; then
echo $result_id
else
echo 0
fi
}
update_title() {
name=$1
downstream_version=$2
upstream_version=$3
repo=$4
id=$5
result=$(curl --silent -X 'PATCH' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues/$id" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN" \
-H 'Content-Type: application/json' \
-d "{
\"title\": \"$repo/$name: upgrade to $upstream_version\"
}"
)
return 0
}
create_issue() {
name=$1
downstream_version=$2
upstream_version=$3
repo=$4
result=$(curl --silent -X 'POST' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN" \
-H 'Content-Type: application/json' \
-d "{
\"title\": \"$repo/$name: upgrade to $upstream_version\",
\"labels\": [
$LABEL_NUMBER
]
}")
return 0
}
if [ -f out_of_date ]; then
out_of_date="$(cat out_of_date)"
echo "Detected $(wc -l out_of_date) out-of-date packages, creating issues"
for pkg in $out_of_date; do
name="$(echo $pkg | awk '{print $1}')"
downstream_version="$(echo $pkg | awk '{print $2}')"
upstream_version="$(echo $pkg | awk '{print $3}')"
repo="$(echo $pkg | awk '{print $4}')"
if does_it_exist $name $downstream_version $upstream_version $repo; then
echo "Issue for $repo/$name already exists"
continue
fi
id=$(is_it_old $name $downstream_version $upstream_version $repo)
if [ "$id" != "0" ] && [ -n "$id" ]; then
echo "Issue for $repo/$name needs updating"
update_title $name $downstream_version $upstream_version $repo $id
continue
fi
echo "Creating issue for $repo/$name"
create_issue $name $downstream_version $upstream_version $repo
done
fi
if [ -f not_in_anitya ]; then
query="Add missing $repo packages to anitya"
query="$(echo $query | sed 's| |%20|g')"
result="$(curl --silent -X 'GET' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues?state=open&q=$query&type=issues" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN"
)"
if [ "$result" == "[]" ]; then
echo "Creating anitya issue"
result=$(curl --silent -X 'POST' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN" \
-H 'Content-Type: application/json' \
-d "{
\"title\": \"Add missing $repo packages to anitya\",
\"body\": \"- [ ] $(sed '{:q;N;s/\n/\\n- [ ] /g;t q}' not_in_anitya)\",
\"labels\": [
$LABEL_NUMBER
]
}")
else
echo "Updating anitya issue"
result_id="$(echo $result | jq -r '.[].number' )"
result=$(curl --silent -X 'PATCH' \
"$GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/issues/$result_id" \
-H 'accept: application/json' \
-H "authorization: Basic $FORGEJO_TOKEN" \
-H 'Content-Type: application/json' \
-d "{
\"body\": \"- [ ] $(sed '{:q;N;s/\n/\\n- [ ] /g;t q}' not_in_anitya)\"
}"
)
fi
fi

View file

@ -3,7 +3,7 @@
# shellcheck disable=SC3040
set -eu -o pipefail
readonly REPOS="backports user"
readonly REPOS="backports ilot"
readonly BASEBRANCH=$GITHUB_BASE_REF
readonly TARGET_REPO=$CI_ALPINE_REPO
@ -14,12 +14,18 @@ for apk in $apkgs; do
arch=$(echo $apk | awk -F '/' '{print $3}')
name=$(echo $apk | awk -F '/' '{print $4}')
echo "Sending $name of arch $arch to $TARGET_REPO/$BASEBRANCH/$branch"
return=$(curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN --upload-file $apk $TARGET_REPO/$BASEBRANCH/$branch 2>&1)
echo $return
if [ "$return" == "package file already exists" ]; then
echo "Package already exists, refreshing..."
curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN -X DELETE $TARGET_REPO/$BASEBRANCH/$branch/$arch/$name
curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN --upload-file $apk $TARGET_REPO/$BASEBRANCH/$branch
if [ "$(curl -s $GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/pulls/$GITHUB_EVENT_NUMBER | jq .draft)" == "true" ]; then
# if draft, send to -testing branch
branch="$branch-testing"
fi
# always clear out package before deploying
for delarch in x86_64 aarch64 armv7 armhf s390x ppc64le riscv64 loongarch64 x86; do
curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN -X DELETE $TARGET_REPO/$BASEBRANCH/$branch/$delarch/$name 2>&1 > /dev/null
done
echo "Sending $name of arch $arch to $TARGET_REPO/$BASEBRANCH/$branch"
curl -s --user $FORGE_REPO_USER:$FORGE_REPO_TOKEN --upload-file $apk $TARGET_REPO/$BASEBRANCH/$branch
done

View file

@ -19,8 +19,7 @@ jobs:
steps:
- name: Environment setup
run: |
doas apk add nodejs git patch curl net-tools
doas hostname host.docker.internal
doas apk add nodejs git patch curl
cd /etc/apk/keys
doas curl -JO https://forge.ilot.io/api/packages/ilot/alpine/key
- name: Repo pull

View file

@ -19,8 +19,7 @@ jobs:
steps:
- name: Environment setup
run: |
doas apk add nodejs git patch curl net-tools
doas hostname host.docker.internal
doas apk add nodejs git patch curl
cd /etc/apk/keys
doas curl -JO https://forge.ilot.io/api/packages/ilot/alpine/key
- name: Repo pull

View file

@ -1,27 +0,0 @@
on:
workflow_dispatch:
schedule:
- cron: '0 5 * * *'
jobs:
check-user:
name: Check user repo
runs-on: x86_64
container:
image: alpine:latest
env:
downstream: https://forge.ilot.io/api/packages/ilot/alpine/v3.21/ilot
FORGEJO_TOKEN: ${{ secrets.forgejo_token }}
LABEL_NUMBER: 8
steps:
- name: Environment setup
run: apk add grep coreutils gawk curl wget bash nodejs git jq sed
- name: Get scripts
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Check out-of-date packages
run: ${{ github.workspace }}/.forgejo/bin/check_ver.sh
- name: Create issues
run: ${{ github.workspace }}/.forgejo/bin/create_issue.sh

View file

@ -1,43 +1,44 @@
# iports
Upstream: https://forge.ilot.io/ilot/iports
# user-aports
Upstream: https://lab.ilot.io/ayakael/user-aports
## Description
This repository contains aports that are not yet merged in the official Alpine
Linux repository or dont adhere to Alpine polices. Packages are automatically
built using CI. Once built, they are deployed to a Forgejo-backed Alpine
repository.
built using GitLab CI on my own GitLab instance. Once built, they are deployed
to a git-lfs repository, making them available to apk.
Branches are matched to Alpine releases.
Branches are matched to Alpine latest released.
## Repositories
You can browse all the repositories at https://forge.ilot.io/ilot/iports/packages
You can browse all the repositories at https://lab.ilot.io/ayakael/repo-apk.
Affixed to each repository description is the appropriate link for use in
`/etc/apk/repositories`.
#### Backports
```
https://forge.ilot.io/api/packages/ilot/alpine/v3.20/backports
https://lab.ilot.io/ayakael/repo-apk/-/raw/edge/backports
```
Aports from the official Alpine repositories backported from edge.
#### Ilot
#### User
```
https://forge.ilot.io/api/packages/ilot/alpine/v3.20/backports
https://lab.ilot.io/ayakael/repo-apk/-/raw/edge/user
```
Aports that have yet to be (or may never be) upstreamed to the official
aports and that are used by ilot coop.
aports.
## How to use
Add security key of the apk repository to your /etc/apk/keys:
Add security key of the repo-apk repository to your /etc/apk/keys:
```shell
cd /etc/apk/keys
curl -JO https://forge.ilot.io/api/packages/ilot/alpine/key
wget https://lab.ilot.io/ayakael/repo-apk/-/raw/edge/antoine.martin@protonmail.com-5b3109ad.rsa.pub
```
Add repositories that you want to use (see above) to `/etc/apk/repositories`.
@ -51,10 +52,10 @@ they will work for you.
## Contribution & bug reports
If you wish to contribute to this aports collection, or wish to report a bug,
you can do so on Forge here:
https://forge.ilot.io/ilot/iports/issues
you can do so on Alpine's GitLab instance here:
https://gitlab.alpinelinux.org/ayakael/user-aports
For packages that are in backports, bug reports and merge requests
For packages that are in testing/community, bug reports and merge requests
should be done on Alpine's aports repo instance:
https://gitlab.alpinelinux.org/alpine/aports

View file

@ -0,0 +1,47 @@
# Contributor: Patrycja Rosa <alpine@ptrcnull.me>
# Maintainer: Patrycja Rosa <alpine@ptrcnull.me>
pkgname=forgejo-runner
pkgver=3.5.0
pkgrel=2
pkgdesc="CI/CD job runner for Forgejo"
url="https://code.forgejo.org/forgejo/runner"
arch="all"
license="MIT"
makedepends="go"
install="$pkgname.pre-install $pkgname.pre-upgrade"
subpackages="$pkgname-openrc"
source="$pkgname-$pkgver.tar.gz::https://code.forgejo.org/forgejo/runner/archive/v$pkgver.tar.gz
forgejo-runner.logrotate
forgejo-runner.initd
forgejo-runner.confd
"
builddir="$srcdir/runner"
options="!check" # tests require running forgejo
build() {
go build \
-o forgejo-runner \
-ldflags "-X gitea.com/gitea/act_runner/internal/pkg/ver.version=$pkgver"
./forgejo-runner generate-config > config.example.yaml
}
check() {
go test ./...
}
package() {
install -Dm755 forgejo-runner -t "$pkgdir"/usr/bin/
install -Dm644 config.example.yaml -t "$pkgdir"/etc/forgejo-runner/
install -Dm755 "$srcdir"/forgejo-runner.initd "$pkgdir"/etc/init.d/forgejo-runner
install -Dm644 "$srcdir"/forgejo-runner.confd "$pkgdir"/etc/conf.d/forgejo-runner
install -Dm644 "$srcdir"/forgejo-runner.logrotate "$pkgdir"/etc/logrotate.d/forgejo-runner
}
sha512sums="
e78968a5f9b6e797fb759a5c8cbf46a5c2fef2083dabc88599c9017729faface963576c63a948b0add424cb267902e864fb1a1b619202660296976d93e670713 forgejo-runner-3.5.0.tar.gz
a3c7238b0c63053325d31e09277edd88690ef5260854517f82d9042d6173fb5d24ebfe36e1d7363673dd8801972638a6e69b6af8ad43debb6057515c73655236 forgejo-runner.logrotate
bb0c6fbe90109c77f9ef9cb0d35d20b8033be0e4b7a60839b596aa5528dfa24309ec894d8c04066bf8fb30143e63a5fd8cc6fc89aac364422b583e0f840e2da6 forgejo-runner.initd
e11eab27f88f1181112389befa7de3aa0bac7c26841861918707ede53335535425c805e6682e25704e9c8a6aecba3dc13e20900a99df1183762b012b62f26d5f forgejo-runner.confd
"

View file

@ -0,0 +1,17 @@
# Configuration for /etc/init.d/forgejo-runner
# Path to the config file (--config).
#cfgfile="/etc/forgejo-runner/config.yaml"
# Path to the working directory (--working-directory).
#datadir="/var/lib/forgejo-runner"
# Path to the log file where stdout/stderr will be redirected.
# Leave empty/commented out to use syslog instead.
#output_log="/var/log/forgejo-runner.log"
# You may change this to root, e.g. to run jobs in LXC
#command_user="forgejo-runner"
# Comment out to run without process supervisor.
supervisor=supervise-daemon

View file

@ -0,0 +1,38 @@
#!/sbin/openrc-run
description="Forgejo CI Runner"
name="Forgejo Runner"
: ${cfgfile:="/etc/forgejo-runner/config.yaml"}
: ${datadir:="/var/lib/forgejo-runner"}
: ${command_user:="forgejo-runner"}
command="/usr/bin/forgejo-runner"
command_args="daemon --config $cfgfile"
command_background="yes"
directory="$datadir"
pidfile="/run/$RC_SVCNAME.pid"
depend() {
need net
use dns logger
}
start_pre() {
checkpath -d -o "$command_user" /etc/forgejo-runner
checkpath -d -o "$command_user" "$datadir"
if ! [ -e "$cfgfile" ]; then
eerror "Config file $cfgfile doesn't exist."
eerror "You can generate it with: forgejo-runner generate-config,"
eerror "or use the auto-generated one in /etc/forgejo-runner/config.example.yaml"
return 1
fi
if [ "$error_log" ]; then
output_log="$error_log"
else
output_logger="logger -t '${RC_SVCNAME}' -p daemon.info"
error_logger="logger -t '${RC_SVCNAME}' -p daemon.error"
fi
}

View file

@ -0,0 +1,5 @@
/var/log/forgejo-runner.log {
copytruncate
missingok
notifempty
}

View file

@ -0,0 +1,14 @@
#!/bin/sh
addgroup -S forgejo-runner 2>/dev/null
adduser -S -D -H -h /var/lib/forgejo-runner -s /sbin/nologin -G forgejo-runner -g forgejo-runner forgejo-runner 2>/dev/null
cat >&2 <<EOF
* In order to setup the runner, create a config file
* in /etc/forgejo-runner/config.yaml (either from .example.yaml,
* or generating your own with 'forgejo-runner generate-config'),
* then register it with 'doas -u forgejo-runner forgejo-runner register'
* ran in the /var/lib/forgejo-runner directory.
EOF
exit 0

View file

@ -0,0 +1 @@
forgejo-runner.pre-install

View file

@ -1,7 +1,7 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=authentik
pkgver=2024.10.5
pkgver=2024.4.4
pkgrel=0
pkgdesc="An open-source Identity Provider focused on flexibility and versatility"
url="https://github.com/goauthentik/authentik"
@ -10,112 +10,151 @@ url="https://github.com/goauthentik/authentik"
# ppc64le: not supported by Rollup build
arch="aarch64 x86_64"
license="MIT"
# following depends aren't direct dependencies, but are needed:
# py3-asn1crypto, py3-cbor2, py3-email-validator, py3-websockets
# py3-openssl, py3-uvloop, py3-httptools
depends="
bash
libcap-setcap
nginx
postgresql
procps
pwgen
py3-aiohttp
py3-aiosignal
py3-amqp
py3-anyio
py3-asgiref
py3-asn1
py3-asn1crypto
py3-async-timeout
py3-attrs
py3-autobahn
py3-automat
py3-bcrypt
py3-billiard
py3-cachetools
py3-cbor2
py3-celery
py3-certifi
py3-cffi
py3-channels
py3-channels_redis
py3-charset-normalizer
py3-click
py3-click-didyoumean
py3-click-plugins
py3-click-repl
py3-codespell
py3-colorama
py3-constantly
py3-cparser
py3-cryptography
py3-dacite
py3-daphne
py3-dateutil
py3-deepmerge
py3-defusedxml
py3-docker-py
py3-deprecated
py3-dnspython
py3-django
py3-django-countries
py3-django-cte
py3-django-filter
py3-django-guardian
py3-django-model-utils
py3-django-otp
py3-django-prometheus
py3-django-pglock
py3-django-redis
py3-django-rest-framework~3.14.0
py3-django-rest-framework~=3.14.0
py3-django-rest-framework-guardian
py3-django-storages
py3-django-tenants
py3-docker-py
py3-dotenv
py3-dumb-init
py3-duo-client
py3-drf-orjson-renderer
py3-duo_client
py3-drf-spectacular
py3-email-validator
py3-facebook-sdk
py3-fido2
py3-flower
py3-frozenlist
py3-geoip2
py3-google-api-python-client
py3-google-auth
py3-gunicorn
py3-h11
py3-httptools
py3-humanize
py3-hyperlink
py3-idna
py3-incremental
py3-inflection
py3-jsonschema
py3-jsonpatch
py3-jwt
py3-jwcrypto
py3-kadmin
py3-kombu
py3-kubernetes
py3-ldap3
py3-lxml
py3-maxminddb
py3-msgpack
py3-msgraph-sdk
py3-multidict
py3-oauthlib
py3-opencontainers
py3-openssl
py3-packaging
py3-paramiko
py3-parsing
py3-prometheus-client
py3-prompt_toolkit
py3-psycopg
py3-psycopg-c
py3-pydantic
py3-pydantic-scim
py3-pyrad
py3-python-gssapi
py3-pynacl
py3-pyrsistent
py3-python-jwt
py3-redis
py3-requests
py3-requests-oauthlib
py3-rsa
py3-scim2-filter-parser
py3-setproctitle
py3-sentry-sdk
py3-service_identity
py3-setuptools
py3-six
py3-sniffio
py3-sqlparse
py3-structlog
py3-swagger-spec-validator
py3-tornado
py3-twilio
py3-twisted
py3-txaio
py3-tenant-schemas-celery
py3-typing-extensions
py3-tz
py3-ua-parser
py3-unidecode
py3-uritemplate
py3-urllib3-secure-extra
py3-uvloop
py3-vine
py3-watchdog
py3-watchfiles
py3-wcwidth
py3-webauthn
py3-websocket-client
py3-websockets
py3-wrapt
py3-wsproto
py3-xmlsec
py3-yaml
py3-yarl
py3-zope-interface
py3-zxcvbn
valkey
redis
uvicorn
"
makedepends="go npm py3-packaging"
checkdepends="
py3-pip
py3-coverage
py3-codespell
py3-colorama
py3-pytest
py3-pytest-django
py3-pytest-randomly
py3-pytest-timeout
py3-freezegun
py3-boto3
py3-requests-mock
py3-k5test
"
makedepends="go npm"
# checkdepends scooped up by poetry due to number
checkdepends="poetry py3-coverage"
# tests disabled for now
options="!check"
install="$pkgname.post-install $pkgname.post-upgrade $pkgname.pre-install"
source="
$pkgname-$pkgver.tar.gz::https://github.com/goauthentik/authentik/archive/refs/tags/version/$pkgver.tar.gz
@ -126,10 +165,9 @@ source="
authentik-manage.sh
fix-ak-bash.patch
root-settings-csrf_trusted_origins.patch
go-downgrade-1.22.patch
"
builddir="$srcdir/"authentik-version-$pkgver
subpackages="$pkgname-openrc $pkgname-doc $pkgname-pyc"
subpackages="$pkgname-openrc $pkgname-doc"
pkgusers="authentik"
pkggroups="authentik"
@ -159,131 +197,57 @@ build() {
npm run build
}
# test failure neutralized due to:
# relation authentik_core_user_pb_groups_id_seq does not exist
check() {
msg "Setting up test environments"
export POSTGRES_DB=authentik
export POSTGRES_USER=authentik
export POSTGRES_PASSWORD="EK-5jnKfjrGRm<77"
export AUTHENTIK_POSTGRESQL__TEST__NAME=authentik
rm -Rf "$srcdir"/tmp
initdb -D "$srcdir"/tmp
postgres -D "$srcdir"/tmp --unix-socket-directories="$srcdir" > "$srcdir"/tmp/psql.log 2>&1 &
valkey-server > "$srcdir"/tmp/valkey.log 2>&1 &
trap "pkill valkey-server; pkill postgres" EXIT
sleep 5
psql -h "$srcdir" -d postgres -c "CREATE ROLE $POSTGRES_USER PASSWORD '$POSTGRES_PASSWORD' INHERIT LOGIN;"
psql -h "$srcdir" -d postgres -c "CREATE DATABASE $POSTGRES_DB OWNER $POSTGRES_USER ENCODING 'UTF-8';"
psql -h "$srcdir" -d postgres -c "CREATE DATABASE test_$POSTGRES_DB OWNER $POSTGRES_USER ENCODING 'UTF-8';"
# .github/actions/setup/action.yml: Generate config + csrf
python3 -c "
from authentik.lib.generators import generate_id
from yaml import safe_dump
with open(\"local.env.yml\", \"w\") as _config:
safe_dump(
{
\"log_level\": \"debug\",
\"secret_key\": generate_id(),
\"csrf\": { \"trusted_origins\": ['https://*']},
},
_config,
default_flow_style=False,
)
"
python -m lifecycle.migrate
# no selenium package
pip install selenium drf_jsonschema_serializer pdoc --break-system-packages
msg "Starting tests"
make test || true
# TODO: Fix go-tests
# make go-test
pkill valkey-server
pkill postgres
}
package() {
msg "Packaging $pkgname"
local prefix="/usr/share/webapps"
local destdir="$pkgdir"$prefix/authentik
mkdir -p "$pkgdir"/usr/share/webapps/authentik/web
mkdir -p "$pkgdir"/usr/share/webapps/authentik/website
mkdir -p "$pkgdir"/var/lib/authentik
mkdir -p "$pkgdir"/usr/share/doc
mkdir -p "$pkgdir"/usr/bin
cp -dr "$builddir"/authentik "$pkgdir"/usr/share/webapps/authentik
cp -dr "$builddir"/web/dist "$pkgdir"/usr/share/webapps/authentik/web/dist
cp -dr "$builddir"/web/authentik "$pkgdir"/usr/share/webapps/authentik/web/authentik
cp -dr "$builddir"/website/build "$pkgdir"/usr/share/doc/authentik
cp -dr "$builddir"/tests "$pkgdir"/usr/share/webapps/authentik/tests
cp -dr "$builddir"/lifecycle "$pkgdir"/usr/share/webapps/authentik/lifecycle
cp -dr "$builddir"/locale "$pkgdir"/usr/share/webapps/authentik/locale
cp -dr "$builddir"/blueprints "$pkgdir"/var/lib/authentik/blueprints
install -Dm755 "$builddir"/manage.py "$pkgdir"/usr/share/webapps/authentik/manage.py
install -Dm755 "$builddir"/server "$pkgdir"/usr/share/webapps/authentik/server
ln -s "/etc/authentik/config.yml" "$pkgdir"/usr/share/webapps/authentik/local.env.yml
# authentik install
install -d -m755 \
"$destdir" \
"$destdir"/web \
"$pkgdir"/usr/bin \
"$pkgdir"/usr/share/doc \
"$pkgdir"/var/lib/authentik
install -Dm755 "$builddir"/proxy "$pkgdir"/usr/bin/authentik-proxy
install -Dm755 "$builddir"/ldap "$pkgdir"/usr/bin/authentik-ldap
install -Dm755 "$builddir"/radius "$pkgdir"/usr/bin/authentik-radius
cp -rl authentik lifecycle locale tests \
"$destdir"/
cp -rl blueprints \
"$pkgdir"/var/lib/authentik/
cp -rl web/dist web/authentik \
"$destdir"/web/
install -m755 -t "$destdir" \
"$builddir"/server \
"$builddir"/ldap \
"$builddir"/radius \
"$builddir"/proxy \
"$builddir"/manage.py
cp -rl website/build/ "$pkgdir"/usr/share/doc/authentik/
# symbolic bin links to usr/bin
for i in server proxy ldap radius; do
ln -s $prefix/authentik/$i "$pkgdir"/usr/bin/authentik-$i
done
# openrc install
for i in $pkgname $pkgname-worker $pkgname-ldap; do
install -Dm755 "$srcdir"/$i.openrc "$pkgdir"/etc/init.d/$i
done
# config file setup
install -Dm755 "$srcdir"/$pkgname.openrc \
"$pkgdir"/etc/init.d/$pkgname
install -Dm755 "$srcdir"/$pkgname-worker.openrc \
"$pkgdir"/etc/init.d/$pkgname-worker
install -Dm755 "$srcdir"/$pkgname-ldap.openrc \
"$pkgdir"/etc/init.d/$pkgname-ldap
install -Dm640 "$srcdir"/$pkgname-ldap.conf \
"$pkgdir"/etc/conf.d/$pkgname-ldap
install -Dm640 "$builddir"/authentik/lib/default.yml \
"$pkgdir"/etc/authentik/config.yml
ln -s "/etc/authentik/config.yml" "$pkgdir"/usr/share/webapps/authentik/local.env.yml
chown root:www-data "$pkgdir"/etc/authentik/config.yml
mv "$pkgdir"/usr/share/webapps/authentik/web/dist/custom.css "$pkgdir"/etc/authentik/custom.css
ln -s "/etc/authentik/custom.css" "$pkgdir"/usr/share/webapps/authentik/web/dist/custom.css
chown root:www-data "$pkgdir"/etc/authentik/custom.css
sed -i 's|cert_discovery_dir.*|cert_discovery_dir: /var/lib/authentik/certs|' "$pkgdir"/etc/authentik/config.yml
sed -i 's|blueprints_dir.*|blueprints_dir: /var/lib/authentik/blueprints|' "$pkgdir"/etc/authentik/config.yml
sed -i 's|template_dir.*|template_dir: /var/lib/authentik/templates|' "$pkgdir"/etc/authentik/config.yml
printf "\ncsrf:\n trusted_origins: ['auth.example.com']" >> "$pkgdir"/etc/authentik/config.yml
printf "\nsecret_key: '@@SECRET_KEY@@'" >> "$pkgdir"/etc/authentik/config.yml
# custom css location change
mv "$pkgdir"/usr/share/webapps/authentik/web/dist/custom.css "$pkgdir"/etc/authentik/custom.css
ln -s "/etc/authentik/custom.css" "$pkgdir"/usr/share/webapps/authentik/web/dist/custom.css
chown root:www-data "$pkgdir"/etc/authentik/custom.css
# Install wrapper script to /usr/bin.
install -m755 -D "$srcdir"/authentik-manage.sh "$pkgdir"/usr/bin/authentik-manage
}
pyc() {
default_pyc
cd "$pkgdir"
# shellcheck disable=SC3003
local IFS=$'\n'
# shellcheck disable=SC2046
amove $(find usr/share/webapps/authentik -type d -name __pycache__)
}
sha512sums="
f6e04ac1d1ac3a46e6d0f89548c0c2748f2214c551157e65f9071721dfdccac53c98b1664ecd1bc70650b4fceec47c5a5ab805da34e82ccc86d6a64087441702 authentik-2024.10.5.tar.gz
22c8ff16b93b9fcb84478b6476dd4f6413719037affc7756f20ba1dc3afff1fbaae2f1fc89d7b3a9c4372fcc856009d8a4ef5eb7854855e4528523fb456a2491 authentik-2024.4.4.tar.gz
4defb4fe3a4230f4aa517fbecd5e5b8bcef2a64e1b40615660ae9eec33597310a09df5e126f4d39ce7764bd1716c0a7040637699135c103cbc1879593c6c06f1 authentik.openrc
6cb03b9b69df39bb4539fe05c966536314d766b2e9307a92d87070ba5f5b7e7ab70f1b5ee1ab3c0c50c23454f9c5a4caec29e63fdf411bbb7a124ad687569b89 authentik-worker.openrc
351e6920d987861f8bf0d7ab2f942db716a8dbdad1f690ac662a6ef29ac0fd46cf817cf557de08f1c024703503d36bc8b46f0d9eb1ecaeb399dce4c3bb527d17 authentik-ldap.openrc
@ -291,5 +255,4 @@ f6e04ac1d1ac3a46e6d0f89548c0c2748f2214c551157e65f9071721dfdccac53c98b1664ecd1bc7
f1a3cb215b6210fa7d857a452a9f2bc4dc0520e49b9fa7027547cff093d740a7e2548f1bf1f8831f7d5ccb80c8e523ee0c8bafcc4dc42d2788725f2137d21bee authentik-manage.sh
3e47db684a3f353dcecdb7bab8836b9d5198766735d77f676a51d952141a0cf9903fcb92e6306c48d2522d7a1f3028b37247fdc1dc74d4d6e043da7eb4f36d49 fix-ak-bash.patch
5c60e54b6a7829d611af66f5cb8184a002b5ae927efbd024c054a7c176fcb9efcfbe5685279ffcf0390b0f0abb3bb03e02782c6867c2b38d1ad2d508aae83fa0 root-settings-csrf_trusted_origins.patch
badff70b19aad79cf16046bd46cb62db25c2a8b85b2673ce7c44c42eb60d42f6fcb1b9a7a7236c00f24803b25d3c66a4d64423f7ce14a59763b8415db292a5b9 go-downgrade-1.22.patch
"

View file

@ -1,38 +0,0 @@
diff --git a/go.mod.orig b/go.mod
index 65490a2..13a611e 100644
--- a/go.mod.orig
+++ b/go.mod
@@ -1,8 +1,6 @@
module goauthentik.io
-go 1.23
-
-toolchain go1.23.0
+go 1.22.2
require (
beryju.io/ldap v0.1.0
@@ -16,7 +14,7 @@ require (
github.com/gorilla/handlers v1.5.2
github.com/gorilla/mux v1.8.1
github.com/gorilla/securecookie v1.1.2
- github.com/gorilla/sessions v1.4.0
+ github.com/gorilla/sessions v1.3.0
github.com/gorilla/websocket v1.5.3
github.com/jellydator/ttlcache/v3 v3.2.1
github.com/mitchellh/mapstructure v1.5.0
diff --git a/go.sum.orig b/go.sum
index 94edf9c..856c2ee 100644
--- a/go.sum.orig
+++ b/go.sum
@@ -175,8 +175,8 @@ github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+
github.com/gorilla/securecookie v1.1.2 h1:YCIWL56dvtr73r6715mJs5ZvhtnY73hBvEF8kXD8ePA=
github.com/gorilla/securecookie v1.1.2/go.mod h1:NfCASbcHqRSY+3a8tlWJwsQap2VX5pwzwo4h3eOamfo=
github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
-github.com/gorilla/sessions v1.4.0 h1:kpIYOp/oi6MG/p5PgxApU8srsSw9tuFbt46Lt7auzqQ=
-github.com/gorilla/sessions v1.4.0/go.mod h1:FLWm50oby91+hl7p/wRxDth9bWSuk0qVL2emc7lT5ik=
+github.com/gorilla/sessions v1.3.0 h1:XYlkq7KcpOB2ZhHBPv5WpjMIxrQosiZanfoy1HLZFzg=
+github.com/gorilla/sessions v1.3.0/go.mod h1:ePLdVu+jbEgHH+KWw8I1z2wqd0BAdAQh/8LRvBeoNcQ=
github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=

View file

@ -1,40 +0,0 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=certbot-dns-gandi
pkgdesc="gandi DNS authenticator plugin for certbot"
pkgver=1.5.0
pkgrel=0
arch="noarch"
url="https://github.com/obynio/certbot-plugin-gandi"
license="MIT"
depends="certbot"
makedepends="
py3-setuptools
py3-gpep517
py3-wheel
"
subpackages="$pkgname-pyc"
options="!check" # No test suite
source="
$pkgname-$pkgver.tar.gz::https://github.com/obynio/certbot-plugin-gandi/archive/refs/tags/$pkgver.tar.gz
gandi.ini
"
builddir="$srcdir"/certbot-plugin-gandi-$pkgver
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
mkdir -p "$pkgdir"/etc/letsencrypt/gandi
install -m 0600 "$srcdir"/gandi.ini "$pkgdir"/etc/letsencrypt/gandi/example.ini
}
sha512sums="
0688baec8e6de429eed12f9d85b28f47384a5bd8cd01615d94e55e38fdaf35c01707ee1ef1ec3e9196c1de06df7087798f3f5a19f07bd446f1d3fd2442b2d702 certbot-dns-gandi-1.5.0.tar.gz
7bdfd769c8a7256a8c2d171f1c8fa4c16bea7c1abcd3442603face90834efb5f9c0d9aec54f57fc83421588c0349acbc3554d4987cb7498a7e833481b01dd712 gandi.ini
"

View file

@ -1,6 +0,0 @@
# Uncomment following line as needed:
# Live DNS v5 api key
#dns_gandi_api_key=APIKEY
# Optional organization id, remove it if not used
#dns_gandi_sharing_id=SHARINGID

View file

@ -1,7 +1,7 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=codeberg-pages-server
pkgver=6.2
pkgver=5.1
pkgrel=0
pkgdesc="The Codeberg Pages Server with custom domain support, per-repo pages using the "pages" branch, caching and more."
url="https://codeberg.org/Codeberg/pages-server"
@ -14,6 +14,7 @@ options="!check"
source="
$pkgname-$pkgver.tar.gz::https://codeberg.org/Codeberg/pages-server/archive/v$pkgver.tar.gz
codeberg-pages-server.openrc
upgrade-go-sqlite3-to-1.14.19.patch
"
builddir="$srcdir/"pages-server
subpackages="$pkgname-openrc"
@ -37,6 +38,7 @@ package() {
}
sha512sums="
d48e10262e94eb2e36696646e3431da066d2f820e037ab713f4446dd72c2e3895c9bf153fcbf702e05b21ec5750aa15ed9b71e2fb383f9357aeeef61073a721a codeberg-pages-server-6.2.tar.gz
55a1dd5ed0f1cb2aaad1066eca8bfbd1d537169ed3712c748163ebff64edc45d05ac1f6f062433e232e2638a790232438282f96dd7410eb4cbaff7208f5f2427 codeberg-pages-server-5.1.tar.gz
4defb4fe3a4230f4aa517fbecd5e5b8bcef2a64e1b40615660ae9eec33597310a09df5e126f4d39ce7764bd1716c0a7040637699135c103cbc1879593c6c06f1 codeberg-pages-server.openrc
895f1c8d22fcf1d5491a6fe0ce5d93201f83b6dd5fc81b24016b609988fb6c66fdde75bb3830f385a5c83d96366ca3a5f4f9524f52058b6c5dfd8b80d14bac5b upgrade-go-sqlite3-to-1.14.19.patch
"

View file

@ -0,0 +1,26 @@
diff --git a/go.mod.orig b/go.mod
index eba292e..00310e5 100644
--- a/go.mod.orig
+++ b/go.mod
@@ -11,7 +11,7 @@ require (
github.com/go-sql-driver/mysql v1.6.0
github.com/joho/godotenv v1.4.0
github.com/lib/pq v1.10.7
- github.com/mattn/go-sqlite3 v1.14.16
+ github.com/mattn/go-sqlite3 v1.14.19
github.com/microcosm-cc/bluemonday v1.0.26
github.com/reugn/equalizer v0.0.0-20210216135016-a959c509d7ad
github.com/rs/zerolog v1.27.0
diff --git a/go.sum.orig b/go.sum
index 7ea8b78..19145ea 100644
--- a/go.sum.orig
+++ b/go.sum
@@ -479,6 +479,8 @@ github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m
github.com/mattn/go-sqlite3 v1.14.9/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
github.com/mattn/go-sqlite3 v1.14.16 h1:yOQRA0RpS5PFz/oikGwBEqvAWhWg5ufRz4ETLjwpU1Y=
github.com/mattn/go-sqlite3 v1.14.16/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
+github.com/mattn/go-sqlite3 v1.14.19 h1:fhGleo2h1p8tVChob4I9HpmVFIAkKGpiukdrgQbWfGI=
+github.com/mattn/go-sqlite3 v1.14.19/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
github.com/mattn/go-tty v0.0.0-20180219170247-931426f7535a/go.mod h1:XPvLUNfbS4fJH25nqRHfWLMa1ONC8Amw+mIA639KxkE=
github.com/mattn/go-tty v0.0.3/go.mod h1:ihxohKRERHTVzN+aSVRwACLCeqIoZAWpoICkkvrWyR0=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=

View file

@ -4,14 +4,14 @@
# Contributor: Patrycja Rosa <alpine@ptrcnull.me>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=forgejo-aneksajo
pkgver=9.0.3_git0
_gittag=v${pkgver/_git/-git-annex}
pkgver=8.0.1
_gittag=v$pkgver-git-annex0
pkgrel=0
pkgdesc="Self-hosted Git service written in Go with git-annex support"
url="https://forgejo.org"
# riscv64: builds fail https://codeberg.org/forgejo/forgejo/issues/3025
arch="all !riscv64"
license="GPL-3.0-or-later"
license="MIT"
depends="git git-lfs gnupg"
makedepends="go nodejs npm"
checkdepends="bash openssh openssh-keygen sqlite tzdata"
@ -55,7 +55,7 @@ build() {
# XXX: LARGEFILE64
export CGO_CFLAGS="$CFLAGS -O2 -D_LARGEFILE64_SOURCE"
export TAGS="bindata sqlite sqlite_unlock_notify"
export GITEA_VERSION="${pkgver/_git/-git-annex}"
export GITEA_VERSION="$pkgver"
export EXTRA_GOFLAGS="$GOFLAGS"
export CGO_LDFLAGS="$LDFLAGS"
unset LDFLAGS
@ -106,7 +106,7 @@ package() {
}
sha512sums="
2c2493c0011e83994c12c11859c2153d855a2265d234a671d2ce855e4f45b8e1b7d7f257e9c7ffa6284b844e0068a6184ef39b88800a1d79f399ce11c7cb23b7 forgejo-aneksajo-v9.0.3-git-annex0.tar.gz
d8e273d369c934eec7ff84795cd0d896cda53bc1a2d17f610dd8476ff92dc50c4a24c4598366ef8aac3be52ddef6630489043183085334376c30bc5d4d5f15c2 forgejo-aneksajo-v8.0.1-git-annex0.tar.gz
eb93a9f6c8f204de5c813f58727015f53f9feaab546589e016c60743131559f04fc1518f487b6d2a0e7fa8fab6d4a67cd0cd9713a7ccd9dec767a8c1ddebe129 forgejo-aneksajo.initd
b537b41b6b3a945274a6028800f39787b48c318425a37cf5d40ace0d1b305444fd07f17b4acafcd31a629bedd7d008b0bb3e30f82ffeb3d7e7e947bdbe0ff4f3 forgejo-aneksajo.ini
"

View file

@ -1,15 +1,15 @@
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=freescout
pkgver=1.8.160
pkgrel=0
pkgver=1.8.139
pkgrel=1
pkgdesc="Free self-hosted help desk & shared mailbox"
arch="noarch"
url="freescout.net"
license="AGPL-3.0"
_php=php83
_php_mods="-fpm -mbstring -xml -imap -zip -gd -curl -intl -tokenizer -pdo_pgsql -openssl -session -iconv -fileinfo -dom -pcntl"
depends="$_php ${_php_mods//-/$_php-} nginx postgresql pwgen bash"
depends="$_php ${_php_mods//-/$_php-} nginx postgresql pwgen"
makedepends="composer pcre"
install="$pkgname.post-install $pkgname.post-upgrade $pkgname.pre-install"
source="
@ -17,7 +17,6 @@ source="
freescout.nginx
freescout-manage.sh
rename-client-to-membre-fr-en.patch
fix-laravel-log-viewer.patch
"
pkgusers="freescout"
pkggroups="freescout"
@ -76,9 +75,8 @@ package() {
install -m755 -D "$srcdir"/freescout-manage.sh "$pkgdir"/usr/bin/freescout-manage
}
sha512sums="
8441385a36d9ee5b542936f34e7700e86e1595d9a16b07afeac42bf48409ba0ecd1c542bc82b48afb0bb9201c7219bd146fe9455491ba40116dc66953b994488 freescout-1.8.160.tar.gz
11d81fa670bd67a7db9f5bff3a067a1d1cf3c812a34c805a3fc83edc978ded3accc8334581eca1e73cf0ad95f8e289278add57de096528728e2989135b3057a3 freescout-1.8.139.tar.gz
e4af6c85dc12f694bef2a02e4664e31ed50b2c109914d7ffad5001c2bbd764ef25b17ecaa59ff55ef41bccf17169bf910d1a08888364bdedd0ecc54d310e661f freescout.nginx
7ce9b3ee3a979db44f5e6d7daa69431e04a5281f364ae7be23e5a0a0547f96abc858d2a8010346be2fb99bd2355fb529e7030ed20d54f310249e61ed5db4d0ba freescout-manage.sh
0cba00b7d945ce84f72a2812d40028a073a5278856f610e46dbfe0ac78deff6bf5eba7643635fa4bc64d070c4d49eb47d24ea0a05ba1e6ea76690bfd77906366 rename-client-to-membre-fr-en.patch
2c651db6adac6d53597ba36965d0c65e005293f9b030e6be167853e4089384920524737aa947c5066877ee8caefb46741ccba797f653e7c2678556063540d261 fix-laravel-log-viewer.patch
3416da98d71aea5a7093913ea34e783e21ff05dca90bdc5ff3d00c548db5889f6d0ec98441cd65ab9f590be5cd59fdd0d7f1c98b5deef7bb3adbc8db435ec9bf rename-client-to-membre-fr-en.patch
"

View file

@ -1,13 +0,0 @@
diff --git a/vendor/composer/installed.json.orig b/vendor/composer/installed.json
index 0b826f5..9d14ec8 100644
--- a/vendor/composer/installed.json.orig
+++ b/vendor/composer/installed.json
@@ -4494,7 +4494,7 @@
"installation-source": "dist",
"autoload": {
"classmap": [
- "src/controllers"
+ "src/"
],
"psr-0": {
"Rap2hpoutre\\LaravelLogViewer\\": "src/"

View file

@ -38,7 +38,7 @@ index 00000000..82d26052
+}
\ No newline at end of file
diff --git a/resources/lang/fr.json.orig b/resources/lang/fr.json
index 6264973..8a7037e 100644
index ff8d9d4..98d158f 100644
--- a/resources/lang/fr.json.orig
+++ b/resources/lang/fr.json
@@ -26,8 +26,8 @@
@ -201,8 +201,8 @@ index 6264973..8a7037e 100644
- "This number is not visible to customers. It is only used to track conversations within :app_name": "Ce numéro n'est pas visible pour les clients. Il est uniquement utilisé pour suivre les conversations dans :app_name",
+ "This number is not visible to customers. It is only used to track conversations within :app_name": "Ce numéro n'est pas visible pour les membres. Il est uniquement utilisé pour suivre les conversations dans :app_name",
"This password is incorrect.": "Ce mot de passe est incorrect.",
- "This reply will go to the customer. :%switch_start%Switch to a note:%switch_end% if you are replying to :user_name.": "Cette réponse ira au client. :%switch_start%Passez à une note:%switch_end% si vous répondez à :user_name.",
+ "This reply will go to the customer. :%switch_start%Switch to a note:%switch_end% if you are replying to :user_name.": "Cette réponse ira au membre. :%switch_start%Passez à une note:%switch_end% si vous répondez à :user_name.",
- "This reply will go to the customer. :%switch_start%Switch to a note:switch_end if you are replying to :user_name.": "Cette réponse ira au client. :%switch_start%Passez à une note:switch_end si vous répondez à :user_name.",
+ "This reply will go to the customer. :%switch_start%Switch to a note:switch_end if you are replying to :user_name.": "Cette réponse ira au membre. :%switch_start%Passez à une note:switch_end si vous répondez à :user_name.",
"This setting gives you control over what page loads after you perform an action (send a reply, add a note, change conversation status or assignee).": "Ce paramètre vous permet de contrôler la page qui se charge après avoir effectué une action (envoyer une réponse, ajouter une note, etc.).",
- "This text will be added to the beginning of each email reply sent to a customer.": "Ce texte sera ajouté au début de chaque réponse par e-mail envoyée à un client.",
+ "This text will be added to the beginning of each email reply sent to a customer.": "Ce texte sera ajouté au début de chaque réponse par e-mail envoyée à un membre.",

View file

@ -1,8 +1,8 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=listmonk
pkgver=4.1.0
pkgrel=0
pkgver=3.0.0
pkgrel=1
pkgdesc='Self-hosted newsletter and mailing list manager with a modern dashboard'
arch="all"
url=https://listmonk.app
@ -10,7 +10,6 @@ license="AGPL3"
depends="
libcap-setcap
postgresql
postgresql-contrib
procps
"
makedepends="go npm nodejs yarn"
@ -53,7 +52,6 @@ package() {
install -Dm644 -t "$pkgdir"/usr/share/webapps/listmonk/ \
schema.sql \
queries.sql \
permissions.json \
config.toml.sample
install -Dm755 listmonk "$pkgdir"/usr/share/webapps/listmonk/
install -Dm644 -t "$pkgdir"/usr/share/webapps/listmonk/frontend/dist/ \
@ -67,7 +65,7 @@ package() {
ln -s /etc/listmonk/config.toml "$pkgdir"/usr/share/webapps/listmonk/config.toml
}
sha512sums="
936b33d6de1d69ee4e7f768810116ac997c516754aace0371089bc8106bebee944197864afc11b7bc5725afa9a4f195d6629957bfcdd37c847e3780aa34558ec listmonk-4.1.0.tar.gz
afd0ea1d4d2b2753c3043526590cf09c45a541a2d818f5d1581644ffd10818326fd553a3b04bca59494860a7bb6e96364b08afd33d337a9fc5c71bedd1a5ee6c listmonk-3.0.0.tar.gz
939450af4b23708e3d23a5a88fad4c24b957090bdd21351a6dd520959e52e45e5fcac117a3eafa280d9506616dae39ad3943589571f008cac5abe1ffd8062424 listmonk.sh
8e9c0b1f335c295fb741418246eb17c7566e5e4200a284c6483433e8ddbf5250aa692435211cf062ad1dfcdce3fae9148def28f03f2492d33fe5e66cbeebd4bd listmonk.openrc
"

View file

@ -10,12 +10,6 @@ if [ "${0##*.}" = 'post-upgrade' ]; then
*
* listmonk --upgrade
*
* If upgrading from v3.0.0, please first set the following env variables:
*
* export LISTMONK_ADMIN_USER=your-admin-user
* export LISTMONK_ADMIN_PASSWORD=your-admin-password
* listmonk --upgrade
*
EOF
else
cat >&2 <<-EOF

View file

@ -7,7 +7,7 @@ _gittag=v$pkgver
pkgrel=1
pkgdesc="A collaborative decision making tool"
url="https://github.com/loomio/loomio"
# failing build
# build failure
#arch="x86_64"
license="MIT"
depends="

View file

@ -4,8 +4,8 @@ pkgname=peertube
pkgver=6.0.2
pkgrel=1
pkgdesc="ActivityPub-federated video streaming platform using P2P directly in your web browser"
# failing build
# arch="x86_64"
# build failure
#arch="x86_64"
url="https://joinpeertube.org/"
license="AGPL"
depends="

View file

@ -1,39 +0,0 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-azure-core
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=azure-core
pkgver=1.32.0
pkgrel=0
pkgdesc="Microsoft Azure Core Library for Python"
url="https://pypi.python.org/project/microsoft-kiota-authentication-azure"
arch="noarch"
license="MIT"
depends="py3-aiohttp py3-requests"
checkdepends="py3-pytest-asyncio py3-trio"
makedepends="py3-setuptools py3-gpep517 py3-wheel py3-flit"
options="!check" #todo
source="$pkgname-$pkgver.tar.gz::https://github.com/Azure/azure-sdk-for-python/archive/refs/tags/azure-core_$pkgver.tar.gz"
builddir="$srcdir"/azure-sdk-for-python-azure-core_$pkgver/sdk/core/azure-core
subpackages="$pkgname-pyc"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
.testenv/bin/python3 -m pytest -v
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
d258a2ca3bc2c9514dec91bf2dbb19c0ee4c0c0bec73a4301b47fb43be768be836f32621b70a8cdb0e39f1491a522191a82a00f318ee7c901e8861a62439e934 py3-azure-core-1.32.0.tar.gz
"

View file

@ -1,44 +0,0 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-azure-identity
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=azure-identity
pkgver=1.19.0
pkgrel=0
pkgdesc="Microsoft Azure Identity Library for Python"
url="https://pypi.org/project/azure-identity/"
arch="noarch"
license="MIT"
depends="
py3-azure-core
py3-cryptography
py3-msal-extensions
py3-typing-extensions
"
checkdepends="py3-pytest"
makedepends="py3-setuptools py3-gpep517 py3-wheel py3-flit"
options="!check" #todo
source="$pkgname-$pkgver.tar.gz::https://github.com/Azure/azure-sdk-for-python/archive/refs/tags/azure-identity_$pkgver.tar.gz"
builddir="$srcdir"/azure-sdk-for-python-azure-identity_$pkgver/sdk/identity/azure-identity
subpackages="$pkgname-pyc"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
.testenv/bin/python3 -m pytest -v
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
090aed812a7a72c649ded2574dc0a05dd7d9db41675e3d86921ab0555f8af7c83999cb879a2f2e0984880874b3b6dfead6b8de0563d8a99d81775715640a9e01 py3-azure-identity-1.19.0.tar.gz
"

View file

@ -1,40 +0,0 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-django-countries
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=django-countries
pkgver=7.6.1
pkgrel=0
pkgdesc="Provides a country field for Django models."
url="https://pypi.python.org/project/django-countries"
arch="noarch"
license="MIT"
depends="py3-django py3-asgiref py3-typing-extensions"
# missing py3-graphene
checkdepends="py3-pytest-django py3-pytest-cov py3-django-rest-framework"
makedepends="py3-setuptools py3-gpep517 py3-wheel"
source="$pkgname-$pkgver.tar.gz::https://github.com/SmileyChris/django-countries/archive/refs/tags/v$pkgver.tar.gz"
options="!check" # TODO
builddir="$srcdir/$_pkgreal-$pkgver"
subpackages="$pkgname-pyc"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
.testenv/bin/python3 -m pytest -v
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
53c7db02244aad196c141d1d04db5087c802d69d12de25e86fe0b2abdfb4ce9ed6ec84b6344c423dc6e7d2e57c2bb14a5324739c7cead54ec7d261e7e3fe6112 py3-django-countries-7.6.1.tar.gz
"

View file

@ -4,7 +4,7 @@
pkgname=py3-django-rest-framework
_pkgname=django-rest-framework
pkgver=3.14.0
pkgrel=1
pkgrel=2
pkgdesc="Web APIs for Django"
url="https://github.com/encode/django-rest-framework"
arch="noarch"

View file

@ -1,48 +0,0 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-django-tenant-schemas
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=django-tenant-schemas
pkgver=1.12.0
pkgrel=0
pkgdesc="Tenant support for Django using PostgreSQL schemas."
url="https://pypi.python.org/project/django-tenant-schemas"
arch="noarch"
license="MIT"
depends="
py3-django
py3-ordered-set
py3-six
py3-psycopg2
"
checkdepends="py3-pytest"
makedepends="py3-setuptools py3-setuptools_scm py3-gpep517 py3-wheel"
source="
$pkgname-$pkgver.tar.gz::https://github.com/bernardopires/django-tenant-schemas/archive/refs/tags/v$pkgver.tar.gz
"
options="!check" # requires pg
builddir="$srcdir/$_pkgreal-$pkgver"
subpackages="$pkgname-pyc"
build() {
export SETUPTOOLS_SCM_PRETEND_VERSION=$pkgver
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
cd tenant_schemas
DJANGO_SETTINGS_MODULE=tests.settings ../.testenv/bin/python3 -m pytest -v
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
758f68dc834d4c0074097b166d742a7d63c86b6426ad67d3ce2f56983d417666bf05ae9c46b3ee89a04dee2d888892463651355d26eda7c265ebee8971992319 py3-django-tenant-schemas-1.12.0.tar.gz
"

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,43 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-django-tenants
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=django-tenants
pkgver=3.6.1
pkgrel=1
pkgdesc="Tenant support for Django using PostgreSQL schemas."
url="https://pypi.python.org/project/django-tenants"
arch="noarch"
license="KIT"
depends="py3-django py3-psycopg py3-gunicorn py3-coverage"
checkdepends="python3-dev py3-pytest"
makedepends="py3-setuptools py3-gpep517 py3-wheel"
source="
$pkgname-$pkgver.tar.gz::https://codeload.github.com/django-tenants/django-tenants/tar.gz/refs/tags/v$pkgver
997_update-from-pgclone-schema.patch
"
builddir="$srcdir/$_pkgreal-$pkgver"
options="!check" # Requires setting up test database
subpackages="$pkgname-pyc"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
DJANGO_SETTINGS_MODULE=tests.settings .testenv/bin/python3 -m pytest -v
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
b18afce81ccc89e49fcc4ebe85d90be602415ca898c1660a4e71e2bef6a3ed2e8c724e94b61d8c6f48f3fb19eb2a87d6a6f5bbf449b3e2f661f87e4b5638eafb py3-django-tenants-3.6.1.tar.gz
f2424bb188db2e3c7d13c15e5bdf0959c6f794e68dbc677c8b876d4faa321f78aded5565539f1bfd97583c6df0fcc19ec05abe203b08407e4446dd7194756825 997_update-from-pgclone-schema.patch
"

View file

@ -1,40 +0,0 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-kadmin
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=kadmin
pkgver=0.2.0
pkgrel=0
pkgdesc="Python module for kerberos admin (kadm5)"
url="https://github.com/authentik-community/python-kadmin"
arch="all"
license="MIT"
checkdepends="py3-pytest py3-k5test"
makedepends="py3-setuptools py3-gpep517 py3-wheel poetry python3-dev"
source="
$pkgname-$pkgver.tar.gz::https://github.com/authentik-community/python-kadmin/archive/refs/tags/v$pkgver.tar.gz
fix-int-conversion-error.patch"
builddir="$srcdir"/python-kadmin-$pkgver
subpackages="$pkgname-pyc"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
.testenv/bin/python3 test/tests.py
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
b405e914cb296f2bfe4f78d2791329804a0db02816182517b59ed1452a21d51dafe303609fddafbbeea57128bba4bcdfcd9b363f193ae0402cc52cf1b3b9020e py3-kadmin-0.2.0.tar.gz
e17223f8597d51ea099f5d4483dd72545b7d64ad76895553a6b7112416536aae93a59a2fd7aea044420495ab8146db7290abd826b268b2d6e518442c3c85c506 fix-int-conversion-error.patch
"

View file

@ -1,13 +0,0 @@
diff --git a/src/PyKAdminPolicyObject.c.orig b/src/PyKAdminPolicyObject.c
index 0bf3ee8..68387c4 100644
--- a/src/PyKAdminPolicyObject.c.orig
+++ b/src/PyKAdminPolicyObject.c
@@ -120,7 +120,7 @@ PyTypeObject PyKAdminPolicyObject_Type = {
sizeof(PyKAdminPolicyObject), /*tp_basicsize*/
0, /*tp_itemsize*/
(destructor)PyKAdminPolicyObject_dealloc, /*tp_dealloc*/
- KAdminPolicyObject_print, /*tp_print*/
+ 0, /*tp_print*/
0, /*tp_getattr*/
0, /*tp_setattr*/
0, /*tp_compare*/

View file

@ -1,44 +0,0 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-microsoft-kiota-abstractions
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=microsoft-kiota-abstractions
pkgver=1.6.8
pkgrel=0
pkgdesc="Abstractions library for Kiota generated Python clients"
url="https://pypi.python.org/project/microsoft-kiota-abstractions"
arch="noarch"
license="MIT"
depends="
py3-std-uritemplate<2.0.0
py3-opentelemetry-sdk
py3-importlib-metadata
"
checkdepends="py3-pytest py3-pytest-asyncio"
makedepends="poetry py3-gpep517 py3-wheel py3-flit"
source="
$pkgname-$pkgver.tar.gz::https://github.com/microsoft/kiota-python/archive/refs/tags/microsoft-kiota-abstractions-v$pkgver.tar.gz
"
builddir="$srcdir/kiota-python-microsoft-kiota-abstractions-v$pkgver/packages/abstractions"
subpackages="$pkgname-pyc"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
.testenv/bin/python3 -m pytest -v
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
55341b1ff3fb1a516ceb84817db991d6e6aa83b01326f64cf21690dee1ab84e9c9c4f7162f9f71ec1261b4e0380b73b13284128bd786b80da29faf968720b355 py3-microsoft-kiota-abstractions-1.6.8.tar.gz
"

View file

@ -1,45 +0,0 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-microsoft-kiota-authentication-azure
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=microsoft-kiota-authentication-azure
pkgver=1.6.8
pkgrel=0
pkgdesc="Authentication provider for Kiota using Azure Identity"
url="https://pypi.python.org/project/microsoft-kiota-authentication-azure"
arch="noarch"
license="MIT"
depends="
py3-azure-core
py3-microsoft-kiota-abstractions
py3-importlib-metadata
"
checkdepends="py3-pytest"
makedepends="poetry py3-gpep517 py3-wheel py3-flit"
source="
$pkgname-$pkgver.tar.gz::https://github.com/microsoft/kiota-python/archive/refs/tags/microsoft-kiota-authentication-azure-v$pkgver.tar.gz
"
options="!check" # TODO
builddir="$srcdir/kiota-python-microsoft-kiota-authentication-azure-v$pkgver/packages/authentication/azure"
subpackages="$pkgname-pyc"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
.testenv/bin/python3 -m pytest -v
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
d661d379f036b45bf356e349e28d3478f4a10b351dfde2d1b11a429c0f2160cde9696990cc18d72a224cfd3cc4c90bdc2e6f07d9e4763bd126cd9f66a09b9bec py3-microsoft-kiota-authentication-azure-1.6.8.tar.gz
"

View file

@ -1,44 +0,0 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-microsoft-kiota-http
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=microsoft-kiota-http
pkgver=1.6.8
pkgrel=0
pkgdesc="Kiota http request adapter implementation for httpx library"
url="https://pypi.python.org/project/microsoft-kiota-http"
arch="noarch"
license="MIT"
depends="
py3-microsoft-kiota-abstractions
py3-httpx
"
checkdepends="py3-pytest"
makedepends="poetry py3-gpep517 py3-wheel py3-flit"
source="
$pkgname-$pkgver.tar.gz::https://github.com/microsoft/kiota-python/archive/refs/tags/microsoft-kiota-http-v$pkgver.tar.gz
"
options="!check" # TODO
builddir="$srcdir/kiota-python-microsoft-kiota-http-v$pkgver/packages/http/httpx"
subpackages="$pkgname-pyc"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
.testenv/bin/python3 -m pytest -v
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
c453c89d31cc062f2d8be4a28bda0666dbde6b5a8e42855892cda72e5d104e6bb5516db01d9feb7f619b8fa77237c9e3badd24b29326f627f95b69210835321d py3-microsoft-kiota-http-1.6.8.tar.gz
"

View file

@ -1,43 +0,0 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-microsoft-kiota-serialization-form
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=microsoft-kiota-serialization-form
pkgver=1.6.8
pkgrel=0
pkgdesc="Kiota Form encoded serialization implementation for Python"
url="https://pypi.python.org/project/microsoft-kiota-serialization-form"
arch="noarch"
license="MIT"
depends="
py3-microsoft-kiota-abstractions
py3-pendulum
"
checkdepends="py3-pytest"
makedepends="poetry py3-gpep517 py3-wheel py3-flit"
source="
$pkgname-$pkgver.tar.gz::https://github.com/microsoft/kiota-python/archive/refs/tags/microsoft-kiota-serialization-form-v$pkgver.tar.gz
"
builddir="$srcdir/kiota-python-microsoft-kiota-serialization-form-v$pkgver/packages/serialization/form"
subpackages="$pkgname-pyc"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
.testenv/bin/python3 -m pytest -v
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
0e4fabe18980612ca3f55fd7350148d2393da3f35dc79cd4fe56b01f50bc2af147bde5e294580d83b97b4a549d77e6581ece8ddb19ea09ee92fd6cbfead0d3db py3-microsoft-kiota-serialization-form-1.6.8.tar.gz
"

View file

@ -1,44 +0,0 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-microsoft-kiota-serialization-json
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=microsoft-kiota-serialization-json
pkgver=1.6.8
pkgrel=0
pkgdesc="JSON serialization implementation for Kiota clients in Python"
url="https://pypi.python.org/project/microsoft-kiota-serialization-json"
arch="noarch"
license="MIT"
depends="
py3-microsoft-kiota-abstractions
py3-pendulum
"
checkdepends="py3-pytest"
makedepends="poetry py3-gpep517 py3-wheel py3-flit"
source="
$pkgname-$pkgver.tar.gz::https://github.com/microsoft/kiota-python/archive/refs/tags/microsoft-kiota-serialization-json-v$pkgver.tar.gz
"
options="!check" # TODO
builddir="$srcdir/kiota-python-microsoft-kiota-serialization-json-v$pkgver/packages/serialization/json"
subpackages="$pkgname-pyc"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
.testenv/bin/python3 -m pytest -v
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
42b8e1d2bfb175e52876314a598647de7b70acb8140cefbfb20d0f8de241bbb03a1cfe6c7108a56047f2a8e3f8f781a23fe54d5612d68a5966340279ff0eb8bc py3-microsoft-kiota-serialization-json-1.6.8.tar.gz
"

View file

@ -1,40 +0,0 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-microsoft-kiota-serialization-multipart
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=microsoft-kiota-serialization-multipart
pkgver=1.6.8
pkgrel=0
pkgdesc="Multipart serialization implementation for python based kiota clients"
url="https://pypi.python.org/project/microsoft-kiota-serialization-multipart"
arch="noarch"
license="MIT"
depends="py3-microsoft-kiota-abstractions py3-microsoft-kiota-serialization-json"
checkdepends="py3-pytest"
makedepends="poetry py3-gpep517 py3-wheel py3-flit"
source="
$pkgname-$pkgver.tar.gz::https://github.com/microsoft/kiota-python/archive/refs/tags/microsoft-kiota-serialization-multipart-v$pkgver.tar.gz
"
builddir="$srcdir/kiota-python-microsoft-kiota-serialization-multipart-v$pkgver/packages/serialization/multipart"
subpackages="$pkgname-pyc"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
.testenv/bin/python3 -m pytest -v
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
d6d6d36fe55f4aa595d380e43f93f3de7674633edba676aec16fc26254a12e4f700427fedf1bedfddde30a7f708c93ccbbe586bb0e6950748a2debe609bf44c1 py3-microsoft-kiota-serialization-multipart-1.6.8.tar.gz
"

View file

@ -1,43 +0,0 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-microsoft-kiota-serialization-text
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=microsoft-kiota-serialization-text
pkgver=1.6.8
pkgrel=0
pkgdesc="Text serialization implementation for Kiota generated clients in Python"
url="https://pypi.python.org/project/microsoft-kiota-abstractions"
arch="noarch"
license="MIT"
depends="
py3-microsoft-kiota-abstractions
py3-dateutil
"
checkdepends="py3-pytest"
makedepends="poetry py3-gpep517 py3-wheel py3-flit"
source="
$pkgname-$pkgver.tar.gz::https://github.com/microsoft/kiota-python/archive/refs/tags/microsoft-kiota-serialization-text-v$pkgver.tar.gz
"
builddir="$srcdir/kiota-python-microsoft-kiota-serialization-text-v$pkgver/packages/serialization/text"
subpackages="$pkgname-pyc"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
.testenv/bin/python3 -m pytest -v
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
55dbc87253819f496e2f25de2bf24b170761f335117da414bb35c6db9008e9ca8c6fd13d5e429914c322a850a57858d9abdee7dc209ad55e469182995290d568 py3-microsoft-kiota-serialization-text-1.6.8.tar.gz
"

View file

@ -1,42 +0,0 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-msal-extensions
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=msal-extensions
pkgver=1.2.0
pkgrel=0
pkgdesc="Microsoft Authentication Library extensions (MSAL EX) provides a persistence API "
url="https://pypi.org/project/msal-extensions"
arch="noarch"
license="MIT"
depends="
py3-msal
py3-portalocker
"
checkdepends="py3-pytest"
makedepends="py3-setuptools py3-gpep517 py3-wheel"
options="!check" #todo
source="$pkgname-$pkgver.tar.gz::https://github.com/AzureAD/microsoft-authentication-extensions-for-python/archive/refs/tags/$pkgver.tar.gz"
builddir="$srcdir"/microsoft-authentication-extensions-for-python-$pkgver
subpackages="$pkgname-pyc"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
.testenv/bin/python3 -m pytest -v
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
847a87e2f7a7b71d47fb758bd3445666b2a9f1f2034c575f8a78ba687e1c5faa682b89ea78906d4afa1350bca608cd9452c7ad244c7ec456145c15c49ad46fb2 py3-msal-extensions-1.2.0.tar.gz
"

View file

@ -1,43 +0,0 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-msal
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=msal
pkgver=1.31.1
pkgrel=0
pkgdesc="Microsoft Authentication Library (MSAL) for Python"
url="https://pypi.org/project/msal"
arch="noarch"
license="MIT"
depends="
py3-requests
py3-cryptography
py3-jwt
"
checkdepends="py3-pytest"
makedepends="py3-setuptools py3-gpep517 py3-wheel"
options="!check" #todo
source="$pkgname-$pkgver.tar.gz::https://github.com/AzureAD/microsoft-authentication-library-for-python/archive/refs/tags/$pkgver.tar.gz"
builddir="$srcdir"/microsoft-authentication-library-for-python-$pkgver
subpackages="$pkgname-pyc"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
.testenv/bin/python3 -m pytest -v
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
f75541337f09ba29d4de13206346ad7793b3f2bdbdbf8fcb050ee7976b397ca666d61aee21121a4efdd7c150c9d2f87f75812e7b8aa96a5f8ac5219e7a946af2 py3-msal-1.31.1.tar.gz
"

View file

@ -1,43 +0,0 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-msgraph-core
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=msgraph-core
pkgver=1.1.8
pkgrel=0
pkgdesc="The Microsoft Graph Python SDK"
url="https://pypi.python.org/project/msgraph-core"
arch="noarch"
license="MIT"
depends="
py3-azure-identity
py3-microsoft-kiota-authentication-azure
py3-microsoft-kiota-http
"
checkdepends="py3-pytest"
makedepends="py3-setuptools py3-gpep517 py3-wheel py3-flit"
source="$pkgname-$pkgver.tar.gz::https://github.com/microsoftgraph/msgraph-sdk-python-core/archive/refs/tags/v$pkgver.tar.gz"
options="!check" # TODO
builddir="$srcdir/msgraph-sdk-python-core-$pkgver"
subpackages="$pkgname-pyc"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
.testenv/bin/python3 -m pytest -v
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
0cae6f76cb1373d1ef76448e47b9951e5076a144140c19edc14186f7bfd92930e50c9f6c459170e3362ef267903cdf261d1897566983a7302beab205f9d61389 py3-msgraph-core-1.1.8.tar.gz
"

View file

@ -1,44 +0,0 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-msgraph-sdk
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=msgraph-sdk
pkgver=1.16.0
pkgrel=0
pkgdesc="The Microsoft Graph Python SDK"
url="https://pypi.python.org/project/msgraph-sdk"
arch="noarch"
license="MIT"
depends="
py3-microsoft-kiota-serialization-text
py3-microsoft-kiota-serialization-form
py3-microsoft-kiota-serialization-multipart
py3-msgraph-core
"
checkdepends="py3-pytest"
makedepends="py3-setuptools py3-gpep517 py3-wheel py3-flit"
source="$pkgname-$pkgver.tar.gz::https://github.com/microsoftgraph/msgraph-sdk-python/archive/refs/tags/v$pkgver.tar.gz"
options="!check" # TODO
builddir="$srcdir/$_pkgreal-python-$pkgver"
subpackages="$pkgname-pyc"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
.testenv/bin/python3 -m pytest -v
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
af930e5e470f6ac78724650885f70cf447482a53f90043d326b3e00dc7572fd0d476658ebb1677118010e38b54f1e4e609dcfb5fcef5664f05b25062786d11af py3-msgraph-sdk-1.16.0.tar.gz
"

View file

@ -1,75 +0,0 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-opentelemetry-sdk
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=opentelemetry-sdk
pkgver=1.29.0
pkgrel=0
pkgdesc="OpenTelemetry Python SDK"
url="https://github.com/open-telemetry/opentelemetry-python/tree/main"
arch="noarch"
license="Apache-2.0"
depends="py3-opentelemetry-semantic-conventions py3-typing-extensions"
checkdepends="py3-pytest"
makedepends="py3-setuptools py3-gpep517 py3-wheel py3-hatchling"
source="$pkgname-$pkgver.tar.gz::https://github.com/open-telemetry/opentelemetry-python/archive/refs/tags/v$pkgver.tar.gz"
builddir="$srcdir/opentelemetry-python-$pkgver"
options="!check" # TODO
# need to figure out -pyc
subpackages="
$pkgname-pyc
py3-opentelemetry-api
py3-opentelemetry-semantic-conventions
py3-opentelemetry-proto
"
build() {
for i in api semantic-conventions sdk proto; do
cd "$builddir"/opentelemetry-$i
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
done
}
check() {
for i in api semantic-conventions sdk proto; do
python3 -m venv --clear --without-pip --system-site-packages "$builddir"/.testenv
"$builddir"/.testenv/bin/python3 -m installer .dist/*.whl
"$builddir"/.testenv/bin/python3 -m pytest -v
done
}
package() {
cd "$builddir"/opentelemetry-sdk
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
api() {
depends="py3-deprecated"
pkgdesc="OpenTelemetry Python API"
cd "$builddir"/opentelemetry-api
python3 -m installer -d "$subpkgdir" \
.dist/*.whl
}
conventions() {
pkgdesc="OpenTelemetry Semantic Conventions"
depends="py3-opentelemetry-api py3-deprecated"
cd "$builddir"/opentelemetry-semantic-conventions
python3 -m installer -d "$subpkgdir" \
.dist/*.whl
}
proto() {
pkgdesc="OpenTelemetry Python Proto"
depends="py3-protobuf"
cd "$builddir"/opentelemetry-proto
python3 -m installer -d "$subpkgdir" \
.dist/*.whl
}
sha512sums="
92c90e6a684d8cfab3bba4d72612ccf53ae54cdd9784e3434b25adc3730fe114f21fd7aa21da80edf6e0e7c80b39c64ee31fb16f68b04809289bbf5d49d4ca2e py3-opentelemetry-sdk-1.29.0.tar.gz
"

View file

@ -0,0 +1,38 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-scim2-filter-parser
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=scim2-filter-parser
pkgver=0.5.0
pkgrel=1
pkgdesc="A customizable parser/transpiler for SCIM2.0 filters"
url="https://pypi.python.org/project/scim2-filter-parser"
arch="noarch"
license="MIT"
depends="py3-django py3-sly"
checkdepends="py3-pytest"
makedepends="py3-setuptools py3-gpep517 py3-wheel poetry"
source="$pkgname-$pkgver.tar.gz::https://github.com/15five/scim2-filter-parser/archive/refs/tags/$pkgver.tar.gz"
builddir="$srcdir/$_pkgreal-$pkgver"
subpackages="$pkgname-pyc"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
.testenv/bin/python3 -m pytest -v
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
5347852af6b82a764a32bc491a7e0f05f06b4f4d93dfa375668b5ca1a15ee58f488702536e350100fe5c96a5c94c492ea8cbd0e1952c5920d5a10e1453357f8c py3-scim2-filter-parser-0.5.0.tar.gz
"

View file

@ -1,41 +0,0 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-std-uritemplate
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=std-uritemplate
pkgver=2.0.1
pkgrel=0
pkgdesc="A complete and maintained cross-language implementation of the Uri Template specification RFC 6570 Level 4"
url="https://pypi.python.org/project/std-uritemplate"
arch="noarch"
license="Apache-2.0"
depends="python3"
checkdepends="py3-pytest"
makedepends="py3-setuptools py3-gpep517 py3-wheel poetry"
source="$pkgname-$pkgver.tar.gz::https://github.com/std-uritemplate/std-uritemplate/archive/refs/tags/$pkgver.tar.gz"
options="!check" # TODO
builddir="$srcdir"/$_pkgreal-$pkgver/python
subpackages="$pkgname-pyc"
prepare() {
default_prepare
ln -s ../Readme.md Readme.md
}
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
poetry run python test.py
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
e073a1204d65bb639cc93480b0f68e1edfe5ac3cff607b72c8da8916b7660eea2b2b246b5db02979cd5c856087958c84dc3bc5e9d76a9540f2ac2a7da8cd18df py3-std-uritemplate-2.0.1.tar.gz
"

View file

@ -0,0 +1,41 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=py3-tenant-schemas-celery
#_pkgreal is used by apkbuild-pypi to find modules at PyPI
_pkgreal=tenant-schemas-celery
pkgver=2.2.0
pkgrel=1
pkgdesc="Celery integration for django-tenant-schemas and django-tenants"
url="https://pypi.python.org/project/tenant-schemas-celery"
arch="noarch"
license="MIT"
depends="py3-django-tenants py3-celery"
checkdepends="python3-dev py3-pytest"
makedepends="py3-setuptools py3-gpep517 py3-wheel"
source="
$pkgname-$pkgver.tar.gz::https://codeload.github.com/maciej-gol/tenant-schemas-celery/tar.gz/refs/tags/$pkgver
"
options="!check" # Test suite wants docker
builddir="$srcdir/$_pkgreal-$pkgver"
subpackages="$pkgname-pyc"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
DJANGO_SETTINGS_MODULE=tests.settings .testenv/bin/python3 -m pytest -v
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/*.whl
}
sha512sums="
dad71011306936dc84d966797b113008780750e9e973513092bec892be0d1468e0a0e7e8e2fcca9765309a27767e1c72bdaad7c8aca16353ae1eef783c239148 py3-tenant-schemas-celery-2.2.0.tar.gz
"

View file

@ -1,8 +1,8 @@
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=uptime-kuma
pkgver=1.23.16
pkgrel=0
pkgver=1.23.13
pkgrel=1
pkgdesc='A fancy self-hosted monitoring tool'
arch="all"
url="https://github.com/louislam/uptime-kuma"
@ -43,7 +43,7 @@ package() {
mv "$pkgdir"/usr/share/webapps/uptime-kuma/LICENSE "$pkgdir"/usr/share/licenses/uptime-kuma/.
}
sha512sums="
a132d1cd796fbd868782627edfd45d2a6bd3d2fadece23e0bbf000e6a30482659062a43c4590c98e390cac9b8c1926efd8ff01c5b358b7ccea4438259b86f24e uptime-kuma-1.23.16.tar.gz
9045cdc69d46ce34011f7866844a8d1866eee21850be6eede3226e77b9c0d3ecc0190481671f04f25da40345b29cc2d13de07bcc27e7baeff7901b4bd9c8b93f uptime-kuma-1.23.13.tar.gz
0ceddb98a6f318029b8bd8b5a49b55c883e77a5f8fffe2b9b271c9abf0ac52dc7a6ea4dbb4a881124a7857f1e43040f18755c1c2a034479e6a94d2b65a73d847 uptime-kuma.openrc
1dbae536b23e3624e139155abbff383bba3209ff2219983da2616b4376b1a5041df812d1e5164716fc6e967a8446d94baae3b96ee575d400813cc6fdc2cc274e uptime-kuma.conf
"

View file

@ -1,618 +0,0 @@
diff --git a/docs/deployment.md b/docs/deployment.md
index d69fcf8..99dfbf3 100644
--- a/docs/deployment.md
+++ b/docs/deployment.md
@@ -60,7 +60,7 @@ Options:
--loop [auto|asyncio|uvloop] Event loop implementation. [default: auto]
--http [auto|h11|httptools] HTTP protocol implementation. [default:
auto]
- --ws [auto|none|websockets|wsproto]
+ --ws [auto|none|websockets|websockets-sansio|wsproto]
WebSocket protocol implementation.
[default: auto]
--ws-max-size INTEGER WebSocket max size message in bytes
diff --git a/docs/index.md b/docs/index.md
index bb6fc32..50e2ab9 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -130,7 +130,7 @@ Options:
--loop [auto|asyncio|uvloop] Event loop implementation. [default: auto]
--http [auto|h11|httptools] HTTP protocol implementation. [default:
auto]
- --ws [auto|none|websockets|wsproto]
+ --ws [auto|none|websockets|websockets-sansio|wsproto]
WebSocket protocol implementation.
[default: auto]
--ws-max-size INTEGER WebSocket max size message in bytes
diff --git a/pyproject.toml b/pyproject.toml
index 0a89966..8771bfb 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -92,6 +92,10 @@ filterwarnings = [
"ignore:Uvicorn's native WSGI implementation is deprecated.*:DeprecationWarning",
"ignore: 'cgi' is deprecated and slated for removal in Python 3.13:DeprecationWarning",
"ignore: remove second argument of ws_handler:DeprecationWarning:websockets",
+ "ignore: websockets.legacy is deprecated.*:DeprecationWarning",
+ "ignore: websockets.server.WebSocketServerProtocol is deprecated.*:DeprecationWarning",
+ "ignore: websockets.client.connect is deprecated.*:DeprecationWarning",
+ "ignore: websockets.exceptions.InvalidStatusCode is deprecated",
]
[tool.coverage.run]
diff --git a/tests/conftest.py b/tests/conftest.py
index 1b0c0e8..7061a14 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -233,9 +233,9 @@ def unused_tcp_port() -> int:
marks=pytest.mark.skipif(not importlib.util.find_spec("wsproto"), reason="wsproto not installed."),
id="wsproto",
),
+ pytest.param("uvicorn.protocols.websockets.websockets_impl:WebSocketProtocol", id="websockets"),
pytest.param(
- "uvicorn.protocols.websockets.websockets_impl:WebSocketProtocol",
- id="websockets",
+ "uvicorn.protocols.websockets.websockets_sansio_impl:WebSocketsSansIOProtocol", id="websockets-sansio"
),
]
)
diff --git a/tests/middleware/test_logging.py b/tests/middleware/test_logging.py
index f27633a..63d7daf 100644
--- a/tests/middleware/test_logging.py
+++ b/tests/middleware/test_logging.py
@@ -49,7 +49,9 @@ async def app(scope: Scope, receive: ASGIReceiveCallable, send: ASGISendCallable
await send({"type": "http.response.body", "body": b"", "more_body": False})
-async def test_trace_logging(caplog: pytest.LogCaptureFixture, logging_config, unused_tcp_port: int):
+async def test_trace_logging(
+ caplog: pytest.LogCaptureFixture, logging_config: dict[str, typing.Any], unused_tcp_port: int
+):
config = Config(
app=app,
log_level="trace",
@@ -91,8 +93,8 @@ async def test_trace_logging_on_http_protocol(http_protocol_cls, caplog, logging
async def test_trace_logging_on_ws_protocol(
ws_protocol_cls: WSProtocol,
- caplog,
- logging_config,
+ caplog: pytest.LogCaptureFixture,
+ logging_config: dict[str, typing.Any],
unused_tcp_port: int,
):
async def websocket_app(scope: Scope, receive: ASGIReceiveCallable, send: ASGISendCallable):
@@ -104,7 +106,7 @@ async def test_trace_logging_on_ws_protocol(
elif message["type"] == "websocket.disconnect":
break
- async def open_connection(url):
+ async def open_connection(url: str):
async with websockets.client.connect(url) as websocket:
return websocket.open
diff --git a/tests/middleware/test_proxy_headers.py b/tests/middleware/test_proxy_headers.py
index 0ade974..d300c45 100644
--- a/tests/middleware/test_proxy_headers.py
+++ b/tests/middleware/test_proxy_headers.py
@@ -465,6 +465,7 @@ async def test_proxy_headers_websocket_x_forwarded_proto(
host, port = scope["client"]
await send({"type": "websocket.accept"})
await send({"type": "websocket.send", "text": f"{scheme}://{host}:{port}"})
+ await send({"type": "websocket.close"})
app_with_middleware = ProxyHeadersMiddleware(websocket_app, trusted_hosts="*")
config = Config(
diff --git a/tests/protocols/test_websocket.py b/tests/protocols/test_websocket.py
index 15ccfdd..e728544 100644
--- a/tests/protocols/test_websocket.py
+++ b/tests/protocols/test_websocket.py
@@ -7,6 +7,8 @@ from copy import deepcopy
import httpx
import pytest
import websockets
+import websockets.asyncio
+import websockets.asyncio.client
import websockets.client
import websockets.exceptions
from typing_extensions import TypedDict
@@ -601,12 +603,9 @@ async def test_connection_lost_before_handshake_complete(
await send_accept_task.wait()
disconnect_message = await receive() # type: ignore
- response: httpx.Response | None = None
-
async def websocket_session(uri: str):
- nonlocal response
async with httpx.AsyncClient() as client:
- response = await client.get(
+ await client.get(
f"http://127.0.0.1:{unused_tcp_port}",
headers={
"upgrade": "websocket",
@@ -623,9 +622,6 @@ async def test_connection_lost_before_handshake_complete(
send_accept_task.set()
await asyncio.sleep(0.1)
- assert response is not None
- assert response.status_code == 500, response.text
- assert response.text == "Internal Server Error"
assert disconnect_message == {"type": "websocket.disconnect", "code": 1006}
await task
@@ -920,6 +916,9 @@ async def test_server_reject_connection_with_body_nolength(
async def test_server_reject_connection_with_invalid_msg(
ws_protocol_cls: WSProtocol, http_protocol_cls: HTTPProtocol, unused_tcp_port: int
):
+ if ws_protocol_cls.__name__ == "WebSocketsSansIOProtocol":
+ pytest.skip("WebSocketsSansIOProtocol sends both start and body messages in one message.")
+
async def app(scope: Scope, receive: ASGIReceiveCallable, send: ASGISendCallable):
assert scope["type"] == "websocket"
assert "extensions" in scope and "websocket.http.response" in scope["extensions"]
@@ -951,6 +950,9 @@ async def test_server_reject_connection_with_invalid_msg(
async def test_server_reject_connection_with_missing_body(
ws_protocol_cls: WSProtocol, http_protocol_cls: HTTPProtocol, unused_tcp_port: int
):
+ if ws_protocol_cls.__name__ == "WebSocketsSansIOProtocol":
+ pytest.skip("WebSocketsSansIOProtocol sends both start and body messages in one message.")
+
async def app(scope: Scope, receive: ASGIReceiveCallable, send: ASGISendCallable):
assert scope["type"] == "websocket"
assert "extensions" in scope and "websocket.http.response" in scope["extensions"]
@@ -986,6 +988,8 @@ async def test_server_multiple_websocket_http_response_start_events(
The server should raise an exception if it sends multiple
websocket.http.response.start events.
"""
+ if ws_protocol_cls.__name__ == "WebSocketsSansIOProtocol":
+ pytest.skip("WebSocketsSansIOProtocol sends both start and body messages in one message.")
exception_message: str | None = None
async def app(scope: Scope, receive: ASGIReceiveCallable, send: ASGISendCallable):
diff --git a/uvicorn/config.py b/uvicorn/config.py
index 664d191..cbfeea6 100644
--- a/uvicorn/config.py
+++ b/uvicorn/config.py
@@ -25,7 +25,7 @@ from uvicorn.middleware.proxy_headers import ProxyHeadersMiddleware
from uvicorn.middleware.wsgi import WSGIMiddleware
HTTPProtocolType = Literal["auto", "h11", "httptools"]
-WSProtocolType = Literal["auto", "none", "websockets", "wsproto"]
+WSProtocolType = Literal["auto", "none", "websockets", "websockets-sansio", "wsproto"]
LifespanType = Literal["auto", "on", "off"]
LoopSetupType = Literal["none", "auto", "asyncio", "uvloop"]
InterfaceType = Literal["auto", "asgi3", "asgi2", "wsgi"]
@@ -47,6 +47,7 @@ WS_PROTOCOLS: dict[WSProtocolType, str | None] = {
"auto": "uvicorn.protocols.websockets.auto:AutoWebSocketsProtocol",
"none": None,
"websockets": "uvicorn.protocols.websockets.websockets_impl:WebSocketProtocol",
+ "websockets-sansio": "uvicorn.protocols.websockets.websockets_sansio_impl:WebSocketsSansIOProtocol",
"wsproto": "uvicorn.protocols.websockets.wsproto_impl:WSProtocol",
}
LIFESPAN: dict[LifespanType, str] = {
diff --git a/uvicorn/protocols/websockets/websockets_sansio_impl.py b/uvicorn/protocols/websockets/websockets_sansio_impl.py
new file mode 100644
index 0000000..994af07
--- /dev/null
+++ b/uvicorn/protocols/websockets/websockets_sansio_impl.py
@@ -0,0 +1,405 @@
+from __future__ import annotations
+
+import asyncio
+import logging
+from asyncio.transports import BaseTransport, Transport
+from http import HTTPStatus
+from typing import Any, Literal, cast
+from urllib.parse import unquote
+
+from websockets import InvalidState
+from websockets.extensions.permessage_deflate import ServerPerMessageDeflateFactory
+from websockets.frames import Frame, Opcode
+from websockets.http11 import Request
+from websockets.server import ServerProtocol
+
+from uvicorn._types import (
+ ASGIReceiveEvent,
+ ASGISendEvent,
+ WebSocketAcceptEvent,
+ WebSocketCloseEvent,
+ WebSocketDisconnectEvent,
+ WebSocketReceiveEvent,
+ WebSocketResponseBodyEvent,
+ WebSocketResponseStartEvent,
+ WebSocketScope,
+ WebSocketSendEvent,
+)
+from uvicorn.config import Config
+from uvicorn.logging import TRACE_LOG_LEVEL
+from uvicorn.protocols.utils import (
+ ClientDisconnected,
+ get_local_addr,
+ get_path_with_query_string,
+ get_remote_addr,
+ is_ssl,
+)
+from uvicorn.server import ServerState
+
+
+class WebSocketsSansIOProtocol(asyncio.Protocol):
+ def __init__(
+ self,
+ config: Config,
+ server_state: ServerState,
+ app_state: dict[str, Any],
+ _loop: asyncio.AbstractEventLoop | None = None,
+ ) -> None:
+ if not config.loaded:
+ config.load() # pragma: no cover
+
+ self.config = config
+ self.app = config.loaded_app
+ self.loop = _loop or asyncio.get_event_loop()
+ self.logger = logging.getLogger("uvicorn.error")
+ self.root_path = config.root_path
+ self.app_state = app_state
+
+ # Shared server state
+ self.connections = server_state.connections
+ self.tasks = server_state.tasks
+ self.default_headers = server_state.default_headers
+
+ # Connection state
+ self.transport: asyncio.Transport = None # type: ignore[assignment]
+ self.server: tuple[str, int] | None = None
+ self.client: tuple[str, int] | None = None
+ self.scheme: Literal["wss", "ws"] = None # type: ignore[assignment]
+
+ # WebSocket state
+ self.queue: asyncio.Queue[ASGIReceiveEvent] = asyncio.Queue()
+ self.handshake_initiated = False
+ self.handshake_complete = False
+ self.close_sent = False
+ self.initial_response: tuple[int, list[tuple[str, str]], bytes] | None = None
+
+ extensions = []
+ if self.config.ws_per_message_deflate:
+ extensions = [ServerPerMessageDeflateFactory()]
+ self.conn = ServerProtocol(
+ extensions=extensions,
+ max_size=self.config.ws_max_size,
+ logger=logging.getLogger("uvicorn.error"),
+ )
+
+ self.read_paused = False
+ self.writable = asyncio.Event()
+ self.writable.set()
+
+ # Buffers
+ self.bytes = b""
+
+ def connection_made(self, transport: BaseTransport) -> None:
+ """Called when a connection is made."""
+ transport = cast(Transport, transport)
+ self.connections.add(self)
+ self.transport = transport
+ self.server = get_local_addr(transport)
+ self.client = get_remote_addr(transport)
+ self.scheme = "wss" if is_ssl(transport) else "ws"
+
+ if self.logger.level <= TRACE_LOG_LEVEL:
+ prefix = "%s:%d - " % self.client if self.client else ""
+ self.logger.log(TRACE_LOG_LEVEL, "%sWebSocket connection made", prefix)
+
+ def connection_lost(self, exc: Exception | None) -> None:
+ code = 1005 if self.handshake_complete else 1006
+ self.queue.put_nowait({"type": "websocket.disconnect", "code": code})
+ self.connections.remove(self)
+
+ if self.logger.level <= TRACE_LOG_LEVEL:
+ prefix = "%s:%d - " % self.client if self.client else ""
+ self.logger.log(TRACE_LOG_LEVEL, "%sWebSocket connection lost", prefix)
+
+ self.handshake_complete = True
+ if exc is None:
+ self.transport.close()
+
+ def eof_received(self) -> None:
+ pass
+
+ def shutdown(self) -> None:
+ if self.handshake_complete:
+ self.queue.put_nowait({"type": "websocket.disconnect", "code": 1012})
+ self.conn.send_close(1012)
+ output = self.conn.data_to_send()
+ self.transport.write(b"".join(output))
+ else:
+ self.send_500_response()
+ self.transport.close()
+
+ def data_received(self, data: bytes) -> None:
+ self.conn.receive_data(data)
+ parser_exc = self.conn.parser_exc
+ if parser_exc is not None:
+ self.handle_parser_exception()
+ return
+ self.handle_events()
+
+ def handle_events(self) -> None:
+ for event in self.conn.events_received():
+ if isinstance(event, Request):
+ self.handle_connect(event)
+ if isinstance(event, Frame):
+ if event.opcode == Opcode.CONT:
+ self.handle_cont(event)
+ elif event.opcode == Opcode.TEXT:
+ self.handle_text(event)
+ elif event.opcode == Opcode.BINARY:
+ self.handle_bytes(event)
+ elif event.opcode == Opcode.PING:
+ self.handle_ping(event)
+ elif event.opcode == Opcode.CLOSE:
+ self.handle_close(event)
+
+ # Event handlers
+
+ def handle_connect(self, event: Request) -> None:
+ self.request = event
+ self.response = self.conn.accept(event)
+ self.handshake_initiated = True
+ if self.response.status_code != 101:
+ self.handshake_complete = True
+ self.close_sent = True
+ self.conn.send_response(self.response)
+ output = self.conn.data_to_send()
+ self.transport.write(b"".join(output))
+ self.transport.close()
+ return
+
+ headers = [
+ (key.encode("ascii"), value.encode("ascii", errors="surrogateescape"))
+ for key, value in event.headers.raw_items()
+ ]
+ raw_path, _, query_string = event.path.partition("?")
+ self.scope: WebSocketScope = {
+ "type": "websocket",
+ "asgi": {"version": self.config.asgi_version, "spec_version": "2.3"},
+ "http_version": "1.1",
+ "scheme": self.scheme,
+ "server": self.server,
+ "client": self.client,
+ "root_path": self.root_path,
+ "path": unquote(raw_path),
+ "raw_path": raw_path.encode("ascii"),
+ "query_string": query_string.encode("ascii"),
+ "headers": headers,
+ "subprotocols": event.headers.get_all("Sec-WebSocket-Protocol"),
+ "state": self.app_state.copy(),
+ "extensions": {"websocket.http.response": {}},
+ }
+ self.queue.put_nowait({"type": "websocket.connect"})
+ task = self.loop.create_task(self.run_asgi())
+ task.add_done_callback(self.on_task_complete)
+ self.tasks.add(task)
+
+ def handle_cont(self, event: Frame) -> None:
+ self.bytes += event.data
+ if event.fin:
+ self.send_receive_event_to_app()
+
+ def handle_text(self, event: Frame) -> None:
+ self.bytes = event.data
+ self.curr_msg_data_type: Literal["text", "bytes"] = "text"
+ if event.fin:
+ self.send_receive_event_to_app()
+
+ def handle_bytes(self, event: Frame) -> None:
+ self.bytes = event.data
+ self.curr_msg_data_type = "bytes"
+ if event.fin:
+ self.send_receive_event_to_app()
+
+ def send_receive_event_to_app(self) -> None:
+ data_type = self.curr_msg_data_type
+ msg: WebSocketReceiveEvent
+ if data_type == "text":
+ msg = {"type": "websocket.receive", data_type: self.bytes.decode()}
+ else:
+ msg = {"type": "websocket.receive", data_type: self.bytes}
+ self.queue.put_nowait(msg)
+ if not self.read_paused:
+ self.read_paused = True
+ self.transport.pause_reading()
+
+ def handle_ping(self, event: Frame) -> None:
+ output = self.conn.data_to_send()
+ self.transport.write(b"".join(output))
+
+ def handle_close(self, event: Frame) -> None:
+ if not self.close_sent and not self.transport.is_closing():
+ disconnect_event: WebSocketDisconnectEvent = {
+ "type": "websocket.disconnect",
+ "code": self.conn.close_rcvd.code, # type: ignore[union-attr]
+ "reason": self.conn.close_rcvd.reason, # type: ignore[union-attr]
+ }
+ self.queue.put_nowait(disconnect_event)
+ output = self.conn.data_to_send()
+ self.transport.write(b"".join(output))
+ self.transport.close()
+
+ def handle_parser_exception(self) -> None:
+ disconnect_event: WebSocketDisconnectEvent = {
+ "type": "websocket.disconnect",
+ "code": self.conn.close_sent.code, # type: ignore[union-attr]
+ "reason": self.conn.close_sent.reason, # type: ignore[union-attr]
+ }
+ self.queue.put_nowait(disconnect_event)
+ output = self.conn.data_to_send()
+ self.transport.write(b"".join(output))
+ self.close_sent = True
+ self.transport.close()
+
+ def on_task_complete(self, task: asyncio.Task[None]) -> None:
+ self.tasks.discard(task)
+
+ async def run_asgi(self) -> None:
+ try:
+ result = await self.app(self.scope, self.receive, self.send)
+ except ClientDisconnected:
+ self.transport.close()
+ except BaseException:
+ self.logger.exception("Exception in ASGI application\n")
+ self.send_500_response()
+ self.transport.close()
+ else:
+ if not self.handshake_complete:
+ msg = "ASGI callable returned without completing handshake."
+ self.logger.error(msg)
+ self.send_500_response()
+ self.transport.close()
+ elif result is not None:
+ msg = "ASGI callable should return None, but returned '%s'."
+ self.logger.error(msg, result)
+ self.transport.close()
+
+ def send_500_response(self) -> None:
+ if self.initial_response or self.handshake_complete:
+ return
+ response = self.conn.reject(500, "Internal Server Error")
+ self.conn.send_response(response)
+ output = self.conn.data_to_send()
+ self.transport.write(b"".join(output))
+
+ async def send(self, message: ASGISendEvent) -> None:
+ await self.writable.wait()
+
+ message_type = message["type"]
+
+ if not self.handshake_complete and self.initial_response is None:
+ if message_type == "websocket.accept":
+ message = cast(WebSocketAcceptEvent, message)
+ self.logger.info(
+ '%s - "WebSocket %s" [accepted]',
+ self.scope["client"],
+ get_path_with_query_string(self.scope),
+ )
+ headers = [
+ (name.decode("latin-1").lower(), value.decode("latin-1").lower())
+ for name, value in (self.default_headers + list(message.get("headers", [])))
+ ]
+ accepted_subprotocol = message.get("subprotocol")
+ if accepted_subprotocol:
+ headers.append(("Sec-WebSocket-Protocol", accepted_subprotocol))
+ self.response.headers.update(headers)
+
+ if not self.transport.is_closing():
+ self.handshake_complete = True
+ self.conn.send_response(self.response)
+ output = self.conn.data_to_send()
+ self.transport.write(b"".join(output))
+
+ elif message_type == "websocket.close":
+ message = cast(WebSocketCloseEvent, message)
+ self.queue.put_nowait({"type": "websocket.disconnect", "code": 1006})
+ self.logger.info(
+ '%s - "WebSocket %s" 403',
+ self.scope["client"],
+ get_path_with_query_string(self.scope),
+ )
+ response = self.conn.reject(HTTPStatus.FORBIDDEN, "")
+ self.conn.send_response(response)
+ output = self.conn.data_to_send()
+ self.close_sent = True
+ self.handshake_complete = True
+ self.transport.write(b"".join(output))
+ self.transport.close()
+ elif message_type == "websocket.http.response.start" and self.initial_response is None:
+ message = cast(WebSocketResponseStartEvent, message)
+ if not (100 <= message["status"] < 600):
+ raise RuntimeError("Invalid HTTP status code '%d' in response." % message["status"])
+ self.logger.info(
+ '%s - "WebSocket %s" %d',
+ self.scope["client"],
+ get_path_with_query_string(self.scope),
+ message["status"],
+ )
+ headers = [
+ (name.decode("latin-1"), value.decode("latin-1"))
+ for name, value in list(message.get("headers", []))
+ ]
+ self.initial_response = (message["status"], headers, b"")
+ else:
+ msg = (
+ "Expected ASGI message 'websocket.accept', 'websocket.close' "
+ "or 'websocket.http.response.start' "
+ "but got '%s'."
+ )
+ raise RuntimeError(msg % message_type)
+
+ elif not self.close_sent and self.initial_response is None:
+ try:
+ if message_type == "websocket.send":
+ message = cast(WebSocketSendEvent, message)
+ bytes_data = message.get("bytes")
+ text_data = message.get("text")
+ if text_data:
+ self.conn.send_text(text_data.encode())
+ elif bytes_data:
+ self.conn.send_binary(bytes_data)
+ output = self.conn.data_to_send()
+ self.transport.write(b"".join(output))
+
+ elif message_type == "websocket.close" and not self.transport.is_closing():
+ message = cast(WebSocketCloseEvent, message)
+ code = message.get("code", 1000)
+ reason = message.get("reason", "") or ""
+ self.queue.put_nowait({"type": "websocket.disconnect", "code": code})
+ self.conn.send_close(code, reason)
+ output = self.conn.data_to_send()
+ self.transport.write(b"".join(output))
+ self.close_sent = True
+ self.transport.close()
+ else:
+ msg = "Expected ASGI message 'websocket.send' or 'websocket.close'," " but got '%s'."
+ raise RuntimeError(msg % message_type)
+ except InvalidState:
+ raise ClientDisconnected()
+ elif self.initial_response is not None:
+ if message_type == "websocket.http.response.body":
+ message = cast(WebSocketResponseBodyEvent, message)
+ body = self.initial_response[2] + message["body"]
+ self.initial_response = self.initial_response[:2] + (body,)
+ if not message.get("more_body", False):
+ response = self.conn.reject(self.initial_response[0], body.decode())
+ response.headers.update(self.initial_response[1])
+ self.queue.put_nowait({"type": "websocket.disconnect", "code": 1006})
+ self.conn.send_response(response)
+ output = self.conn.data_to_send()
+ self.close_sent = True
+ self.transport.write(b"".join(output))
+ self.transport.close()
+ else:
+ msg = "Expected ASGI message 'websocket.http.response.body' " "but got '%s'."
+ raise RuntimeError(msg % message_type)
+
+ else:
+ msg = "Unexpected ASGI message '%s', after sending 'websocket.close'."
+ raise RuntimeError(msg % message_type)
+
+ async def receive(self) -> ASGIReceiveEvent:
+ message = await self.queue.get()
+ if self.read_paused and self.queue.empty():
+ self.read_paused = False
+ self.transport.resume_reading()
+ return message
diff --git a/uvicorn/server.py b/uvicorn/server.py
index cca2e85..50c5ed2 100644
--- a/uvicorn/server.py
+++ b/uvicorn/server.py
@@ -23,9 +23,10 @@ if TYPE_CHECKING:
from uvicorn.protocols.http.h11_impl import H11Protocol
from uvicorn.protocols.http.httptools_impl import HttpToolsProtocol
from uvicorn.protocols.websockets.websockets_impl import WebSocketProtocol
+ from uvicorn.protocols.websockets.websockets_sansio_impl import WebSocketsSansIOProtocol
from uvicorn.protocols.websockets.wsproto_impl import WSProtocol
- Protocols = Union[H11Protocol, HttpToolsProtocol, WSProtocol, WebSocketProtocol]
+ Protocols = Union[H11Protocol, HttpToolsProtocol, WSProtocol, WebSocketProtocol, WebSocketsSansIOProtocol]
HANDLED_SIGNALS = (
signal.SIGINT, # Unix signal 2. Sent by Ctrl+C.

View file

@ -1,567 +0,0 @@
diff --git a/requirements.txt b/requirements.txt
index e26e6b3..b16569f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -7,7 +7,7 @@ h11 @ git+https://github.com/python-hyper/h11.git@master
# Explicit optionals
a2wsgi==1.10.7
wsproto==1.2.0
-websockets==13.1
+websockets==14.1
# Packaging
build==1.2.2.post1
diff --git a/tests/middleware/test_logging.py b/tests/middleware/test_logging.py
index 63d7daf..5aef174 100644
--- a/tests/middleware/test_logging.py
+++ b/tests/middleware/test_logging.py
@@ -8,8 +8,7 @@ import typing
import httpx
import pytest
-import websockets
-import websockets.client
+from websockets.asyncio.client import connect
from tests.utils import run_server
from uvicorn import Config
@@ -107,8 +106,8 @@ async def test_trace_logging_on_ws_protocol(
break
async def open_connection(url: str):
- async with websockets.client.connect(url) as websocket:
- return websocket.open
+ async with connect(url):
+ return True
config = Config(
app=websocket_app,
diff --git a/tests/middleware/test_proxy_headers.py b/tests/middleware/test_proxy_headers.py
index d300c45..4b5f195 100644
--- a/tests/middleware/test_proxy_headers.py
+++ b/tests/middleware/test_proxy_headers.py
@@ -5,7 +5,7 @@ from typing import TYPE_CHECKING
import httpx
import httpx._transports.asgi
import pytest
-import websockets.client
+from websockets.asyncio.client import connect
from tests.response import Response
from tests.utils import run_server
@@ -479,7 +479,7 @@ async def test_proxy_headers_websocket_x_forwarded_proto(
async with run_server(config):
url = f"ws://127.0.0.1:{unused_tcp_port}"
headers = {X_FORWARDED_FOR: "1.2.3.4", X_FORWARDED_PROTO: forwarded_proto}
- async with websockets.client.connect(url, extra_headers=headers) as websocket:
+ async with connect(url, additional_headers=headers) as websocket:
data = await websocket.recv()
assert data == expected
diff --git a/tests/protocols/test_websocket.py b/tests/protocols/test_websocket.py
index e728544..b9035ec 100644
--- a/tests/protocols/test_websocket.py
+++ b/tests/protocols/test_websocket.py
@@ -12,6 +12,8 @@ import websockets.asyncio.client
import websockets.client
import websockets.exceptions
from typing_extensions import TypedDict
+from websockets.asyncio.client import ClientConnection, connect
+from websockets.exceptions import ConnectionClosed, ConnectionClosedError, InvalidHandshake, InvalidStatus
from websockets.extensions.permessage_deflate import ClientPerMessageDeflateFactory
from websockets.typing import Subprotocol
@@ -130,8 +132,8 @@ async def test_accept_connection(ws_protocol_cls: WSProtocol, http_protocol_cls:
await self.send({"type": "websocket.accept"})
async def open_connection(url: str):
- async with websockets.client.connect(url) as websocket:
- return websocket.open
+ async with connect(url):
+ return True
config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port)
async with run_server(config):
@@ -146,7 +148,7 @@ async def test_shutdown(ws_protocol_cls: WSProtocol, http_protocol_cls: HTTPProt
config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port)
async with run_server(config) as server:
- async with websockets.client.connect(f"ws://127.0.0.1:{unused_tcp_port}"):
+ async with connect(f"ws://127.0.0.1:{unused_tcp_port}"):
# Attempt shutdown while connection is still open
await server.shutdown()
@@ -160,8 +162,8 @@ async def test_supports_permessage_deflate_extension(
async def open_connection(url: str):
extension_factories = [ClientPerMessageDeflateFactory()]
- async with websockets.client.connect(url, extensions=extension_factories) as websocket:
- return [extension.name for extension in websocket.extensions]
+ async with connect(url, extensions=extension_factories) as websocket:
+ return [extension.name for extension in websocket.protocol.extensions]
config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port)
async with run_server(config):
@@ -180,8 +182,8 @@ async def test_can_disable_permessage_deflate_extension(
# enable per-message deflate on the client, so that we can check the server
# won't support it when it's disabled.
extension_factories = [ClientPerMessageDeflateFactory()]
- async with websockets.client.connect(url, extensions=extension_factories) as websocket:
- return [extension.name for extension in websocket.extensions]
+ async with connect(url, extensions=extension_factories) as websocket:
+ return [extension.name for extension in websocket.protocol.extensions]
config = Config(
app=App,
@@ -203,8 +205,8 @@ async def test_close_connection(ws_protocol_cls: WSProtocol, http_protocol_cls:
async def open_connection(url: str):
try:
- await websockets.client.connect(url)
- except websockets.exceptions.InvalidHandshake:
+ await connect(url)
+ except InvalidHandshake:
return False
return True # pragma: no cover
@@ -224,8 +226,8 @@ async def test_headers(ws_protocol_cls: WSProtocol, http_protocol_cls: HTTPProto
await self.send({"type": "websocket.accept"})
async def open_connection(url: str):
- async with websockets.client.connect(url, extra_headers=[("username", "abraão")]) as websocket:
- return websocket.open
+ async with connect(url, additional_headers=[("username", "abraão")]):
+ return True
config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port)
async with run_server(config):
@@ -239,8 +241,9 @@ async def test_extra_headers(ws_protocol_cls: WSProtocol, http_protocol_cls: HTT
await self.send({"type": "websocket.accept", "headers": [(b"extra", b"header")]})
async def open_connection(url: str):
- async with websockets.client.connect(url) as websocket:
- return websocket.response_headers
+ async with connect(url) as websocket:
+ assert websocket.response
+ return websocket.response.headers
config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port)
async with run_server(config):
@@ -258,8 +261,8 @@ async def test_path_and_raw_path(ws_protocol_cls: WSProtocol, http_protocol_cls:
await self.send({"type": "websocket.accept"})
async def open_connection(url: str):
- async with websockets.client.connect(url) as websocket:
- return websocket.open
+ async with connect(url):
+ return True
config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port)
async with run_server(config):
@@ -276,7 +279,7 @@ async def test_send_text_data_to_client(
await self.send({"type": "websocket.send", "text": "123"})
async def get_data(url: str):
- async with websockets.client.connect(url) as websocket:
+ async with connect(url) as websocket:
return await websocket.recv()
config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port)
@@ -294,7 +297,7 @@ async def test_send_binary_data_to_client(
await self.send({"type": "websocket.send", "bytes": b"123"})
async def get_data(url: str):
- async with websockets.client.connect(url) as websocket:
+ async with connect(url) as websocket:
return await websocket.recv()
config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port)
@@ -313,7 +316,7 @@ async def test_send_and_close_connection(
await self.send({"type": "websocket.close"})
async def get_data(url: str):
- async with websockets.client.connect(url) as websocket:
+ async with connect(url) as websocket:
data = await websocket.recv()
is_open = True
try:
@@ -342,7 +345,7 @@ async def test_send_text_data_to_server(
await self.send({"type": "websocket.send", "text": _text})
async def send_text(url: str):
- async with websockets.client.connect(url) as websocket:
+ async with connect(url) as websocket:
await websocket.send("abc")
return await websocket.recv()
@@ -365,7 +368,7 @@ async def test_send_binary_data_to_server(
await self.send({"type": "websocket.send", "bytes": _bytes})
async def send_text(url: str):
- async with websockets.client.connect(url) as websocket:
+ async with connect(url) as websocket:
await websocket.send(b"abc")
return await websocket.recv()
@@ -387,7 +390,7 @@ async def test_send_after_protocol_close(
await self.send({"type": "websocket.send", "text": "123"})
async def get_data(url: str):
- async with websockets.client.connect(url) as websocket:
+ async with connect(url) as websocket:
data = await websocket.recv()
is_open = True
try:
@@ -407,14 +410,14 @@ async def test_missing_handshake(ws_protocol_cls: WSProtocol, http_protocol_cls:
async def app(scope: Scope, receive: ASGIReceiveCallable, send: ASGISendCallable):
pass
- async def connect(url: str):
- await websockets.client.connect(url)
+ async def open_connection(url: str):
+ await connect(url)
config = Config(app=app, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port)
async with run_server(config):
- with pytest.raises(websockets.exceptions.InvalidStatusCode) as exc_info:
- await connect(f"ws://127.0.0.1:{unused_tcp_port}")
- assert exc_info.value.status_code == 500
+ with pytest.raises(InvalidStatus) as exc_info:
+ await open_connection(f"ws://127.0.0.1:{unused_tcp_port}")
+ assert exc_info.value.response.status_code == 500
async def test_send_before_handshake(
@@ -423,14 +426,14 @@ async def test_send_before_handshake(
async def app(scope: Scope, receive: ASGIReceiveCallable, send: ASGISendCallable):
await send({"type": "websocket.send", "text": "123"})
- async def connect(url: str):
- await websockets.client.connect(url)
+ async def open_connection(url: str):
+ await connect(url)
config = Config(app=app, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port)
async with run_server(config):
- with pytest.raises(websockets.exceptions.InvalidStatusCode) as exc_info:
- await connect(f"ws://127.0.0.1:{unused_tcp_port}")
- assert exc_info.value.status_code == 500
+ with pytest.raises(InvalidStatus) as exc_info:
+ await open_connection(f"ws://127.0.0.1:{unused_tcp_port}")
+ assert exc_info.value.response.status_code == 500
async def test_duplicate_handshake(ws_protocol_cls: WSProtocol, http_protocol_cls: HTTPProtocol, unused_tcp_port: int):
@@ -440,10 +443,10 @@ async def test_duplicate_handshake(ws_protocol_cls: WSProtocol, http_protocol_cl
config = Config(app=app, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port)
async with run_server(config):
- async with websockets.client.connect(f"ws://127.0.0.1:{unused_tcp_port}") as websocket:
- with pytest.raises(websockets.exceptions.ConnectionClosed):
+ async with connect(f"ws://127.0.0.1:{unused_tcp_port}") as websocket:
+ with pytest.raises(ConnectionClosed):
_ = await websocket.recv()
- assert websocket.close_code == 1006
+ assert websocket.protocol.close_code == 1006
async def test_asgi_return_value(ws_protocol_cls: WSProtocol, http_protocol_cls: HTTPProtocol, unused_tcp_port: int):
@@ -458,10 +461,10 @@ async def test_asgi_return_value(ws_protocol_cls: WSProtocol, http_protocol_cls:
config = Config(app=app, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port)
async with run_server(config):
- async with websockets.client.connect(f"ws://127.0.0.1:{unused_tcp_port}") as websocket:
- with pytest.raises(websockets.exceptions.ConnectionClosed):
+ async with connect(f"ws://127.0.0.1:{unused_tcp_port}") as websocket:
+ with pytest.raises(ConnectionClosed):
_ = await websocket.recv()
- assert websocket.close_code == 1006
+ assert websocket.protocol.close_code == 1006
@pytest.mark.parametrize("code", [None, 1000, 1001])
@@ -493,13 +496,13 @@ async def test_app_close(
config = Config(app=app, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port)
async with run_server(config):
- async with websockets.client.connect(f"ws://127.0.0.1:{unused_tcp_port}") as websocket:
+ async with connect(f"ws://127.0.0.1:{unused_tcp_port}") as websocket:
await websocket.ping()
await websocket.send("abc")
- with pytest.raises(websockets.exceptions.ConnectionClosed):
+ with pytest.raises(ConnectionClosed):
await websocket.recv()
- assert websocket.close_code == (code or 1000)
- assert websocket.close_reason == (reason or "")
+ assert websocket.protocol.close_code == (code or 1000)
+ assert websocket.protocol.close_reason == (reason or "")
async def test_client_close(ws_protocol_cls: WSProtocol, http_protocol_cls: HTTPProtocol, unused_tcp_port: int):
@@ -518,7 +521,7 @@ async def test_client_close(ws_protocol_cls: WSProtocol, http_protocol_cls: HTTP
break
async def websocket_session(url: str):
- async with websockets.client.connect(url) as websocket:
+ async with connect(url) as websocket:
await websocket.ping()
await websocket.send("abc")
await websocket.close(code=1001, reason="custom reason")
@@ -555,7 +558,7 @@ async def test_client_connection_lost(
port=unused_tcp_port,
)
async with run_server(config):
- async with websockets.client.connect(f"ws://127.0.0.1:{unused_tcp_port}") as websocket:
+ async with connect(f"ws://127.0.0.1:{unused_tcp_port}") as websocket:
websocket.transport.close()
await asyncio.sleep(0.1)
got_disconnect_event_before_shutdown = got_disconnect_event
@@ -583,7 +586,7 @@ async def test_client_connection_lost_on_send(
config = Config(app=app, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port)
async with run_server(config):
url = f"ws://127.0.0.1:{unused_tcp_port}"
- async with websockets.client.connect(url):
+ async with connect(url):
await asyncio.sleep(0.1)
disconnect.set()
@@ -642,11 +645,11 @@ async def test_send_close_on_server_shutdown(
disconnect_message = message
break
- websocket: websockets.client.WebSocketClientProtocol | None = None
+ websocket: ClientConnection | None = None
async def websocket_session(uri: str):
nonlocal websocket
- async with websockets.client.connect(uri) as ws_connection:
+ async with connect(uri) as ws_connection:
websocket = ws_connection
await server_shutdown_event.wait()
@@ -676,9 +679,7 @@ async def test_subprotocols(
await self.send({"type": "websocket.accept", "subprotocol": subprotocol})
async def get_subprotocol(url: str):
- async with websockets.client.connect(
- url, subprotocols=[Subprotocol("proto1"), Subprotocol("proto2")]
- ) as websocket:
+ async with connect(url, subprotocols=[Subprotocol("proto1"), Subprotocol("proto2")]) as websocket:
return websocket.subprotocol
config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port)
@@ -688,7 +689,7 @@ async def test_subprotocols(
MAX_WS_BYTES = 1024 * 1024 * 16
-MAX_WS_BYTES_PLUS1 = MAX_WS_BYTES + 1
+MAX_WS_BYTES_PLUS1 = MAX_WS_BYTES + 10
@pytest.mark.parametrize(
@@ -731,15 +732,15 @@ async def test_send_binary_data_to_server_bigger_than_default_on_websockets(
port=unused_tcp_port,
)
async with run_server(config):
- async with websockets.client.connect(f"ws://127.0.0.1:{unused_tcp_port}", max_size=client_size_sent) as ws:
+ async with connect(f"ws://127.0.0.1:{unused_tcp_port}", max_size=client_size_sent) as ws:
await ws.send(b"\x01" * client_size_sent)
if expected_result == 0:
data = await ws.recv()
assert data == b"\x01" * client_size_sent
else:
- with pytest.raises(websockets.exceptions.ConnectionClosedError):
+ with pytest.raises(ConnectionClosedError):
await ws.recv()
- assert ws.close_code == expected_result
+ assert ws.protocol.close_code == expected_result
async def test_server_reject_connection(
@@ -764,10 +765,10 @@ async def test_server_reject_connection(
disconnected_message = await receive()
async def websocket_session(url: str):
- with pytest.raises(websockets.exceptions.InvalidStatusCode) as exc_info:
- async with websockets.client.connect(url):
+ with pytest.raises(InvalidStatus) as exc_info:
+ async with connect(url):
pass # pragma: no cover
- assert exc_info.value.status_code == 403
+ assert exc_info.value.response.status_code == 403
config = Config(app=app, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port)
async with run_server(config):
@@ -937,10 +938,10 @@ async def test_server_reject_connection_with_invalid_msg(
await send(message)
async def websocket_session(url: str):
- with pytest.raises(websockets.exceptions.InvalidStatusCode) as exc_info:
- async with websockets.client.connect(url):
+ with pytest.raises(InvalidStatus) as exc_info:
+ async with connect(url):
pass # pragma: no cover
- assert exc_info.value.status_code == 404
+ assert exc_info.value.response.status_code == 404
config = Config(app=app, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port)
async with run_server(config):
@@ -971,10 +972,10 @@ async def test_server_reject_connection_with_missing_body(
# no further message
async def websocket_session(url: str):
- with pytest.raises(websockets.exceptions.InvalidStatusCode) as exc_info:
- async with websockets.client.connect(url):
+ with pytest.raises(InvalidStatus) as exc_info:
+ async with connect(url):
pass # pragma: no cover
- assert exc_info.value.status_code == 404
+ assert exc_info.value.response.status_code == 404
config = Config(app=app, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port)
async with run_server(config):
@@ -1014,17 +1015,17 @@ async def test_server_multiple_websocket_http_response_start_events(
exception_message = str(exc)
async def websocket_session(url: str):
- with pytest.raises(websockets.exceptions.InvalidStatusCode) as exc_info:
- async with websockets.client.connect(url):
+ with pytest.raises(InvalidStatus) as exc_info:
+ async with connect(url):
pass # pragma: no cover
- assert exc_info.value.status_code == 404
+ assert exc_info.value.response.status_code == 404
config = Config(app=app, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port)
async with run_server(config):
await websocket_session(f"ws://127.0.0.1:{unused_tcp_port}")
assert exception_message == (
- "Expected ASGI message 'websocket.http.response.body' but got " "'websocket.http.response.start'."
+ "Expected ASGI message 'websocket.http.response.body' but got 'websocket.http.response.start'."
)
@@ -1053,7 +1054,7 @@ async def test_server_can_read_messages_in_buffer_after_close(
config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port)
async with run_server(config):
- async with websockets.client.connect(f"ws://127.0.0.1:{unused_tcp_port}") as websocket:
+ async with connect(f"ws://127.0.0.1:{unused_tcp_port}") as websocket:
await websocket.send(b"abc")
await websocket.send(b"abc")
await websocket.send(b"abc")
@@ -1070,8 +1071,9 @@ async def test_default_server_headers(
await self.send({"type": "websocket.accept"})
async def open_connection(url: str):
- async with websockets.client.connect(url) as websocket:
- return websocket.response_headers
+ async with connect(url) as websocket:
+ assert websocket.response
+ return websocket.response.headers
config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port)
async with run_server(config):
@@ -1085,8 +1087,9 @@ async def test_no_server_headers(ws_protocol_cls: WSProtocol, http_protocol_cls:
await self.send({"type": "websocket.accept"})
async def open_connection(url: str):
- async with websockets.client.connect(url) as websocket:
- return websocket.response_headers
+ async with connect(url) as websocket:
+ assert websocket.response
+ return websocket.response.headers
config = Config(
app=App,
@@ -1108,8 +1111,9 @@ async def test_no_date_header_on_wsproto(http_protocol_cls: HTTPProtocol, unused
await self.send({"type": "websocket.accept"})
async def open_connection(url: str):
- async with websockets.client.connect(url) as websocket:
- return websocket.response_headers
+ async with connect(url) as websocket:
+ assert websocket.response
+ return websocket.response.headers
config = Config(
app=App,
@@ -1140,8 +1144,9 @@ async def test_multiple_server_header(
)
async def open_connection(url: str):
- async with websockets.client.connect(url) as websocket:
- return websocket.response_headers
+ async with connect(url) as websocket:
+ assert websocket.response
+ return websocket.response.headers
config = Config(app=App, ws=ws_protocol_cls, http=http_protocol_cls, lifespan="off", port=unused_tcp_port)
async with run_server(config):
@@ -1176,8 +1181,8 @@ async def test_lifespan_state(ws_protocol_cls: WSProtocol, http_protocol_cls: HT
await self.send({"type": "websocket.accept"})
async def open_connection(url: str):
- async with websockets.client.connect(url) as websocket:
- return websocket.open
+ async with connect(url):
+ return True
async def app_wrapper(scope: Scope, receive: ASGIReceiveCallable, send: ASGISendCallable):
if scope["type"] == "lifespan":
diff --git a/uvicorn/protocols/websockets/websockets_impl.py b/uvicorn/protocols/websockets/websockets_impl.py
index cd6c54f..685d6b6 100644
--- a/uvicorn/protocols/websockets/websockets_impl.py
+++ b/uvicorn/protocols/websockets/websockets_impl.py
@@ -13,8 +13,7 @@ from websockets.datastructures import Headers
from websockets.exceptions import ConnectionClosed
from websockets.extensions.base import ServerExtensionFactory
from websockets.extensions.permessage_deflate import ServerPerMessageDeflateFactory
-from websockets.legacy.server import HTTPResponse
-from websockets.server import WebSocketServerProtocol
+from websockets.legacy.server import HTTPResponse, WebSocketServerProtocol
from websockets.typing import Subprotocol
from uvicorn._types import (
diff --git a/uvicorn/protocols/websockets/wsproto_impl.py b/uvicorn/protocols/websockets/wsproto_impl.py
index 828afe5..5d84bff 100644
--- a/uvicorn/protocols/websockets/wsproto_impl.py
+++ b/uvicorn/protocols/websockets/wsproto_impl.py
@@ -149,12 +149,13 @@ class WSProtocol(asyncio.Protocol):
self.writable.set() # pragma: full coverage
def shutdown(self) -> None:
- if self.handshake_complete:
- self.queue.put_nowait({"type": "websocket.disconnect", "code": 1012})
- output = self.conn.send(wsproto.events.CloseConnection(code=1012))
- self.transport.write(output)
- else:
- self.send_500_response()
+ if not self.response_started:
+ if self.handshake_complete:
+ self.queue.put_nowait({"type": "websocket.disconnect", "code": 1012})
+ output = self.conn.send(wsproto.events.CloseConnection(code=1012))
+ self.transport.write(output)
+ else:
+ self.send_500_response()
self.transport.close()
def on_task_complete(self, task: asyncio.Task[None]) -> None:
@@ -221,13 +222,15 @@ class WSProtocol(asyncio.Protocol):
def send_500_response(self) -> None:
if self.response_started or self.handshake_complete:
return # we cannot send responses anymore
+ reject_data = b"Internal Server Error"
headers: list[tuple[bytes, bytes]] = [
(b"content-type", b"text/plain; charset=utf-8"),
+ (b"content-length", str(len(reject_data)).encode()),
(b"connection", b"close"),
(b"content-length", b"21"),
]
output = self.conn.send(wsproto.events.RejectConnection(status_code=500, headers=headers, has_body=True))
- output += self.conn.send(wsproto.events.RejectData(data=b"Internal Server Error"))
+ output += self.conn.send(wsproto.events.RejectData(data=reject_data))
self.transport.write(output)
async def run_asgi(self) -> None:

View file

@ -1,59 +0,0 @@
maintainer="Michał Polański <michal@polanski.me>"
pkgname=uvicorn
pkgver=0.34.0
pkgrel=0
pkgdesc="Lightning-fast ASGI server"
url="https://www.uvicorn.org/"
license="BSD-3-Clause"
# disable due to lack of support for websockets 14
# https://gitlab.alpinelinux.org/alpine/aports/-/issues/16646
arch="noarch"
depends="py3-click py3-h11"
makedepends="py3-gpep517 py3-hatchling"
checkdepends="
py3-a2wsgi
py3-dotenv
py3-httptools
py3-httpx
py3-pytest
py3-pytest-mock
py3-trustme
py3-typing-extensions
py3-watchfiles
py3-websockets
py3-wsproto
py3-yaml
"
subpackages="$pkgname-pyc"
source="https://github.com/encode/uvicorn/archive/$pkgver/uvicorn-$pkgver.tar.gz
test_multiprocess.patch
2540_add-websocketssansioprotocol.patch
2541_bump-wesockets-on-requirements.patch
fix-test-wsgi.patch
"
build() {
gpep517 build-wheel \
--wheel-dir .dist \
--output-fd 3 3>&1 >&2
}
check() {
python3 -m venv --clear --without-pip --system-site-packages .testenv
.testenv/bin/python3 -m installer .dist/*.whl
.testenv/bin/python3 -m pytest \
-k "not test_close_connection_with_multiple_requests" # a known issue
}
package() {
python3 -m installer -d "$pkgdir" \
.dist/uvicorn-$pkgver-py3-none-any.whl
}
sha512sums="
260782e385a2934049da8c474750958826afe1bfe23b38fe2f6420f355af7a537563f8fe6ac3830814c7469203703d10f4f9f3d6e53e79113bfd2fd34f7a7c72 uvicorn-0.34.0.tar.gz
cfad91dd84f8974362f52d754d7a29f09d07927a46acaa0eb490b6115a5729d84d6df94fead10ccd4cce7f5ea376f1348b0f59daede661dd8373a3851c313c46 test_multiprocess.patch
858e9a7baaf1c12e076aecd81aaaf622b35a59dcaabea4ee1bfc4cda704c9fe271b1cc616a5910d845393717e4989cecb3b04be249cb5d0df1001ec5224c293f 2540_add-websocketssansioprotocol.patch
f8a8c190981b9070232ea985880685bc801947cc7f673d59abf73d3e68bc2e13515ad200232a1de2af0808bc85da48a341f57d47caf87bcc190bfdc3c45718e0 2541_bump-wesockets-on-requirements.patch
379963f9ccbda013e4a0bc3441eee70a581c91f60206aedc15df6a8737950824b7cb8d867774fc415763449bb3e0bba66601e8551101bfc1741098acd035f0cc fix-test-wsgi.patch
"

View file

@ -1,13 +0,0 @@
diff --git a/tests/middleware/test_wsgi.py.orig b/tests/middleware/test_wsgi.py
index 6003f27..2750487 100644
--- a/tests/middleware/test_wsgi.py.orig
+++ b/tests/middleware/test_wsgi.py
@@ -73,7 +73,7 @@ async def test_wsgi_post(wsgi_middleware: Callable) -> None:
async with httpx.AsyncClient(transport=transport, base_url="http://testserver") as client:
response = await client.post("/", json={"example": 123})
assert response.status_code == 200
- assert response.text == '{"example":123}'
+ assert response.text == '{"example": 123}'
@pytest.mark.anyio

View file

@ -1,14 +0,0 @@
Wait a bit longer, otherwise the workers might
not have time to finish restarting.
--- a/tests/supervisors/test_multiprocess.py
+++ b/tests/supervisors/test_multiprocess.py
@@ -132,7 +132,7 @@ def test_multiprocess_sighup() -> None:
time.sleep(1)
pids = [p.pid for p in supervisor.processes]
supervisor.signal_queue.append(signal.SIGHUP)
- time.sleep(1)
+ time.sleep(3)
assert pids != [p.pid for p in supervisor.processes]
supervisor.signal_queue.append(signal.SIGINT)
supervisor.join_all()

View file

@ -1,8 +1,8 @@
# Maintainer: Antoine Martin (ayakael) <dev@ayakael.net>
# Contributor: Antoine Martin (ayakael) <dev@ayakael.net>
pkgname=wikijs
pkgver=2.5.305
pkgrel=0
pkgver=2.5.303
pkgrel=1
pkgdesc="Wiki.js | A modern, lightweight and powerful wiki app built on Node.js"
license="AGPL-3.0"
arch="!armv7 x86_64"
@ -49,14 +49,11 @@ package() {
install -Dm644 "$builddir"/package.json -t "$pkgdir"/usr/lib/bundles/wikijs
cp -aR "$builddir"/assets "$builddir"/server "$builddir"/node_modules "$pkgdir"/usr/lib/bundles/wikijs
# remove prebuilts
rm -Rf "$pkgdir"/usr/lib/bundles/wikijs/node_modules/*/prebuilds
mkdir -p "$pkgdir"/var/lib/wikijs
chown 5494:5494 "$pkgdir"/var/lib/wikijs
}
sha512sums="
e715e2d93fd176dc93676b3dd97d8dd745589552a7d67971fce0c1097f607fa44a3147534709a82b3ad13dda95d7c5833bc30ec37538c6cdef54ac309e6b44d1 wikijs-2.5.305.tar.gz
a463d79ad0d8ff15dbe568b839094d697c6de0b2e991b77a4944e2a82f9789de6840e504a4673e4e0900d61596e880ca276008de86dac4f05f5823dc0427d2fc wikijs-2.5.303.tar.gz
355131ee5617348b82681cb8543c784eea59689990a268ecd3b77d44fe9abcca9c86fb8b047f0a8faeba079c650faa7790c5dd65418d313cd7561f38bb590c03 wikijs.initd
07b536c20e370d2a926038165f0e953283259c213a80a8648419565f5359ab05f528ac310e81606914013da212270df6feddb22e514cbcb2464c8274c956e4af config.sample.yml.patch
"