Compare commits

..

3 Commits

Author SHA1 Message Date
Dewi Roberts
301ae720ce Merge branch 'main' into website/integrations--update-jellyfin 2026-02-13 12:40:58 +00:00
Dewi Roberts
9acc45df94 Update website/integrations/media/jellyfin/index.mdx
Co-authored-by: Dominic R <dominic@sdko.org>
Signed-off-by: Dewi Roberts <dewi@goauthentik.io>
2026-01-21 15:31:31 +00:00
dewi-tik
46b7c8f068 Copy old PR 2026-01-16 11:03:28 +00:00
603 changed files with 12901 additions and 23195 deletions

View File

@@ -22,7 +22,7 @@ runs:
sudo rm -rf /usr/local/lib/android
- name: Install uv
if: ${{ contains(inputs.dependencies, 'python') }}
uses: astral-sh/setup-uv@5a095e7a2014a4212f075830d4f7277575a9d098 # v5
uses: astral-sh/setup-uv@eac588ad8def6316056a12d4907a9d4d84ff7a3b # v5
with:
enable-cache: true
- name: Setup python
@@ -36,7 +36,7 @@ runs:
run: uv sync --all-extras --dev --frozen
- name: Setup node
if: ${{ contains(inputs.dependencies, 'node') }}
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v4
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v4
with:
node-version-file: web/package.json
cache: "npm"
@@ -44,7 +44,7 @@ runs:
registry-url: 'https://registry.npmjs.org'
- name: Setup go
if: ${{ contains(inputs.dependencies, 'go') }}
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v5
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v5
with:
go-version-file: "go.mod"
- name: Setup docker cache
@@ -58,7 +58,7 @@ runs:
run: |
export PSQL_TAG=${{ inputs.postgresql_version }}
docker compose -f .github/actions/setup/compose.yml up -d
cd web && npm ci
cd web && npm i
- name: Generate config
if: ${{ contains(inputs.dependencies, 'python') }}
shell: uv run python {0}

View File

@@ -38,21 +38,6 @@ updates:
#endregion
#region Rust
- package-ecosystem: rust-toolchain
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
commit-message:
prefix: "core:"
labels:
- dependencies
#endregion
#region Web
- package-ecosystem: npm
@@ -249,7 +234,7 @@ updates:
- package-ecosystem: docker
directories:
- /lifecycle/container
- /
- /website
schedule:
interval: daily

View File

@@ -67,12 +67,12 @@ jobs:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
with:
go-version-file: "go.mod"
- name: Generate API Clients
@@ -80,7 +80,7 @@ jobs:
make gen-client-ts
make gen-client-go
- name: Build Docker Image
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6
uses: docker/build-push-action@601a80b39c9405e50806ae38af30926f9d957c47 # v6
id: push
with:
context: .
@@ -95,7 +95,7 @@ jobs:
platforms: linux/${{ inputs.image_arch }}
cache-from: type=registry,ref=${{ steps.ev.outputs.attestImageNames }}:buildcache-${{ inputs.image_arch }}
cache-to: ${{ steps.ev.outputs.cacheTo }}
- uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v3
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
with:

View File

@@ -90,14 +90,14 @@ jobs:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: int128/docker-manifest-create-action@8aac06098a12365ccdf99372dcfb453ccce8a0b0 # v2
- uses: int128/docker-manifest-create-action@1a059c021f1d5e9f2bd39de745d5dd3a0ef6df90 # v2
id: build
with:
tags: ${{ matrix.tag }}
sources: |
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-amd64.outputs.image-digest }}
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-arm64.outputs.image-digest }}
- uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v3
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
id: attest
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}

View File

@@ -25,7 +25,7 @@ jobs:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
token: ${{ steps.generate_token.outputs.token }}
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: web/package.json
registry-url: "https://registry.npmjs.org"

View File

@@ -33,7 +33,7 @@ jobs:
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: website/package.json
cache: "npm"
@@ -55,7 +55,7 @@ jobs:
env:
NODE_ENV: production
run: npm run build -w api
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v4
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v4
with:
name: api-docs
path: website/api/build
@@ -67,11 +67,11 @@ jobs:
- build
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v5
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v5
with:
name: api-docs
path: website/api/build
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: website/package.json
cache: "npm"

View File

@@ -24,7 +24,7 @@ jobs:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- name: Setup authentik env
uses: ./.github/actions/setup
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: lifecycle/aws/package.json
cache: "npm"

View File

@@ -36,7 +36,7 @@ jobs:
NODE_ENV: production
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: website/package.json
cache: "npm"
@@ -53,7 +53,7 @@ jobs:
NODE_ENV: production
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: website/package.json
cache: "npm"
@@ -96,7 +96,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
id: push
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6
uses: docker/build-push-action@601a80b39c9405e50806ae38af30926f9d957c47 # v6
with:
tags: ${{ steps.ev.outputs.imageTags }}
file: website/Dockerfile
@@ -105,7 +105,7 @@ jobs:
context: .
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache,mode=max' || '' }}
- uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v3
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
with:

View File

@@ -6,10 +6,6 @@ on:
schedule:
# Every night at 3am
- cron: "0 3 * * *"
pull_request:
paths:
# Needs to refer to itself
- .github/workflows/ci-main-daily.yml
jobs:
test-container:
@@ -19,14 +15,14 @@ jobs:
matrix:
version:
- docs
- version-2025-12
- version-2025-10
- version-2025-4
- version-2025-2
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- run: |
current="$(pwd)"
dir="/tmp/authentik/${{ matrix.version }}"
mkdir -p "${dir}/lifecycle/container"
cd "${dir}"
wget "https://${{ matrix.version }}.goauthentik.io/docker-compose.yml" -O "${dir}/lifecycle/container/compose.yml"
"${current}/scripts/test_docker.sh"
mkdir -p $dir
cd $dir
wget https://${{ matrix.version }}.goauthentik.io/compose.yml
${current}/scripts/test_docker.sh

View File

@@ -170,7 +170,7 @@ jobs:
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Create k8s Kind Cluster
uses: helm/kind-action@ef37e7f390d99f746eb8b610417061a60e82a6cc # v1.14.0
uses: helm/kind-action@92086f6be054225fa813e0a4b13787fc9088faab # v1.13.0
- name: run integration
run: |
uv run coverage run manage.py test tests/integration
@@ -279,7 +279,7 @@ jobs:
with:
flags: conformance
- if: ${{ !cancelled() }}
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
with:
name: conformance-certification-${{ matrix.job.name }}
path: tests/openid_conformance/exports/

View File

@@ -22,7 +22,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
with:
go-version-file: "go.mod"
- name: Prepare and generate API
@@ -43,7 +43,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
with:
go-version-file: "go.mod"
- name: Setup authentik env
@@ -111,7 +111,7 @@ jobs:
run: make gen-client-go
- name: Build Docker Image
id: push
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6
uses: docker/build-push-action@601a80b39c9405e50806ae38af30926f9d957c47 # v6
with:
tags: ${{ steps.ev.outputs.imageTags }}
file: lifecycle/container/${{ matrix.type }}.Dockerfile
@@ -122,7 +122,7 @@ jobs:
context: .
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && format('type=registry,ref=ghcr.io/goauthentik/dev-{0}:buildcache,mode=max', matrix.type) || '' }}
- uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v3
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
with:
@@ -148,10 +148,10 @@ jobs:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
ref: ${{ github.event.pull_request.head.sha }}
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
with:
go-version-file: "go.mod"
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: web/package.json
cache: "npm"

View File

@@ -32,7 +32,7 @@ jobs:
project: web
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: ${{ matrix.project }}/package.json
cache: "npm"
@@ -49,7 +49,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: web/package.json
cache: "npm"
@@ -77,7 +77,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: web/package.json
cache: "npm"

View File

@@ -29,19 +29,18 @@ jobs:
- packages/eslint-config
- packages/prettier-config
- packages/docusaurus-config
- packages/logger-js
- packages/esbuild-plugin-live-reload
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
fetch-depth: 2
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: ${{ matrix.package }}/package.json
registry-url: "https://registry.npmjs.org"
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # 24d32ffd492484c1d75e0c0b894501ddb9d30d62
uses: tj-actions/changed-files@8cba46e29c11878d930bca7870bb54394d3e8b21 # 24d32ffd492484c1d75e0c0b894501ddb9d30d62
with:
files: |
${{ matrix.package }}/package.json

View File

@@ -51,14 +51,14 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
id: push
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6
uses: docker/build-push-action@601a80b39c9405e50806ae38af30926f9d957c47 # v6
with:
tags: ${{ steps.ev.outputs.imageTags }}
file: website/Dockerfile
push: true
platforms: linux/amd64,linux/arm64
context: .
- uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v3
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
id: attest
if: true
with:
@@ -84,10 +84,10 @@ jobs:
- rac
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
with:
go-version-file: "go.mod"
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: web/package.json
cache: "npm"
@@ -119,7 +119,7 @@ jobs:
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6
uses: docker/build-push-action@601a80b39c9405e50806ae38af30926f9d957c47 # v6
id: push
with:
push: true
@@ -129,7 +129,7 @@ jobs:
file: lifecycle/container/${{ matrix.type }}.Dockerfile
platforms: linux/amd64,linux/arm64
context: .
- uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v3
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
id: attest
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
@@ -152,10 +152,10 @@ jobs:
goarch: [amd64, arm64]
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
with:
go-version-file: "go.mod"
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: web/package.json
cache: "npm"
@@ -180,7 +180,7 @@ jobs:
export CGO_ENABLED=0
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
- name: Upload binaries to release
uses: svenstaro/upload-release-action@b98a3b12e86552593f3e4e577ca8a62aa2f3f22b # v2
uses: svenstaro/upload-release-action@6b7fa9f267e90b50a19fef07b3596790bb941741 # v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}

View File

@@ -91,7 +91,6 @@ jobs:
# ID from https://api.github.com/users/authentik-automation[bot]
git config --global user.name '${{ steps.app-token.outputs.app-slug }}[bot]'
git config --global user.email '${{ steps.get-user-id.outputs.user-id }}+${{ steps.app-token.outputs.app-slug }}[bot]@users.noreply.github.com'
git pull
git commit -a -m "release: ${{ inputs.version }}" --allow-empty
git tag "version/${{ inputs.version }}" HEAD -m "version/${{ inputs.version }}"
git push --follow-tags
@@ -175,25 +174,21 @@ jobs:
if: "${{ inputs.release_reason == 'feature' }}"
run: |
changelog_url="https://docs.goauthentik.io/docs/releases/${{ needs.check-inputs.outputs.major_version }}"
reason="${{ inputs.release_reason }}"
jq \
--arg version "${{ inputs.version }}" \
--arg changelog "See ${changelog_url}" \
--arg changelog_url "${changelog_url}" \
--arg reason "${reason}" \
'.stable.version = $version | .stable.changelog = $changelog | .stable.changelog_url = $changelog_url | .stable.reason = $reason' version.json > version.new.json
'.stable.version = $version | .stable.changelog = $changelog | .stable.changelog_url = $changelog_url' version.json > version.new.json
mv version.new.json version.json
- name: Bump version
if: "${{ inputs.release_reason != 'feature' }}"
run: |
changelog_url="https://docs.goauthentik.io/docs/releases/${{ needs.check-inputs.outputs.major_version }}#fixed-in-$(echo -n ${{ inputs.version}} | sed 's/\.//g')"
reason="${{ inputs.release_reason }}"
jq \
--arg version "${{ inputs.version }}" \
--arg changelog "See ${changelog_url}" \
--arg changelog_url "${changelog_url}" \
--arg reason "${reason}" \
'.stable.version = $version | .stable.changelog = $changelog | .stable.changelog_url = $changelog_url | .stable.reason = $reason' version.json > version.new.json
'.stable.version = $version | .stable.changelog = $changelog | .stable.changelog_url = $changelog_url' version.json > version.new.json
mv version.new.json version.json
- name: Create pull request
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7

View File

@@ -19,7 +19,7 @@ jobs:
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/stale@b5d41d4e1d5dceea10e7104786b73624c18a190f # v10
- uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10
with:
repo-token: ${{ steps.generate_token.outputs.token }}
days-before-stale: 60

View File

@@ -34,7 +34,6 @@ packages/docusaurus-config @goauthentik/frontend
packages/esbuild-plugin-live-reload @goauthentik/frontend
packages/eslint-config @goauthentik/frontend
packages/prettier-config @goauthentik/frontend
packages/logger-js @goauthentik/frontend
packages/tsconfig @goauthentik/frontend
# Web
web/ @goauthentik/frontend

View File

@@ -148,11 +148,11 @@ bump: ## Bump authentik version. Usage: make bump version=20xx.xx.xx
ifndef version
$(error Usage: make bump version=20xx.xx.xx )
endif
$(eval current_version := $(shell cat ${PWD}/internal/constants/VERSION))
$(SED_INPLACE) 's/^version = ".*"/version = "$(version)"/' ${PWD}/pyproject.toml
$(SED_INPLACE) 's/^VERSION = ".*"/VERSION = "$(version)"/' ${PWD}/authentik/__init__.py
$(SED_INPLACE) 's/^version = ".*"/version = "$(version)"/' pyproject.toml
$(SED_INPLACE) 's/^VERSION = ".*"/VERSION = "$(version)"/' authentik/__init__.py
$(MAKE) gen-build gen-compose aws-cfn
$(SED_INPLACE) "s/\"${current_version}\"/\"$(version)\"/" ${PWD}/package.json ${PWD}/package-lock.json ${PWD}/web/package.json ${PWD}/web/package-lock.json
npm version --no-git-tag-version --allow-same-version $(version)
cd ${PWD}/web && npm version --no-git-tag-version --allow-same-version $(version)
echo -n $(version) > ${PWD}/internal/constants/VERSION
#########################
@@ -168,22 +168,12 @@ gen-build: ## Extract the schema from the database
gen-compose:
$(UV) run scripts/generate_compose.py
gen-changelog: ## (Release) generate the changelog based from the commits since the last version
# These are best-effort guesses based on commit messages
$(eval last_version := $(shell git tag --list 'version/*' --sort 'version:refname' | grep -vE 'rc\d+$$' | tail -1))
$(eval current_commit := $(shell git rev-parse HEAD))
git log --pretty=format:"- %s" $(shell git merge-base ${last_version} ${current_commit})...${current_commit} > merged_to_current
git log --pretty=format:"- %s" $(shell git merge-base ${last_version} ${current_commit})...${last_version} > merged_to_last
grep -Eo 'cherry-pick (#\d+)' merged_to_last | cut -d ' ' -f 2 | sed 's/.*/(&)$$/' > cherry_picked_to_last
grep -vf cherry_picked_to_last merged_to_current | sort > changelog.md
rm merged_to_current
rm merged_to_last
rm cherry_picked_to_last
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
npx prettier --write changelog.md
gen-diff: ## (Release) generate the changelog diff between the current schema and the last version
$(eval last_version := $(shell git tag --list 'version/*' --sort 'version:refname' | grep -vE 'rc\d+$$' | tail -1))
git show ${last_version}:schema.yml > schema-old.yml
gen-diff: ## (Release) generate the changelog diff between the current schema and the last tag
git show $(shell git describe --tags $(shell git rev-list --tags --max-count=1)):schema.yml > schema-old.yml
docker compose -f scripts/api/compose.yml run --rm --user "${UID}:${GID}" diff \
--markdown \
/local/diff.md \

View File

@@ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
| Version | Supported |
| ---------- | ---------- |
| 2025.10.x | ✅ |
| 2025.12.x | ✅ |
| 2026.2.x | ✅ |
## Reporting a Vulnerability

View File

@@ -100,25 +100,13 @@ class S3Backend(ManageableBackend):
f"storage.{self.usage.value}.{self.name}.addressing_style",
CONFIG.get(f"storage.{self.name}.addressing_style", "auto"),
)
signature_version = CONFIG.get(
f"storage.{self.usage.value}.{self.name}.signature_version",
CONFIG.get(f"storage.{self.name}.signature_version", "s3v4"),
)
# Keep signature_version pass-through and let boto3/botocore handle it.
# In boto3's S3 configuration docs, `s3v4` (default) and deprecated `s3`
# are the documented values:
# https://github.com/boto/boto3/blob/791a3e8f36d83664a47b4281a0586b3546cef3ec/docs/source/guide/configuration.rst?plain=1#L398-L407
# Botocore also supports additional signer names, so we intentionally do
# not enforce a restricted allowlist here.
return self.session.client(
"s3",
endpoint_url=endpoint_url,
use_ssl=use_ssl,
region_name=region_name,
config=Config(
signature_version=signature_version, s3={"addressing_style": addressing_style}
),
config=Config(signature_version="s3v4", s3={"addressing_style": addressing_style}),
)
@property

View File

@@ -1,6 +1,5 @@
from unittest import skipUnless
from botocore.exceptions import UnsupportedSignatureVersionError
from django.test import TestCase
from authentik.admin.files.tests.utils import FileTestS3BackendMixin, s3_test_server_available
@@ -82,27 +81,6 @@ class TestS3Backend(FileTestS3BackendMixin, TestCase):
self.assertIn("X-Amz-Signature=", url)
self.assertIn("test.png", url)
def test_client_signature_version_default_v4(self):
"""Test S3 client defaults to v4 signature when not configured."""
self.assertEqual(self.media_s3_backend.client.meta.config.signature_version, "s3v4")
@CONFIG.patch("storage.s3.signature_version", "s3")
def test_client_signature_version_global_override(self):
"""Test S3 client respects globally configured signature version."""
self.assertEqual(self.media_s3_backend.client.meta.config.signature_version, "s3")
@CONFIG.patch("storage.s3.signature_version", "s3v4")
@CONFIG.patch("storage.media.s3.signature_version", "s3")
def test_client_signature_version_media_override(self):
"""Test usage-specific signature version takes precedence over global."""
self.assertEqual(self.media_s3_backend.client.meta.config.signature_version, "s3")
@CONFIG.patch("storage.media.s3.signature_version", "not-a-real-signature")
def test_client_signature_version_unsupported(self):
"""Test unsupported signature version raises botocore error."""
with self.assertRaises(UnsupportedSignatureVersionError):
self.media_s3_backend.file_url("test.png", use_cache=False)
@CONFIG.patch("storage.s3.bucket_name", "test-bucket")
def test_file_exists_true(self):
"""Test file_exists returns True for existing file"""

View File

@@ -1,7 +1,5 @@
"""authentik core models"""
import re
import traceback
from datetime import datetime, timedelta
from enum import StrEnum
from hashlib import sha256
@@ -17,6 +15,7 @@ from django.contrib.sessions.base_session import AbstractBaseSession
from django.core.validators import validate_slug
from django.db import models
from django.db.models import Q, QuerySet, options
from django.db.models.constants import LOOKUP_SEP
from django.http import HttpRequest
from django.utils.functional import cached_property
from django.utils.timezone import now
@@ -44,7 +43,6 @@ from authentik.lib.models import (
DomainlessFormattedURLValidator,
SerializerModel,
)
from authentik.lib.utils.inheritance import get_deepest_child
from authentik.lib.utils.time import timedelta_from_string
from authentik.policies.models import PolicyBindingModel
from authentik.rbac.models import Role
@@ -530,35 +528,23 @@ class User(SerializerModel, AttributesMixin, AbstractUser):
"default: in 30 days). See authentik logs for every will invocation of this "
"deprecation."
)
stacktrace = traceback.format_stack()
# The last line is this function, the next-to-last line is its caller
cause = stacktrace[-2] if len(stacktrace) > 1 else "Unknown, see stacktrace in logs"
if search := re.search(r'"(.*?)"', cause):
cause = f"Property mapping or Expression policy named {search.group(1)}"
LOGGER.warning(
"deprecation used",
message=message_logger,
deprecation=deprecation,
replacement=replacement,
cause=cause,
stacktrace=stacktrace,
)
if not Event.filter_not_expired(
action=EventAction.CONFIGURATION_WARNING,
context__deprecation=deprecation,
context__cause=cause,
action=EventAction.CONFIGURATION_WARNING, context__deprecation=deprecation
).exists():
event = Event.new(
EventAction.CONFIGURATION_WARNING,
deprecation=deprecation,
replacement=replacement,
message=message_event,
cause=cause,
)
event.expires = datetime.now() + timedelta(days=30)
event.save()
return self.groups
def set_password(self, raw_password, signal=True, sender=None, request=None):
@@ -803,7 +789,25 @@ class Application(SerializerModel, PolicyBindingModel):
"""Get casted provider instance. Needs Application queryset with_provider"""
if not self.provider:
return None
return get_deepest_child(self.provider)
candidates = []
base_class = Provider
for subclass in base_class.objects.get_queryset()._get_subclasses_recurse(base_class):
parent = self.provider
for level in subclass.split(LOOKUP_SEP):
try:
parent = getattr(parent, level)
except AttributeError:
break
if parent in candidates:
continue
idx = subclass.count(LOOKUP_SEP)
if type(parent) is not base_class:
idx += 1
candidates.insert(idx, parent)
if not candidates:
return None
return candidates[-1]
def backchannel_provider_for[T: Provider](self, provider_type: type[T], **kwargs) -> T | None:
"""Get Backchannel provider for a specific type"""

View File

@@ -44,24 +44,19 @@
{% endblock %}
</div>
</main>
<footer
name="site-footer"
aria-label="{% trans 'Site footer' %}"
class="pf-c-login__footer pf-m-dark">
<div name="flow-links" aria-label="{% trans 'Flow links' %}">
<ul class="pf-c-list pf-m-inline" part="list">
{% for link in footer_links %}
<li part="list-item">
<a part="list-item-link" href="{{ link.href }}">{{ link.name }}</a>
</li>
{% endfor %}
<li part="list-item">
<span>
{% trans 'Powered by authentik' %}
</span>
</li>
</ul>
</div>
<footer aria-label="Site footer" class="pf-c-login__footer pf-m-dark">
<ul class="pf-c-list pf-m-inline">
{% for link in footer_links %}
<li>
<a href="{{ link.href }}">{{ link.name }}</a>
</li>
{% endfor %}
<li>
<span>
{% trans 'Powered by authentik' %}
</span>
</li>
</ul>
</footer>
</div>
</div>

View File

@@ -78,7 +78,7 @@ def generate_key_id_legacy(key_data: str) -> str:
"""Generate Key ID using MD5 (legacy format for backwards compatibility)."""
if not key_data:
return ""
return md5(key_data.encode("utf-8"), usedforsecurity=False).hexdigest() # nosec
return md5(key_data.encode("utf-8")).hexdigest() # nosec
class CertificateKeyPair(SerializerModel, ManagedModel, CreatedUpdatedModel):

View File

@@ -1,6 +1,5 @@
from hashlib import sha256
from json import loads
from unittest.mock import PropertyMock, patch
from django.urls import reverse
from jwt import encode
@@ -233,43 +232,3 @@ class TestEndpointStage(FlowTestCase):
plan = plan()
self.assertNotIn(PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE, plan.context)
self.assertEqual(plan.context[PLAN_CONTEXT_DEVICE], self.device)
def test_endpoint_stage_connector_no_stage_optional(self):
flow = create_test_flow()
stage = EndpointStage.objects.create(connector=self.connector, mode=StageMode.OPTIONAL)
FlowStageBinding.objects.create(stage=stage, target=flow, order=0)
with patch(
"authentik.endpoints.connectors.agent.models.AgentConnector.stage",
PropertyMock(return_value=None),
):
with self.assertFlowFinishes() as plan:
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
)
self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
plan = plan()
self.assertNotIn(PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE, plan.context)
self.assertNotIn(PLAN_CONTEXT_DEVICE, plan.context)
def test_endpoint_stage_connector_no_stage_required(self):
flow = create_test_flow()
stage = EndpointStage.objects.create(connector=self.connector, mode=StageMode.REQUIRED)
FlowStageBinding.objects.create(stage=stage, target=flow, order=0)
with patch(
"authentik.endpoints.connectors.agent.models.AgentConnector.stage",
PropertyMock(return_value=None),
):
with self.assertFlowFinishes() as plan:
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
)
self.assertStageResponse(
res,
component="ak-stage-access-denied",
error_message="Invalid stage configuration",
)
plan = plan()
self.assertNotIn(PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE, plan.context)
self.assertNotIn(PLAN_CONTEXT_DEVICE, plan.context)

View File

@@ -1,4 +1,4 @@
from authentik.endpoints.models import EndpointStage, StageMode
from authentik.endpoints.models import EndpointStage
from authentik.flows.stage import StageView
PLAN_CONTEXT_ENDPOINT_CONNECTOR = "endpoint_connector"
@@ -6,24 +6,15 @@ PLAN_CONTEXT_ENDPOINT_CONNECTOR = "endpoint_connector"
class EndpointStageView(StageView):
def _get_inner(self) -> StageView | None:
def _get_inner(self):
stage: EndpointStage = self.executor.current_stage
inner_stage: type[StageView] | None = stage.connector.stage
if not inner_stage:
return None
return self.executor.stage_ok()
return inner_stage(self.executor, request=self.request)
def dispatch(self, request, *args, **kwargs):
inner = self._get_inner()
if inner is None:
stage: EndpointStage = self.executor.current_stage
if stage.mode == StageMode.OPTIONAL:
return self.executor.stage_ok()
else:
return self.executor.stage_invalid("Invalid stage configuration")
return inner.dispatch(request, *args, **kwargs)
return self._get_inner().dispatch(request, *args, **kwargs)
def cleanup(self):
inner = self._get_inner()
if inner is not None:
return inner.cleanup()
return self._get_inner().cleanup()

View File

@@ -15,7 +15,6 @@ from django.core.cache import cache
from django.db.models.query import QuerySet
from django.utils.timezone import now
from jwt import PyJWTError, decode, get_unverified_header
from jwt.algorithms import ECAlgorithm
from rest_framework.exceptions import ValidationError
from rest_framework.fields import (
ChoiceField,
@@ -110,20 +109,13 @@ class LicenseKey:
intermediate.verify_directly_issued_by(get_licensing_key())
except InvalidSignature, TypeError, ValueError, Error:
raise ValidationError("Unable to verify license") from None
_validate_curve_original = ECAlgorithm._validate_curve
try:
# authentik's license are generated with `algorithm="ES512"` and signed with
# a key of curve `secp384r1`. Starting with version 2.11.0, pyjwt enforces the spec, see
# https://github.com/jpadilla/pyjwt/commit/5b8622773358e56d3d3c0a9acf404809ff34433a
# authentik will change its license generation to `algorithm="ES384"` in 2026.
# TODO: remove this when the last incompatible license runs out.
ECAlgorithm._validate_curve = lambda *_: True
body = from_dict(
LicenseKey,
decode(
jwt,
our_cert.public_key(),
algorithms=["ES384", "ES512"],
algorithms=["ES512"],
audience=get_license_aud(),
options={"verify_exp": check_expiry, "verify_signature": check_expiry},
),
@@ -133,8 +125,6 @@ class LicenseKey:
if unverified["aud"] != get_license_aud():
raise ValidationError("Invalid Install ID in license") from None
raise ValidationError("Unable to verify license") from None
finally:
ECAlgorithm._validate_curve = _validate_curve_original
return body
@staticmethod

View File

@@ -1,11 +1,11 @@
from datetime import datetime
from datetime import date
from django.db.models import BooleanField as ModelBooleanField
from django.db.models import Case, Q, Value, When
from django_filters.rest_framework import BooleanFilter, FilterSet
from drf_spectacular.utils import extend_schema
from drf_spectacular.utils import extend_schema, extend_schema_field
from rest_framework.decorators import action
from rest_framework.fields import IntegerField, SerializerMethodField
from rest_framework.fields import DateField, IntegerField, SerializerMethodField
from rest_framework.mixins import CreateModelMixin
from rest_framework.request import Request
from rest_framework.response import Response
@@ -21,7 +21,6 @@ from authentik.enterprise.lifecycle.utils import (
ReviewerUserSerializer,
admin_link_for_model,
parse_content_type,
start_of_day,
)
from authentik.lib.utils.time import timedelta_from_string
@@ -68,13 +67,13 @@ class LifecycleIterationSerializer(EnterpriseRequiredMixin, ModelSerializer):
def get_object_admin_url(self, iteration: LifecycleIteration) -> str:
return admin_link_for_model(iteration.object)
def get_grace_period_end(self, iteration: LifecycleIteration) -> datetime:
return start_of_day(
iteration.opened_on + timedelta_from_string(iteration.rule.grace_period)
)
@extend_schema_field(DateField())
def get_grace_period_end(self, iteration: LifecycleIteration) -> date:
return iteration.opened_on + timedelta_from_string(iteration.rule.grace_period)
def get_next_review_date(self, iteration: LifecycleIteration) -> datetime:
return start_of_day(iteration.opened_on + timedelta_from_string(iteration.rule.interval))
@extend_schema_field(DateField())
def get_next_review_date(self, iteration: LifecycleIteration):
return iteration.opened_on + timedelta_from_string(iteration.rule.interval)
def get_user_can_review(self, iteration: LifecycleIteration) -> bool:
return iteration.user_can_review(self.context["request"].user)

View File

@@ -1,18 +0,0 @@
# Generated by Django 5.2.11 on 2026-02-13 09:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_lifecycle", "0001_initial"),
]
operations = [
migrations.AlterField(
model_name="lifecycleiteration",
name="opened_on",
field=models.DateTimeField(auto_now_add=True),
),
]

View File

@@ -1,4 +1,3 @@
from datetime import timedelta
from uuid import uuid4
from django.contrib.contenttypes.fields import GenericForeignKey
@@ -14,7 +13,7 @@ from rest_framework.serializers import BaseSerializer
from authentik.blueprints.models import ManagedModel
from authentik.core.models import Group, User
from authentik.enterprise.lifecycle.utils import link_for_model, start_of_day
from authentik.enterprise.lifecycle.utils import link_for_model
from authentik.events.models import Event, EventAction, NotificationSeverity, NotificationTransport
from authentik.lib.models import SerializerModel
from authentik.lib.utils.time import timedelta_from_string, timedelta_string_validator
@@ -99,9 +98,7 @@ class LifecycleRule(SerializerModel):
def _get_newly_overdue_iterations(self) -> QuerySet[LifecycleIteration]:
return self.lifecycleiteration_set.filter(
opened_on__lt=start_of_day(
timezone.now() + timedelta(days=1) - timedelta_from_string(self.grace_period)
),
opened_on__lte=timezone.now() - timedelta_from_string(self.grace_period),
state=ReviewState.PENDING,
)
@@ -109,9 +106,7 @@ class LifecycleRule(SerializerModel):
recent_iteration_ids = LifecycleIteration.objects.filter(
content_type=self.content_type,
object_id__isnull=False,
opened_on__gte=start_of_day(
timezone.now() + timedelta(days=1) - timedelta_from_string(self.interval)
),
opened_on__gte=timezone.now() - timedelta_from_string(self.interval),
).values_list(Cast("object_id", output_field=self._get_pk_field()), flat=True)
return self.get_objects().exclude(pk__in=recent_iteration_ids)
@@ -191,7 +186,7 @@ class LifecycleIteration(SerializerModel, ManagedModel):
rule = models.ForeignKey(LifecycleRule, null=True, on_delete=models.SET_NULL)
state = models.CharField(max_length=10, choices=ReviewState, default=ReviewState.PENDING)
opened_on = models.DateTimeField(auto_now_add=True)
opened_on = models.DateField(auto_now_add=True)
class Meta:
indexes = [models.Index(fields=["content_type", "opened_on"])]

View File

@@ -1,4 +1,3 @@
import datetime as dt
from datetime import timedelta
from unittest.mock import patch
@@ -320,7 +319,7 @@ class TestLifecycleModels(TestCase):
content_type=content_type, object_id=str(app_one.pk), rule=rule_overdue
)
LifecycleIteration.objects.filter(pk=iteration.pk).update(
opened_on=(timezone.now() - timedelta(days=20))
opened_on=(timezone.now().date() - timedelta(days=20))
)
# Apply again to trigger overdue logic
@@ -384,7 +383,7 @@ class TestLifecycleModels(TestCase):
content_type=content_type, object_id=str(app_overdue.pk), rule=rule_overdue
)
LifecycleIteration.objects.filter(pk=overdue_iteration.pk).update(
opened_on=(timezone.now() - timedelta(days=20))
opened_on=(timezone.now().date() - timedelta(days=20))
)
# Apply overdue rule to mark iteration as overdue
@@ -668,178 +667,3 @@ class TestLifecycleModels(TestCase):
reviewers = list(rule.get_reviewers())
self.assertIn(explicit_reviewer, reviewers)
self.assertIn(group_member, reviewers)
class TestLifecycleDateBoundaries(TestCase):
"""Verify that start_of_day normalization ensures correct overdue/due
detection regardless of exact task execution time within a day.
The daily task may run at any point during the day. The start_of_day
normalization in _get_newly_overdue_iterations and _get_newly_due_objects
ensures that the boundary is always at midnight, so millisecond variations
in task execution time do not affect results."""
def _create_rule_and_iteration(self, grace_period="days=1", interval="days=365"):
app = Application.objects.create(name=generate_id(), slug=generate_id())
content_type = ContentType.objects.get_for_model(Application)
rule = LifecycleRule.objects.create(
name=generate_id(),
content_type=content_type,
object_id=str(app.pk),
interval=interval,
grace_period=grace_period,
)
iteration = LifecycleIteration.objects.get(
content_type=content_type, object_id=str(app.pk), rule=rule
)
return app, rule, iteration
def test_overdue_iteration_opened_yesterday(self):
"""grace_period=1 day: iteration opened yesterday at any time is overdue today."""
_, rule, iteration = self._create_rule_and_iteration(grace_period="days=1")
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
for opened_on in [
dt.datetime(2025, 6, 14, 0, 0, 0, tzinfo=dt.UTC),
dt.datetime(2025, 6, 14, 12, 0, 0, tzinfo=dt.UTC),
dt.datetime(2025, 6, 14, 23, 59, 59, 999999, tzinfo=dt.UTC),
]:
with self.subTest(opened_on=opened_on):
LifecycleIteration.objects.filter(pk=iteration.pk).update(
opened_on=opened_on, state=ReviewState.PENDING
)
with patch("django.utils.timezone.now", return_value=fixed_now):
self.assertIn(iteration, list(rule._get_newly_overdue_iterations()))
def test_not_overdue_iteration_opened_today(self):
"""grace_period=1 day: iteration opened today at any time is NOT overdue."""
_, rule, iteration = self._create_rule_and_iteration(grace_period="days=1")
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
for opened_on in [
dt.datetime(2025, 6, 15, 0, 0, 0, tzinfo=dt.UTC),
dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC),
dt.datetime(2025, 6, 15, 23, 59, 59, 999999, tzinfo=dt.UTC),
]:
with self.subTest(opened_on=opened_on):
LifecycleIteration.objects.filter(pk=iteration.pk).update(
opened_on=opened_on, state=ReviewState.PENDING
)
with patch("django.utils.timezone.now", return_value=fixed_now):
self.assertNotIn(iteration, list(rule._get_newly_overdue_iterations()))
def test_overdue_independent_of_task_execution_time(self):
"""Overdue detection gives the same result whether the task runs at 00:00:01 or 23:59:59."""
_, rule, iteration = self._create_rule_and_iteration(grace_period="days=1")
opened_on = dt.datetime(2025, 6, 14, 18, 0, 0, tzinfo=dt.UTC)
LifecycleIteration.objects.filter(pk=iteration.pk).update(
opened_on=opened_on, state=ReviewState.PENDING
)
for task_time in [
dt.datetime(2025, 6, 15, 0, 0, 1, tzinfo=dt.UTC),
dt.datetime(2025, 6, 15, 12, 0, 0, tzinfo=dt.UTC),
dt.datetime(2025, 6, 15, 23, 59, 59, tzinfo=dt.UTC),
]:
with self.subTest(task_time=task_time):
with patch("django.utils.timezone.now", return_value=task_time):
self.assertIn(iteration, list(rule._get_newly_overdue_iterations()))
def test_overdue_boundary_multi_day_grace_period(self):
"""grace_period=30 days: overdue after 30 full days, not after 29."""
_, rule, iteration = self._create_rule_and_iteration(grace_period="days=30")
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
# Opened 30 days ago (May 16), should go overdue
LifecycleIteration.objects.filter(pk=iteration.pk).update(
opened_on=dt.datetime(2025, 5, 16, 12, 0, 0, tzinfo=dt.UTC),
state=ReviewState.PENDING,
)
with patch("django.utils.timezone.now", return_value=fixed_now):
self.assertIn(iteration, list(rule._get_newly_overdue_iterations()))
# Opened 29 days ago (May 17), should NOT go overdue
LifecycleIteration.objects.filter(pk=iteration.pk).update(
opened_on=dt.datetime(2025, 5, 17, 12, 0, 0, tzinfo=dt.UTC),
state=ReviewState.PENDING,
)
with patch("django.utils.timezone.now", return_value=fixed_now):
self.assertNotIn(iteration, list(rule._get_newly_overdue_iterations()))
def test_due_object_iteration_opened_yesterday(self):
"""interval=1 day: object with iteration opened yesterday is due for a new review."""
app, rule, iteration = self._create_rule_and_iteration(interval="days=1")
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
for opened_on in [
dt.datetime(2025, 6, 14, 0, 0, 0, tzinfo=dt.UTC),
dt.datetime(2025, 6, 14, 12, 0, 0, tzinfo=dt.UTC),
dt.datetime(2025, 6, 14, 23, 59, 59, 999999, tzinfo=dt.UTC),
]:
with self.subTest(opened_on=opened_on):
LifecycleIteration.objects.filter(pk=iteration.pk).update(opened_on=opened_on)
with patch("django.utils.timezone.now", return_value=fixed_now):
self.assertIn(app, list(rule._get_newly_due_objects()))
def test_not_due_object_iteration_opened_today(self):
"""interval=1 day: object with iteration opened today is NOT due."""
app, rule, iteration = self._create_rule_and_iteration(interval="days=1")
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
for opened_on in [
dt.datetime(2025, 6, 15, 0, 0, 0, tzinfo=dt.UTC),
dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC),
dt.datetime(2025, 6, 15, 23, 59, 59, 999999, tzinfo=dt.UTC),
]:
with self.subTest(opened_on=opened_on):
LifecycleIteration.objects.filter(pk=iteration.pk).update(opened_on=opened_on)
with patch("django.utils.timezone.now", return_value=fixed_now):
self.assertNotIn(app, list(rule._get_newly_due_objects()))
def test_due_independent_of_task_execution_time(self):
"""Due detection gives the same result whether the task runs at 00:00:01 or 23:59:59."""
app, rule, iteration = self._create_rule_and_iteration(interval="days=1")
opened_on = dt.datetime(2025, 6, 14, 18, 0, 0, tzinfo=dt.UTC)
LifecycleIteration.objects.filter(pk=iteration.pk).update(opened_on=opened_on)
for task_time in [
dt.datetime(2025, 6, 15, 0, 0, 1, tzinfo=dt.UTC),
dt.datetime(2025, 6, 15, 12, 0, 0, tzinfo=dt.UTC),
dt.datetime(2025, 6, 15, 23, 59, 59, tzinfo=dt.UTC),
]:
with self.subTest(task_time=task_time):
with patch("django.utils.timezone.now", return_value=task_time):
self.assertIn(app, list(rule._get_newly_due_objects()))
def test_due_boundary_multi_day_interval(self):
"""interval=30 days: due after 30 full days, not after 29."""
app, rule, iteration = self._create_rule_and_iteration(interval="days=30")
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
# Previous review opened 30 days ago (May 16), review is due for the object
LifecycleIteration.objects.filter(pk=iteration.pk).update(
opened_on=dt.datetime(2025, 5, 16, 12, 0, 0, tzinfo=dt.UTC)
)
with patch("django.utils.timezone.now", return_value=fixed_now):
self.assertIn(app, list(rule._get_newly_due_objects()))
# Previous review opened 29 days ago (May 17), new review is NOT due
LifecycleIteration.objects.filter(pk=iteration.pk).update(
opened_on=dt.datetime(2025, 5, 17, 12, 0, 0, tzinfo=dt.UTC)
)
with patch("django.utils.timezone.now", return_value=fixed_now):
self.assertNotIn(app, list(rule._get_newly_due_objects()))
def test_apply_overdue_at_boundary(self):
"""apply() marks iteration overdue when grace period just expired,
regardless of what time the daily task runs."""
_, rule, iteration = self._create_rule_and_iteration(
grace_period="days=1", interval="days=365"
)
opened_on = dt.datetime(2025, 6, 14, 20, 0, 0, tzinfo=dt.UTC)
for task_time in [
dt.datetime(2025, 6, 15, 0, 0, 1, tzinfo=dt.UTC),
dt.datetime(2025, 6, 15, 23, 59, 59, tzinfo=dt.UTC),
]:
with self.subTest(task_time=task_time):
LifecycleIteration.objects.filter(pk=iteration.pk).update(
opened_on=opened_on, state=ReviewState.PENDING
)
with patch("django.utils.timezone.now", return_value=task_time):
rule.apply()
iteration.refresh_from_db()
self.assertEqual(iteration.state, ReviewState.OVERDUE)

View File

@@ -1,4 +1,3 @@
from datetime import datetime
from urllib import parse
from django.contrib.contenttypes.models import ContentType
@@ -40,10 +39,6 @@ def link_for_model(model: Model) -> str:
return f"{reverse("authentik_core:if-admin")}#{admin_link_for_model(model)}"
def start_of_day(dt: datetime) -> datetime:
return dt.replace(hour=0, minute=0, second=0, microsecond=0)
class ContentTypeField(ChoiceField):
def __init__(self, **kwargs):
super().__init__(choices=model_choices(), **kwargs)

View File

@@ -78,8 +78,7 @@ class MicrosoftEntraUserClient(MicrosoftEntraSyncClient[User, MicrosoftEntraProv
def create(self, user: User):
"""Create user from scratch and create a connection object"""
microsoft_user = self.to_schema(user, None)
if microsoft_user.user_principal_name:
self.check_email_valid(microsoft_user.user_principal_name)
self.check_email_valid(microsoft_user.user_principal_name)
with transaction.atomic():
try:
response = self._request(self.client.users.post(microsoft_user))
@@ -119,8 +118,7 @@ class MicrosoftEntraUserClient(MicrosoftEntraSyncClient[User, MicrosoftEntraProv
def update(self, user: User, connection: MicrosoftEntraProviderUser):
"""Update existing user"""
microsoft_user = self.to_schema(user, connection)
if microsoft_user.user_principal_name:
self.check_email_valid(microsoft_user.user_principal_name)
self.check_email_valid(microsoft_user.user_principal_name)
response = self._request(
self.client.users.by_user_id(connection.microsoft_id).patch(microsoft_user)
)

View File

@@ -5,7 +5,6 @@ from django.urls import reverse
from rest_framework.fields import CharField, SerializerMethodField, URLField
from authentik.core.api.providers import ProviderSerializer
from authentik.core.models import Provider
from authentik.enterprise.api import EnterpriseRequiredMixin
from authentik.enterprise.providers.ws_federation.models import WSFederationProvider
from authentik.enterprise.providers.ws_federation.processors.metadata import MetadataProcessor
@@ -19,29 +18,6 @@ class WSFederationProviderSerializer(EnterpriseRequiredMixin, SAMLProviderSerial
wtrealm = CharField(source="audience")
url_wsfed = SerializerMethodField()
def get_url_download_metadata(self, instance: WSFederationProvider) -> str:
"""Get metadata download URL"""
if "request" not in self._context:
return ""
request: HttpRequest = self._context["request"]._request
try:
return request.build_absolute_uri(
reverse(
"authentik_providers_ws_federation:metadata-download",
kwargs={"application_slug": instance.application.slug},
)
)
except Provider.application.RelatedObjectDoesNotExist:
return request.build_absolute_uri(
reverse(
"authentik_api:wsfederationprovider-metadata",
kwargs={
"pk": instance.pk,
},
)
+ "?download"
)
def get_url_wsfed(self, instance: WSFederationProvider) -> str:
"""Get WS-Fed url"""
if "request" not in self._context:

View File

@@ -81,8 +81,6 @@ class SignInProcessor:
self.sign_in_request = sign_in_request
self.saml_processor = AssertionProcessor(self.provider, self.request, AuthNRequest())
self.saml_processor.provider.audience = self.sign_in_request.wtrealm
if self.provider.signing_kp:
self.saml_processor.provider.sign_assertion = True
def create_response_token(self):
root = Element(f"{{{NS_WS_FED_TRUST}}}RequestSecurityTokenResponse", nsmap=NS_MAP)
@@ -150,8 +148,7 @@ class SignInProcessor:
def response(self) -> dict[str, str]:
root = self.create_response_token()
assertion = root.xpath("//saml:Assertion", namespaces=NS_MAP)[0]
if self.provider.signing_kp:
self.saml_processor._sign(assertion)
self.saml_processor._sign(assertion)
str_token = etree.tostring(root).decode("utf-8") # nosec
return delete_none_values(
{

View File

@@ -3,7 +3,7 @@
from django.urls import path
from authentik.enterprise.providers.ws_federation.api.providers import WSFederationProviderViewSet
from authentik.enterprise.providers.ws_federation.views import MetadataDownload, WSFedEntryView
from authentik.enterprise.providers.ws_federation.views import WSFedEntryView
urlpatterns = [
path(
@@ -11,12 +11,6 @@ urlpatterns = [
WSFedEntryView.as_view(),
name="wsfed",
),
# Metadata
path(
"<slug:application_slug>/metadata/",
MetadataDownload.as_view(),
name="metadata-download",
),
]
api_urlpatterns = [

View File

@@ -1,8 +1,6 @@
from django.http import Http404, HttpRequest, HttpResponse
from django.shortcuts import get_object_or_404, redirect
from django.urls import reverse
from django.shortcuts import get_object_or_404
from django.utils.translation import gettext as _
from django.views import View
from structlog.stdlib import get_logger
from authentik.core.models import Application, AuthenticatedSession
@@ -162,24 +160,3 @@ class WSFedFlowFinalView(ChallengeStageView):
"attrs": response,
},
)
class MetadataDownload(View):
"""Redirect to metadata download"""
def dispatch(self, request: HttpRequest, application_slug: str) -> HttpResponse:
app = Application.objects.filter(slug=application_slug).with_provider().first()
if not app:
raise Http404
provider = app.get_provider()
if not provider:
raise Http404
return redirect(
reverse(
"authentik_api:wsfederationprovider-metadata",
kwargs={
"pk": provider.pk,
},
)
+ "?download"
)

View File

@@ -29,12 +29,6 @@ class RefreshOtherFlowsAfterAuthentication(Flag[bool], key="flows_refresh_others
visibility = "public"
class ContinuousLogin(Flag[bool], key="flows_continuous_login"):
default = False
visibility = "public"
class AuthentikFlowsConfig(ManagedAppConfig):
"""authentik flows app config"""

View File

@@ -9,15 +9,7 @@
{{ block.super }}
<link rel="prefetch" href="{{ flow_background_url }}" />
{% if flow.compatibility_mode and not inspector %}
{% comment %}
@see {@link web/types/webcomponents.d.ts} for type definitions.
{% endcomment %}
<script data-id="shady-dom">
"use strict";
window.ShadyDOM = window.ShadyDOM || {}
window.ShadyDOM.force = true
</script>
<script data-id="shady-dom">ShadyDOM = { force: true };</script>
{% endif %}
{% include "base/header_js.html" %}
<script data-id="flow-config">
@@ -53,11 +45,16 @@
slug="{{ flow.slug }}"
class="pf-c-login"
data-layout="{{ flow.layout|default:'stacked' }}"
loading
>
{% include "base/placeholder.html" %}
<ak-brand-links name="flow-links" slot="footer"></ak-brand-links>
<ak-brand-links
slot="footer"
exportparts="list:brand-links-list, list-item:brand-links-list-item"
role="contentinfo"
aria-label="{% trans 'Site footer' %}"
class="pf-c-login__footer {% if flow.layout == 'stacked' %}pf-m-dark{% endif %}"
></ak-brand-links>
</ak-flow-executor>
</div>
</div>

View File

@@ -141,10 +141,6 @@ web:
# workers: 2
threads: 4
path: /
timeout_http_read_header: 5s
timeout_http_read: 30s
timeout_http_write: 60s
timeout_http_idle: 120s
worker:
processes: 1
@@ -166,7 +162,6 @@ storage:
# region: "us-east-1"
# use_ssl: True
# endpoint: "https://s3.us-east-1.amazonaws.com"
# signature_version: "s3v4"
# access_key: ""
# secret_key: ""
# bucket_name: "authentik-data"

View File

@@ -1,119 +0,0 @@
"""Tests for inheritance helpers."""
from contextlib import contextmanager
from django.db import connection, models
from django.test import TransactionTestCase
from django.test.utils import isolate_apps
from authentik.lib.utils.inheritance import get_deepest_child
@contextmanager
def temporary_inheritance_models():
"""Create a temporary multi-table inheritance graph for testing."""
with isolate_apps("authentik.lib.tests"):
class GrandParent(models.Model):
class Meta:
app_label = "tests"
def __str__(self) -> str:
return f"GrandParent({self.pk})"
class Parent(GrandParent):
class Meta:
app_label = "tests"
def __str__(self) -> str:
return f"Parent({self.pk})"
class Child(Parent):
class Meta:
app_label = "tests"
def __str__(self) -> str:
return f"Child({self.pk})"
class GrandChild(Child):
class Meta:
app_label = "tests"
def __str__(self) -> str:
return f"GrandChild({self.pk})"
with connection.schema_editor() as schema_editor:
schema_editor.create_model(GrandParent)
schema_editor.create_model(Parent)
schema_editor.create_model(Child)
schema_editor.create_model(GrandChild)
try:
yield GrandParent, Parent, Child, GrandChild
finally:
with connection.schema_editor() as schema_editor:
schema_editor.delete_model(GrandChild)
schema_editor.delete_model(Child)
schema_editor.delete_model(Parent)
schema_editor.delete_model(GrandParent)
class TestInheritanceUtils(TransactionTestCase):
"""Tests for helper functions in authentik.lib.utils.inheritance."""
def test_get_deepest_child_grandparent_to_parent(self):
"""GrandParent -> Parent."""
with temporary_inheritance_models() as (GrandParent, Parent, _Child, _GrandChild):
parent = Parent.objects.create()
grandparent = GrandParent.objects.get(pk=parent.pk)
resolved = get_deepest_child(grandparent)
self.assertIsInstance(resolved, Parent)
self.assertEqual(resolved.pk, parent.pk)
def test_get_deepest_child_grandparent_to_child(self):
"""GrandParent -> Child."""
with temporary_inheritance_models() as (GrandParent, _Parent, Child, _GrandChild):
child = Child.objects.create()
grandparent = GrandParent.objects.get(pk=child.pk)
resolved = get_deepest_child(grandparent)
self.assertIsInstance(resolved, Child)
self.assertEqual(resolved.pk, child.pk)
def test_get_deepest_child_grandparent_to_grandchild(self):
"""GrandParent -> GrandChild."""
with temporary_inheritance_models() as (GrandParent, _Parent, _Child, GrandChild):
grandchild = GrandChild.objects.create()
grandparent = GrandParent.objects.get(pk=grandchild.pk)
resolved = get_deepest_child(grandparent)
self.assertIsInstance(resolved, GrandChild)
self.assertEqual(resolved.pk, grandchild.pk)
def test_get_deepest_child_parent_to_child(self):
"""Parent -> Child (start from non-root)."""
with temporary_inheritance_models() as (_GrandParent, Parent, Child, _GrandChild):
child = Child.objects.create()
parent = Parent.objects.get(pk=child.pk)
resolved = get_deepest_child(parent)
self.assertIsInstance(resolved, Child)
self.assertEqual(resolved.pk, child.pk)
def test_get_deepest_child_no_queries_with_preloaded_relations(self):
"""No extra queries when the inheritance chain is fully select_related."""
with temporary_inheritance_models() as (GrandParent, _Parent, _Child, GrandChild):
grandchild = GrandChild.objects.create()
grandparent = GrandParent.objects.select_related("parent__child__grandchild").get(
pk=grandchild.pk
)
with self.assertNumQueries(0):
resolved = get_deepest_child(grandparent)
self.assertIsInstance(resolved, GrandChild)

View File

@@ -1,41 +0,0 @@
from django.db.models import Model, OneToOneField, OneToOneRel
def get_deepest_child(parent: Model) -> Model:
"""
In multiple table inheritance, given any ancestor object, get the deepest child object.
See https://docs.djangoproject.com/en/dev/topics/db/models/#multi-table-inheritance
This function does not query the database if `select_related` has been performed on all
subclasses of `parent`'s model.
"""
# Almost verbatim copy from django-model-utils, see
# https://github.com/jazzband/django-model-utils/blob/5.0.0/model_utils/managers.py#L132
one_to_one_rels = [
field for field in parent._meta.get_fields() if isinstance(field, OneToOneRel)
]
submodel_fields = [
rel
for rel in one_to_one_rels
if isinstance(rel.field, OneToOneField)
and issubclass(rel.field.model, parent._meta.model)
and parent._meta.model is not rel.field.model
and rel.parent_link
]
submodel_accessors = [submodel_field.get_accessor_name() for submodel_field in submodel_fields]
# End Copy
child = None
for submodel in submodel_accessors:
try:
child = getattr(parent, submodel)
break
except AttributeError:
continue
if not child:
return parent
return get_deepest_child(child)

View File

@@ -132,14 +132,9 @@ class PolicyEngine:
# If we didn't find any static bindings, do nothing
return
self.logger.debug("P_ENG: Found static bindings", **matched_bindings)
if self.mode == PolicyEngineMode.MODE_ANY:
if matched_bindings.get("passing", 0) > 0:
# Any passing static binding -> passing
passing = True
elif self.mode == PolicyEngineMode.MODE_ALL:
if matched_bindings.get("passing", 0) == matched_bindings["total"]:
# All static bindings are passing -> passing
passing = True
if matched_bindings.get("passing", 0) > 0:
# Any passing static binding -> passing
passing = True
elif matched_bindings["total"] > 0 and matched_bindings.get("passing", 0) < 1:
# No matching static bindings but at least one is configured -> not passing
passing = False
@@ -190,16 +185,6 @@ class PolicyEngine:
# Only call .recv() if no result is saved, otherwise we just deadlock here
if not proc_info.result:
proc_info.result = proc_info.connection.recv()
if proc_info.result and proc_info.result._exec_time:
HIST_POLICIES_EXECUTION_TIME.labels(
binding_order=proc_info.binding.order,
binding_target_type=proc_info.binding.target_type,
binding_target_name=proc_info.binding.target_name,
object_type=(
class_to_path(self.request.obj.__class__) if self.request.obj else ""
),
mode="execute_process",
).observe(proc_info.result._exec_time)
return self
@property

View File

@@ -2,7 +2,6 @@
from multiprocessing import get_context
from multiprocessing.connection import Connection
from time import perf_counter
from django.core.cache import cache
from sentry_sdk import start_span
@@ -12,6 +11,8 @@ from structlog.stdlib import get_logger
from authentik.events.models import Event, EventAction
from authentik.lib.config import CONFIG
from authentik.lib.utils.errors import exception_to_dict
from authentik.lib.utils.reflection import class_to_path
from authentik.policies.apps import HIST_POLICIES_EXECUTION_TIME
from authentik.policies.exceptions import PolicyException
from authentik.policies.models import PolicyBinding
from authentik.policies.types import CACHE_PREFIX, PolicyRequest, PolicyResult
@@ -122,9 +123,18 @@ class PolicyProcess(PROCESS_CLASS):
def profiling_wrapper(self):
"""Run with profiling enabled"""
with start_span(
op="authentik.policy.process.execute",
) as span:
with (
start_span(
op="authentik.policy.process.execute",
) as span,
HIST_POLICIES_EXECUTION_TIME.labels(
binding_order=self.binding.order,
binding_target_type=self.binding.target_type,
binding_target_name=self.binding.target_name,
object_type=class_to_path(self.request.obj.__class__) if self.request.obj else "",
mode="execute_process",
).time(),
):
span: Span
span.set_data("policy", self.binding.policy)
span.set_data("request", self.request)
@@ -132,14 +142,8 @@ class PolicyProcess(PROCESS_CLASS):
def run(self): # pragma: no cover
"""Task wrapper to run policy checking"""
result = None
try:
start = perf_counter()
result = self.profiling_wrapper()
end = perf_counter()
result._exec_time = max((end - start), 0)
self.connection.send(self.profiling_wrapper())
except Exception as exc: # noqa
LOGGER.warning("Policy failed to run", exc=exc)
result = PolicyResult(False, str(exc))
finally:
self.connection.send(result)
self.connection.send(PolicyResult(False, str(exc)))

View File

@@ -33,9 +33,6 @@ class TestPolicyEngine(TestCase):
self.policy_raises = ExpressionPolicy.objects.create(
name=generate_id(), expression="{{ 0/0 }}"
)
self.group_member = Group.objects.create(name=generate_id())
self.user.groups.add(self.group_member)
self.group_non_member = Group.objects.create(name=generate_id())
def test_engine_empty(self):
"""Ensure empty policy list passes"""
@@ -54,7 +51,7 @@ class TestPolicyEngine(TestCase):
self.assertEqual(result.passing, True)
self.assertEqual(result.messages, ("dummy",))
def test_engine_mode_all_dyn(self):
def test_engine_mode_all(self):
"""Ensure all policies passes with AND mode (false and true -> false)"""
pbm = PolicyBindingModel.objects.create(policy_engine_mode=PolicyEngineMode.MODE_ALL)
PolicyBinding.objects.create(target=pbm, policy=self.policy_false, order=0)
@@ -70,7 +67,7 @@ class TestPolicyEngine(TestCase):
),
)
def test_engine_mode_any_dyn(self):
def test_engine_mode_any(self):
"""Ensure all policies passes with OR mode (false and true -> true)"""
pbm = PolicyBindingModel.objects.create(policy_engine_mode=PolicyEngineMode.MODE_ANY)
PolicyBinding.objects.create(target=pbm, policy=self.policy_false, order=0)
@@ -86,26 +83,6 @@ class TestPolicyEngine(TestCase):
),
)
def test_engine_mode_all_static(self):
"""Ensure all policies passes with OR mode (false and true -> true)"""
pbm = PolicyBindingModel.objects.create(policy_engine_mode=PolicyEngineMode.MODE_ALL)
PolicyBinding.objects.create(target=pbm, group=self.group_member, order=0)
PolicyBinding.objects.create(target=pbm, group=self.group_non_member, order=1)
engine = PolicyEngine(pbm, self.user)
result = engine.build().result
self.assertEqual(result.passing, False)
self.assertEqual(result.messages, ())
def test_engine_mode_any_static(self):
"""Ensure all policies passes with OR mode (false and true -> true)"""
pbm = PolicyBindingModel.objects.create(policy_engine_mode=PolicyEngineMode.MODE_ANY)
PolicyBinding.objects.create(target=pbm, group=self.group_member, order=0)
PolicyBinding.objects.create(target=pbm, group=self.group_non_member, order=1)
engine = PolicyEngine(pbm, self.user)
result = engine.build().result
self.assertEqual(result.passing, True)
self.assertEqual(result.messages, ())
def test_engine_negate(self):
"""Test negate flag"""
pbm = PolicyBindingModel.objects.create()

View File

@@ -77,8 +77,6 @@ class PolicyResult:
log_messages: list[LogEvent] | None
_exec_time: int | None
def __init__(self, passing: bool, *messages: str):
self.passing = passing
self.messages = messages
@@ -86,7 +84,6 @@ class PolicyResult:
self.source_binding = None
self.source_results = []
self.log_messages = []
self._exec_time = None
def __repr__(self):
return self.__str__()

View File

@@ -68,8 +68,6 @@ class IDToken:
at_hash: str | None = None
# Session ID, https://openid.net/specs/openid-connect-frontchannel-1_0.html#ClaimsContents
sid: str | None = None
# JWT ID, https://www.rfc-editor.org/rfc/rfc7519.html#section-4.1.7
jti: str | None = None
claims: dict[str, Any] = field(default_factory=dict)
@@ -83,7 +81,6 @@ class IDToken:
(token.expires if token.expires is not None else default_token_duration()).timestamp()
)
id_token.iss = provider.get_issuer(request)
id_token.jti = generate_id()
id_token.aud = provider.client_id
id_token.claims = {}

View File

@@ -5,7 +5,6 @@ from urllib.parse import parse_qs, urlparse
from django.test import RequestFactory
from django.urls import reverse
from django.utils import translation
from django.utils.timezone import now
from authentik.blueprints.tests import apply_blueprint
@@ -691,21 +690,18 @@ class TestAuthorize(OAuthTestCase):
Application.objects.create(name="app", slug="app", provider=provider)
state = generate_id()
self.client.logout()
try:
response = self.client.get(
reverse("authentik_providers_oauth2:authorize"),
data={
"response_type": "code",
"client_id": "test",
"state": state,
"redirect_uri": "foo://localhost",
"ui_locales": "invalid fr",
},
)
parsed = parse_qs(urlparse(response.url).query)
self.assertEqual(parsed["locale"], ["fr"])
finally:
translation.deactivate()
response = self.client.get(
reverse("authentik_providers_oauth2:authorize"),
data={
"response_type": "code",
"client_id": "test",
"state": state,
"redirect_uri": "foo://localhost",
"ui_locales": "invalid fr",
},
)
parsed = parse_qs(urlparse(response.url).query)
self.assertEqual(parsed["locale"], ["fr"])
@apply_blueprint("default/flow-default-authentication-flow.yaml")
def test_ui_locales_invalid(self):

View File

@@ -1,6 +1,5 @@
"""Device backchannel tests"""
from base64 import b64encode
from json import loads
from django.urls import reverse
@@ -27,7 +26,7 @@ class TesOAuth2DeviceBackchannel(OAuthTestCase):
provider=self.provider,
)
def test_backchannel_invalid_client_id_via_post_body(self):
def test_backchannel_invalid(self):
"""Test backchannel"""
res = self.client.post(
reverse("authentik_providers_oauth2:device"),
@@ -51,7 +50,7 @@ class TesOAuth2DeviceBackchannel(OAuthTestCase):
)
self.assertEqual(res.status_code, 400)
def test_backchannel_client_id_via_post_body(self):
def test_backchannel(self):
"""Test backchannel"""
res = self.client.post(
reverse("authentik_providers_oauth2:device"),
@@ -62,37 +61,3 @@ class TesOAuth2DeviceBackchannel(OAuthTestCase):
self.assertEqual(res.status_code, 200)
body = loads(res.content.decode())
self.assertEqual(body["expires_in"], 60)
def test_backchannel_invalid_client_id_via_auth_header(self):
"""Test backchannel"""
creds = b64encode(b"foo:").decode()
res = self.client.post(
reverse("authentik_providers_oauth2:device"),
HTTP_AUTHORIZATION=f"Basic {creds}",
)
self.assertEqual(res.status_code, 400)
res = self.client.post(
reverse("authentik_providers_oauth2:device"),
)
self.assertEqual(res.status_code, 400)
# test without application
self.application.provider = None
self.application.save()
res = self.client.post(
reverse("authentik_providers_oauth2:device"),
data={
"client_id": "test",
},
)
self.assertEqual(res.status_code, 400)
def test_backchannel_client_id_via_auth_header(self):
"""Test backchannel"""
creds = b64encode(f"{self.provider.client_id}:".encode()).decode()
res = self.client.post(
reverse("authentik_providers_oauth2:device"),
HTTP_AUTHORIZATION=f"Basic {creds}",
)
self.assertEqual(res.status_code, 200)
body = loads(res.content.decode())
self.assertEqual(body["expires_in"], 60)

View File

@@ -16,7 +16,7 @@ from authentik.lib.config import CONFIG
from authentik.lib.utils.time import timedelta_from_string
from authentik.providers.oauth2.errors import DeviceCodeError
from authentik.providers.oauth2.models import DeviceToken, OAuth2Provider
from authentik.providers.oauth2.utils import TokenResponse, extract_client_auth
from authentik.providers.oauth2.utils import TokenResponse
from authentik.providers.oauth2.views.device_init import QS_KEY_CODE
LOGGER = get_logger()
@@ -32,7 +32,7 @@ class DeviceView(View):
def parse_request(self):
"""Parse incoming request"""
client_id, _ = extract_client_auth(self.request)
client_id = self.request.POST.get("client_id", None)
if not client_id:
raise DeviceCodeError("invalid_client")
provider = OAuth2Provider.objects.filter(client_id=client_id).first()

View File

@@ -52,10 +52,10 @@ class SCIMApplicationPoliciesTests(TestCase):
email=f"{uid}@goauthentik.io",
)
self.users[1].groups.add(self.group1)
self.users[2].groups.add(self.group2)
self.users[4].groups.add(self.group1)
self.users[4].groups.add(self.group2)
self.users[1].ak_groups.add(self.group1)
self.users[2].ak_groups.add(self.group2)
self.users[4].ak_groups.add(self.group1)
self.users[4].ak_groups.add(self.group2)
def test_no_group_policy(self):
"""Test with no group policy set"""

View File

@@ -89,7 +89,7 @@ class PytestTestRunner(DiscoverRunner): # pragma: no cover
sentry_init()
self.logger.debug("Test environment configured")
self.task_broker = use_test_broker()
use_test_broker()
# Send startup signals
pre_startup.send(sender=self, mode="test")
@@ -185,9 +185,7 @@ class PytestTestRunner(DiscoverRunner): # pragma: no cover
self.logger.info("Running tests", test_files=self.args)
with patch("guardian.shortcuts._get_ct_cached", patched__get_ct_cached):
try:
ret = pytest.main(self.args)
self.task_broker.close()
return ret
except Exception as exc: # noqa
self.logger.error("Error running tests", exc=exc, test_files=self.args)
return pytest.main(self.args)
except Exception as e: # noqa
self.logger.error("Error running tests", error=str(e), test_files=self.args)
return 1

View File

@@ -14,7 +14,6 @@ from django.utils.translation import gettext_lazy as _
from ldap3 import ALL, NONE, RANDOM, Connection, Server, ServerPool, Tls
from ldap3.core.exceptions import LDAPException, LDAPInsufficientAccessRightsResult, LDAPSchemaError
from rest_framework.serializers import Serializer
from structlog.stdlib import get_logger
from authentik.core.models import (
Group,
@@ -32,7 +31,6 @@ from authentik.tasks.schedules.common import ScheduleSpec
LDAP_TIMEOUT = 15
LDAP_UNIQUENESS = "ldap_uniq"
LDAP_DISTINGUISHED_NAME = "distinguishedName"
LOGGER = get_logger()
def flatten(value: Any) -> Any:
@@ -270,7 +268,6 @@ class LDAPSource(IncomingSyncSource):
)
if self.start_tls:
LOGGER.debug("Connection StartTLS", source=self)
conn.start_tls(read_server_info=False)
try:
successful = conn.bind()
@@ -281,9 +278,7 @@ class LDAPSource(IncomingSyncSource):
# See https://github.com/goauthentik/authentik/issues/4590
# See also https://github.com/goauthentik/authentik/issues/3399
if server_kwargs.get("get_info", ALL) == NONE:
LOGGER.warning("Failed to connect after schema downgrade", source=self, exc=exc)
raise exc
LOGGER.warning("Downgrading connection to no schema info", source=self, exc=exc)
server_kwargs["get_info"] = NONE
return self.connection(server, server_kwargs, connection_kwargs)
finally:

View File

@@ -14,24 +14,6 @@ class SAMLException(SentryIgnoredException):
return self.default_message
class InvalidEncryption(SAMLException):
"""Encryption of XML Object is either missing or invalid."""
default_message = "The encryption of the SAML object is either missing or invalid."
class InvalidSignature(SAMLException):
"""Signature of XML Object is either missing or invalid."""
default_message = "The signature of the SAML object is either missing or invalid."
class MismatchedRequestID(SAMLException):
"""Exception raised when the returned request ID doesn't match the saved ID."""
default_message = "The SAML Response ID does not match the original request ID."
class MissingSAMLResponse(SAMLException):
"""Exception raised when request does not contain SAML Response."""
@@ -42,3 +24,21 @@ class UnsupportedNameIDFormat(SAMLException):
"""Exception raised when SAML Response contains NameID Format not supported."""
default_message = "The NameID Format in the SAML Response is not supported."
class MismatchedRequestID(SAMLException):
"""Exception raised when the returned request ID doesn't match the saved ID."""
default_message = "The SAML Response ID does not match the original request ID."
class InvalidEncryption(SAMLException):
"""Encryption of XML Object is either missing or invalid."""
default_message = "The encryption of the SAML object is either missing or invalid."
class InvalidSignature(SAMLException):
"""Signature of XML Object is either missing or invalid."""
default_message = "The signature of the SAML object is either missing or invalid."

View File

@@ -4,7 +4,6 @@ from urllib.parse import parse_qsl, urlparse, urlunparse
from django.contrib.auth import logout
from django.contrib.auth.mixins import LoginRequiredMixin
from django.core.exceptions import SuspiciousOperation
from django.http import Http404, HttpRequest, HttpResponse
from django.http.response import HttpResponseBadRequest
from django.shortcuts import get_object_or_404, redirect
@@ -38,9 +37,7 @@ from authentik.flows.views.executor import NEXT_ARG_NAME, SESSION_KEY_GET, SESSI
from authentik.lib.views import bad_request_message
from authentik.providers.saml.utils.encoding import nice64
from authentik.sources.saml.exceptions import (
InvalidEncryption,
InvalidSignature,
MismatchedRequestID,
MissingSAMLResponse,
UnsupportedNameIDFormat,
)
@@ -159,15 +156,7 @@ class ACSView(View):
processor = ResponseProcessor(source, request)
try:
processor.parse()
except (
InvalidEncryption,
InvalidSignature,
MismatchedRequestID,
MissingSAMLResponse,
SuspiciousOperation,
VerificationError,
ValueError,
) as exc:
except (InvalidSignature, MissingSAMLResponse, VerificationError, ValueError) as exc:
return bad_request_message(request, str(exc))
try:

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -40,10 +40,6 @@ class EmailTemplates(models.TextChoices):
"email/event_notification.html",
_("Event Notification"),
)
INVITATION = (
"email/invitation.html",
_("Invitation"),
)
def get_template_choices():

View File

@@ -1,55 +0,0 @@
{% extends "email/base.html" %}
{% load i18n %}
{% load humanize %}
{% block content %}
<tr>
<td align="center">
<h1>
{% blocktrans %}
You're Invited!
{% endblocktrans %}
</h1>
</td>
</tr>
<tr>
<td align="center">
<table border="0">
<tr>
<td align="center" style="max-width: 300px; padding: 20px 0; color: #212124;">
{% blocktrans %}
You have been invited to join {{ host }}. Click the button below to get started.
{% endblocktrans %}
</td>
</tr>
{% if expires %}
<tr>
<td align="center" style="max-width: 300px; padding: 10px 0; color: #212124; font-size: 12px;">
{% blocktrans with expires=expires|naturaltime %}
This invitation expires {{ expires }}.
{% endblocktrans %}
</td>
</tr>
{% endif %}
<tr>
<td align="center" class="btn btn-primary">
<a id="confirm" href="{{ url }}" rel="noopener noreferrer" target="_blank">{% trans 'Accept Invitation' %}</a>
</td>
</tr>
</table>
</td>
</tr>
{% endblock %}
{% block sub_content %}
<tr>
<td style="padding: 20px; font-size: 12px; color: #212124;" align="center">
{% blocktrans %}
If you cannot click the button above, please copy and paste the following URL into your browser:
{% endblocktrans %}
<br>
<a href="{{ url }}" rel="noopener noreferrer" target="_blank">{{ url }}</a>
</td>
</tr>
{% endblock %}

View File

@@ -1,16 +0,0 @@
{% load i18n %}
{% load humanize %}
{% blocktrans %}You're Invited!{% endblocktrans %}
{% blocktrans %}You have been invited to join {{ host }}. Use the link below to get started.{% endblocktrans %}
{% trans 'Accept Invitation' %}: {{ url }}
{% if expires %}
{% blocktrans with expires=expires|naturaltime %}This invitation expires {{ expires }}.{% endblocktrans %}
{% endif %}
{% blocktrans %}If you cannot click the link above, please copy and paste the following URL into your browser:{% endblocktrans %}
{{ url }}

View File

@@ -54,7 +54,7 @@ class TestEmailStageTemplates(FlowTestCase):
chmod(file2, 0o000) # Remove all permissions so we can't read the file
choices = get_template_choices()
self.assertEqual(choices[-1][0], Path(file).name)
self.assertEqual(len(choices), 6)
self.assertEqual(len(choices), 5)
unlink(file)
unlink(file2)

View File

@@ -99,7 +99,6 @@ class IdentificationChallenge(Challenge):
password_fields = BooleanField()
allow_show_password = BooleanField(default=False)
application_pre = CharField(required=False)
application_pre_launch = CharField(required=False)
flow_designation = ChoiceField(FlowDesignation.choices)
captcha_stage = CaptchaChallenge(required=False, allow_null=True)
@@ -349,12 +348,9 @@ class IdentificationStageView(ChallengeStageView):
# If the user has been redirected to us whilst trying to access an
# application, PLAN_CONTEXT_APPLICATION is set in the flow plan
if PLAN_CONTEXT_APPLICATION in self.executor.plan.context:
app: Application = self.executor.plan.context.get(
challenge.initial_data["application_pre"] = self.executor.plan.context.get(
PLAN_CONTEXT_APPLICATION, Application()
)
challenge.initial_data["application_pre"] = app.name
if launch_url := app.get_launch_url():
challenge.initial_data["application_pre_launch"] = launch_url
).name
if (
PLAN_CONTEXT_DEVICE in self.executor.plan.context
and PLAN_CONTEXT_DEVICE_AUTH_TOKEN in self.executor.plan.context

View File

@@ -1,21 +1,10 @@
"""Invitation Stage API Views"""
from django.http import HttpRequest
from django_filters.filters import BooleanFilter
from django_filters.filterset import FilterSet
from drf_spectacular.utils import extend_schema
from guardian.shortcuts import get_anonymous_user
from rest_framework.decorators import action
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import (
CharField,
ListField,
PrimaryKeyRelatedField,
Serializer,
)
from rest_framework.serializers import PrimaryKeyRelatedField
from rest_framework.viewsets import ModelViewSet
from structlog.stdlib import get_logger
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
from authentik.core.api.groups import PartialUserSerializer
@@ -24,11 +13,8 @@ from authentik.core.api.utils import JSONDictField, ModelSerializer
from authentik.core.models import User
from authentik.flows.api.flows import FlowSerializer
from authentik.flows.api.stages import StageSerializer
from authentik.lib.expression.evaluator import BaseEvaluator
from authentik.stages.invitation.models import Invitation, InvitationStage
LOGGER = get_logger()
class InvitationStageSerializer(StageSerializer):
"""InvitationStage Serializer"""
@@ -91,15 +77,6 @@ class InvitationSerializer(ModelSerializer):
]
class InvitationSendEmailSerializer(Serializer):
"""Serializer for sending invitation emails"""
email_addresses = ListField(required=True)
cc_addresses = ListField(required=False)
bcc_addresses = ListField(required=False)
template = CharField(required=False, default="invitation")
class InvitationViewSet(UsedByMixin, ModelViewSet):
"""Invitation Viewset"""
@@ -114,61 +91,3 @@ class InvitationViewSet(UsedByMixin, ModelViewSet):
if SERIALIZER_CONTEXT_BLUEPRINT not in serializer.context:
kwargs["created_by"] = self.request.user
serializer.save(**kwargs)
@extend_schema(
request=InvitationSendEmailSerializer,
responses={204: None},
)
@action(
detail=True,
methods=["post"],
serializer_class=InvitationSendEmailSerializer,
)
def send_email(self, request: Request, pk: str) -> Response:
"""Send invitation link via email to one or more addresses"""
invitation = self.get_object()
email_addresses = request.data.get("email_addresses", [])
cc_addresses = request.data.get("cc_addresses", [])
bcc_addresses = request.data.get("bcc_addresses", [])
template = request.data.get("template", "email/invitation.html")
if not email_addresses:
return Response({"error": "No email addresses provided"}, status=400)
# Build the invitation link
http_request: HttpRequest = request._request
protocol = "https" if http_request.is_secure() else "http"
host = http_request.get_host()
# Determine the flow slug
flow_slug = invitation.flow.slug if invitation.flow else None
if not flow_slug:
return Response({"error": "Invitation has no associated flow"}, status=400)
invitation_link = f"{protocol}://{host}/if/flow/{flow_slug}/?itoken={invitation.pk}"
# Prepare template context
context = {
"url": invitation_link,
"expires": invitation.expires,
"host": host,
}
# Prepare email content
subject = f"You have been invited to {host}"
# Queue emails for sending via async ak_send_email
evaluator = BaseEvaluator()
for email in email_addresses:
evaluator.expr_send_email(
address=email,
subject=subject,
template=template,
context=context,
stage=None,
cc=cc_addresses if cc_addresses else None,
bcc=bcc_addresses if bcc_addresses else None,
)
return Response(status=204)

View File

@@ -217,105 +217,3 @@ class TestInvitationsAPI(APITestCase):
self.assertEqual(invitation.created_by, get_anonymous_user())
self.assertEqual(invitation.name, "test-blueprint-invitation")
self.assertEqual(invitation.fixed_data, {"email": "test@example.com"})
def test_send_email_no_addresses(self):
"""Test send_email endpoint with no email addresses"""
flow = create_test_flow(FlowDesignation.ENROLLMENT)
invite = Invitation.objects.create(
name="test-invite",
created_by=self.user,
flow=flow,
)
response = self.client.post(
reverse("authentik_api:invitation-send-email", kwargs={"pk": invite.pk}),
{"email_addresses": []},
format="json",
)
self.assertEqual(response.status_code, 400)
self.assertIn("error", response.data)
def test_send_email_no_flow(self):
"""Test send_email endpoint with invitation without flow"""
invite = Invitation.objects.create(
name="test-invite-no-flow",
created_by=self.user,
flow=None,
)
response = self.client.post(
reverse("authentik_api:invitation-send-email", kwargs={"pk": invite.pk}),
{"email_addresses": ["test@example.com"]},
format="json",
)
self.assertEqual(response.status_code, 400)
self.assertIn("error", response.data)
@patch("authentik.stages.invitation.api.BaseEvaluator.expr_send_email")
def test_send_email_success(self, mock_send_email: MagicMock):
"""Test send_email endpoint successfully queues emails"""
flow = create_test_flow(FlowDesignation.ENROLLMENT)
invite = Invitation.objects.create(
name="test-invite",
created_by=self.user,
flow=flow,
)
response = self.client.post(
reverse("authentik_api:invitation-send-email", kwargs={"pk": invite.pk}),
{
"email_addresses": ["user1@example.com", "user2@example.com"],
"template": "email/invitation.html",
},
format="json",
)
self.assertEqual(response.status_code, 204)
self.assertEqual(mock_send_email.call_count, 2)
@patch("authentik.stages.invitation.api.BaseEvaluator.expr_send_email")
def test_send_email_with_cc_bcc(self, mock_send_email: MagicMock):
"""Test send_email endpoint with CC and BCC addresses"""
flow = create_test_flow(FlowDesignation.ENROLLMENT)
invite = Invitation.objects.create(
name="test-invite",
created_by=self.user,
flow=flow,
)
response = self.client.post(
reverse("authentik_api:invitation-send-email", kwargs={"pk": invite.pk}),
{
"email_addresses": ["user@example.com"],
"cc_addresses": ["cc@example.com"],
"bcc_addresses": ["bcc@example.com"],
"template": "email/invitation.html",
},
format="json",
)
self.assertEqual(response.status_code, 204)
mock_send_email.assert_called_once()
call_kwargs = mock_send_email.call_args.kwargs
self.assertEqual(call_kwargs["cc"], ["cc@example.com"])
self.assertEqual(call_kwargs["bcc"], ["bcc@example.com"])
@patch("authentik.stages.invitation.api.BaseEvaluator.expr_send_email")
def test_send_email_context(self, mock_send_email: MagicMock):
"""Test send_email endpoint passes correct context to email"""
flow = create_test_flow(FlowDesignation.ENROLLMENT)
invite = Invitation.objects.create(
name="test-invite",
created_by=self.user,
flow=flow,
)
response = self.client.post(
reverse("authentik_api:invitation-send-email", kwargs={"pk": invite.pk}),
{"email_addresses": ["user@example.com"]},
format="json",
)
self.assertEqual(response.status_code, 204)
mock_send_email.assert_called_once()
call_kwargs = mock_send_email.call_args.kwargs
self.assertIn("url", call_kwargs["context"])
self.assertIn(str(invite.pk), call_kwargs["context"]["url"])
self.assertIn(flow.slug, call_kwargs["context"]["url"])

View File

@@ -6,7 +6,6 @@ from django.contrib.auth.views import redirect_to_login
from django.http.request import HttpRequest
from structlog.stdlib import get_logger
from authentik.core.middleware import get_user
from authentik.core.models import Session
from authentik.events.context_processors.asn import ASN_CONTEXT_PROCESSOR
from authentik.events.context_processors.geoip import GEOIP_CONTEXT_PROCESSOR
@@ -55,13 +54,11 @@ class SessionBindingBroken(SentryIgnoredException):
def logout_extra(request: HttpRequest, exc: SessionBindingBroken):
"""Similar to django's logout method, but able to carry more info to the signal"""
# Since this middleware runs before the AuthenticationMiddleware, we can't use `request.user`
# as it hasn't been populated yet.
user = get_user(request)
if not getattr(user, "is_authenticated", True):
user = None
# Dispatch the signal before the user is logged out so the receivers have a
# chance to find out *who* logged out.
user = getattr(request, "user", None)
if not getattr(user, "is_authenticated", True):
user = None
user_logged_out.send(
sender=user.__class__, request=request, user=user, event_extra=exc.to_event()
)

View File

@@ -10,8 +10,6 @@ from django.utils.timezone import now
from authentik.blueprints.tests import apply_blueprint
from authentik.core.models import AuthenticatedSession, Session
from authentik.core.tests.utils import create_test_flow, create_test_user
from authentik.events.models import Event, EventAction
from authentik.events.utils import get_user
from authentik.flows.markers import StageMarker
from authentik.flows.models import FlowDesignation, FlowStageBinding
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlan
@@ -272,7 +270,6 @@ class TestUserLoginStage(FlowTestCase):
def test_session_binding_broken(self):
"""Test session binding"""
Event.objects.all().delete()
self.client.force_login(self.user)
session = self.client.session
session[Session.Keys.LAST_IP] = "192.0.2.1"
@@ -288,5 +285,3 @@ class TestUserLoginStage(FlowTestCase):
)
+ f"?{NEXT_ARG_NAME}={reverse("authentik_api:user-me")}",
)
event = Event.objects.filter(action=EventAction.LOGOUT).first()
self.assertEqual(event.user, get_user(self.user))

44
authentik/tasks/forks.py Normal file
View File

@@ -0,0 +1,44 @@
from signal import pause
from structlog.stdlib import get_logger
from authentik.lib.config import CONFIG
LOGGER = get_logger()
def worker_healthcheck():
import authentik.tasks.setup # noqa
from authentik.tasks.middleware import WorkerHealthcheckMiddleware
host, _, port = CONFIG.get("listen.http").rpartition(":")
try:
port = int(port)
except ValueError:
LOGGER.error(f"Invalid port entered: {port}")
WorkerHealthcheckMiddleware.run(host, port)
pause()
def worker_status():
import authentik.tasks.setup # noqa
from authentik.tasks.middleware import WorkerStatusMiddleware
WorkerStatusMiddleware.run()
def worker_metrics():
import authentik.tasks.setup # noqa
from authentik.tasks.middleware import MetricsMiddleware
addr, _, port = CONFIG.get("listen.metrics").rpartition(":")
try:
port = int(port)
except ValueError:
LOGGER.error(f"Invalid port entered: {port}")
MetricsMiddleware.run(addr, port)
pause()

View File

@@ -1,37 +1,29 @@
import socket
from collections.abc import Callable
from http.server import BaseHTTPRequestHandler
from threading import Event as TEvent
from threading import Thread, current_thread
from time import sleep
from typing import Any, cast
import pglock
from django.db import OperationalError, connections, transaction
from django.db import OperationalError, connections
from django.utils.timezone import now
from django_dramatiq_postgres.middleware import (
CurrentTask as BaseCurrentTask,
)
from django_dramatiq_postgres.middleware import (
HTTPServer,
HTTPServerThread,
)
from django_dramatiq_postgres.middleware import HTTPServer
from django_dramatiq_postgres.middleware import (
MetricsMiddleware as BaseMetricsMiddleware,
)
from django_dramatiq_postgres.middleware import (
_MetricsHandler as BaseMetricsHandler,
)
from dramatiq import Worker
from dramatiq.broker import Broker
from dramatiq.message import Message
from dramatiq.middleware import Middleware
from psycopg.errors import Error
from setproctitle import setthreadtitle
from structlog.stdlib import get_logger
from authentik import authentik_full_version
from authentik.events.models import Event, EventAction
from authentik.lib.config import CONFIG
from authentik.lib.sentry import should_ignore_exception
from authentik.lib.utils.reflection import class_to_path
from authentik.root.monitoring import monitoring_set
@@ -221,39 +213,17 @@ class _healthcheck_handler(BaseHTTPRequestHandler):
class WorkerHealthcheckMiddleware(Middleware):
thread: HTTPServerThread | None
@property
def forks(self):
from authentik.tasks.forks import worker_healthcheck
def __init__(self):
host, _, port = CONFIG.get("listen.http").rpartition(":")
try:
port = int(port)
except ValueError:
LOGGER.error(f"Invalid port entered: {port}")
self.host, self.port = host, port
def after_worker_boot(self, broker: Broker, worker: Worker):
self.thread = HTTPServerThread(
target=WorkerHealthcheckMiddleware.run, args=(self.host, self.port)
)
self.thread.start()
def before_worker_shutdown(self, broker: Broker, worker: Worker):
server = self.thread.server
if server:
server.shutdown()
LOGGER.debug("Stopping WorkerHealthcheckMiddleware")
self.thread.join()
return [worker_healthcheck]
@staticmethod
def run(addr: str, port: int):
setthreadtitle("authentik Worker Healthcheck server")
try:
server = HTTPServer((addr, port), _healthcheck_handler)
thread = cast(HTTPServerThread, current_thread())
thread.server = server
server.serve_forever()
httpd = HTTPServer((addr, port), _healthcheck_handler)
httpd.serve_forever()
except OSError as exc:
get_logger(__name__, type(WorkerHealthcheckMiddleware)).warning(
"Port is already in use, not starting healthcheck server",
@@ -262,50 +232,36 @@ class WorkerHealthcheckMiddleware(Middleware):
class WorkerStatusMiddleware(Middleware):
thread: Thread | None
thread_event: TEvent | None
@property
def forks(self):
from authentik.tasks.forks import worker_status
def after_worker_boot(self, broker: Broker, worker: Worker):
self.thread_event = TEvent()
self.thread = Thread(target=WorkerStatusMiddleware.run, args=(self.thread_event,))
self.thread.start()
def before_worker_shutdown(self, broker: Broker, worker: Worker):
self.thread_event.set()
LOGGER.debug("Stopping WorkerStatusMiddleware")
self.thread.join()
return [worker_status]
@staticmethod
def run(event: TEvent):
setthreadtitle("authentik Worker status")
with transaction.atomic():
hostname = socket.gethostname()
WorkerStatus.objects.filter(hostname=hostname).delete()
status, _ = WorkerStatus.objects.update_or_create(
hostname=hostname,
version=authentik_full_version(),
)
while not event.is_set():
def run():
status = WorkerStatus.objects.create(
hostname=socket.gethostname(),
version=authentik_full_version(),
)
while True:
try:
WorkerStatusMiddleware.keep(event, status)
WorkerStatusMiddleware.keep(status)
except DB_ERRORS: # pragma: no cover
event.wait(10)
sleep(10)
try:
connections.close_all()
except DB_ERRORS:
pass
@staticmethod
def keep(event: TEvent, status: WorkerStatus):
def keep(status: WorkerStatus):
lock_id = f"goauthentik.io/worker/status/{status.pk}"
with pglock.advisory(lock_id, side_effect=pglock.Raise):
while not event.is_set():
status.refresh_from_db()
old_last_seen = status.last_seen
while True:
status.last_seen = now()
if old_last_seen != status.last_seen:
status.save(update_fields=("last_seen",))
event.wait(30)
status.save(update_fields=("last_seen",))
sleep(30)
class _MetricsHandler(BaseMetricsHandler):
@@ -315,26 +271,10 @@ class _MetricsHandler(BaseMetricsHandler):
class MetricsMiddleware(BaseMetricsMiddleware):
thread: HTTPServerThread | None
handler_class = _MetricsHandler
@property
def forks(self) -> list[Callable[[], None]]:
return []
def forks(self):
from authentik.tasks.forks import worker_metrics
def after_worker_boot(self, broker: Broker, worker: Worker):
addr, _, port = CONFIG.get("listen.metrics").rpartition(":")
try:
port = int(port)
except ValueError:
LOGGER.error(f"Invalid port entered: {port}")
self.thread = HTTPServerThread(target=MetricsMiddleware.run, args=(addr, port))
self.thread.start()
def before_worker_shutdown(self, broker: Broker, worker: Worker):
server = self.thread.server
if server:
server.shutdown()
LOGGER.debug("Stopping MetricsMiddleware")
self.thread.join()
return [worker_metrics]

View File

@@ -10,26 +10,24 @@ from dramatiq.results.middleware import Results
from dramatiq.worker import Worker, _ConsumerThread, _WorkerThread
from authentik.tasks.broker import PostgresBroker
from authentik.tasks.middleware import WorkerHealthcheckMiddleware
TESTING_QUEUE = "testing"
from authentik.tasks.middleware import MetricsMiddleware
class TestWorker(Worker):
def __init__(self, broker: Broker):
def __init__(self, queue_name: str, broker: Broker):
super().__init__(broker=broker)
self.work_queue = PriorityQueue()
self.consumers = {
TESTING_QUEUE: _ConsumerThread(
queue_name: _ConsumerThread(
broker=self.broker,
queue_name=TESTING_QUEUE,
queue_name=queue_name,
prefetch=2,
work_queue=self.work_queue,
worker_timeout=1,
),
}
self.consumers[TESTING_QUEUE].consumer = self.broker.consume(
queue_name=TESTING_QUEUE,
self.consumers[queue_name].consumer = self.broker.consume(
queue_name=queue_name,
prefetch=2,
timeout=1,
)
@@ -42,29 +40,18 @@ class TestWorker(Worker):
self.broker.emit_before("worker_boot", self)
self.broker.emit_after("worker_boot", self)
self.broker.emit_after("process_boot")
def process_message(self, message: MessageProxy):
self.work_queue.put((0, message))
self.consumers[TESTING_QUEUE].consumer.in_processing.add(message.message_id)
self.work_queue.put(message)
self.consumers[message.queue_name].consumer.in_processing.add(message.message_id)
self._worker.process_message(message)
class TestBroker(PostgresBroker):
worker: TestWorker | None = None
def start(self):
self.worker = TestWorker(broker=self)
def close(self):
self.emit_before("worker_shutdown", self)
return super().close()
def enqueue(self, *args, **kwargs):
message = super().enqueue(*args, **kwargs).copy(queue_name=TESTING_QUEUE)
if not self.worker:
return message
self.worker.process_message(MessageProxy(message))
message = super().enqueue(*args, **kwargs)
worker = TestWorker(message.queue_name, broker=self)
worker.process_message(MessageProxy(message))
return message
@@ -82,8 +69,8 @@ def use_test_broker():
middleware: Middleware = import_string(middleware_class)(
**middleware_kwargs,
)
if isinstance(middleware, WorkerHealthcheckMiddleware):
middleware.port = 9102
if isinstance(middleware, MetricsMiddleware):
continue
if isinstance(middleware, Retries):
middleware.max_retries = 0
if isinstance(middleware, Results):
@@ -93,6 +80,4 @@ def use_test_broker():
)
broker.add_middleware(middleware)
broker.start()
set_broker(broker)
return broker

View File

@@ -1,7 +1,10 @@
from json import loads
from django.test import TestCase
from django.urls import reverse
from authentik.core.tests.utils import create_test_admin_user
from authentik.core.models import Group, User
from authentik.lib.generators import generate_id
class TestAdminAPI(TestCase):
@@ -9,13 +12,15 @@ class TestAdminAPI(TestCase):
def setUp(self) -> None:
super().setUp()
self.user = create_test_admin_user()
self.user = User.objects.create(username=generate_id())
self.group = Group.objects.create(name=generate_id(), is_superuser=True)
self.group.users.add(self.user)
self.group.save()
self.client.force_login(self.user)
def test_workers(self):
"""Test Workers API"""
response = self.client.get(reverse("authentik_api:tasks_workers"))
self.assertEqual(response.status_code, 200)
# Disabled for flakiness
# body = loads(response.content)
# self.assertEqual(len(body), 1)
body = loads(response.content)
self.assertEqual(len(body), 0)

View File

@@ -1,52 +0,0 @@
from django.test import TestCase
from dramatiq import actor, get_broker
from authentik.tasks.middleware import CurrentTask
from authentik.tasks.models import Task, TaskLog
class TestWorkerMiddleware(TestCase):
def test_task_log(self):
@actor
def test_task():
self = CurrentTask.get_task()
self.info("foo")
test_task.send()
task = Task.objects.filter(actor_name=test_task.actor_name).first()
logs = list(
TaskLog.objects.filter(task=task).order_by("timestamp").values_list("event", flat=True)
)
self.assertEqual(
logs,
[
"Task has been queued",
"Task is being processed",
"foo",
"Task finished processing without errors",
],
)
broker = get_broker()
del broker.actors[test_task.actor_name]
def test_task_exceptions(self):
@actor
def test_task():
raise ValueError("foo")
test_task.send()
task = Task.objects.filter(actor_name=test_task.actor_name).first()
logs = list(
TaskLog.objects.filter(task=task).order_by("timestamp").values_list("event", flat=True)
)
self.assertEqual(
logs,
[
"Task has been queued",
"Task is being processed",
"foo",
],
)
broker = get_broker()
del broker.actors[test_task.actor_name]

View File

@@ -9175,6 +9175,7 @@
"CO",
"KM",
"CG",
"CD",
"CK",
"CR",
"CI",
@@ -9183,7 +9184,6 @@
"CW",
"CY",
"CZ",
"CD",
"DK",
"DJ",
"DM",
@@ -9222,6 +9222,7 @@
"GY",
"HT",
"HM",
"VA",
"HN",
"HK",
"HU",
@@ -9363,7 +9364,6 @@
"UY",
"UZ",
"VU",
"VA",
"VE",
"VN",
"VG",

View File

@@ -29,7 +29,7 @@ entries:
password=request.user.password
)
# ...otherwise we set an immutable ID based on the user's UID
user["on_premises_immutable_id"] = request.user.uid
user["on_premises_immutable_id"] = request.user.uid,
return user
- identifiers:
managed: goauthentik.io/providers/microsoft_entra/group

8
go.mod
View File

@@ -1,13 +1,13 @@
module goauthentik.io
go 1.26.0
go 1.25.5
require (
beryju.io/ldap v0.1.0
beryju.io/radius-eap v0.1.0
github.com/avast/retry-go/v4 v4.7.0
github.com/coreos/go-oidc/v3 v3.17.0
github.com/getsentry/sentry-go v0.43.0
github.com/getsentry/sentry-go v0.42.0
github.com/go-http-utils/etag v0.0.0-20161124023236-513ea8f21eb1
github.com/go-ldap/ldap/v3 v3.4.12
github.com/go-openapi/runtime v0.29.2
@@ -23,14 +23,14 @@ require (
github.com/jellydator/ttlcache/v3 v3.4.0
github.com/mitchellh/mapstructure v1.5.0
github.com/nmcclain/asn1-ber v0.0.0-20170104154839-2661553a0484
github.com/pires/go-proxyproto v0.11.0
github.com/pires/go-proxyproto v0.10.0
github.com/prometheus/client_golang v1.23.2
github.com/sethvargo/go-envconfig v1.3.0
github.com/sirupsen/logrus v1.9.4
github.com/spf13/cobra v1.10.2
github.com/stretchr/testify v1.11.1
github.com/wwt/guac v1.3.2
goauthentik.io/api/v3 v3.2026020.17-0.20260304104333-840924fe52c4
goauthentik.io/api/v3 v3.2026020.17-0.20260211204352-035cbbe57393
golang.org/x/exp v0.0.0-20230210204819-062eb4c674ab
golang.org/x/oauth2 v0.35.0
golang.org/x/sync v0.19.0

22
go.sum
View File

@@ -20,8 +20,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/felixge/httpsnoop v1.0.3 h1:s/nj+GCswXYzN5v2DpNMuMQYe+0DDwt5WVCU6CWBdXk=
github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/getsentry/sentry-go v0.43.0 h1:XbXLpFicpo8HmBDaInk7dum18G9KSLcjZiyUKS+hLW4=
github.com/getsentry/sentry-go v0.43.0/go.mod h1:XDotiNZbgf5U8bPDUAfvcFmOnMQQceESxyKaObSssW0=
github.com/getsentry/sentry-go v0.42.0 h1:eeFMACuZTbUQf90RE8dE4tXeSe4CZyfvR1MBL7RLEt8=
github.com/getsentry/sentry-go v0.42.0/go.mod h1:eRXCoh3uvmjQLY6qu63BjUZnaBu5L5WhMV1RwYO8W5s=
github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667 h1:BP4M0CvQ4S3TGls2FvczZtj5Re/2ZzkV9VwqPHH/3Bo=
github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667/go.mod h1:hEBeB/ic+5LoWskz+yKT7vGhhPYkProFKoKdwZRWMe0=
github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA=
@@ -158,8 +158,8 @@ github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4=
github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8=
github.com/pires/go-proxyproto v0.11.0 h1:gUQpS85X/VJMdUsYyEgyn59uLJvGqPhJV5YvG68wXH4=
github.com/pires/go-proxyproto v0.11.0/go.mod h1:ZKAAyp3cgy5Y5Mo4n9AlScrkCZwUy0g3Jf+slqQVcuU=
github.com/pires/go-proxyproto v0.10.0 h1:08wrdt9NQYTjLWeag3EBIS7ZNi6Vwl3rGsEjVLaAhvU=
github.com/pires/go-proxyproto v0.10.0/go.mod h1:ZKAAyp3cgy5Y5Mo4n9AlScrkCZwUy0g3Jf+slqQVcuU=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
@@ -214,12 +214,14 @@ go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI=
go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU=
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
goauthentik.io/api/v3 v3.2026020.17-0.20260217173516-3a500f6eed7d h1:Gb26L41O+Q7l+57wkXI1BaG+lCWRteZ9tlaabjMkb3U=
goauthentik.io/api/v3 v3.2026020.17-0.20260217173516-3a500f6eed7d/go.mod h1:uYa+yGMglhJy8ymyUQ8KQiJjOb3UZTuPQ24Ot2s9BCo=
goauthentik.io/api/v3 v3.2026020.17-0.20260223141659-4c1444ee54d9 h1:tuvgm4e1nV0ZPZy24wOeJcuAbMnhbJA09BuI2fzBHRk=
goauthentik.io/api/v3 v3.2026020.17-0.20260223141659-4c1444ee54d9/go.mod h1:uYa+yGMglhJy8ymyUQ8KQiJjOb3UZTuPQ24Ot2s9BCo=
goauthentik.io/api/v3 v3.2026020.17-0.20260304104333-840924fe52c4 h1:zjmi1QNVQPABt0Yx5hws1lXR3tuTI23Ae7MwXffbP/s=
goauthentik.io/api/v3 v3.2026020.17-0.20260304104333-840924fe52c4/go.mod h1:uYa+yGMglhJy8ymyUQ8KQiJjOb3UZTuPQ24Ot2s9BCo=
goauthentik.io/api/v3 v3.2026020.17-0.20260205232234-280022b0a8de h1:X1ELA34R1N+S+EWR8mcZRTwyZTze3bVKJh4cmeppxIY=
goauthentik.io/api/v3 v3.2026020.17-0.20260205232234-280022b0a8de/go.mod h1:uYa+yGMglhJy8ymyUQ8KQiJjOb3UZTuPQ24Ot2s9BCo=
goauthentik.io/api/v3 v3.2026020.17-0.20260210174940-ae049de99535 h1:DPk8z6SGesp0gbmaD2zTAKVSd/NQ++Nu+lu3UrCkNvE=
goauthentik.io/api/v3 v3.2026020.17-0.20260210174940-ae049de99535/go.mod h1:uYa+yGMglhJy8ymyUQ8KQiJjOb3UZTuPQ24Ot2s9BCo=
goauthentik.io/api/v3 v3.2026020.17-0.20260211005401-cdd71ec2f62f h1:KK5lBHSvZSlMbUViB7KStlkP9kC1t9JeiMawa7wyI6Q=
goauthentik.io/api/v3 v3.2026020.17-0.20260211005401-cdd71ec2f62f/go.mod h1:uYa+yGMglhJy8ymyUQ8KQiJjOb3UZTuPQ24Ot2s9BCo=
goauthentik.io/api/v3 v3.2026020.17-0.20260211204352-035cbbe57393 h1:eLRd2GC+pxvwd3m2msJRNB9upH7pcIZH5V4L9/WhRcw=
goauthentik.io/api/v3 v3.2026020.17-0.20260211204352-035cbbe57393/go.mod h1:uYa+yGMglhJy8ymyUQ8KQiJjOb3UZTuPQ24Ot2s9BCo=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=

View File

@@ -128,9 +128,9 @@ func (c *Config) fromEnv() error {
return nil
}
func (c *Config) walkScheme(v any) {
func (c *Config) walkScheme(v interface{}) {
rv := reflect.ValueOf(v)
if rv.Kind() != reflect.Pointer || rv.IsNil() {
if rv.Kind() != reflect.Ptr || rv.IsNil() {
return
}

View File

@@ -104,11 +104,7 @@ type OutpostConfig struct {
}
type WebConfig struct {
Path string `yaml:"path" env:"PATH, overwrite"`
TimeoutHttpReadHeader string `yaml:"timeout_http_read_header" env:"TIMEOUT_HTTP_READ_HEADER, overwrite"`
TimeoutHttpRead string `yaml:"timeout_http_read" env:"TIMEOUT_HTTP_READ, overwrite"`
TimeoutHttpWrite string `yaml:"timeout_http_write" env:"TIMEOUT_HTTP_WRITE, overwrite"`
TimeoutHttpIdle string `yaml:"timeout_http_idle" env:"TIMEOUT_HTTP_IDLE, overwrite"`
Path string `yaml:"path" env:"PATH, overwrite"`
}
type LogConfig struct {

View File

@@ -204,8 +204,8 @@ func (a *APIController) OnRefresh() error {
return err
}
func (a *APIController) getEventPingArgs() map[string]any {
args := map[string]any{
func (a *APIController) getEventPingArgs() map[string]interface{} {
args := map[string]interface{}{
"version": constants.VERSION(),
"buildHash": constants.BUILD(""),
"uuid": a.instanceUUID.String(),

View File

@@ -186,7 +186,7 @@ func (ac *APIController) startEventHealth() {
time.Sleep(time.Second * 5)
continue
}
err := ac.SendEventHello(map[string]any{})
err := ac.SendEventHello(map[string]interface{}{})
if err != nil {
ac.logger.WithField("loop", "event-health").WithError(err).Warning("event write error")
go ac.recentEvents()
@@ -240,9 +240,11 @@ func (a *APIController) AddEventHandler(handler EventHandler) {
a.eventHandlers = append(a.eventHandlers, handler)
}
func (a *APIController) SendEventHello(args map[string]any) error {
func (a *APIController) SendEventHello(args map[string]interface{}) error {
allArgs := a.getEventPingArgs()
maps.Copy(allArgs, args)
for key, value := range args {
allArgs[key] = value
}
aliveMsg := Event{
Instruction: EventKindHello,
Args: allArgs,

View File

@@ -24,11 +24,11 @@ const (
type EventHandler func(ctx context.Context, msg Event) error
type Event struct {
Instruction EventKind `json:"instruction"`
Args any `json:"args"`
Instruction EventKind `json:"instruction"`
Args interface{} `json:"args"`
}
func (wm Event) ArgsAs(out any) error {
func (wm Event) ArgsAs(out interface{}) error {
return mapstructure.Decode(wm.Args, out)
}

View File

@@ -38,7 +38,7 @@ func Paginator[Tobj any, Treq any, Tres PaginatorResponse[Tobj]](
if opts.Logger == nil {
opts.Logger = log.NewEntry(log.StandardLogger())
}
var bfreq, cfreq any
var bfreq, cfreq interface{}
fetchOffset := func(page int32) (Tres, error) {
bfreq = req.Page(page)
cfreq = bfreq.(PaginatorRequest[Treq, Tres]).PageSize(int32(opts.PageSize))

View File

@@ -27,10 +27,10 @@ func (pi *ProviderInstance) UserEntry(u api.User) *ldap.Entry {
})
if u.IsActive == nil {
u.IsActive = new(false)
u.IsActive = api.PtrBool(false)
}
if u.Email == nil {
u.Email = new("")
u.Email = api.PtrString("")
}
attrs = utils.EnsureAttributes(attrs, map[string][]string{
"ak-active": {strings.ToUpper(strconv.FormatBool(*u.IsActive))},

View File

@@ -20,7 +20,7 @@ type LDAPGroup struct {
MemberOf []string
IsSuperuser bool
IsVirtualGroup bool
Attributes map[string]any
Attributes map[string]interface{}
}
func (lg *LDAPGroup) Entry() *ldap.Entry {

View File

@@ -1,3 +1,3 @@
package handler
type Handler any
type Handler interface{}

View File

@@ -83,8 +83,8 @@ func normalizeAttributes(attributes []string) []string {
for _, attr := range attributes {
if strings.Contains(attr, ",") {
// Split comma-separated attributes and add them individually
parts := strings.SplitSeq(attr, ",")
for part := range parts {
parts := strings.Split(attr, ",")
for _, part := range parts {
part = strings.TrimSpace(part)
if part != "" {
result = append(result, part)

View File

@@ -17,7 +17,7 @@ func AttributeKeySanitize(key string) string {
)
}
func stringify(in any) *string {
func stringify(in interface{}) *string {
switch t := in.(type) {
case string:
return &t
@@ -45,7 +45,7 @@ func stringify(in any) *string {
}
func AttributesToLDAP(
attrs map[string]any,
attrs map[string]interface{},
keyFormatter func(key string) string,
valueFormatter func(value []string) []string,
) []*ldap.EntryAttribute {
@@ -60,7 +60,7 @@ func AttributesToLDAP(
entry.Values = valueFormatter(t)
case *[]string:
entry.Values = valueFormatter(*t)
case []any:
case []interface{}:
vv := make([]string, 0)
for _, v := range t {
v := stringify(v)

View File

@@ -16,7 +16,7 @@ func TestAKAttrsToLDAP_String(t *testing.T) {
u := api.User{}
// normal string
u.Attributes = map[string]any{
u.Attributes = map[string]interface{}{
"foo": "bar",
}
mapped := AttributesToLDAP(u.Attributes, func(key string) string {
@@ -28,8 +28,8 @@ func TestAKAttrsToLDAP_String(t *testing.T) {
assert.Equal(t, "foo", mapped[0].Name)
assert.Equal(t, []string{"bar"}, mapped[0].Values)
// pointer string
u.Attributes = map[string]any{
"foo": new("bar"),
u.Attributes = map[string]interface{}{
"foo": api.PtrString("bar"),
}
mapped = AttributesToLDAP(u.Attributes, func(key string) string {
return AttributeKeySanitize(key)
@@ -44,7 +44,7 @@ func TestAKAttrsToLDAP_String(t *testing.T) {
func TestAKAttrsToLDAP_String_List(t *testing.T) {
u := api.User{}
// string list
u.Attributes = map[string]any{
u.Attributes = map[string]interface{}{
"foo": []string{"bar"},
}
mapped := AttributesToLDAP(u.Attributes, func(key string) string {
@@ -56,7 +56,7 @@ func TestAKAttrsToLDAP_String_List(t *testing.T) {
assert.Equal(t, "foo", mapped[0].Name)
assert.Equal(t, []string{"bar"}, mapped[0].Values)
// pointer string list
u.Attributes = map[string]any{
u.Attributes = map[string]interface{}{
"foo": &[]string{"bar"},
}
mapped = AttributesToLDAP(u.Attributes, func(key string) string {
@@ -71,7 +71,7 @@ func TestAKAttrsToLDAP_String_List(t *testing.T) {
func TestAKAttrsToLDAP_Dict(t *testing.T) {
// dict
d := map[string]any{
d := map[string]interface{}{
"foo": map[string]string{
"foo": "bar",
},
@@ -88,8 +88,8 @@ func TestAKAttrsToLDAP_Dict(t *testing.T) {
func TestAKAttrsToLDAP_Mixed(t *testing.T) {
// dict
d := map[string]any{
"foo": []any{
d := map[string]interface{}{
"foo": []interface{}{
"foo",
6,
},

View File

@@ -26,6 +26,7 @@ import (
"goauthentik.io/api/v3"
"goauthentik.io/internal/config"
"goauthentik.io/internal/outpost/ak"
"goauthentik.io/internal/outpost/proxyv2/constants"
"goauthentik.io/internal/outpost/proxyv2/hs256"
"goauthentik.io/internal/outpost/proxyv2/metrics"
"goauthentik.io/internal/outpost/proxyv2/templates"
@@ -249,7 +250,7 @@ func NewApplication(p api.ProxyOutpostConfig, c *http.Client, server Server, old
if *p.SkipPathRegex != "" {
a.UnauthenticatedRegex = make([]*regexp.Regexp, 0)
for regex := range strings.SplitSeq(*p.SkipPathRegex, "\n") {
for _, regex := range strings.Split(*p.SkipPathRegex, "\n") {
re, err := regexp.Compile(regex)
if err != nil {
// TODO: maybe create event for this?
@@ -293,16 +294,22 @@ func (a *Application) Stop() {
func (a *Application) handleSignOut(rw http.ResponseWriter, r *http.Request) {
redirect := a.endpoint.EndSessionEndpoint
cc := a.getClaimsFromSession(rw, r)
if cc == nil {
s, err := a.sessions.Get(r, a.SessionName())
if err != nil {
a.redirectToStart(rw, r)
return
}
c, exists := s.Values[constants.SessionClaims]
if c == nil && !exists {
a.redirectToStart(rw, r)
return
}
cc := c.(types.Claims)
uv := url.Values{
"id_token_hint": []string{cc.RawToken},
}
redirect += "?" + uv.Encode()
err := a.Logout(r.Context(), func(c types.Claims) bool {
err = a.Logout(r.Context(), func(c types.Claims) bool {
return c.Sub == cc.Sub
})
if err != nil {

View File

@@ -62,7 +62,7 @@ func (a *Application) getHeaders(c *types.Claims) map[string]string {
if additionalHeaders == nil {
return headers
}
for key, value := range additionalHeaders.(map[string]any) {
for key, value := range additionalHeaders.(map[string]interface{}) {
headers[key] = toString(value)
}
}
@@ -134,13 +134,13 @@ func (a *Application) getNginxForwardUrl(r *http.Request) (*url.URL, error) {
return u, nil
}
func (a *Application) ReportMisconfiguration(r *http.Request, msg string, fields map[string]any) {
func (a *Application) ReportMisconfiguration(r *http.Request, msg string, fields map[string]interface{}) {
fields["message"] = msg
a.log.WithFields(fields).Error("Reporting configuration error")
req := api.EventRequest{
Action: api.EVENTACTIONS_CONFIGURATION_ERROR,
App: "authentik.providers.proxy", // must match python apps.py name
ClientIp: *api.NewNullableString(new(r.RemoteAddr)),
ClientIp: *api.NewNullableString(api.PtrString(r.RemoteAddr)),
Context: fields,
}
_, _, err := a.ak.Client.EventsAPI.EventsEventsCreate(context.Background()).EventRequest(req).Execute()

View File

@@ -82,9 +82,9 @@ func TestAdHeaders_Standard(t *testing.T) {
func TestAdHeaders_BasicAuth(t *testing.T) {
a := newTestApplication()
a.proxyConfig.BasicAuthEnabled = new(true)
a.proxyConfig.BasicAuthUserAttribute = new("user")
a.proxyConfig.BasicAuthPasswordAttribute = new("pass")
a.proxyConfig.BasicAuthEnabled = api.PtrBool(true)
a.proxyConfig.BasicAuthUserAttribute = api.PtrString("user")
a.proxyConfig.BasicAuthPasswordAttribute = api.PtrString("pass")
h := http.Header{}
a.addHeaders(h, &types.Claims{
PreferredUsername: "foo",

View File

@@ -28,7 +28,7 @@ func (a *Application) forwardHandleTraefik(rw http.ResponseWriter, r *http.Reque
// First check if we've got everything we need
fwd, err := a.getTraefikForwardUrl(r)
if err != nil {
a.ReportMisconfiguration(r, fmt.Sprintf("Outpost %s (Provider %s) failed to detect a forward URL from Traefik", a.outpostName, a.proxyConfig.Name), map[string]any{
a.ReportMisconfiguration(r, fmt.Sprintf("Outpost %s (Provider %s) failed to detect a forward URL from Traefik", a.outpostName, a.proxyConfig.Name), map[string]interface{}{
"provider": a.proxyConfig.Name,
"outpost": a.outpostName,
"url": r.URL.String(),
@@ -71,7 +71,7 @@ func (a *Application) forwardHandleCaddy(rw http.ResponseWriter, r *http.Request
// First check if we've got everything we need
fwd, err := a.getTraefikForwardUrl(r)
if err != nil {
a.ReportMisconfiguration(r, fmt.Sprintf("Outpost %s (Provider %s) failed to detect a forward URL from Caddy", a.outpostName, a.proxyConfig.Name), map[string]any{
a.ReportMisconfiguration(r, fmt.Sprintf("Outpost %s (Provider %s) failed to detect a forward URL from Caddy", a.outpostName, a.proxyConfig.Name), map[string]interface{}{
"provider": a.proxyConfig.Name,
"outpost": a.outpostName,
"url": r.URL.String(),
@@ -113,7 +113,7 @@ func (a *Application) forwardHandleNginx(rw http.ResponseWriter, r *http.Request
a.log.WithField("header", r.Header).Trace("tracing headers for debug")
fwd, err := a.getNginxForwardUrl(r)
if err != nil {
a.ReportMisconfiguration(r, fmt.Sprintf("Outpost %s (Provider %s) failed to detect a forward URL from nginx", a.outpostName, a.proxyConfig.Name), map[string]any{
a.ReportMisconfiguration(r, fmt.Sprintf("Outpost %s (Provider %s) failed to detect a forward URL from nginx", a.outpostName, a.proxyConfig.Name), map[string]interface{}{
"provider": a.proxyConfig.Name,
"outpost": a.outpostName,
"url": r.URL.String(),

View File

@@ -74,10 +74,10 @@ func TestForwardHandleCaddy_Single_Claims(t *testing.T) {
s.Values[constants.SessionClaims] = types.Claims{
Sub: "foo",
Proxy: &types.ProxyClaims{
UserAttributes: map[string]any{
UserAttributes: map[string]interface{}{
"username": "foo",
"password": "bar",
"additionalHeaders": map[string]any{
"additionalHeaders": map[string]interface{}{
"foo": "bar",
},
},
@@ -110,7 +110,7 @@ func TestForwardHandleCaddy_Single_Claims(t *testing.T) {
func TestForwardHandleCaddy_Domain_Blank(t *testing.T) {
a := newTestApplication()
a.proxyConfig.Mode = api.PROXYMODE_FORWARD_DOMAIN.Ptr()
a.proxyConfig.CookieDomain = new("foo")
a.proxyConfig.CookieDomain = api.PtrString("foo")
req, _ := http.NewRequest("GET", "/outpost.goauthentik.io/auth/caddy", nil)
rr := httptest.NewRecorder()
@@ -122,7 +122,7 @@ func TestForwardHandleCaddy_Domain_Blank(t *testing.T) {
func TestForwardHandleCaddy_Domain_Header(t *testing.T) {
a := newTestApplication()
a.proxyConfig.Mode = api.PROXYMODE_FORWARD_DOMAIN.Ptr()
a.proxyConfig.CookieDomain = new("foo")
a.proxyConfig.CookieDomain = api.PtrString("foo")
a.proxyConfig.ExternalHost = "http://auth.test.goauthentik.io"
req, _ := http.NewRequest("GET", "/outpost.goauthentik.io/auth/caddy", nil)
req.Header.Set("X-Forwarded-Proto", "http")

View File

@@ -56,10 +56,10 @@ func TestForwardHandleEnvoy_Single_Claims(t *testing.T) {
s.Values[constants.SessionClaims] = types.Claims{
Sub: "foo",
Proxy: &types.ProxyClaims{
UserAttributes: map[string]any{
UserAttributes: map[string]interface{}{
"username": "foo",
"password": "bar",
"additionalHeaders": map[string]any{
"additionalHeaders": map[string]interface{}{
"foo": "bar",
},
},
@@ -92,7 +92,7 @@ func TestForwardHandleEnvoy_Single_Claims(t *testing.T) {
func TestForwardHandleEnvoy_Domain_Header(t *testing.T) {
a := newTestApplication()
a.proxyConfig.Mode = api.PROXYMODE_FORWARD_DOMAIN.Ptr()
a.proxyConfig.CookieDomain = new("foo")
a.proxyConfig.CookieDomain = api.PtrString("foo")
a.proxyConfig.ExternalHost = "http://auth.test.goauthentik.io"
req, _ := http.NewRequest("GET", "http:///app", nil)
req.Host = "test.goauthentik.io"

View File

@@ -75,10 +75,10 @@ func TestForwardHandleNginx_Single_Claims(t *testing.T) {
s.Values[constants.SessionClaims] = types.Claims{
Sub: "foo",
Proxy: &types.ProxyClaims{
UserAttributes: map[string]any{
UserAttributes: map[string]interface{}{
"username": "foo",
"password": "bar",
"additionalHeaders": map[string]any{
"additionalHeaders": map[string]interface{}{
"foo": "bar",
},
},
@@ -111,7 +111,7 @@ func TestForwardHandleNginx_Single_Claims(t *testing.T) {
func TestForwardHandleNginx_Domain_Blank(t *testing.T) {
a := newTestApplication()
a.proxyConfig.Mode = api.PROXYMODE_FORWARD_DOMAIN.Ptr()
a.proxyConfig.CookieDomain = new("foo")
a.proxyConfig.CookieDomain = api.PtrString("foo")
req, _ := http.NewRequest("GET", "/outpost.goauthentik.io/auth/nginx", nil)
rr := httptest.NewRecorder()
@@ -123,7 +123,7 @@ func TestForwardHandleNginx_Domain_Blank(t *testing.T) {
func TestForwardHandleNginx_Domain_Header(t *testing.T) {
a := newTestApplication()
a.proxyConfig.Mode = api.PROXYMODE_FORWARD_DOMAIN.Ptr()
a.proxyConfig.CookieDomain = new("foo")
a.proxyConfig.CookieDomain = api.PtrString("foo")
a.proxyConfig.ExternalHost = "http://auth.test.goauthentik.io"
req, _ := http.NewRequest("GET", "/outpost.goauthentik.io/auth/nginx", nil)
req.Header.Set("X-Original-URL", "http://test.goauthentik.io/app")

View File

@@ -74,10 +74,10 @@ func TestForwardHandleTraefik_Single_Claims(t *testing.T) {
s.Values[constants.SessionClaims] = types.Claims{
Sub: "foo",
Proxy: &types.ProxyClaims{
UserAttributes: map[string]any{
UserAttributes: map[string]interface{}{
"username": "foo",
"password": "bar",
"additionalHeaders": map[string]any{
"additionalHeaders": map[string]interface{}{
"foo": "bar",
},
},
@@ -110,7 +110,7 @@ func TestForwardHandleTraefik_Single_Claims(t *testing.T) {
func TestForwardHandleTraefik_Domain_Blank(t *testing.T) {
a := newTestApplication()
a.proxyConfig.Mode = api.PROXYMODE_FORWARD_DOMAIN.Ptr()
a.proxyConfig.CookieDomain = new("foo")
a.proxyConfig.CookieDomain = api.PtrString("foo")
req, _ := http.NewRequest("GET", "/outpost.goauthentik.io/auth/traefik", nil)
rr := httptest.NewRecorder()
@@ -122,7 +122,7 @@ func TestForwardHandleTraefik_Domain_Blank(t *testing.T) {
func TestForwardHandleTraefik_Domain_Header(t *testing.T) {
a := newTestApplication()
a.proxyConfig.Mode = api.PROXYMODE_FORWARD_DOMAIN.Ptr()
a.proxyConfig.CookieDomain = new("foo")
a.proxyConfig.CookieDomain = api.PtrString("foo")
a.proxyConfig.ExternalHost = "http://auth.test.goauthentik.io"
req, _ := http.NewRequest("GET", "/outpost.goauthentik.io/auth/traefik", nil)
req.Header.Set("X-Forwarded-Proto", "http")

View File

@@ -76,7 +76,7 @@ func (a *Application) redirectToStart(rw http.ResponseWriter, r *http.Request) {
}
}
redirectUrl := urlJoin(a.proxyConfig.ExternalHost, r.URL.EscapedPath())
redirectUrl := urlJoin(a.proxyConfig.ExternalHost, r.URL.Path)
if a.Mode() == api.PROXYMODE_FORWARD_DOMAIN {
dom := strings.TrimPrefix(*a.proxyConfig.CookieDomain, ".")

View File

@@ -106,7 +106,7 @@ func (a *Application) createState(r *http.Request, w http.ResponseWriter, fwd st
func (a *Application) stateFromRequest(rw http.ResponseWriter, r *http.Request) *OAuthState {
stateJwt := r.URL.Query().Get("state")
token, err := jwt.Parse(stateJwt, func(token *jwt.Token) (any, error) {
token, err := jwt.Parse(stateJwt, func(token *jwt.Token) (interface{}, error) {
// Don't forget to validate the alg is what you expect:
if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok {
return nil, fmt.Errorf("unexpected signing method: %v", token.Header["alg"])

View File

@@ -55,7 +55,7 @@ func TestCheckRedirectParam_ValidPartial(t *testing.T) {
func TestCheckRedirectParam_Domain(t *testing.T) {
a := newTestApplication()
a.proxyConfig.Mode = api.PROXYMODE_FORWARD_DOMAIN.Ptr()
a.proxyConfig.CookieDomain = new("t.goauthentik.io")
a.proxyConfig.CookieDomain = api.PtrString("t.goauthentik.io")
req, _ := http.NewRequest("GET", "https://a.t.goauthentik.io/outpost.goauthentik.io/auth/start", nil)
rd, ok := a.checkRedirectParam(req)

View File

@@ -53,13 +53,13 @@ func TestPostgresStore_SessionLifecycle(t *testing.T) {
userID := uuid.New()
sessionKey := "test_session_" + uuid.New().String()
sessionData := map[string]any{
constants.SessionClaims: map[string]any{
sessionData := map[string]interface{}{
constants.SessionClaims: map[string]interface{}{
"sub": userID.String(),
"email": "test@example.com",
"preferred_username": "testuser",
"custom_claim": "custom_value",
"groups": []any{"admin", "user"},
"groups": []interface{}{"admin", "user"},
},
}
sessionDataJSON, err := json.Marshal(sessionData)
@@ -89,11 +89,11 @@ func TestPostgresStore_SessionLifecycle(t *testing.T) {
assert.Equal(t, userID, *retrievedSession.UserID)
// Parse session data
var parsedData map[string]any
var parsedData map[string]interface{}
err = json.Unmarshal([]byte(retrievedSession.SessionData), &parsedData)
require.NoError(t, err)
claims, ok := parsedData[constants.SessionClaims].(map[string]any)
claims, ok := parsedData[constants.SessionClaims].(map[string]interface{})
assert.True(t, ok)
assert.Equal(t, "test@example.com", claims["email"])
assert.Equal(t, "testuser", claims["preferred_username"])
@@ -109,8 +109,8 @@ func TestPostgresStore_LogoutSessions(t *testing.T) {
user2 := uuid.New()
createSessionData := func(userID uuid.UUID, email string) string {
sessionData := map[string]any{
constants.SessionClaims: map[string]any{
sessionData := map[string]interface{}{
constants.SessionClaims: map[string]interface{}{
"sub": userID.String(),
"email": email,
},
@@ -229,13 +229,13 @@ func TestPostgresStore_SessionClaims(t *testing.T) {
// Create session with complex claims
userID := uuid.New()
sessionData := map[string]any{
constants.SessionClaims: map[string]any{
sessionData := map[string]interface{}{
constants.SessionClaims: map[string]interface{}{
"sub": userID.String(),
"email": "test@example.com",
"preferred_username": "testuser",
"groups": []any{"admin", "user"},
"entitlements": []any{"read", "write"},
"groups": []interface{}{"admin", "user"},
"entitlements": []interface{}{"read", "write"},
"custom_field": "custom_value",
},
}
@@ -261,24 +261,24 @@ func TestPostgresStore_SessionClaims(t *testing.T) {
assert.Equal(t, userID, *retrieved.UserID)
// Parse and verify session data
var parsedData map[string]any
var parsedData map[string]interface{}
err = json.Unmarshal([]byte(retrieved.SessionData), &parsedData)
require.NoError(t, err)
claims, ok := parsedData[constants.SessionClaims].(map[string]any)
claims, ok := parsedData[constants.SessionClaims].(map[string]interface{})
assert.True(t, ok)
assert.Equal(t, "test@example.com", claims["email"])
assert.Equal(t, "testuser", claims["preferred_username"])
assert.Equal(t, "custom_value", claims["custom_field"])
// Verify groups array
groups, ok := claims["groups"].([]any)
groups, ok := claims["groups"].([]interface{})
assert.True(t, ok)
assert.Contains(t, groups, "admin")
assert.Contains(t, groups, "user")
// Verify entitlements array
entitlements, ok := claims["entitlements"].([]any)
entitlements, ok := claims["entitlements"].([]interface{})
assert.True(t, ok)
assert.Contains(t, entitlements, "read")
assert.Contains(t, entitlements, "write")

View File

@@ -19,7 +19,7 @@ func newTestServer() *testServer {
return &testServer{
api: ak.MockAK(
api.Outpost{
Config: map[string]any{
Config: map[string]interface{}{
"authentik_host": ak.TestSecret(),
},
},
@@ -50,18 +50,18 @@ func newTestApplication() *Application {
a, _ := NewApplication(
api.ProxyOutpostConfig{
Name: ak.TestSecret(),
ClientId: new(ak.TestSecret()),
ClientSecret: new(ak.TestSecret()),
CookieDomain: new(""),
CookieSecret: new(ak.TestSecret()),
ClientId: api.PtrString(ak.TestSecret()),
ClientSecret: api.PtrString(ak.TestSecret()),
CookieDomain: api.PtrString(""),
CookieSecret: api.PtrString(ak.TestSecret()),
ExternalHost: "https://ext.t.goauthentik.io",
InternalHost: new("http://backend"),
InternalHostSslValidation: new(true),
InternalHost: api.PtrString("http://backend"),
InternalHostSslValidation: api.PtrBool(true),
Mode: api.PROXYMODE_FORWARD_SINGLE.Ptr(),
SkipPathRegex: new("/skip.*"),
BasicAuthEnabled: new(true),
BasicAuthUserAttribute: new("username"),
BasicAuthPasswordAttribute: new("password"),
SkipPathRegex: api.PtrString("/skip.*"),
BasicAuthEnabled: api.PtrBool(true),
BasicAuthUserAttribute: api.PtrString("username"),
BasicAuthPasswordAttribute: api.PtrString("password"),
OidcConfiguration: api.OpenIDConnectConfiguration{
AuthorizationEndpoint: "http://fake-auth.t.goauthentik.io/auth",
TokenEndpoint: "http://fake-auth.t.goauthentik.io/token",

Some files were not shown because too many files have changed in this diff Show More