Compare commits

..

3 Commits

Author SHA1 Message Date
Marcelo Elizeche Landó
264ad31f50 refactor for bandit alerts 2026-04-30 02:39:29 -03:00
Marcelo Elizeche Landó
cced288ddb Add polling to shutdown.wait in lifecycle/worker_process.py 2026-04-30 01:35:06 -03:00
Marcelo Elizeche Landó
4aa323bc20 Add debug-attach to Makefile implementing Python 3.14 remote debugging interface 2026-04-30 00:12:01 -03:00
363 changed files with 5146 additions and 18268 deletions

View File

@@ -1,81 +0,0 @@
name: "Setup Node.js and NPM"
description: "Sets up Node.js with a specific NPM version via Corepack"
inputs:
working-directory:
description: "Path to the working directory containing the package.json file"
required: false
default: "."
dependencies:
required: false
description: "List of dependencies to setup"
default: "monorepo,working-directory"
node-version-file:
description: "Path to file containing the Node.js version"
required: false
default: "package.json"
cache-dependency-path:
description: "Path to dependency lock file for caching"
required: false
default: "package-lock.json"
cache:
description: "Package manager to cache"
default: "npm"
registry-url:
description: "npm registry URL"
default: "https://registry.npmjs.org"
runs:
using: "composite"
steps:
- name: Setup Node.js (Corepack bootstrap)
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v4
with:
node-version-file: ${{ inputs.node-version-file }}
registry-url: ${{ inputs.registry-url }}
# The setup-node action will attempt to create a cache using a version of
# npm that may not be compatible with the range specified in package.json.
# This can be enabled **after** corepack is installed and the correct npm version is available.
package-manager-cache: false
- name: Install Corepack
working-directory: ${{ github.workspace}}
shell: bash
run: | #shell
node ./scripts/node/lint-runtime.mjs
node ./scripts/node/setup-corepack.mjs --force
corepack enable
- name: Lint Node.js and NPM versions
shell: bash
run: node ./scripts/node/lint-runtime.mjs
- name: Setup Node.js (Monorepo Root)
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v4
with:
node-version-file: ${{ inputs.node-version-file }}
cache: ${{ inputs.cache }}
cache-dependency-path: ${{ inputs.cache-dependency-path }}
registry-url: ${{ inputs.registry-url }}
- name: Install monorepo dependencies
if: ${{ contains(inputs.dependencies, 'monorepo') }}
shell: bash
run: | #shell
node ./scripts/node/lint-lockfile.mjs
corepack npm ci
- name: Setup Node.js (Working Directory)
if: ${{ contains(inputs.dependencies, 'working-directory') }}
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v4
with:
node-version-file: ${{ inputs.working-directory }}/${{ inputs.node-version-file }}
cache: ${{ inputs.cache }}
cache-dependency-path: ${{ inputs.working-directory }}/${{ inputs.cache-dependency-path }}
registry-url: ${{ inputs.registry-url }}
- name: Install working directory dependencies
if: ${{ contains(inputs.dependencies, 'working-directory') }}
shell: bash
run: | # shell
corepack install
echo "node version: $(node --version)"
echo "npm version: $(corepack npm --version)"
node ./scripts/node/lint-lockfile.mjs ${{ inputs.working-directory }}
corepack npm ci --prefix ${{ inputs.working-directory }}

View File

@@ -18,24 +18,19 @@ runs:
using: "composite"
steps:
- name: Cleanup apt
if: ${{ contains(inputs.dependencies, 'system') || contains(inputs.dependencies,
'python') }}
if: ${{ contains(inputs.dependencies, 'system') || contains(inputs.dependencies, 'python') }}
shell: bash
run: sudo apt-get remove --purge man-db
- name: Install apt deps
if: ${{ contains(inputs.dependencies, 'system') || contains(inputs.dependencies,
'python') }}
if: ${{ contains(inputs.dependencies, 'system') || contains(inputs.dependencies, 'python') }}
uses: gerlero/apt-install@f4fa5265092af9e750549565d28c99aec7189639
with:
packages: libpq-dev openssl libxmlsec1-dev pkg-config gettext krb5-multidev
libkrb5-dev heimdal-multidev libclang-dev krb5-kdc krb5-user
krb5-admin-server
packages: libpq-dev openssl libxmlsec1-dev pkg-config gettext krb5-multidev libkrb5-dev heimdal-multidev libclang-dev krb5-kdc krb5-user krb5-admin-server
update: true
upgrade: false
install-recommends: false
- name: Make space on disk
if: ${{ contains(inputs.dependencies, 'system') || contains(inputs.dependencies,
'python') }}
if: ${{ contains(inputs.dependencies, 'system') || contains(inputs.dependencies, 'python') }}
shell: bash
run: |
sudo mkdir -p /tmp/empty/
@@ -54,10 +49,9 @@ runs:
if: ${{ contains(inputs.dependencies, 'python') }}
shell: bash
working-directory: ${{ inputs.working-directory }}
run: uv sync --all-extras --dev --locked
run: uv sync --all-extras --dev --frozen
- name: Setup rust (stable)
if: ${{ contains(inputs.dependencies, 'rust') && !contains(inputs.dependencies,
'rust-nightly') }}
if: ${{ contains(inputs.dependencies, 'rust') && !contains(inputs.dependencies, 'rust-nightly') }}
uses: actions-rust-lang/setup-rust-toolchain@2b1f5e9b395427c92ee4e3331786ca3c37afe2d7 # v1
with:
rustflags: ""
@@ -70,14 +64,30 @@ runs:
rustflags: ""
- name: Setup rust dependencies
if: ${{ contains(inputs.dependencies, 'rust') }}
uses: taiki-e/install-action@db5fb34fa772531a3ece57ca434f579eb334e0fb # v2
uses: taiki-e/install-action@cf525cb33f51aca27cd6fa02034117ab963ff9f1 # v2
with:
tool: cargo-deny cargo-machete cargo-llvm-cov nextest
- name: Setup node (root, web)
- name: Setup node (web)
if: ${{ contains(inputs.dependencies, 'node') }}
uses: ./.github/actions/setup-node
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v4
with:
working-directory: web
node-version-file: "${{ inputs.working-directory }}web/package.json"
cache: "npm"
cache-dependency-path: "${{ inputs.working-directory }}web/package-lock.json"
registry-url: "https://registry.npmjs.org"
- name: Setup node (root)
if: ${{ contains(inputs.dependencies, 'node') }}
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v4
with:
node-version-file: "${{ inputs.working-directory }}package.json"
cache: "npm"
cache-dependency-path: "${{ inputs.working-directory }}package-lock.json"
registry-url: "https://registry.npmjs.org"
- name: Install Node deps
if: ${{ contains(inputs.dependencies, 'node') }}
shell: bash
working-directory: ${{ inputs.working-directory }}
run: npm ci
- name: Setup go
if: ${{ contains(inputs.dependencies, 'go') }}
uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v5
@@ -87,17 +97,15 @@ runs:
if: ${{ contains(inputs.dependencies, 'runtime') }}
uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7
with:
key: docker-images-${{ runner.os }}-${{
hashFiles('.github/actions/setup/compose.yml', 'Makefile') }}-${{
inputs.postgresql_version }}
key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }}
- name: Setup dependencies
if: ${{ contains(inputs.dependencies, 'runtime') }}
shell: bash
working-directory: ${{ inputs.working-directory }}
run: |
export PSQL_TAG=${{ inputs.postgresql_version }}
docker compose -f .github/actions/setup/compose.yml up -d --wait
corepack npm ci --prefix web
docker compose -f .github/actions/setup/compose.yml up -d
cd web && npm ci
- name: Generate config
if: ${{ contains(inputs.dependencies, 'python') }}
shell: uv run python {0}

View File

@@ -8,14 +8,8 @@ services:
POSTGRES_USER: authentik
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
POSTGRES_DB: authentik
PGDATA: /var/lib/postgresql/data/pgdata
ports:
- 5432:5432
healthcheck:
test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB} -h 127.0.0.1"]
interval: 1s
timeout: 5s
retries: 60
restart: always
s3:
container_name: s3

View File

@@ -67,16 +67,6 @@ jobs:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: ./.github/actions/setup-node
with:
working-directory: web
dependencies: "monorepo"
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
with:
go-version-file: "go.mod"
- name: Generate API Clients
run: |
make gen-client-ts
- name: Build Docker Image
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
id: push
@@ -91,8 +81,7 @@ jobs:
${{ steps.ev.outputs.imageBuildArgs }}
tags: ${{ steps.ev.outputs.imageTags }}
platforms: linux/${{ inputs.image_arch }}
cache-from: type=registry,ref=${{ steps.ev.outputs.attestImageNames
}}:buildcache-${{ inputs.image_arch }}
cache-from: type=registry,ref=${{ steps.ev.outputs.attestImageNames }}:buildcache-${{ inputs.image_arch }}
cache-to: ${{ steps.ev.outputs.cacheTo }}
- uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v3
id: attest

View File

@@ -90,7 +90,7 @@ jobs:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: int128/docker-manifest-create-action@fa55f72001a6c74b0f4997dca65c70d334905180 # v2
- uses: int128/docker-manifest-create-action@7df7f9e221d927eaadf87db231ddf728047308a4 # v2
id: build
with:
tags: ${{ matrix.tag }}

View File

@@ -1,65 +0,0 @@
---
name: API - Publish Typescript client
on:
push:
branches: [main]
paths:
- "schema.yml"
workflow_dispatch:
permissions:
# Required for NPM OIDC trusted publisher
id-token: write
contents: read
jobs:
build:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
token: ${{ steps.generate_token.outputs.token }}
- uses: ./.github/actions/setup-node
with:
working-directory: web
- name: Generate API Client
run: make gen-client-ts
- name: Publish package
working-directory: gen-ts-api/
run: |
npm i
npm publish --tag generated
- name: Upgrade /web
working-directory: web
run: |
export VERSION=`node -e 'import mod from "./gen-ts-api/package.json" with { type: "json" };console.log(mod.version);'`
npm i @goauthentik/api@$VERSION
- name: Upgrade /web/packages/sfe
working-directory: web/packages/sfe
run: |
export VERSION=`node -e 'import mod from "./gen-ts-api/package.json" with { type: "json" };console.log(mod.version);'`
npm i @goauthentik/api@$VERSION
- uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
id: cpr
with:
token: ${{ steps.generate_token.outputs.token }}
branch: update-web-api-client
commit-message: "web: bump API Client version"
title: "web: bump API Client version"
body: "web: bump API Client version"
delete-branch: true
signoff: true
# ID from https://api.github.com/users/authentik-automation[bot]
author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
labels: dependencies
- uses: peter-evans/enable-pull-request-automerge@a660677d5469627102a1c1e11409dd063606628d # v3
with:
token: ${{ steps.generate_token.outputs.token }}
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
merge-method: squash

View File

@@ -22,19 +22,25 @@ jobs:
- prettier-check
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: ./.github/actions/setup-node
with:
working-directory: website
- name: Install Dependencies
working-directory: website/
run: npm ci
- name: Lint
run: corepack npm run ${{ matrix.command }} --prefix website
working-directory: website/
run: npm run ${{ matrix.command }}
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: ./.github/actions/setup-node
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
with:
working-directory: website
node-version-file: website/package.json
cache: "npm"
cache-dependency-path: website/package-lock.json
- working-directory: website/
name: Install Dependencies
run: npm ci
- uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v4
with:
path: |
@@ -48,7 +54,7 @@ jobs:
working-directory: website
env:
NODE_ENV: production
run: corepack npm run build -w api
run: npm run build -w api
- uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v4
with:
name: api-docs
@@ -65,9 +71,11 @@ jobs:
with:
name: api-docs
path: website/api/build
- uses: ./.github/actions/setup-node
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
with:
working-directory: website
node-version-file: website/package.json
cache: "npm"
cache-dependency-path: website/package-lock.json
- name: Deploy Netlify (Production)
working-directory: website/api
if: github.event_name == 'push' && github.ref == 'refs/heads/main'

View File

@@ -24,9 +24,14 @@ jobs:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- name: Setup authentik env
uses: ./.github/actions/setup
- uses: ./.github/actions/setup-node
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
with:
working-directory: lifecycle/aws
node-version-file: lifecycle/aws/package.json
cache: "npm"
cache-dependency-path: lifecycle/aws/package-lock.json
- working-directory: lifecycle/aws/
run: |
npm ci
- name: Check changes have been applied
run: |
uv run make aws-cfn

View File

@@ -24,34 +24,46 @@ jobs:
- prettier-check
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: ./.github/actions/setup-node
with:
working-directory: website
- name: Install dependencies
working-directory: website/
run: npm ci
- name: Lint
run: corepack npm run ${{ matrix.command }} --prefix website
working-directory: website/
run: npm run ${{ matrix.command }}
build-docs:
runs-on: ubuntu-latest
env:
NODE_ENV: production
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: ./.github/actions/setup-node
name: Setup Node.js
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
with:
working-directory: website
node-version-file: website/package.json
cache: "npm"
cache-dependency-path: website/package-lock.json
- working-directory: website/
name: Install Dependencies
run: npm ci
- name: Build Documentation via Docusaurus
run: corepack npm run build --prefix website
working-directory: website/
run: npm run build
build-integrations:
runs-on: ubuntu-latest
env:
NODE_ENV: production
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: ./.github/actions/setup-node
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
with:
working-directory: website
node-version-file: website/package.json
cache: "npm"
cache-dependency-path: website/package-lock.json
- working-directory: website/
name: Install Dependencies
run: npm ci
- name: Build Integrations via Docusaurus
run: corepack npm run build -w integrations --prefix website
working-directory: website/
run: npm run build -w integrations
build-container:
runs-on: ubuntu-latest
permissions:
@@ -92,9 +104,7 @@ jobs:
platforms: linux/amd64,linux/arm64
context: .
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' &&
'type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache,mode=max'
|| '' }}
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache,mode=max' || '' }}
- uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v3
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}

View File

@@ -73,8 +73,7 @@ jobs:
- name: generate API clients
run: make gen-clients
- name: ensure schema is up-to-date
run: git diff --exit-code -- schema.yml blueprints/schema.json
packages/client-go packages/client-rust packages/client-ts
run: git diff --exit-code -- schema.yml blueprints/schema.json packages/client-go packages/client-rust packages/client-ts
test-migrations:
runs-on: ubuntu-latest
steps:
@@ -92,8 +91,7 @@ jobs:
outputs:
seed: ${{ steps.seed.outputs.seed }}
test-migrations-from-stable:
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }} - Run ${{
matrix.run_id }}/5
name: test-migrations-from-stable - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
runs-on: ubuntu-latest
timeout-minutes: 30
needs: test-make-seed
@@ -103,7 +101,7 @@ jobs:
psql:
- 14-alpine
- 18-alpine
run_id: [ 1, 2, 3, 4, 5 ]
run_id: [1, 2, 3, 4, 5]
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
@@ -111,13 +109,8 @@ jobs:
- name: checkout stable
run: |
set -e -o pipefail
cp -R .github ..
cp -R scripts ..
mkdir -p ../packages
cp -R packages/logger-js ../packages/logger-js
# Previous stable tag
prev_stable=$(git tag --sort=version:refname | grep '^version/' | grep -vE -- '-rc[0-9]+$' | tail -n1)
# Current version family based on
@@ -125,13 +118,10 @@ jobs:
if [[ -n $current_version_family ]]; then
prev_stable="version/${current_version_family}"
fi
echo "::notice::Checking out ${prev_stable} as stable version..."
git checkout ${prev_stable}
rm -rf .github/ scripts/ packages/logger-js/
rm -rf .github/ scripts/
mv ../.github ../scripts .
mv ../packages/logger-js ./packages/
- name: Setup authentik env (stable)
uses: ./.github/actions/setup
with:
@@ -179,7 +169,7 @@ jobs:
psql:
- 14-alpine
- 18-alpine
run_id: [ 1, 2, 3, 4, 5 ]
run_id: [1, 2, 3, 4, 5]
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- name: Setup authentik env
@@ -262,22 +252,19 @@ jobs:
COMPOSE_PROFILES: ${{ matrix.job.profiles }}
run: |
docker compose -f tests/e2e/compose.yml up -d --quiet-pull
- uses: ./.github/actions/setup-node
- id: cache-web
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v4
if: contains(matrix.job.profiles, 'selenium')
with:
path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json',
'package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
- name: prepare web ui
if: steps.cache-web.outputs.cache-hit != 'true' && contains(matrix.job.profiles,
'selenium')
if: steps.cache-web.outputs.cache-hit != 'true' && contains(matrix.job.profiles, 'selenium')
working-directory: web
run: |
corepack npm ci
corepack npm run build
corepack npm run build:sfe
npm ci
npm run build
npm run build:sfe
- name: run e2e
run: |
uv run coverage run manage.py test ${{ matrix.job.glob }}
@@ -295,18 +282,10 @@ jobs:
fail-fast: false
matrix:
job:
- name: oidc_basic
glob: tests/openid_conformance/test_oidc_basic.py
- name: oidc_implicit
glob: tests/openid_conformance/test_oidc_implicit.py
- name: oidc_rp-initiated
glob: tests/openid_conformance/test_oidc_rp_initiated.py
- name: oidc_frontchannel
glob: tests/openid_conformance/test_oidc_frontchannel.py
- name: oidc_backchannel
glob: tests/openid_conformance/test_oidc_backchannel.py
- name: ssf_transmitter
glob: tests/openid_conformance/test_ssf_transmitter.py
- name: basic
glob: tests/openid_conformance/test_basic.py
- name: implicit
glob: tests/openid_conformance/test_implicit.py
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- name: Setup authentik env
@@ -323,14 +302,14 @@ jobs:
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v4
with:
path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**',
'web/packages/sfe/src/**') }}-b
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
- name: prepare web ui
if: steps.cache-web.outputs.cache-hit != 'true'
working-directory: web
run: |
corepack npm ci --prefix web
corepack npm run build --prefix web
corepack npm run build:sfe --prefix web
npm ci
npm run build
npm run build:sfe
- name: run conformance
run: |
uv run coverage run manage.py test ${{ matrix.job.glob }}
@@ -396,9 +375,7 @@ jobs:
uses: ./.github/workflows/_reusable-docker-build.yml
secrets: inherit
with:
image_name: ${{ github.repository == 'goauthentik/authentik-internal' &&
'ghcr.io/goauthentik/internal-server' ||
'ghcr.io/goauthentik/dev-server' }}
image_name: ${{ github.repository == 'goauthentik/authentik-internal' && 'ghcr.io/goauthentik/internal-server' || 'ghcr.io/goauthentik/dev-server' }}
release: false
pr-comment:
needs:

View File

@@ -114,11 +114,8 @@ jobs:
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
platforms: linux/amd64,linux/arm64
context: .
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type
}}:buildcache
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' &&
format('type=registry,ref=ghcr.io/goauthentik/dev-{0}:buildcache,mode=max',
matrix.type) || '' }}
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && format('type=registry,ref=ghcr.io/goauthentik/dev-{0}:buildcache,mode=max', matrix.type) || '' }}
- uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v3
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
@@ -139,8 +136,8 @@ jobs:
- ldap
- radius
- rac
goos: [ linux ]
goarch: [ amd64, arm64 ]
goos: [linux]
goarch: [amd64, arm64]
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
@@ -148,11 +145,16 @@ jobs:
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
with:
go-version-file: "go.mod"
- uses: ./.github/actions/setup-node
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
with:
working-directory: web
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- name: Build web
run: corepack npm run build-proxy --prefix web
working-directory: web/
run: |
npm ci
npm run build-proxy
- name: Build outpost
run: |
set -x

View File

@@ -15,30 +15,48 @@ on:
jobs:
lint:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
command:
- lint
- lint:lockfile
- tsc
- prettier-check
project:
- web
include:
- command: tsc
project: web
- command: lit-analyse
project: web
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: ./.github/actions/setup-node
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
with:
working-directory: web
node-version-file: ${{ matrix.project }}/package.json
cache: "npm"
cache-dependency-path: ${{ matrix.project }}/package-lock.json
- working-directory: ${{ matrix.project }}/
run: |
npm ci
- name: Lint
run: corepack npm run lint --prefix web
- name: Check types
run: corepack npm run tsc --prefix web
- name: Check formatting
run: corepack npm run prettier-check --prefix web
- name: Lit analyse
run: corepack npm run lit-analyse --prefix web
working-directory: ${{ matrix.project }}/
run: npm run ${{ matrix.command }}
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: ./.github/actions/setup-node
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
with:
working-directory: web
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
- name: build
working-directory: web/
run: corepack npm run build
run: npm run build
ci-web-mark:
if: always()
needs:
@@ -55,9 +73,13 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: ./.github/actions/setup-node
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
with:
working-directory: web
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
- name: test
working-directory: web/
run: corepack npm run test || exit 0
run: npm run test || exit 0

View File

@@ -3,7 +3,7 @@ name: Packages - Publish NPM packages
on:
push:
branches: [ main ]
branches: [main]
paths:
- packages/tsconfig/**
- packages/eslint-config/**
@@ -35,19 +35,22 @@ jobs:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
fetch-depth: 2
- uses: ./.github/actions/setup-node
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
with:
working-directory: ${{ matrix.package }}
node-version-file: ${{ matrix.package }}/package.json
registry-url: "https://registry.npmjs.org"
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@9426d40962ed5378910ee2e21d5f8c6fcbf2dd96 # 24d32ffd492484c1d75e0c0b894501ddb9d30d62
with:
files: |
${{ matrix.package }}/package.json
- name: Install Dependencies
run: npm ci
- name: Publish package
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ${{ matrix.package }}
run: |
corepack npm ci
corepack npm run build
corepack npm publish
npm ci
npm run build
npm publish

View File

@@ -3,7 +3,7 @@ name: Release - On publish
on:
release:
types: [ published, created ]
types: [published, created]
jobs:
build-server:
@@ -87,9 +87,11 @@ jobs:
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
with:
go-version-file: "go.mod"
- uses: ./.github/actions/setup-node
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
with:
working-directory: web
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- name: Set up QEMU
uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0
- name: Set up Docker Buildx
@@ -142,16 +144,22 @@ jobs:
- proxy
- ldap
- radius
goos: [ linux, darwin ]
goarch: [ amd64, arm64 ]
goos: [linux, darwin]
goarch: [amd64, arm64]
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
with:
go-version-file: "go.mod"
- uses: ./.github/actions/setup-node
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
with:
working-directory: web
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- name: Install web dependencies
working-directory: web/
run: |
npm ci
- name: Build web
working-directory: web/
run: |
@@ -167,10 +175,8 @@ jobs:
uses: svenstaro/upload-release-action@29e53e917877a24fad85510ded594ab3c9ca12de # v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{
matrix.goarch }}
asset_name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{
matrix.goarch }}
file: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
asset_name: authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
tag: ${{ github.ref }}
upload-aws-cfn-template:
permissions:

7
.gitignore vendored
View File

@@ -14,8 +14,6 @@ media
# Node
node_modules
corepack.tgz
.corepack
.cspellcache
cspell-report.*
@@ -231,11 +229,6 @@ source_docs/
### Golang ###
/vendor/
server
proxy
ldap
rac
radius
### Docker ###
tests/openid_conformance/exports/*.zip

158
Cargo.lock generated
View File

@@ -17,6 +17,18 @@ version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
[[package]]
name = "ahash"
version = "0.8.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75"
dependencies = [
"cfg-if",
"once_cell",
"version_check",
"zerocopy",
]
[[package]]
name = "aho-corasick"
version = "1.1.4"
@@ -191,7 +203,6 @@ dependencies = [
"tokio",
"tracing",
"uuid",
"which",
]
[[package]]
@@ -1003,17 +1014,6 @@ dependencies = [
"pin-project-lite",
]
[[package]]
name = "evmap"
version = "11.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b8874945f036109c72242964c1174cf99434e30cfa45bf45fedc983f50046f8"
dependencies = [
"hashbag",
"left-right",
"smallvec",
]
[[package]]
name = "eyre"
version = "0.6.12"
@@ -1230,21 +1230,6 @@ dependencies = [
"slab",
]
[[package]]
name = "generator"
version = "0.8.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52f04ae4152da20c76fe800fa48659201d5cf627c5149ca0b707b69d7eef6cf9"
dependencies = [
"cc",
"cfg-if",
"libc",
"log",
"rustversion",
"windows-link",
"windows-result",
]
[[package]]
name = "generic-array"
version = "0.14.7"
@@ -1326,12 +1311,6 @@ dependencies = [
"tracing",
]
[[package]]
name = "hashbag"
version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7040a10f52cba493ddb09926e15d10a9d8a28043708a405931fe4c6f19fac064"
[[package]]
name = "hashbrown"
version = "0.15.5"
@@ -1889,17 +1868,6 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2"
[[package]]
name = "left-right"
version = "0.11.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f0c21e4c8ff95f487fb34e6f9182875f42c84cef966d29216bf115d9bba835a"
dependencies = [
"crossbeam-utils",
"loom",
"slab",
]
[[package]]
name = "libc"
version = "0.2.183"
@@ -1971,19 +1939,6 @@ version = "0.4.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897"
[[package]]
name = "loom"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca"
dependencies = [
"cfg-if",
"generator",
"scoped-tls",
"tracing",
"tracing-subscriber",
]
[[package]]
name = "lru-slab"
version = "0.1.2"
@@ -2023,22 +1978,21 @@ checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79"
[[package]]
name = "metrics"
version = "0.24.5"
version = "0.24.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff56c2e7dce6bd462e3b8919986a617027481b1dcc703175b58cf9dd98a2f071"
checksum = "5d5312e9ba3771cfa961b585728215e3d972c950a3eed9252aa093d6301277e8"
dependencies = [
"ahash",
"portable-atomic",
"rapidhash",
]
[[package]]
name = "metrics-exporter-prometheus"
version = "0.18.3"
version = "0.18.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1db0d8f1fc9e62caebd0319e11eaec5822b0186c171568f0480b46a0137f9108"
checksum = "3589659543c04c7dc5526ec858591015b87cd8746583b51b48ef4353f99dbcda"
dependencies = [
"base64 0.22.1",
"evmap",
"indexmap",
"metrics",
"metrics-util",
@@ -2057,7 +2011,7 @@ dependencies = [
"hashbrown 0.16.1",
"metrics",
"quanta",
"rand 0.9.4",
"rand 0.9.2",
"rand_xoshiro",
"sketches-ddsketch",
]
@@ -2744,7 +2698,7 @@ dependencies = [
"bytes",
"getrandom 0.3.4",
"lru-slab",
"rand 0.9.4",
"rand 0.9.2",
"ring",
"rustc-hash",
"rustls",
@@ -2804,9 +2758,9 @@ dependencies = [
[[package]]
name = "rand"
version = "0.9.4"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44c5af06bb1b7d3216d91932aed5265164bf384dc89cd6ba05cf59a35f5f76ea"
checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
dependencies = [
"rand_chacha 0.9.0",
"rand_core 0.9.5",
@@ -2859,15 +2813,6 @@ dependencies = [
"rand_core 0.9.5",
]
[[package]]
name = "rapidhash"
version = "4.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5e48930979c155e2f33aa36ab3119b5ee81332beb6482199a8ecd6029b80b59"
dependencies = [
"rustversion",
]
[[package]]
name = "raw-cpuid"
version = "11.6.0"
@@ -2926,9 +2871,9 @@ checksum = "dc897dd8d9e8bd1ed8cdad82b5966c3e0ecae09fb1907d58efaa013543185d0a"
[[package]]
name = "reqwest"
version = "0.13.3"
version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62e0021ea2c22aed41653bc7e1419abb2c97e038ff2c33d0e1309e49a97deec0"
checksum = "ab3f43e3283ab1488b624b44b0e988d0acea0b3214e694730a055cb6b2efa801"
dependencies = [
"base64 0.22.1",
"bytes",
@@ -3160,12 +3105,6 @@ dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "scoped-tls"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294"
[[package]]
name = "scopeguard"
version = "1.2.0"
@@ -3203,9 +3142,9 @@ checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2"
[[package]]
name = "sentry"
version = "0.48.0"
version = "0.47.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8ac94aab850a23d7507307cc505332ed2bafd36c65930dfc5c43610f9e9b477"
checksum = "eb25f439f97d26fea01d717fa626167ceffcd981addaa670001e70505b72acbb"
dependencies = [
"cfg_aliases",
"httpdate",
@@ -3224,9 +3163,9 @@ dependencies = [
[[package]]
name = "sentry-backtrace"
version = "0.48.1"
version = "0.47.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc84c325ace9ca2388e510fe7d6672b5d60cd8b3bd0eb4bb4ee8314c323cd686"
checksum = "46a8c2c1bd5c1f735e84f28b48e7d72efcaafc362b7541bc8253e60e8fcdffc6"
dependencies = [
"backtrace",
"regex",
@@ -3235,9 +3174,9 @@ dependencies = [
[[package]]
name = "sentry-contexts"
version = "0.48.1"
version = "0.47.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "896c1ab62dbfe1746fb262bbf72e6feb2fb9dfb2c14709077bf71beb532e44b2"
checksum = "9b88a90baa654d7f0e1f4b667f6b434293d9f72c71bef16b197c76af5b7d5803"
dependencies = [
"hostname",
"libc",
@@ -3249,11 +3188,11 @@ dependencies = [
[[package]]
name = "sentry-core"
version = "0.48.1"
version = "0.47.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d5f5abf20c42cb1593ec1638976e2647da55f79bccac956444c1707b6cce259a"
checksum = "0ac170a5bba8bec6e3339c90432569d89641fa7a3d3e4f44987d24f0762e6adf"
dependencies = [
"rand 0.9.4",
"rand 0.9.2",
"sentry-types",
"serde",
"serde_json",
@@ -3262,9 +3201,9 @@ dependencies = [
[[package]]
name = "sentry-debug-images"
version = "0.48.1"
version = "0.47.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b88bbe6a760d5724bb40689827e82e8db1e275947df2c59abe171bfc30bb671"
checksum = "dd9646a972b57896d4a92ed200cf76139f8e30b3cfd03b6662ae59926d26633c"
dependencies = [
"findshlibs",
"sentry-core",
@@ -3272,9 +3211,9 @@ dependencies = [
[[package]]
name = "sentry-panic"
version = "0.48.1"
version = "0.47.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0260dcb52562b6a79ae7702312a26dba94b79fb5baee7301087529e5ca4e872e"
checksum = "6127d3d304ba5ce0409401e85aae538e303a569f8dbb031bf64f9ba0f7174346"
dependencies = [
"sentry-backtrace",
"sentry-core",
@@ -3282,9 +3221,9 @@ dependencies = [
[[package]]
name = "sentry-tower"
version = "0.48.1"
version = "0.47.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d669616d5d5279b5712febfc80c343acc3695e499de0d101ed70fceacadf37f2"
checksum = "61c5253dc4ad89863a866b93aeaaac1c9d60f2f774663b5024afe2d57e0a101c"
dependencies = [
"sentry-core",
"tower-layer",
@@ -3293,9 +3232,9 @@ dependencies = [
[[package]]
name = "sentry-tracing"
version = "0.48.1"
version = "0.47.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1c035f3a0a8671ae1a231c5b457abb68b71acba2bf3054dab2a09a9d4ea487e"
checksum = "27701acc51e68db5281802b709010395bfcbcb128b1d0a4e5873680d3b47ff0c"
dependencies = [
"bitflags 2.11.0",
"sentry-backtrace",
@@ -3306,13 +3245,13 @@ dependencies = [
[[package]]
name = "sentry-types"
version = "0.48.1"
version = "0.47.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "82d8e81058ec155992191f61c7b29bfa7b2cf12012131e7cdc0678020898a7c9"
checksum = "56780cb5597d676bf22e6c11d1f062eb4def46390ea3bfb047bcbcf7dfd19bdb"
dependencies = [
"debugid",
"hex",
"rand 0.9.4",
"rand 0.9.2",
"serde",
"serde_json",
"thiserror 2.0.18",
@@ -4214,7 +4153,7 @@ dependencies = [
"http",
"httparse",
"log",
"rand 0.9.4",
"rand 0.9.2",
"sha1",
"thiserror 2.0.18",
]
@@ -4576,15 +4515,6 @@ dependencies = [
"rustls-pki-types",
]
[[package]]
name = "which"
version = "8.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81995fafaaaf6ae47a7d0cc83c67caf92aeb7e5331650ae6ff856f7c0c60c459"
dependencies = [
"libc",
]
[[package]]
name = "whoami"
version = "1.6.1"

View File

@@ -43,15 +43,15 @@ hyper-unix-socket = "= 0.6.1"
hyper-util = "= 0.1.20"
ipnet = { version = "= 2.12.0", features = ["serde"] }
json-subscriber = "= 0.2.8"
metrics = "= 0.24.5"
metrics-exporter-prometheus = { version = "= 0.18.3", default-features = false }
metrics = "= 0.24.3"
metrics-exporter-prometheus = { version = "= 0.18.1", default-features = false }
nix = { version = "= 0.31.2", features = ["hostname", "signal"] }
notify = "= 8.2.0"
pin-project-lite = "= 0.2.17"
pyo3 = "= 0.28.3"
pyo3-build-config = "= 0.28.3"
regex = "= 1.12.3"
reqwest = { version = "= 0.13.3", features = [
reqwest = { version = "= 0.13.2", features = [
"form",
"json",
"multipart",
@@ -67,7 +67,7 @@ reqwest-middleware = { version = "= 0.5.1", features = [
"rustls",
] }
rustls = { version = "= 0.23.40", features = ["fips"] }
sentry = { version = "= 0.48.0", default-features = false, features = [
sentry = { version = "= 0.47.0", default-features = false, features = [
"backtrace",
"contexts",
"debug-images",
@@ -113,7 +113,6 @@ tracing-subscriber = { version = "= 0.3.23", features = [
] }
url = "= 2.5.8"
uuid = { version = "= 1.23.1", features = ["serde", "v4"] }
which = "= 8.0.2"
ak-axum = { package = "authentik-axum", version = "2026.5.0-rc1", path = "./packages/ak-axum" }
ak-client = { package = "authentik-client", version = "2026.5.0-rc1", path = "./packages/client-rust" }
@@ -283,7 +282,6 @@ sqlx = { workspace = true, optional = true }
tokio.workspace = true
tracing.workspace = true
uuid.workspace = true
which.workspace = true
[lints]
workspace = true

View File

@@ -106,15 +106,20 @@ migrate: ## Run the Authentik Django server's migrations
i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that require translation into files to send to a translation service
aws-cfn: node-install
corepack npm install --prefix lifecycle/aws
$(UV) run corepack npm run aws-cfn --prefix lifecycle/aws
aws-cfn:
cd lifecycle/aws && npm i && $(UV) run npm run aws-cfn
run: ## Run the main authentik server and worker processes
$(UV) run ak allinone
run-server: ## Run the main authentik server process
$(UV) run ak server
run-watch: ## Run the authentik server and worker, with auto reloading
watchexec --on-busy-update=restart --stop-signal=SIGINT --exts py,rs,go --no-meta --notify -- $(UV) run ak allinone
run-worker: ## Run the main authentik worker process
$(UV) run ak worker
run-worker-watch: ## Run the authentik worker, with auto reloading
watchexec --on-busy-update=restart --stop-signal=SIGINT --exts py,rs --no-meta --notify -- $(UV) run ak worker
debug-attach: ## Attach pdb to a running authentik Python worker (PEP 768). PID=<pid> to pick; SUDO=1 on macOS.
$(UV) run python scripts/debug_attach.py
core-i18n-extract:
$(UV) run ak makemessages \
@@ -126,7 +131,7 @@ core-i18n-extract:
--ignore website \
-l en
install: node-install web-install core-install ## Install all requires dependencies for `node`, `web` and `core`
install: node-install docs-install core-install ## Install all requires dependencies for `node`, `docs` and `core`
dev-drop-db:
$(eval pg_user := $(shell $(UV) run python -m authentik.lib.config postgresql.user 2>/dev/null))
@@ -230,46 +235,38 @@ gen-dev-config: ## Generate a local development config file
#########################
node-install: ## Install the necessary libraries to build Node.js packages
node ./scripts/node/setup-corepack.mjs
node ./scripts/node/lint-runtime.mjs
node ./scripts/node/lint-runtime.mjs
npm ci
npm ci --prefix web
#########################
## Web
#########################
web-install: ## Install the necessary libraries to build the Authentik UI
node ./scripts/node/lint-runtime.mjs web
corepack npm ci
corepack npm ci --prefix web
web-build: ## Build the Authentik UI
corepack npm run --prefix web build
web-build: node-install ## Build the Authentik UI
npm run --prefix web build
web: web-lint-fix web-lint web-check-compile ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it
web-test: ## Run tests for the Authentik UI
corepack npm run --prefix web test
npm run --prefix web test
web-watch: ## Build and watch the Authentik UI for changes, updating automatically
corepack npm run --prefix web watch
npm run --prefix web watch
web-storybook-watch: ## Build and run the storybook documentation server
corepack npm run --prefix web storybook
npm run --prefix web storybook
web-lint-fix:
corepack npm run --prefix web prettier
npm run --prefix web prettier
web-lint:
corepack npm run --prefix web lint
corepack npm run --prefix web lit-analyse
npm run --prefix web lint
npm run --prefix web lit-analyse
web-check-compile:
corepack npm run --prefix web tsc
npm run --prefix web tsc
web-i18n-extract:
corepack npm run --prefix web extract-locales
npm run --prefix web extract-locales
#########################
## Docs
@@ -277,40 +274,35 @@ web-i18n-extract:
docs: docs-lint-fix docs-build ## Automatically fix formatting issues in the Authentik docs source code, lint the code, and compile it
docs-install: node-install ## Install the necessary libraries to build the Authentik documentation
node ./scripts/node/lint-runtime.mjs
corepack npm ci
corepack npm ci --prefix website
docs-install:
npm ci --prefix website
docs-lint-fix: lint-spellcheck
corepack npm run --prefix website prettier
npm run --prefix website prettier
docs-build:
node ./scripts/node/lint-runtime.mjs website
corepack npm run --prefix website build
npm run --prefix website build
docs-watch: ## Build and watch the topics documentation
corepack npm run --prefix website start
npm run --prefix website start
integrations: docs-lint-fix integrations-build ## Fix formatting issues in the integrations source code, lint the code, and compile it
integrations-build:
corepack npm run --prefix website -w integrations build
npm run --prefix website -w integrations build
integrations-watch: ## Build and watch the Integrations documentation
corepack npm run --prefix website -w integrations start
npm run --prefix website -w integrations start
docs-api-build:
corepack npm run --prefix website -w api build
npm run --prefix website -w api build
docs-api-watch: ## Build and watch the API documentation
corepack npm run --prefix website -w api generate
corepack npm run --prefix website -w api start
npm run --prefix website -w api generate
npm run --prefix website -w api start
docs-api-clean: ## Clean generated API documentation
corepack npm run --prefix website -w api build:api:clean
npm run --prefix website -w api build:api:clean
#########################
## Docker

View File

@@ -1,36 +0,0 @@
from django.db.models import F, QuerySet
from rest_framework.filters import OrderingFilter
from rest_framework.request import Request
from rest_framework.views import APIView
class NullsAwareOrderingFilter(OrderingFilter):
"""OrderingFilter that sorts NULL values consistently.
For any nullable field, NULLs are treated as the smallest possible value:
- ascending → NULLs appear first (nulls_first=True)
- descending → NULLs appear last (nulls_last=True)
"""
def _nullable_field_names(self, queryset: QuerySet) -> set[str]:
return {f.name for f in queryset.model._meta.get_fields() if hasattr(f, "null") and f.null}
def filter_queryset(self, request: Request, queryset: QuerySet, view: APIView):
queryset = super().filter_queryset(request, queryset, view)
ordering = queryset.query.order_by
if not ordering:
return queryset
nullable = self._nullable_field_names(queryset)
new_ordering = []
changed = False
for term in ordering:
name = term.lstrip("-")
if name in nullable:
changed = True
if term.startswith("-"):
new_ordering.append(F(name).desc(nulls_last=True))
else:
new_ordering.append(F(name).asc(nulls_first=True))
else:
new_ordering.append(term)
return queryset.order_by(*new_ordering) if changed else queryset

View File

@@ -1,59 +0,0 @@
from django.db.models import OrderBy
from django.test import TestCase
from rest_framework.request import Request
from rest_framework.test import APIRequestFactory
from authentik.api.ordering import NullsAwareOrderingFilter
from authentik.core.models import Token, User
class MockView:
ordering_fields = "__all__"
ordering = None
class TestNullsAwareOrderingFilter(TestCase):
def setUp(self):
self.filter = NullsAwareOrderingFilter()
self.view = MockView()
factory = APIRequestFactory()
self._req = lambda ordering: Request(factory.get("/", {"ordering": ordering}))
def _order_by(self, model, ordering):
qs = model.objects.all()
return self.filter.filter_queryset(self._req(ordering), qs, self.view).query.order_by
def test_nullable_asc_nulls_first(self):
"""Ascending sort on a nullable field rewrites to nulls_first=True."""
(expr,) = self._order_by(User, "last_login")
self.assertIsInstance(expr, OrderBy)
self.assertFalse(expr.descending)
self.assertTrue(expr.nulls_first)
def test_nullable_desc_nulls_last(self):
"""Descending sort on a nullable field rewrites to nulls_last=True."""
(expr,) = self._order_by(User, "-last_login")
self.assertIsInstance(expr, OrderBy)
self.assertTrue(expr.descending)
self.assertTrue(expr.nulls_last)
def test_non_nullable_passes_through(self):
"""Non-nullable fields are left as plain string terms."""
(expr,) = self._order_by(User, "username")
self.assertEqual(expr, "username")
def test_mixed_ordering(self):
"""Only nullable terms are rewritten; non-nullable terms pass through unchanged."""
first, second = self._order_by(User, "username,-last_login")
self.assertEqual(first, "username")
self.assertIsInstance(second, OrderBy)
self.assertTrue(second.descending)
self.assertTrue(second.nulls_last)
def test_expires_nullable(self):
"""expires on ExpiringModel is nullable and is rewritten correctly."""
(expr,) = self._order_by(Token, "-expires")
self.assertIsInstance(expr, OrderBy)
self.assertTrue(expr.descending)
self.assertTrue(expr.nulls_last)

View File

@@ -1,73 +1,31 @@
"""authentik API Modelviewset tests"""
from collections.abc import Callable
from urllib.parse import urlencode
from django.test import TestCase
from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet
from authentik.admin.api.version_history import VersionHistoryViewSet
from authentik.api.v3.urls import router
from authentik.core.tests.utils import RequestFactory, create_test_admin_user
from authentik.lib.generators import generate_id
from authentik.tenants.api.domains import DomainViewSet
from authentik.tenants.api.tenants import TenantViewSet
from authentik.tenants.utils import get_current_tenant
class TestModelViewSets(TestCase):
"""Test Viewset"""
def setUp(self):
self.user = create_test_admin_user()
self.factory = RequestFactory()
def viewset_tester_factory(test_viewset: type[ModelViewSet], full=True) -> dict[str, Callable]:
def viewset_tester_factory(test_viewset: type[ModelViewSet]) -> Callable:
"""Test Viewset"""
def test_attrs(self: TestModelViewSets) -> None:
"""Test attributes we require on all viewsets"""
self.assertIsNotNone(getattr(test_viewset, "ordering", None))
def tester(self: TestModelViewSets):
self.assertIsNotNone(getattr(test_viewset, "search_fields", None))
self.assertIsNotNone(getattr(test_viewset, "ordering", None))
filterset_class = getattr(test_viewset, "filterset_class", None)
if not filterset_class:
self.assertIsNotNone(getattr(test_viewset, "filterset_fields", None))
def test_ordering(self: TestModelViewSets) -> None:
"""Test that all ordering fields are correct"""
view = test_viewset.as_view({"get": "list"})
for ordering_field in test_viewset.ordering:
with self.subTest(ordering_field):
req = self.factory.get(
f"/?{urlencode({'ordering': ordering_field}, doseq=True)}", user=self.user
)
req.tenant = get_current_tenant()
res = view(req)
self.assertEqual(res.status_code, 200)
def test_search(self: TestModelViewSets) -> None:
"""Test that search fields are correct"""
view = test_viewset.as_view({"get": "list"})
req = self.factory.get(
f"/?{urlencode({'search': generate_id()}, doseq=True)}", user=self.user
)
req.tenant = get_current_tenant()
res = view(req)
self.assertEqual(res.status_code, 200)
cases = {
"attrs": test_attrs,
}
if full:
cases["ordering"] = test_ordering
cases["search"] = test_search
return cases
return tester
for _, viewset, _ in router.registry:
if not issubclass(viewset, ModelViewSet | ReadOnlyModelViewSet):
continue
full = viewset not in [VersionHistoryViewSet, DomainViewSet, TenantViewSet]
for test, case in viewset_tester_factory(viewset, full=full).items():
setattr(TestModelViewSets, f"test_viewset_{viewset.__name__}_{test}", case)
setattr(TestModelViewSets, f"test_viewset_{viewset.__name__}", viewset_tester_factory(viewset))

View File

@@ -1,6 +1,5 @@
"""Serializer mixin for managed models"""
from json import JSONDecodeError, loads
from typing import cast
from django.conf import settings
@@ -45,7 +44,6 @@ class BlueprintUploadSerializer(PassiveSerializer):
file = FileField(required=False)
path = CharField(required=False)
context = CharField(required=False, allow_blank=True)
def validate_path(self, path: str) -> str:
"""Ensure the path (if set) specified is retrievable"""
@@ -56,18 +54,6 @@ class BlueprintUploadSerializer(PassiveSerializer):
raise ValidationError(_("Blueprint file does not exist"))
return path
def validate_context(self, context: str) -> dict:
"""Parse context as a JSON object"""
if not context:
return {}
try:
parsed = loads(context)
except JSONDecodeError as exc:
raise ValidationError(_("Context must be valid JSON")) from exc
if not isinstance(parsed, dict):
raise ValidationError(_("Context must be a JSON object"))
return parsed
class ManagedSerializer:
"""Managed Serializer"""
@@ -140,7 +126,7 @@ class BlueprintInstanceSerializer(ModelSerializer):
def check_blueprint_perms(blueprint: Blueprint, user: User, explicit_action: str | None = None):
"""Check for individual permissions for each model in a blueprint"""
for entry in blueprint.iter_entries():
for entry in blueprint.entries:
full_model = entry.get_model(blueprint)
app, __, model = full_model.partition(".")
perms = [
@@ -238,8 +224,7 @@ class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
).retrieve_file()
else:
raise ValidationError("Either path or file must be set")
context = body.validated_data.get("context") or {}
importer = Importer.from_string(string_contents, context)
importer = Importer.from_string(string_contents)
check_blueprint_perms(importer.blueprint, request.user)

View File

@@ -1,6 +1,6 @@
"""Test blueprints v1 api"""
from json import dumps, loads
from json import loads
from tempfile import NamedTemporaryFile, mkdtemp
from django.urls import reverse
@@ -8,11 +8,7 @@ from rest_framework.test import APITestCase
from yaml import dump
from authentik.core.tests.utils import create_test_admin_user
from authentik.flows.models import Flow
from authentik.lib.config import CONFIG
from authentik.lib.generators import generate_id
from authentik.stages.invitation.models import InvitationStage
from authentik.stages.user_write.models import UserWriteStage
TMP = mkdtemp("authentik-blueprints")
@@ -84,107 +80,3 @@ class TestBlueprintsV1API(APITestCase):
res.content.decode(),
{"content": ["Failed to validate blueprint", "- Invalid blueprint version"]},
)
def test_api_import_with_context(self):
"""Test that the import endpoint applies the supplied context to the real blueprint"""
slug = f"invitation-enrollment-{generate_id()}"
flow_name = f"Invitation Enrollment {generate_id()}"
stage_name = f"invitation-stage-{generate_id()}"
user_type = "internal"
continue_without_invitation = True
res = self.client.post(
reverse("authentik_api:blueprintinstance-import-"),
data={
"path": "example/flows-invitation-enrollment-minimal.yaml",
"context": dumps(
{
"flow_slug": slug,
"flow_name": flow_name,
"stage_name": stage_name,
"continue_flow_without_invitation": continue_without_invitation,
"user_type": user_type,
}
),
},
format="multipart",
)
self.assertEqual(res.status_code, 200)
self.assertTrue(res.json()["success"])
flow = Flow.objects.get(slug=slug)
self.assertEqual(flow.name, flow_name)
self.assertEqual(flow.title, flow_name)
invitation_stage = InvitationStage.objects.get(name=stage_name)
self.assertEqual(
invitation_stage.continue_flow_without_invitation,
continue_without_invitation,
)
user_write_stage = UserWriteStage.objects.get(
name=f"invitation-enrollment-user-write-{slug}"
)
self.assertEqual(user_write_stage.user_type, user_type)
self.assertEqual(user_write_stage.user_path_template, f"users/{user_type}")
def test_api_import_blank_path(self):
"""Validator returns empty path unchanged (covers api.py:53)."""
with NamedTemporaryFile(mode="w+", suffix=".yaml") as file:
file.write(dump({"version": 1, "entries": []}))
file.flush()
file.seek(0)
res = self.client.post(
reverse("authentik_api:blueprintinstance-import-"),
data={"path": "", "file": file},
format="multipart",
)
self.assertEqual(res.status_code, 200)
def test_api_import_unknown_path(self):
"""Path not in available blueprints is rejected (covers api.py:56)."""
res = self.client.post(
reverse("authentik_api:blueprintinstance-import-"),
data={"path": "does/not/exist.yaml"},
format="multipart",
)
self.assertEqual(res.status_code, 400)
self.assertIn("Blueprint file does not exist", res.content.decode())
def test_api_import_blank_context(self):
"""Blank context is normalized to empty dict (covers api.py:62)."""
res = self.client.post(
reverse("authentik_api:blueprintinstance-import-"),
data={
"path": "example/flows-invitation-enrollment-minimal.yaml",
"context": "",
},
format="multipart",
)
self.assertEqual(res.status_code, 200)
def test_api_import_invalid_json_context(self):
"""Malformed JSON context raises ValidationError (covers api.py:65-66)."""
res = self.client.post(
reverse("authentik_api:blueprintinstance-import-"),
data={
"path": "example/flows-invitation-enrollment-minimal.yaml",
"context": "{not json",
},
format="multipart",
)
self.assertEqual(res.status_code, 400)
self.assertIn("Context must be valid JSON", res.content.decode())
def test_api_import_non_object_context(self):
"""JSON context that isn't an object is rejected (covers api.py:68)."""
res = self.client.post(
reverse("authentik_api:blueprintinstance-import-"),
data={
"path": "example/flows-invitation-enrollment-minimal.yaml",
"context": "[1, 2, 3]",
},
format="multipart",
)
self.assertEqual(res.status_code, 400)
self.assertIn("Context must be a JSON object", res.content.decode())

View File

@@ -1,11 +1,8 @@
"""Test blueprints v1"""
from unittest.mock import patch
from django.test import TransactionTestCase
from authentik.blueprints.v1.importer import Importer
from authentik.enterprise.license import LicenseKey
from authentik.flows.models import Flow
from authentik.lib.generators import generate_id
from authentik.lib.tests.utils import load_fixture
@@ -45,45 +42,3 @@ class TestBlueprintsV1Conditions(TransactionTestCase):
# Ensure objects do not exist
self.assertFalse(Flow.objects.filter(slug=flow_slug1))
self.assertFalse(Flow.objects.filter(slug=flow_slug2))
def test_enterprise_license_context_unlicensed(self):
"""Test enterprise license context defaults to a false boolean when unlicensed."""
license_key = LicenseKey("test", 0, "Test license", 0, 0)
with patch("authentik.enterprise.license.LicenseKey.get_total", return_value=license_key):
importer = Importer.from_string("""
version: 1
entries:
- identifiers:
name: enterprise-test
slug: enterprise-test
model: authentik_flows.flow
conditions:
- !Context goauthentik.io/enterprise/licensed
attrs:
designation: stage_configuration
title: foo
""")
self.assertIs(importer.blueprint.context["goauthentik.io/enterprise/licensed"], False)
def test_enterprise_license_context_licensed(self):
"""Test enterprise license context defaults to a true boolean when licensed."""
license_key = LicenseKey("test", 253402300799, "Test license", 1000, 1000)
with patch("authentik.enterprise.license.LicenseKey.get_total", return_value=license_key):
importer = Importer.from_string("""
version: 1
entries:
- identifiers:
name: enterprise-test
slug: enterprise-test
model: authentik_flows.flow
conditions:
- !Context goauthentik.io/enterprise/licensed
attrs:
designation: stage_configuration
title: foo
""")
self.assertIs(importer.blueprint.context["goauthentik.io/enterprise/licensed"], True)

View File

@@ -146,7 +146,9 @@ class Importer:
try:
from authentik.enterprise.license import LicenseKey
context["goauthentik.io/enterprise/licensed"] = LicenseKey.get_total().status().is_valid
context["goauthentik.io/enterprise/licensed"] = (
LicenseKey.get_total().status().is_valid,
)
except ModuleNotFoundError:
pass
return context

View File

@@ -64,7 +64,6 @@ class BrandSerializer(ModelSerializer):
"flow_unenrollment",
"flow_user_settings",
"flow_device_code",
"flow_lockdown",
"default_application",
"web_certificate",
"client_certificates",
@@ -118,7 +117,6 @@ class CurrentBrandSerializer(PassiveSerializer):
flow_unenrollment = CharField(source="flow_unenrollment.slug", required=False)
flow_user_settings = CharField(source="flow_user_settings.slug", required=False)
flow_device_code = CharField(source="flow_device_code.slug", required=False)
flow_lockdown = CharField(source="flow_lockdown.slug", required=False)
default_locale = CharField(read_only=True)
flags = SerializerMethodField()
@@ -156,7 +154,6 @@ class BrandViewSet(UsedByMixin, ModelViewSet):
"flow_unenrollment",
"flow_user_settings",
"flow_device_code",
"flow_lockdown",
"web_certificate",
"client_certificates",
]

View File

@@ -1,25 +0,0 @@
# Generated by Django 5.2.12 on 2026-03-14 02:58
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_brands", "0011_alter_brand_branding_default_flow_background_and_more"),
("authentik_flows", "0031_alter_flow_layout"),
]
operations = [
migrations.AddField(
model_name="brand",
name="flow_lockdown",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="brand_lockdown",
to="authentik_flows.flow",
),
),
]

View File

@@ -58,9 +58,6 @@ class Brand(SerializerModel):
flow_device_code = models.ForeignKey(
Flow, null=True, on_delete=models.SET_NULL, related_name="brand_device_code"
)
flow_lockdown = models.ForeignKey(
Flow, null=True, on_delete=models.SET_NULL, related_name="brand_lockdown"
)
default_application = models.ForeignKey(
"authentik_core.Application",

View File

@@ -20,16 +20,11 @@ class TestBrands(APITestCase):
def setUp(self):
super().setUp()
self.default_flags = {}
for flag in Flag.available(visibility="public"):
self.default_flags[flag().key] = flag.get()
Brand.objects.all().delete()
@property
def default_flags(self) -> dict[str, object]:
"""Get current public flags.
Some tests define temporary Flag subclasses, so this can't be cached in setUp.
"""
return {flag().key: flag.get() for flag in Flag.available(visibility="public")}
def test_current_brand(self):
"""Test Current brand API"""
brand = create_test_brand()

View File

@@ -47,8 +47,7 @@ class ApplicationEntitlementViewSet(UsedByMixin, ModelViewSet):
search_fields = [
"pbm_uuid",
"name",
"app__name",
"app__slug",
"app",
"attributes",
]
filterset_fields = [

View File

@@ -32,19 +32,19 @@ from authentik.rbac.decorators import permission_required
class UserAgentDeviceDict(TypedDict):
"""User agent device"""
brand: str | None = None
brand: str
family: str
model: str | None = None
model: str
class UserAgentOSDict(TypedDict):
"""User agent os"""
family: str
major: str | None = None
minor: str | None = None
patch: str | None = None
patch_minor: str | None = None
major: str
minor: str
patch: str
patch_minor: str
class UserAgentBrowserDict(TypedDict):

View File

@@ -563,9 +563,6 @@ class UsersFilter(FilterSet):
class UserViewSet(
ConditionalInheritance(
"authentik.enterprise.stages.account_lockdown.api.UserAccountLockdownMixin"
),
ConditionalInheritance("authentik.enterprise.reports.api.reports.ExportMixin"),
UsedByMixin,
ModelViewSet,

View File

@@ -1,5 +1,6 @@
"""authentik core signals"""
from asgiref.sync import async_to_sync
from channels.layers import get_channel_layer
from django.contrib.auth.signals import user_logged_in
from django.core.cache import cache
@@ -58,7 +59,7 @@ def user_logged_in_session(sender, request: HttpRequest, user: User, **_):
layer = get_channel_layer()
device_cookie = request.COOKIES.get("authentik_device")
if device_cookie:
layer.group_send_blocking(
async_to_sync(layer.group_send)(
build_device_group(device_cookie),
{"type": "event.session.authenticated"},
)

View File

@@ -12,7 +12,7 @@
{% block head %}
<style data-id="static-styles">
:root {
--ak-global--background-image: url("{{ request.brand.branding_default_flow_background_url|iriencode|safe }}");
--ak-global--background-image: url("{{ request.brand.branding_default_flow_background_url }}");
}
</style>

View File

@@ -1,6 +1,7 @@
from datetime import datetime
from django.db.models import Exists, OuterRef, Q, Subquery
from django.db.models import BooleanField as ModelBooleanField
from django.db.models import Case, Q, Value, When
from django_filters.rest_framework import BooleanFilter, FilterSet
from drf_spectacular.utils import extend_schema
from rest_framework.decorators import action
@@ -13,7 +14,7 @@ from rest_framework.viewsets import GenericViewSet
from authentik.core.api.utils import ModelSerializer
from authentik.enterprise.api import EnterpriseRequiredMixin
from authentik.enterprise.lifecycle.api.reviews import ReviewSerializer
from authentik.enterprise.lifecycle.models import LifecycleIteration, LifecycleRule, ReviewState
from authentik.enterprise.lifecycle.models import LifecycleIteration, ReviewState
from authentik.enterprise.lifecycle.utils import (
ContentTypeField,
ReviewerGroupSerializer,
@@ -25,25 +26,20 @@ from authentik.enterprise.lifecycle.utils import (
from authentik.lib.utils.time import timedelta_from_string
class RelatedRuleSerializer(EnterpriseRequiredMixin, ModelSerializer):
reviewer_groups = ReviewerGroupSerializer(many=True, read_only=True)
min_reviewers = IntegerField(read_only=True)
reviewers = ReviewerUserSerializer(many=True, read_only=True)
class Meta:
model = LifecycleRule
fields = ["id", "name", "reviewer_groups", "min_reviewers", "reviewers"]
class LifecycleIterationSerializer(EnterpriseRequiredMixin, ModelSerializer):
content_type = ContentTypeField()
object_verbose = SerializerMethodField()
rule = RelatedRuleSerializer(read_only=True)
object_admin_url = SerializerMethodField(read_only=True)
grace_period_end = SerializerMethodField(read_only=True)
reviews = ReviewSerializer(many=True, read_only=True, source="review_set.all")
user_can_review = SerializerMethodField(read_only=True)
reviewer_groups = ReviewerGroupSerializer(
many=True, read_only=True, source="rule.reviewer_groups"
)
min_reviewers = IntegerField(read_only=True, source="rule.min_reviewers")
reviewers = ReviewerUserSerializer(many=True, read_only=True, source="rule.reviewers")
next_review_date = SerializerMethodField(read_only=True)
class Meta:
@@ -59,8 +55,10 @@ class LifecycleIterationSerializer(EnterpriseRequiredMixin, ModelSerializer):
"grace_period_end",
"next_review_date",
"reviews",
"rule",
"user_can_review",
"reviewer_groups",
"min_reviewers",
"reviewers",
]
read_only_fields = fields
@@ -90,55 +88,43 @@ class IterationViewSet(EnterpriseRequiredMixin, CreateModelMixin, GenericViewSet
queryset = LifecycleIteration.objects.all()
serializer_class = LifecycleIterationSerializer
ordering = ["-opened_on"]
ordering_fields = [
"state",
"content_type__model",
"rule__name",
"opened_on",
"grace_period_end",
]
ordering_fields = ["state", "content_type__model", "opened_on", "grace_period_end"]
filterset_class = LifecycleIterationFilterSet
def get_queryset(self):
user = self.request.user
return self.queryset.annotate(
user_is_reviewer=Exists(
LifecycleRule.objects.filter(
pk=OuterRef("rule_id"),
).filter(
Q(reviewers=user) | Q(reviewer_groups__in=user.groups.all().with_ancestors())
)
user_is_reviewer=Case(
When(
Q(rule__reviewers=user)
| Q(rule__reviewer_groups__in=user.groups.all().with_ancestors()),
then=Value(True),
),
default=Value(False),
output_field=ModelBooleanField(),
)
)
).distinct()
@extend_schema(
operation_id="lifecycle_iterations_list_latest",
responses={200: LifecycleIterationSerializer(many=True)},
)
@action(
detail=False,
pagination_class=None,
methods=["get"],
url_path=r"latest/(?P<content_type>[^/]+)/(?P<object_id>[^/]+)",
)
def latest_iterations(self, request: Request, content_type: str, object_id: str) -> Response:
def latest_iteration(self, request: Request, content_type: str, object_id: str) -> Response:
ct = parse_content_type(content_type)
latest_ids_subquery = (
LifecycleIteration.objects.filter(
rule=OuterRef("rule"),
content_type__app_label=ct["app_label"],
content_type__model=ct["model"],
object_id=object_id,
try:
obj = (
self.get_queryset()
.filter(
content_type__app_label=ct["app_label"],
content_type__model=ct["model"],
object_id=object_id,
)
.latest("opened_on")
)
.order_by("-opened_on")
.values("id")[:1]
)
latest_per_rule = LifecycleIteration.objects.filter(
content_type__app_label=ct["app_label"],
content_type__model=ct["model"],
object_id=object_id,
).filter(id=Subquery(latest_ids_subquery))
serializer = self.get_serializer(latest_per_rule, many=True)
except LifecycleIteration.DoesNotExist:
return Response(status=404)
serializer = self.get_serializer(obj)
return Response(serializer.data)
@extend_schema(

View File

@@ -84,6 +84,23 @@ class LifecycleRuleSerializer(EnterpriseRequiredMixin, ModelSerializer):
raise ValidationError(
{"grace_period": _("Grace period must be shorter than the interval.")}
)
if "content_type" in attrs or "object_id" in attrs:
content_type = attrs.get("content_type", getattr(self.instance, "content_type", None))
object_id = attrs.get("object_id", getattr(self.instance, "object_id", None))
if content_type is not None and object_id is None:
existing = LifecycleRule.objects.filter(
content_type=content_type, object_id__isnull=True
)
if self.instance:
existing = existing.exclude(pk=self.instance.pk)
if existing.exists():
raise ValidationError(
{
"content_type": _(
"Only one type-wide rule for each object type is allowed."
)
}
)
return attrs

View File

@@ -1,21 +0,0 @@
# Generated by Django 5.2.11 on 2026-03-05 11:27
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("authentik_lifecycle", "0002_alter_lifecycleiteration_opened_on"),
]
operations = [
migrations.RemoveConstraint(
model_name="lifecyclerule",
name="uniq_lifecycle_rule_ct_null_object",
),
migrations.AlterUniqueTogether(
name="lifecyclerule",
unique_together=set(),
),
]

View File

@@ -56,6 +56,14 @@ class LifecycleRule(SerializerModel):
class Meta:
indexes = [models.Index(fields=["content_type"])]
unique_together = [["content_type", "object_id"]]
constraints = [
models.UniqueConstraint(
fields=["content_type"],
condition=Q(object_id__isnull=True),
name="uniq_lifecycle_rule_ct_null_object",
)
]
@property
def serializer(self) -> type[BaseSerializer]:
@@ -74,6 +82,12 @@ class LifecycleRule(SerializerModel):
qs = self.content_type.get_all_objects_for_this_type()
if self.object_id:
qs = qs.filter(pk=self.object_id)
else:
qs = qs.exclude(
pk__in=LifecycleRule.objects.filter(
content_type=self.content_type, object_id__isnull=False
).values_list(Cast("object_id", output_field=self._get_pk_field()), flat=True)
)
return qs
def _get_stale_iterations(self) -> QuerySet[LifecycleIteration]:
@@ -93,7 +107,8 @@ class LifecycleRule(SerializerModel):
def _get_newly_due_objects(self) -> QuerySet:
recent_iteration_ids = LifecycleIteration.objects.filter(
rule=self,
content_type=self.content_type,
object_id__isnull=False,
opened_on__gte=start_of_day(
timezone.now() + timedelta(days=1) - timedelta_from_string(self.interval)
),
@@ -199,15 +214,9 @@ class LifecycleIteration(SerializerModel, ManagedModel):
}
def initialize(self):
if (self.content_type.app_label, self.content_type.model) == ("authentik_core", "group"):
object_label = self.object.name
elif (self.content_type.app_label, self.content_type.model) == ("authentik_rbac", "role"):
object_label = self.object.name
else:
object_label = str(self.object)
event = Event.new(
EventAction.REVIEW_INITIATED,
message=_(f"Access review is due for {self.content_type.name.lower()} {object_label}"),
message=_(f"Access review is due for {self.content_type.name} {str(self.object)}"),
**self._get_event_args(),
)
event.save()

View File

@@ -3,7 +3,6 @@ from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from authentik.enterprise.lifecycle.models import LifecycleRule, ReviewState
from authentik.tasks.schedules.models import Schedule
@receiver(post_save, sender=LifecycleRule)
@@ -12,9 +11,7 @@ def post_rule_save(sender, instance: LifecycleRule, created: bool, **_):
apply_lifecycle_rule.send_with_options(
args=(instance.id,),
rel_obj=Schedule.objects.get(
actor_name="authentik.enterprise.lifecycle.tasks.apply_lifecycle_rules"
),
rel_obj=instance,
)

View File

@@ -4,17 +4,14 @@ from dramatiq import actor
from authentik.core.models import User
from authentik.enterprise.lifecycle.models import LifecycleRule
from authentik.events.models import Event, Notification, NotificationTransport
from authentik.tasks.schedules.models import Schedule
@actor(description=_("Dispatch tasks to apply lifecycle rules."))
@actor(description=_("Dispatch tasks to validate lifecycle rules."))
def apply_lifecycle_rules():
for rule in LifecycleRule.objects.all():
apply_lifecycle_rule.send_with_options(
args=(rule.id,),
rel_obj=Schedule.objects.get(
actor_name="authentik.enterprise.lifecycle.tasks.apply_lifecycle_rules"
),
rel_obj=rule,
)

View File

@@ -1,4 +1,3 @@
from django.apps import apps
from django.contrib.contenttypes.models import ContentType
from django.urls import reverse
from rest_framework.test import APITestCase
@@ -20,11 +19,6 @@ class TestLifecycleRuleAPI(APITestCase):
self.content_type = ContentType.objects.get_for_model(Application)
self.reviewer_group = Group.objects.create(name=generate_id())
@classmethod
def setUpTestData(cls):
config = apps.get_app_config("authentik_tasks_schedules")
config._on_startup_callback(None)
def test_list_rules(self):
rule = LifecycleRule.objects.create(
name=generate_id(),
@@ -196,11 +190,6 @@ class TestIterationAPI(APITestCase):
self.reviewer_group = Group.objects.create(name=generate_id())
self.reviewer_group.users.add(self.user)
@classmethod
def setUpTestData(cls):
config = apps.get_app_config("authentik_tasks_schedules")
config._on_startup_callback(None)
def test_open_iterations(self):
rule = LifecycleRule.objects.create(
name=generate_id(),
@@ -242,7 +231,7 @@ class TestIterationAPI(APITestCase):
response = self.client.get(
reverse(
"authentik_api:lifecycleiteration-latest-iterations",
"authentik_api:lifecycleiteration-latest-iteration",
kwargs={
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
"object_id": str(self.app.pk),
@@ -250,20 +239,19 @@ class TestIterationAPI(APITestCase):
)
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data), 1)
self.assertEqual(response.data[0]["object_id"], str(self.app.pk))
self.assertEqual(response.data["object_id"], str(self.app.pk))
def test_latest_iteration_not_found(self):
response = self.client.get(
reverse(
"authentik_api:lifecycleiteration-latest-iterations",
"authentik_api:lifecycleiteration-latest-iteration",
kwargs={
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
"object_id": "00000000-0000-0000-0000-000000000000",
},
)
)
self.assertEqual(response.data, [])
self.assertEqual(response.status_code, 404)
def test_iteration_includes_user_can_review(self):
rule = LifecycleRule.objects.create(
@@ -291,11 +279,6 @@ class TestReviewAPI(APITestCase):
self.reviewer_group = Group.objects.create(name=generate_id())
self.reviewer_group.users.add(self.user)
@classmethod
def setUpTestData(cls):
config = apps.get_app_config("authentik_tasks_schedules")
config._on_startup_callback(None)
def test_create_review(self):
rule = LifecycleRule.objects.create(
name=generate_id(),

View File

@@ -2,7 +2,6 @@ import datetime as dt
from datetime import timedelta
from unittest.mock import patch
from django.apps import apps
from django.contrib.contenttypes.models import ContentType
from django.test import RequestFactory, TestCase
from django.utils import timezone
@@ -30,11 +29,6 @@ class TestLifecycleModels(TestCase):
def setUp(self):
self.factory = RequestFactory()
@classmethod
def setUpTestData(cls):
config = apps.get_app_config("authentik_tasks_schedules")
config._on_startup_callback(None)
def _get_request(self):
return self.factory.get("/")
@@ -444,6 +438,31 @@ class TestLifecycleModels(TestCase):
self.assertIn(app_one, objects)
self.assertIn(app_two, objects)
def test_rule_type_excludes_objects_with_specific_rules(self):
app_with_rule = Application.objects.create(name=generate_id(), slug=generate_id())
app_without_rule = Application.objects.create(name=generate_id(), slug=generate_id())
content_type = ContentType.objects.get_for_model(Application)
# Create a specific rule for app_with_rule
LifecycleRule.objects.create(
name=generate_id(),
content_type=content_type,
object_id=str(app_with_rule.pk),
interval="days=30",
)
# Create a type-level rule
type_rule = LifecycleRule.objects.create(
name=generate_id(),
content_type=content_type,
object_id=None,
interval="days=60",
)
objects = list(type_rule.get_objects())
self.assertNotIn(app_with_rule, objects)
self.assertIn(app_without_rule, objects)
def test_rule_type_apply_creates_iterations_for_all_objects(self):
app_one = Application.objects.create(name=generate_id(), slug=generate_id())
app_two = Application.objects.create(name=generate_id(), slug=generate_id())
@@ -650,73 +669,6 @@ class TestLifecycleModels(TestCase):
self.assertIn(explicit_reviewer, reviewers)
self.assertIn(group_member, reviewers)
def test_multiple_rules_same_object_create_separate_iterations(self):
"""Two rules targeting the same object each create their own iteration."""
obj = Application.objects.create(name=generate_id(), slug=generate_id())
content_type = ContentType.objects.get_for_model(obj)
rule_one = self._create_rule_for_object(obj, interval="days=30", grace_period="days=10")
rule_two = self._create_rule_for_object(obj, interval="days=60", grace_period="days=20")
iterations = LifecycleIteration.objects.filter(
content_type=content_type, object_id=str(obj.pk)
)
self.assertEqual(iterations.count(), 2)
iter_one = iterations.get(rule=rule_one)
iter_two = iterations.get(rule=rule_two)
self.assertEqual(iter_one.state, ReviewState.PENDING)
self.assertEqual(iter_two.state, ReviewState.PENDING)
self.assertNotEqual(iter_one.pk, iter_two.pk)
def test_multiple_rules_same_object_reviewed_independently(self):
"""Reviewing one rule's iteration does not affect the other rule's iteration."""
obj = Application.objects.create(name=generate_id(), slug=generate_id())
content_type = ContentType.objects.get_for_model(obj)
reviewer = create_test_user()
rule_one = self._create_rule_for_object(obj, min_reviewers=1)
rule_two = self._create_rule_for_object(obj, min_reviewers=1)
group = Group.objects.create(name=generate_id())
group.users.add(reviewer)
rule_one.reviewer_groups.add(group)
rule_two.reviewer_groups.add(group)
iter_one = LifecycleIteration.objects.get(
content_type=content_type, object_id=str(obj.pk), rule=rule_one
)
iter_two = LifecycleIteration.objects.get(
content_type=content_type, object_id=str(obj.pk), rule=rule_two
)
request = self._get_request()
# Review only rule_one's iteration
Review.objects.create(iteration=iter_one, reviewer=reviewer)
iter_one.on_review(request)
iter_one.refresh_from_db()
iter_two.refresh_from_db()
self.assertEqual(iter_one.state, ReviewState.REVIEWED)
self.assertEqual(iter_two.state, ReviewState.PENDING)
def test_type_rule_and_object_rule_both_create_iterations(self):
"""A type-level rule and an object-level rule both create iterations for the same object."""
obj = Application.objects.create(name=generate_id(), slug=generate_id())
content_type = ContentType.objects.get_for_model(obj)
object_rule = self._create_rule_for_object(obj, interval="days=30")
type_rule = self._create_rule_for_type(Application, interval="days=60")
iterations = LifecycleIteration.objects.filter(
content_type=content_type, object_id=str(obj.pk)
)
self.assertEqual(iterations.count(), 2)
self.assertTrue(iterations.filter(rule=object_rule).exists())
self.assertTrue(iterations.filter(rule=type_rule).exists())
class TestLifecycleDateBoundaries(TestCase):
"""Verify that start_of_day normalization ensures correct overdue/due
@@ -727,11 +679,6 @@ class TestLifecycleDateBoundaries(TestCase):
ensures that the boundary is always at midnight, so millisecond variations
in task execution time do not affect results."""
@classmethod
def setUpTestData(cls):
config = apps.get_app_config("authentik_tasks_schedules")
config._on_startup_callback(None)
def _create_rule_and_iteration(self, grace_period="days=1", interval="days=365"):
app = Application.objects.create(name=generate_id(), slug=generate_id())
content_type = ContentType.objects.get_for_model(Application)

View File

@@ -1,7 +1,6 @@
# Generated by Django 5.2.12 on 2026-04-04 16:58
from django.db import migrations, models
import django.contrib.postgres.fields
class Migration(migrations.Migration):
@@ -41,109 +40,4 @@ class Migration(migrations.Migration):
]
),
),
migrations.AlterField(
model_name="stream",
name="events_requested",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.TextField(
choices=[
(
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
"Caep Session Revoked",
),
(
"https://schemas.openid.net/secevent/caep/event-type/token-claims-change",
"Caep Token Claims Change",
),
(
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
"Caep Credential Change",
),
(
"https://schemas.openid.net/secevent/caep/event-type/assurance-level-change",
"Caep Assurance Level Change",
),
(
"https://schemas.openid.net/secevent/caep/event-type/device-compliance-change",
"Caep Device Compliance Change",
),
(
"https://schemas.openid.net/secevent/caep/event-type/session-established",
"Caep Session Established",
),
(
"https://schemas.openid.net/secevent/caep/event-type/session-presented",
"Caep Session Presented",
),
(
"https://schemas.openid.net/secevent/caep/event-type/risk-level-change",
"Caep Risk Level Change",
),
(
"https://schemas.openid.net/secevent/ssf/event-type/verification",
"Set Verification",
),
]
),
default=list,
size=None,
),
),
migrations.AlterField(
model_name="stream",
name="status",
field=models.TextField(
choices=[
("enabled", "Enabled"),
("paused", "Paused"),
("disabled", "Disabled"),
("disabled_deleted", "Disabled Deleted"),
],
default="enabled",
),
),
migrations.AlterField(
model_name="streamevent",
name="type",
field=models.TextField(
choices=[
(
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
"Caep Session Revoked",
),
(
"https://schemas.openid.net/secevent/caep/event-type/token-claims-change",
"Caep Token Claims Change",
),
(
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
"Caep Credential Change",
),
(
"https://schemas.openid.net/secevent/caep/event-type/assurance-level-change",
"Caep Assurance Level Change",
),
(
"https://schemas.openid.net/secevent/caep/event-type/device-compliance-change",
"Caep Device Compliance Change",
),
(
"https://schemas.openid.net/secevent/caep/event-type/session-established",
"Caep Session Established",
),
(
"https://schemas.openid.net/secevent/caep/event-type/session-presented",
"Caep Session Presented",
),
(
"https://schemas.openid.net/secevent/caep/event-type/risk-level-change",
"Caep Risk Level Change",
),
(
"https://schemas.openid.net/secevent/ssf/event-type/verification",
"Set Verification",
),
]
),
),
]

View File

@@ -24,31 +24,8 @@ class EventTypes(models.TextChoices):
"""SSF Event types supported by authentik"""
CAEP_SESSION_REVOKED = "https://schemas.openid.net/secevent/caep/event-type/session-revoked"
"""https://openid.net/specs/openid-caep-1_0-final.html#section-3.1"""
CAEP_TOKEN_CLAIMS_CHANGE = (
"https://schemas.openid.net/secevent/caep/event-type/token-claims-change"
)
"""https://openid.net/specs/openid-caep-1_0-final.html#section-3.2"""
CAEP_CREDENTIAL_CHANGE = "https://schemas.openid.net/secevent/caep/event-type/credential-change"
"""https://openid.net/specs/openid-caep-1_0-final.html#section-3.3"""
CAEP_ASSURANCE_LEVEL_CHANGE = (
"https://schemas.openid.net/secevent/caep/event-type/assurance-level-change"
)
"""https://openid.net/specs/openid-caep-1_0-final.html#section-3.4"""
CAEP_DEVICE_COMPLIANCE_CHANGE = (
"https://schemas.openid.net/secevent/caep/event-type/device-compliance-change"
)
"""https://openid.net/specs/openid-caep-1_0-final.html#section-3.5"""
CAEP_SESSION_ESTABLISHED = (
"https://schemas.openid.net/secevent/caep/event-type/session-established"
)
"""https://openid.net/specs/openid-caep-1_0-final.html#section-3.6"""
CAEP_SESSION_PRESENTED = "https://schemas.openid.net/secevent/caep/event-type/session-presented"
"""https://openid.net/specs/openid-caep-1_0-final.html#section-3.7"""
CAEP_RISK_LEVEL_CHANGE = "https://schemas.openid.net/secevent/caep/event-type/risk-level-change"
"""https://openid.net/specs/openid-caep-1_0-final.html#section-3.8"""
SET_VERIFICATION = "https://schemas.openid.net/secevent/ssf/event-type/verification"
"""https://openid.net/specs/openid-sharedsignals-framework-1_0.html#section-8.1.4.1"""
class DeliveryMethods(models.TextChoices):
@@ -69,12 +46,10 @@ class SSFEventStatus(models.TextChoices):
class StreamStatus(models.TextChoices):
"""SSF Stream status"""
ENABLED = "enabled"
PAUSED = "paused"
DISABLED = "disabled"
DISABLED_DELETED = "disabled_deleted"
class SSFProvider(TasksModel, BackchannelProvider):

View File

@@ -108,13 +108,13 @@ def send_ssf_event(stream_uuid: UUID, event_data: dict[str, Any]):
event.save()
self.info("Event successfully sent", status=response.status_code)
# Cleanup, if we were the last pending message for this stream and it has been deleted
# (status=StreamStatus.DISABLED_DELETED), then we can delete the stream
# (status=StreamStatus.DISABLED), then we can delete the stream
if (
not StreamEvent.objects.filter(
stream=stream,
status__in=[SSFEventStatus.PENDING_FAILED, SSFEventStatus.PENDING_NEW],
).exists()
and stream.status == StreamStatus.DISABLED_DELETED
and stream.status == StreamStatus.DISABLED
):
LOGGER.info(
"Deleting inactive stream as all pending messages were sent.", stream=stream

View File

@@ -62,7 +62,7 @@ class TestSSFAuth(APITestCase):
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
self.assertEqual(
event.payload["events"],
{"https://schemas.openid.net/secevent/ssf/event-type/verification": {}},
{"https://schemas.openid.net/secevent/ssf/event-type/verification": {"state": None}},
)
def test_stream_add_oidc(self):
@@ -115,7 +115,7 @@ class TestSSFAuth(APITestCase):
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
self.assertEqual(
event.payload["events"],
{"https://schemas.openid.net/secevent/ssf/event-type/verification": {}},
{"https://schemas.openid.net/secevent/ssf/event-type/verification": {"state": None}},
)
def test_token_invalid(self):

View File

@@ -54,7 +54,7 @@ class TestStream(APITestCase):
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
self.assertEqual(
event.payload["events"],
{"https://schemas.openid.net/secevent/ssf/event-type/verification": {}},
{"https://schemas.openid.net/secevent/ssf/event-type/verification": {"state": None}},
)
def test_stream_add_poll(self):
@@ -96,7 +96,7 @@ class TestStream(APITestCase):
)
self.assertEqual(res.status_code, 204)
stream.refresh_from_db()
self.assertEqual(stream.status, StreamStatus.DISABLED_DELETED)
self.assertEqual(stream.status, StreamStatus.DISABLED)
def test_stream_get(self):
"""get stream"""
@@ -225,26 +225,3 @@ class TestStream(APITestCase):
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
)
self.assertEqual(res.status_code, 404)
def test_stream_status_update(self):
stream = Stream.objects.create(provider=self.provider)
res = self.client.post(
reverse(
"authentik_providers_ssf:stream-status",
kwargs={"application_slug": self.application.slug},
),
data={
"stream_id": str(stream.pk),
"status": StreamStatus.DISABLED,
},
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
)
self.assertEqual(res.status_code, 200)
stream.refresh_from_db()
self.assertJSONEqual(
res.content,
{
"stream_id": str(stream.pk),
"status": str(stream.status),
},
)

View File

@@ -33,7 +33,7 @@ class TestTasks(APITestCase):
)
event_data = stream.prepare_event_payload(
EventTypes.SET_VERIFICATION,
{},
{"state": None},
sub_id={"format": "opaque", "id": str(stream.uuid)},
)
with Mocker() as mocker:
@@ -46,7 +46,7 @@ class TestTasks(APITestCase):
)
jwt = decode_complete(mocker.request_history[0].body, options={"verify_signature": False})
self.assertEqual(jwt["header"]["typ"], "secevent+jwt")
self.assertEqual(jwt["payload"]["events"][EventTypes.SET_VERIFICATION], {})
self.assertIsNone(jwt["payload"]["events"][EventTypes.SET_VERIFICATION]["state"])
def test_push_auth(self):
auth = generate_id()
@@ -58,7 +58,7 @@ class TestTasks(APITestCase):
)
event_data = stream.prepare_event_payload(
EventTypes.SET_VERIFICATION,
{},
{"state": None},
sub_id={"format": "opaque", "id": str(stream.uuid)},
)
with Mocker() as mocker:
@@ -72,7 +72,7 @@ class TestTasks(APITestCase):
)
jwt = decode_complete(mocker.request_history[0].body, options={"verify_signature": False})
self.assertEqual(jwt["header"]["typ"], "secevent+jwt")
self.assertEqual(jwt["payload"]["events"][EventTypes.SET_VERIFICATION], {})
self.assertIsNone(jwt["payload"]["events"][EventTypes.SET_VERIFICATION]["state"])
def test_push_stream_disable(self):
auth = generate_id()
@@ -81,11 +81,11 @@ class TestTasks(APITestCase):
delivery_method=DeliveryMethods.RFC_PUSH,
endpoint_url="http://localhost/ssf-push",
authorization_header=auth,
status=StreamStatus.DISABLED_DELETED,
status=StreamStatus.DISABLED,
)
event_data = stream.prepare_event_payload(
EventTypes.SET_VERIFICATION,
{},
{"state": None},
sub_id={"format": "opaque", "id": str(stream.uuid)},
)
with Mocker() as mocker:
@@ -95,7 +95,7 @@ class TestTasks(APITestCase):
).get_result(block=True, timeout=1)
jwt = decode_complete(mocker.request_history[0].body, options={"verify_signature": False})
self.assertEqual(jwt["header"]["typ"], "secevent+jwt")
self.assertEqual(jwt["payload"]["events"][EventTypes.SET_VERIFICATION], {})
self.assertIsNone(jwt["payload"]["events"][EventTypes.SET_VERIFICATION]["state"])
self.assertFalse(Stream.objects.filter(pk=stream.pk).exists())
def test_push_error(self):
@@ -106,7 +106,7 @@ class TestTasks(APITestCase):
)
event_data = stream.prepare_event_payload(
EventTypes.SET_VERIFICATION,
{},
{"state": None},
sub_id={"format": "opaque", "id": str(stream.uuid)},
)
with Mocker() as mocker:

View File

@@ -24,10 +24,10 @@ class SSFView(APIView):
class SSFStreamView(SSFView):
def get_object(self) -> Stream:
streams = Stream.objects.filter(provider=self.provider).exclude(
status=StreamStatus.DISABLED_DELETED
)
def get_object(self, any_status=False) -> Stream:
streams = Stream.objects.filter(provider=self.provider)
if not any_status:
streams = streams.filter(status__in=[StreamStatus.ENABLED, StreamStatus.PAUSED])
if "stream_id" in self.request.query_params:
streams = streams.filter(pk=self.request.query_params["stream_id"])
if "stream_id" in self.request.data:

View File

@@ -1,6 +1,6 @@
from uuid import uuid4
from django.http import Http404, HttpRequest
from django.http import HttpRequest
from django.urls import reverse
from rest_framework.exceptions import PermissionDenied, ValidationError
from rest_framework.fields import CharField, ChoiceField, ListField, SerializerMethodField
@@ -106,11 +106,7 @@ class StreamResponseSerializer(PassiveSerializer):
}
def get_events_supported(self, instance: Stream) -> list[str]:
return [
EventTypes.CAEP_SESSION_REVOKED,
EventTypes.CAEP_CREDENTIAL_CHANGE,
EventTypes.SET_VERIFICATION,
]
return [x.value for x in EventTypes]
class StreamView(SSFStreamView):
@@ -132,9 +128,10 @@ class StreamView(SSFStreamView):
LOGGER.info("Sending verification event", stream=instance)
send_ssf_events(
EventTypes.SET_VERIFICATION,
{},
{
"state": None,
},
stream_filter={"pk": instance.uuid},
request=request,
sub_id={"format": "opaque", "id": str(instance.uuid)},
)
response = StreamResponseSerializer(instance=instance, context={"request": request}).data
@@ -162,9 +159,7 @@ class StreamView(SSFStreamView):
def delete(self, request: Request, *args, **kwargs) -> Response:
stream = self.get_object()
if stream.status == StreamStatus.DISABLED_DELETED:
raise Http404
stream.status = StreamStatus.DISABLED_DELETED
stream.status = StreamStatus.DISABLED
stream.save()
return Response(status=204)
@@ -180,7 +175,6 @@ class StreamVerifyView(SSFStreamView):
"state": state,
},
stream_filter={"pk": stream.uuid},
request=request,
sub_id={"format": "opaque", "id": str(stream.uuid)},
)
return Response(status=204)
@@ -188,25 +182,8 @@ class StreamVerifyView(SSFStreamView):
class StreamStatusView(SSFStreamView):
class StreamStatusSerializer(PassiveSerializer):
stream_id = CharField()
status = ChoiceField(choices=StreamStatus.choices)
def get(self, request: Request, *args, **kwargs):
stream = self.get_object()
return Response(
{
"stream_id": str(stream.pk),
"status": str(stream.status),
}
)
def post(self, request: Request, *args, **kwargs):
stream = self.get_object()
serializer = self.StreamStatusSerializer(stream, data=request.data)
serializer.is_valid(raise_exception=True)
stream.status = serializer.validated_data["status"]
stream.save()
stream = self.get_object(any_status=True)
return Response(
{
"stream_id": str(stream.pk),

View File

@@ -14,7 +14,6 @@ TENANT_APPS = [
"authentik.enterprise.providers.ssf",
"authentik.enterprise.providers.ws_federation",
"authentik.enterprise.reports",
"authentik.enterprise.stages.account_lockdown",
"authentik.enterprise.stages.authenticator_endpoint_gdtc",
"authentik.enterprise.stages.mtls",
"authentik.enterprise.stages.source",

View File

@@ -1,141 +0,0 @@
"""Account Lockdown Stage API Views"""
from django.utils.translation import gettext as _
from drf_spectacular.utils import OpenApiExample, OpenApiResponse, extend_schema
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
from rest_framework.permissions import IsAuthenticated
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import PrimaryKeyRelatedField
from rest_framework.viewsets import ModelViewSet
from structlog.stdlib import get_logger
from authentik.api.validation import validate
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import LinkSerializer, PassiveSerializer
from authentik.core.models import (
User,
)
from authentik.enterprise.api import EnterpriseRequiredMixin, enterprise_action
from authentik.enterprise.stages.account_lockdown.models import AccountLockdownStage
from authentik.enterprise.stages.account_lockdown.stage import (
can_lock_user,
get_lockdown_target_users,
)
from authentik.flows.api.stages import StageSerializer
from authentik.flows.exceptions import EmptyFlowException, FlowNonApplicableException
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner
LOGGER = get_logger()
class AccountLockdownStageSerializer(EnterpriseRequiredMixin, StageSerializer):
"""AccountLockdownStage Serializer"""
class Meta:
model = AccountLockdownStage
fields = StageSerializer.Meta.fields + [
"deactivate_user",
"set_unusable_password",
"delete_sessions",
"revoke_tokens",
"self_service_completion_flow",
]
class AccountLockdownStageViewSet(UsedByMixin, ModelViewSet):
"""AccountLockdownStage Viewset"""
queryset = AccountLockdownStage.objects.all()
serializer_class = AccountLockdownStageSerializer
filterset_fields = "__all__"
ordering = ["name"]
search_fields = ["name"]
class UserAccountLockdownSerializer(PassiveSerializer):
"""Choose the target account before starting the lockdown flow."""
user = PrimaryKeyRelatedField(
queryset=get_lockdown_target_users(),
required=False,
allow_null=True,
help_text=_("User to lock. If omitted, locks the current user (self-service)."),
)
class UserAccountLockdownMixin:
"""Enterprise account-lockdown API actions for UserViewSet."""
def _create_lockdown_flow_url(self, request: Request, user: User) -> str:
"""Create a flow URL for account lockdown.
The request body selects the target before the flow starts. The API
pre-plans the lockdown flow with the target as the pending user, so the
account lockdown stage can use the normal flow context.
"""
flow = request._request.brand.flow_lockdown
if flow is None:
raise ValidationError({"non_field_errors": [_("No lockdown flow configured.")]})
planner = FlowPlanner(flow)
planner.use_cache = False
try:
plan = planner.plan(request._request, {PLAN_CONTEXT_PENDING_USER: user})
except EmptyFlowException, FlowNonApplicableException:
raise ValidationError(
{"non_field_errors": [_("Lockdown flow is not applicable.")]}
) from None
return plan.to_redirect(request._request, flow).url
@extend_schema(
description=_("Choose the target account, then return a flow link."),
request=UserAccountLockdownSerializer,
responses={
"200": OpenApiResponse(
response=LinkSerializer,
examples=[
OpenApiExample(
"Lockdown flow URL",
value={
"link": "https://example.invalid/if/flow/default-account-lockdown/",
},
response_only=True,
status_codes=["200"],
)
],
),
"400": OpenApiResponse(
description=_("No lockdown flow configured or the flow is not applicable")
),
"403": OpenApiResponse(
description=_("Permission denied (when targeting another user)")
),
},
)
@action(
detail=False,
methods=["POST"],
permission_classes=[IsAuthenticated],
url_path="account_lockdown",
)
@validate(UserAccountLockdownSerializer)
@enterprise_action
def account_lockdown(self, request: Request, body: UserAccountLockdownSerializer) -> Response:
"""Trigger account lockdown for a user.
If no user is specified, locks the current user (self-service).
When targeting another user, admin permissions are required.
Returns a flow link for the frontend to follow. The flow is pre-planned
with the target user as pending user for the lockdown stage.
"""
user = body.validated_data.get("user") or request.user
if not can_lock_user(request.user, user):
LOGGER.debug("Permission denied for account lockdown", user=request.user)
self.permission_denied(request)
flow_url = self._create_lockdown_flow_url(request, user)
LOGGER.debug("Returning lockdown flow URL", flow_url=flow_url, user=user.username)
return Response({"link": flow_url})

View File

@@ -1,12 +0,0 @@
"""authentik account lockdown stage app config"""
from authentik.enterprise.apps import EnterpriseConfig
class AuthentikEnterpriseStageAccountLockdownConfig(EnterpriseConfig):
"""authentik account lockdown stage config"""
name = "authentik.enterprise.stages.account_lockdown"
label = "authentik_stages_account_lockdown"
verbose_name = "authentik Enterprise.Stages.Account Lockdown"
default = True

View File

@@ -1,74 +0,0 @@
# Generated by Django 5.2.13 on 2026-04-19 21:56
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
("authentik_flows", "0031_alter_flow_layout"),
]
operations = [
migrations.CreateModel(
name="AccountLockdownStage",
fields=[
(
"stage_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="authentik_flows.stage",
),
),
(
"deactivate_user",
models.BooleanField(
default=True,
help_text="Deactivate the user account (set is_active to False)",
),
),
(
"set_unusable_password",
models.BooleanField(
default=True, help_text="Set an unusable password for the user"
),
),
(
"delete_sessions",
models.BooleanField(
default=True, help_text="Delete all active sessions for the user"
),
),
(
"revoke_tokens",
models.BooleanField(
default=True,
help_text="Revoke all tokens for the user (API, app password, recovery, verification, OAuth)",
),
),
(
"self_service_completion_flow",
models.ForeignKey(
blank=True,
help_text="Flow to redirect users to after self-service lockdown. This flow should not require authentication since the user's session is deleted.",
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="account_lockdown_stages",
to="authentik_flows.flow",
),
),
],
options={
"verbose_name": "Account Lockdown Stage",
"verbose_name_plural": "Account Lockdown Stages",
},
bases=("authentik_flows.stage",),
),
]

View File

@@ -1,62 +0,0 @@
"""Account lockdown stage models"""
from django.db import models
from django.utils.translation import gettext_lazy as _
from django.views import View
from rest_framework.serializers import BaseSerializer
from authentik.flows.models import Stage
class AccountLockdownStage(Stage):
"""Lock down a target user account."""
deactivate_user = models.BooleanField(
default=True,
help_text=_("Deactivate the user account (set is_active to False)"),
)
set_unusable_password = models.BooleanField(
default=True,
help_text=_("Set an unusable password for the user"),
)
delete_sessions = models.BooleanField(
default=True,
help_text=_("Delete all active sessions for the user"),
)
revoke_tokens = models.BooleanField(
default=True,
help_text=_(
"Revoke all tokens for the user (API, app password, recovery, verification, OAuth)"
),
)
self_service_completion_flow = models.ForeignKey(
"authentik_flows.Flow",
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="account_lockdown_stages",
help_text=_(
"Flow to redirect users to after self-service lockdown. "
"This flow should not require authentication since the user's session is deleted."
),
)
@property
def serializer(self) -> type[BaseSerializer]:
from authentik.enterprise.stages.account_lockdown.api import AccountLockdownStageSerializer
return AccountLockdownStageSerializer
@property
def view(self) -> type[View]:
from authentik.enterprise.stages.account_lockdown.stage import AccountLockdownStageView
return AccountLockdownStageView
@property
def component(self) -> str:
return "ak-stage-account-lockdown-form"
class Meta:
verbose_name = _("Account Lockdown Stage")
verbose_name_plural = _("Account Lockdown Stages")

View File

@@ -1,345 +0,0 @@
"""Account lockdown stage logic"""
from django.apps import apps
from django.core.exceptions import FieldDoesNotExist
from django.db.models import Model, QuerySet
from django.db.models.query_utils import Q
from django.db.transaction import atomic
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from dramatiq.actor import Actor
from dramatiq.composition import group
from dramatiq.results.errors import ResultTimeout
from authentik.core.models import (
AuthenticatedSession,
ExpiringModel,
Session,
Token,
User,
UserTypes,
)
from authentik.enterprise.stages.account_lockdown.models import AccountLockdownStage
from authentik.events.models import Event, EventAction
from authentik.flows.stage import StageView
from authentik.lib.sync.outgoing.models import OutgoingSyncProvider
from authentik.lib.sync.outgoing.signals import sync_outgoing_inhibit_dispatch
from authentik.lib.utils.reflection import class_to_path
from authentik.lib.utils.time import timedelta_from_string
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
PLAN_CONTEXT_LOCKDOWN_REASON = "lockdown_reason"
LOCKDOWN_EVENT_ACTION_ID = "account_lockdown"
TARGET_REQUIRED_MESSAGE = _("No target user specified for account lockdown")
PERMISSION_DENIED_MESSAGE = _("You do not have permission to lock down this account.")
ACCOUNT_LOCKDOWN_FAILED_MESSAGE = _("Account lockdown failed for this account.")
SELF_SERVICE_COMPLETION_FLOW_REQUIRED_MESSAGE = _(
"Self-service account lockdown requires a completion flow."
)
def get_lockdown_target_users() -> QuerySet[User]:
"""Return users that can be targeted by account lockdown."""
return User.objects.exclude_anonymous().exclude(type=UserTypes.INTERNAL_SERVICE_ACCOUNT)
def _get_model_field(model: type[Model], field_name: str):
"""Get a model field by name, if present."""
try:
return model._meta.get_field(field_name)
except FieldDoesNotExist:
return None
def _has_user_field(model: type[Model]) -> bool:
"""Check if a model has a direct user foreign key."""
field = _get_model_field(model, "user")
return bool(field and getattr(field, "remote_field", None) and field.remote_field.model is User)
def _has_authenticated_session_field(model: type[Model]) -> bool:
"""Check if a model is linked to an authenticated session."""
field = _get_model_field(model, "session")
return bool(
field
and getattr(field, "remote_field", None)
and field.remote_field.model is AuthenticatedSession
)
def _has_provider_field(model: type[Model]) -> bool:
"""Check if a model is linked to a provider."""
return _get_model_field(model, "provider") is not None
def get_lockdown_token_models() -> tuple[type[Model], ...]:
"""Return token, grant, and provider session models removed by account lockdown."""
token_models: list[type[Model]] = []
for model in apps.get_models():
if model._meta.abstract or not issubclass(model, ExpiringModel):
continue
if model is Token:
token_models.append(model)
elif _has_user_field(model) and (
_has_provider_field(model) or _has_authenticated_session_field(model)
):
token_models.append(model)
elif _has_authenticated_session_field(model):
token_models.append(model)
return tuple(token_models)
def get_lockdown_token_queryset(model: type[Model], user: User) -> QuerySet:
"""Return account lockdown artifacts for a model and user."""
manager = model.objects.including_expired()
if _has_user_field(model):
return manager.filter(user=user)
return manager.filter(session__user=user)
def can_lock_user(actor, user: User) -> bool:
"""Check whether the actor may lock the target user."""
if not actor.is_authenticated:
return False
if user.pk == actor.pk:
return True
return actor.has_perm("authentik_core.change_user", user)
def get_outgoing_sync_tasks() -> tuple[tuple[type[OutgoingSyncProvider], Actor], ...]:
"""Return outgoing sync provider types and their direct sync tasks."""
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider
from authentik.enterprise.providers.google_workspace.tasks import google_workspace_sync_direct
from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProvider
from authentik.enterprise.providers.microsoft_entra.tasks import microsoft_entra_sync_direct
from authentik.providers.scim.models import SCIMProvider
from authentik.providers.scim.tasks import scim_sync_direct
return (
(SCIMProvider, scim_sync_direct),
(GoogleWorkspaceProvider, google_workspace_sync_direct),
(MicrosoftEntraProvider, microsoft_entra_sync_direct),
)
class AccountLockdownStageView(StageView):
"""Execute account lockdown actions on the target user."""
def is_self_service(self, request: HttpRequest, user: User) -> bool:
"""Check whether the currently authenticated user is locking their own account."""
return request.user.is_authenticated and user.pk == request.user.pk
def get_reason(self) -> str:
"""Get the lockdown reason from the plan context.
Priority:
1. prompt_data[PLAN_CONTEXT_LOCKDOWN_REASON]
2. PLAN_CONTEXT_LOCKDOWN_REASON (explicitly set)
3. Empty string as fallback
"""
prompt_data = self.executor.plan.context.get(PLAN_CONTEXT_PROMPT, {})
if PLAN_CONTEXT_LOCKDOWN_REASON in prompt_data:
return prompt_data[PLAN_CONTEXT_LOCKDOWN_REASON]
return self.executor.plan.context.get(PLAN_CONTEXT_LOCKDOWN_REASON, "")
def _apply_lockdown_actions(self, stage: AccountLockdownStage, user: User) -> None:
"""Apply the configured account changes to the target user."""
if stage.deactivate_user:
user.is_active = False
if stage.set_unusable_password:
user.set_unusable_password()
if stage.deactivate_user:
with sync_outgoing_inhibit_dispatch():
user.save()
return
user.save()
def _sync_deactivated_user_to_outgoing_providers(self, user: User) -> None:
"""Synchronize a deactivated user to outgoing sync providers."""
messages = []
wait_timeout = 0
model = class_to_path(User)
provider_filter = Q(backchannel_application__isnull=False) | Q(application__isnull=False)
for provider_model, task_sync_direct in get_outgoing_sync_tasks():
for provider in provider_model.objects.filter(provider_filter):
time_limit = int(
timedelta_from_string(provider.sync_page_timeout).total_seconds() * 1000
)
messages.append(
task_sync_direct.message_with_options(
args=(model, user.pk, provider.pk),
rel_obj=provider,
time_limit=time_limit,
uid=f"{provider.name}:user:{user.pk}:direct",
)
)
wait_timeout += time_limit
if not messages:
return
try:
group(messages).run().wait(timeout=wait_timeout)
except ResultTimeout:
self.logger.warning(
"Timed out waiting for outgoing sync tasks; tasks remain queued",
user=user.username,
timeout=wait_timeout,
)
def _get_lockdown_artifact_querysets(
self, stage: AccountLockdownStage, user: User
) -> tuple[QuerySet, ...]:
"""Return the configured sessions and tokens targeted by lockdown."""
querysets: list[QuerySet] = []
if stage.delete_sessions:
querysets.append(Session.objects.filter(authenticatedsession__user=user))
if stage.revoke_tokens:
querysets.extend(
get_lockdown_token_queryset(model, user) for model in get_lockdown_token_models()
)
return tuple(querysets)
def _delete_lockdown_artifacts(self, stage: AccountLockdownStage, user: User) -> None:
"""Delete sessions and tokens selected by the lockdown configuration."""
for queryset in self._get_lockdown_artifact_querysets(stage, user):
queryset.delete()
def _has_lockdown_artifacts(self, stage: AccountLockdownStage, user: User) -> bool:
"""Check whether there are still sessions or tokens to remove."""
return any(
queryset.exists() for queryset in self._get_lockdown_artifact_querysets(stage, user)
)
def _emit_lockdown_event(self, request: HttpRequest, user: User, reason: str) -> None:
"""Emit the audit event for a completed lockdown."""
# Emit the audit event after the transaction commits. If event creation
# fails here, dispatch() would otherwise treat the whole lockdown as
# failed even though the account changes have already been committed.
try:
Event.new(
EventAction.USER_WRITE,
action_id=LOCKDOWN_EVENT_ACTION_ID,
reason=reason,
affected_user=user.username,
).from_http(request)
except Exception as exc: # noqa: BLE001
# Event emission should not make the lockdown itself fail.
self.logger.warning(
"Failed to emit account lockdown event",
user=user.username,
exc=exc,
)
def _lockdown_user(
self,
request: HttpRequest,
stage: AccountLockdownStage,
user: User,
reason: str,
) -> None:
"""Execute lockdown actions on a single user."""
with atomic():
user = User.objects.get(pk=user.pk)
self._apply_lockdown_actions(stage, user)
self._delete_lockdown_artifacts(stage, user)
# These additional checks/deletes are done to prevent a timing attack that creates tokens
# with a compromised token that is simultaneously being deleted.
while self._has_lockdown_artifacts(stage, user):
with atomic():
self._delete_lockdown_artifacts(stage, user)
if stage.deactivate_user:
try:
self._sync_deactivated_user_to_outgoing_providers(user)
except Exception as exc: # noqa: BLE001
# Local lockdown has already committed. Provider sync failures
# must not reopen access or mark the lockdown itself as failed.
self.logger.warning(
"Failed to sync account lockdown deactivation to outgoing providers",
user=user.username,
exc=exc,
)
self._emit_lockdown_event(request, user, reason)
def dispatch(self, request: HttpRequest) -> HttpResponse:
"""Execute account lockdown actions."""
self.request = request
stage: AccountLockdownStage = self.executor.current_stage
pending_user = self.get_pending_user()
if not pending_user.is_authenticated:
self.logger.warning("No target user found for account lockdown")
return self.executor.stage_invalid(TARGET_REQUIRED_MESSAGE)
user = get_lockdown_target_users().filter(pk=pending_user.pk).first()
if user is None:
self.logger.warning("Target user is not eligible for account lockdown")
return self.executor.stage_invalid(TARGET_REQUIRED_MESSAGE)
if not can_lock_user(request.user, user):
self.logger.warning(
"Permission denied for account lockdown",
actor=getattr(request.user, "username", None),
target=user.username,
)
return self.executor.stage_invalid(PERMISSION_DENIED_MESSAGE)
reason = self.get_reason()
self_service = self.is_self_service(request, user)
if self_service and stage.delete_sessions and not stage.self_service_completion_flow:
self.logger.warning("No completion flow configured for self-service account lockdown")
return self.executor.stage_invalid(SELF_SERVICE_COMPLETION_FLOW_REQUIRED_MESSAGE)
self.logger.info(
"Executing account lockdown",
user=user.username,
reason=reason,
self_service=self_service,
deactivate_user=stage.deactivate_user,
set_unusable_password=stage.set_unusable_password,
delete_sessions=stage.delete_sessions,
revoke_tokens=stage.revoke_tokens,
)
try:
self._lockdown_user(request, stage, user, reason)
self.logger.info("Account lockdown completed", user=user.username)
except Exception as exc: # noqa: BLE001
# Convert unexpected lockdown errors to a flow-stage failure instead
# of leaking an exception through the flow executor.
self.logger.warning("Account lockdown failed", user=user.username, exc=exc)
return self.executor.stage_invalid(ACCOUNT_LOCKDOWN_FAILED_MESSAGE)
if self_service:
if stage.delete_sessions:
return self._self_service_completion_response(request)
return self.executor.stage_ok()
return self.executor.stage_ok()
def _self_service_completion_response(self, request: HttpRequest) -> HttpResponse:
"""Redirect to completion flow after self-service lockdown.
Since all sessions are deleted, the user cannot continue in the flow.
Redirect them to an unauthenticated completion flow that shows the
lockdown message.
We use a direct HTTP redirect instead of a challenge because the
flow executor's challenge handling may try to access the session
which we just deleted.
"""
stage: AccountLockdownStage = self.executor.current_stage
completion_flow = stage.self_service_completion_flow
if completion_flow:
# Flush the current request's session to prevent Django's session
# middleware from trying to save a deleted session
if hasattr(request, "session"):
request.session.flush()
redirect_to = reverse(
"authentik_core:if-flow",
kwargs={"flow_slug": completion_flow.slug},
)
return HttpResponseRedirect(redirect_to)
return self.executor.stage_invalid(SELF_SERVICE_COMPLETION_FLOW_REQUIRED_MESSAGE)

View File

@@ -1,148 +0,0 @@
"""Test Users Account Lockdown API"""
from json import loads
from unittest.mock import MagicMock, patch
from urllib.parse import urlparse
from django.urls import reverse
from rest_framework.test import APITestCase
from authentik.core.tests.utils import (
create_test_brand,
create_test_flow,
create_test_user,
)
from authentik.enterprise.stages.account_lockdown.models import AccountLockdownStage
from authentik.flows.models import FlowDesignation, FlowStageBinding
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
from authentik.flows.views.executor import SESSION_KEY_PLAN
from authentik.lib.generators import generate_id
# Patch for enterprise license check
patch_license = patch(
"authentik.enterprise.models.LicenseUsageStatus.is_valid",
MagicMock(return_value=True),
)
@patch_license
class AccountLockdownAPITestCase(APITestCase):
"""Shared helpers for account lockdown API tests."""
def setUp(self) -> None:
self.lockdown_flow = create_test_flow(FlowDesignation.STAGE_CONFIGURATION)
self.lockdown_stage = AccountLockdownStage.objects.create(name=generate_id())
FlowStageBinding.objects.create(
target=self.lockdown_flow,
stage=self.lockdown_stage,
order=0,
)
self.brand = create_test_brand()
self.brand.flow_lockdown = self.lockdown_flow
self.brand.save()
def create_user_with_email(self):
"""Create a regular user with a unique email address."""
user = create_test_user()
user.email = f"{generate_id()}@test.com"
user.save()
return user
def assert_redirect_targets(self, response, user):
"""Assert that a response contains a pre-planned lockdown flow link for a user."""
self.assertEqual(response.status_code, 200)
body = loads(response.content)
self.assertIn(self.lockdown_flow.slug, body["link"])
self.assertEqual(urlparse(body["link"]).query, "")
plan = self.client.session[SESSION_KEY_PLAN]
self.assertEqual(plan.context[PLAN_CONTEXT_PENDING_USER].pk, user.pk)
def assert_no_flow_configured(self, response):
"""Assert that the API reports a missing lockdown flow."""
self.assertEqual(response.status_code, 400)
body = loads(response.content)
self.assertIn("No lockdown flow configured", body["non_field_errors"][0])
@patch_license
class TestUsersAccountLockdownAPI(AccountLockdownAPITestCase):
"""Test Users Account Lockdown API"""
def setUp(self) -> None:
super().setUp()
self.actor = create_test_user()
self.user = self.create_user_with_email()
def test_account_lockdown_with_change_user_returns_redirect(self):
"""Test that account lockdown allows users with change_user permission."""
self.actor.assign_perms_to_managed_role("authentik_core.change_user", self.user)
self.client.force_login(self.actor)
response = self.client.post(
reverse("authentik_api:user-account-lockdown"),
data={"user": self.user.pk},
format="json",
)
self.assert_redirect_targets(response, self.user)
def test_account_lockdown_no_flow_configured(self):
"""Test account lockdown when no flow is configured"""
self.brand.flow_lockdown = None
self.brand.save()
self.actor.assign_perms_to_managed_role("authentik_core.change_user", self.user)
self.client.force_login(self.actor)
response = self.client.post(
reverse("authentik_api:user-account-lockdown"),
data={"user": self.user.pk},
format="json",
)
self.assert_no_flow_configured(response)
def test_account_lockdown_unauthenticated(self):
"""Test account lockdown requires authentication"""
response = self.client.post(
reverse("authentik_api:user-account-lockdown"),
data={"user": self.user.pk},
format="json",
)
self.assertEqual(response.status_code, 403)
def test_account_lockdown_without_change_user_denied(self):
"""Test account lockdown denies users without change_user permission."""
self.client.force_login(self.actor)
response = self.client.post(
reverse("authentik_api:user-account-lockdown"),
data={"user": self.user.pk},
format="json",
)
self.assertEqual(response.status_code, 403)
def test_account_lockdown_self_returns_redirect(self):
"""Test successful self-service account lockdown returns a direct redirect."""
self.client.force_login(self.user)
response = self.client.post(
reverse("authentik_api:user-account-lockdown"),
data={},
format="json",
)
self.assert_redirect_targets(response, self.user)
def test_account_lockdown_self_target_without_change_user_returns_redirect(self):
"""Test self-service does not require change_user permission."""
self.client.force_login(self.user)
response = self.client.post(
reverse("authentik_api:user-account-lockdown"),
data={"user": self.user.pk},
format="json",
)
self.assert_redirect_targets(response, self.user)

View File

@@ -1,46 +0,0 @@
"""Tests for the packaged account-lockdown blueprint."""
from unittest.mock import patch
from django.test import TransactionTestCase
from authentik.blueprints.models import BlueprintInstance
from authentik.blueprints.v1.importer import Importer
from authentik.blueprints.v1.tasks import blueprints_find, check_blueprint_v1_file
from authentik.enterprise.license import LicenseKey
from authentik.flows.models import Flow
BLUEPRINT_PATH = "example/flow-default-account-lockdown.yaml"
class TestAccountLockdownBlueprint(TransactionTestCase):
"""Test the packaged account-lockdown blueprint behavior."""
def test_blueprint_is_not_auto_instantiated(self):
"""Test the packaged blueprint is opt-in and skipped by discovery."""
BlueprintInstance.objects.filter(path=BLUEPRINT_PATH).delete()
blueprint = next(item for item in blueprints_find() if item.path == BLUEPRINT_PATH)
check_blueprint_v1_file(blueprint)
self.assertFalse(BlueprintInstance.objects.filter(path=BLUEPRINT_PATH).exists())
def test_blueprint_requires_licensed_context(self):
"""Test manual import only creates flows when enterprise is licensed."""
content = BlueprintInstance(path=BLUEPRINT_PATH).retrieve()
license_key = LicenseKey("test", 253402300799, "Test license", 1000, 1000)
with patch("authentik.enterprise.license.LicenseKey.get_total", return_value=license_key):
importer = Importer.from_string(content, {"goauthentik.io/enterprise/licensed": False})
valid, logs = importer.validate()
self.assertTrue(valid, logs)
self.assertTrue(importer.apply())
self.assertFalse(Flow.objects.filter(slug="default-account-lockdown").exists())
self.assertFalse(Flow.objects.filter(slug="default-account-lockdown-complete").exists())
importer = Importer.from_string(content, {"goauthentik.io/enterprise/licensed": True})
valid, logs = importer.validate()
self.assertTrue(valid, logs)
self.assertTrue(importer.apply())
self.assertTrue(Flow.objects.filter(slug="default-account-lockdown").exists())
self.assertTrue(Flow.objects.filter(slug="default-account-lockdown-complete").exists())

View File

@@ -1,627 +0,0 @@
"""Account lockdown stage tests"""
import json
from dataclasses import asdict
from threading import Event as ThreadEvent
from threading import Thread
from types import SimpleNamespace
from unittest.mock import MagicMock, patch
from django.db import connection
from django.http import HttpResponse
from django.test import TransactionTestCase
from django.urls import reverse
from django.utils import timezone
from dramatiq.results.errors import ResultTimeout
from authentik.core.models import AuthenticatedSession, Session, Token, TokenIntents
from authentik.core.tests.utils import (
RequestFactory,
create_test_admin_user,
create_test_cert,
create_test_flow,
create_test_user,
)
from authentik.enterprise.stages.account_lockdown.models import AccountLockdownStage
from authentik.enterprise.stages.account_lockdown.stage import (
LOCKDOWN_EVENT_ACTION_ID,
PLAN_CONTEXT_LOCKDOWN_REASON,
AccountLockdownStageView,
can_lock_user,
)
from authentik.events.models import Event, EventAction
from authentik.flows.markers import StageMarker
from authentik.flows.models import FlowDesignation, FlowStageBinding
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlan
from authentik.flows.tests import FlowTestCase
from authentik.lib.generators import generate_id
from authentik.lib.utils.reflection import class_to_path
from authentik.providers.oauth2.id_token import IDToken
from authentik.providers.oauth2.models import (
AccessToken,
AuthorizationCode,
DeviceToken,
OAuth2Provider,
RedirectURI,
RedirectURIMatchingMode,
RefreshToken,
)
from authentik.providers.saml.models import SAMLProvider, SAMLSession
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
patch_enterprise_enabled = patch(
"authentik.enterprise.apps.AuthentikEnterpriseConfig.check_enabled",
return_value=True,
)
class AccountLockdownStageTestMixin:
"""Shared setup helpers for account lockdown stage tests."""
@classmethod
def setUpClass(cls):
cls.patch_enterprise_enabled = patch_enterprise_enabled.start()
cls.patch_event_dispatch = patch("authentik.events.tasks.event_trigger_dispatch.send")
cls.patch_event_dispatch.start()
super().setUpClass()
@classmethod
def tearDownClass(cls):
cls.patch_event_dispatch.stop()
patch_enterprise_enabled.stop()
super().tearDownClass()
def setUp(self):
super().setUp()
self.user = create_test_admin_user()
self.target_user = create_test_admin_user()
self.flow = create_test_flow(FlowDesignation.STAGE_CONFIGURATION)
self.stage = AccountLockdownStage.objects.create(
name="lockdown",
)
self.binding = FlowStageBinding.objects.create(target=self.flow, stage=self.stage, order=0)
self.request_factory = RequestFactory()
def make_stage_view(self, plan: FlowPlan):
def _stage_ok():
return HttpResponse(status=204)
def _stage_invalid(_error_message=None):
return HttpResponse(status=400)
return AccountLockdownStageView(
SimpleNamespace(
plan=plan,
current_stage=self.stage,
current_binding=self.binding,
flow=self.flow,
stage_ok=_stage_ok,
stage_invalid=_stage_invalid,
)
)
def make_request(self, *, user=None, query=None):
return self.request_factory.post(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
query_params=query or {},
user=user,
)
def get_lockdown_event(self):
"""Return the account-lockdown user-write event."""
return Event.objects.filter(
action=EventAction.USER_WRITE,
context__action_id=LOCKDOWN_EVENT_ACTION_ID,
).first()
class TestAccountLockdownStage(AccountLockdownStageTestMixin, FlowTestCase):
"""Account lockdown stage tests"""
def test_lockdown_no_target(self):
"""Test lockdown stage with no pending user fails"""
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
view = self.make_stage_view(plan)
response = view.dispatch(self.make_request())
self.assertEqual(response.status_code, 400)
def test_lockdown_with_pending_user(self):
"""Test lockdown stage with a pending target user."""
self.target_user.is_active = True
self.target_user.save()
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
plan.context[PLAN_CONTEXT_LOCKDOWN_REASON] = "Security incident"
plan.context[PLAN_CONTEXT_PENDING_USER] = self.target_user
view = self.make_stage_view(plan)
request = self.make_request(user=self.user)
self.assertTrue(can_lock_user(request.user, self.target_user))
response = view.dispatch(request)
self.target_user.refresh_from_db()
self.assertFalse(self.target_user.is_active)
self.assertFalse(self.target_user.has_usable_password())
self.assertEqual(response.status_code, 204)
# Check event was created
event = self.get_lockdown_event()
self.assertIsNotNone(event)
self.assertEqual(event.context["action_id"], LOCKDOWN_EVENT_ACTION_ID)
self.assertEqual(event.context["reason"], "Security incident")
self.assertEqual(event.context["affected_user"], self.target_user.username)
def test_lockdown_with_pending_user_reason(self):
"""Test lockdown stage with a pending target and explicit reason."""
self.target_user.is_active = True
self.target_user.save()
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
plan.context[PLAN_CONTEXT_LOCKDOWN_REASON] = "Compromised account"
plan.context[PLAN_CONTEXT_PENDING_USER] = self.target_user
view = self.make_stage_view(plan)
request = self.make_request(user=self.user)
self.assertTrue(can_lock_user(request.user, self.target_user))
response = view.dispatch(request)
self.target_user.refresh_from_db()
self.assertFalse(self.target_user.is_active)
self.assertEqual(response.status_code, 204)
def test_lockdown_reason_from_prompt(self):
"""Test lockdown stage reads the reason from prompt data."""
self.target_user.is_active = True
self.target_user.save()
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
plan.context[PLAN_CONTEXT_PROMPT] = {
PLAN_CONTEXT_LOCKDOWN_REASON: "User requested lockdown",
}
view = self.make_stage_view(plan)
request = self.make_request(user=self.user)
view._lockdown_user(request, self.stage, self.target_user, view.get_reason())
event = self.get_lockdown_event()
self.assertIsNotNone(event)
self.assertEqual(event.context["reason"], "User requested lockdown")
def test_lockdown_event_failure_does_not_fail_self_service(self):
"""Test lockdown still succeeds when event emission fails."""
self.stage.delete_sessions = False
self.stage.save()
self.target_user.is_active = True
self.target_user.save()
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
plan.context[PLAN_CONTEXT_PENDING_USER] = self.target_user
view = self.make_stage_view(plan)
request = self.make_request(user=self.target_user)
original_event_new = Event.new
def _event_new_side_effect(action, *args, **kwargs):
if (
action == EventAction.USER_WRITE
and kwargs.get("action_id") == LOCKDOWN_EVENT_ACTION_ID
):
raise RuntimeError("simulated event failure")
return original_event_new(action, *args, **kwargs)
with patch(
"authentik.enterprise.stages.account_lockdown.stage.Event.new",
side_effect=_event_new_side_effect,
):
view._lockdown_user(request, self.stage, self.target_user, view.get_reason())
self.target_user.refresh_from_db()
self.assertFalse(self.target_user.is_active)
def test_dispatch_records_success_when_event_emission_fails(self):
"""Test dispatch still completes if event emission fails."""
self.stage.delete_sessions = False
self.stage.save()
self.target_user.is_active = True
self.target_user.save()
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
plan.context[PLAN_CONTEXT_PENDING_USER] = self.target_user
view = self.make_stage_view(plan)
request = self.make_request(
user=self.target_user,
)
original_event_new = Event.new
def _event_new_side_effect(action, *args, **kwargs):
if (
action == EventAction.USER_WRITE
and kwargs.get("action_id") == LOCKDOWN_EVENT_ACTION_ID
):
raise RuntimeError("simulated event failure")
return original_event_new(action, *args, **kwargs)
with patch(
"authentik.enterprise.stages.account_lockdown.stage.Event.new",
side_effect=_event_new_side_effect,
):
response = view.dispatch(request)
self.target_user.refresh_from_db()
self.assertFalse(self.target_user.is_active)
self.assertEqual(response.status_code, 204)
def test_lockdown_self_service_redirects_to_completion_flow(self):
"""Test self-service lockdown redirects to completion flow when sessions are deleted."""
completion_flow = create_test_flow(FlowDesignation.STAGE_CONFIGURATION)
self.stage.self_service_completion_flow = completion_flow
self.stage.save()
self.target_user.is_active = True
self.target_user.save()
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
view = self.make_stage_view(plan)
request = self.make_request(user=self.target_user)
view._lockdown_user(request, self.stage, self.target_user, view.get_reason())
response = view._self_service_completion_response(request)
self.assertEqual(response.status_code, 302)
self.assertEqual(
response.url,
reverse("authentik_core:if-flow", kwargs={"flow_slug": completion_flow.slug}),
)
def test_lockdown_self_service_requires_completion_flow(self):
"""Test self-service lockdown fails before deleting sessions without a completion flow."""
self.stage.self_service_completion_flow = None
self.stage.save()
self.target_user.is_active = True
self.target_user.save()
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
plan.context[PLAN_CONTEXT_PENDING_USER] = self.target_user
view = self.make_stage_view(plan)
request = self.make_request(user=self.target_user)
response = view.dispatch(request)
self.assertEqual(response.status_code, 400)
self.target_user.refresh_from_db()
self.assertTrue(self.target_user.is_active)
def test_lockdown_denies_other_user_without_permission(self):
"""Test lockdown stage rejects non-self requests without change_user permission."""
actor = create_test_user()
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
plan.context[PLAN_CONTEXT_PENDING_USER] = self.target_user
view = self.make_stage_view(plan)
request = self.make_request(user=actor)
self.assertFalse(can_lock_user(request.user, self.target_user))
response = view.dispatch(request)
self.assertEqual(response.status_code, 400)
def test_lockdown_revokes_tokens(self):
"""Test lockdown stage revokes tokens"""
Token.objects.create(
user=self.target_user,
identifier="test-token",
intent=TokenIntents.INTENT_API,
key=generate_id(),
expiring=False,
)
self.assertEqual(Token.objects.filter(user=self.target_user).count(), 1)
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
view = self.make_stage_view(plan)
view._lockdown_user(self.make_request(user=self.user), self.stage, self.target_user, "")
self.assertEqual(Token.objects.filter(user=self.target_user).count(), 0)
def test_lockdown_revokes_provider_tokens(self):
"""Test lockdown stage revokes provider tokens and sessions."""
oauth_provider = OAuth2Provider.objects.create(
name=generate_id(),
authorization_flow=create_test_flow(),
redirect_uris=[
RedirectURI(RedirectURIMatchingMode.STRICT, "http://testserver/callback")
],
signing_key=create_test_cert(),
)
saml_provider = SAMLProvider.objects.create(
name=generate_id(),
authorization_flow=create_test_flow(),
acs_url="https://sp.example.com/acs",
issuer_override="https://idp.example.com",
)
session = Session.objects.create(
session_key=generate_id(),
expires=timezone.now() + timezone.timedelta(hours=1),
last_ip="127.0.0.1",
)
auth_session = AuthenticatedSession.objects.create(
session=session,
user=self.target_user,
)
grant_kwargs = {
"provider": oauth_provider,
"user": self.target_user,
"auth_time": timezone.now(),
"_scope": "openid profile",
"expiring": False,
}
token_kwargs = grant_kwargs | {"_id_token": json.dumps(asdict(IDToken("foo", "bar")))}
AuthorizationCode.objects.create(
code=generate_id(),
session=auth_session,
**grant_kwargs,
)
AccessToken.objects.create(
token=generate_id(),
session=auth_session,
**token_kwargs,
)
RefreshToken.objects.create(
token=generate_id(),
session=auth_session,
**token_kwargs,
)
DeviceToken.objects.create(
provider=oauth_provider,
user=self.target_user,
session=auth_session,
_scope="openid profile",
expiring=False,
)
SAMLSession.objects.create(
provider=saml_provider,
user=self.target_user,
session=auth_session,
session_index=generate_id(),
name_id=self.target_user.email,
expires=timezone.now() + timezone.timedelta(hours=1),
expiring=True,
)
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
view = self.make_stage_view(plan)
view._lockdown_user(self.make_request(user=self.user), self.stage, self.target_user, "")
self.assertEqual(AuthorizationCode.objects.filter(user=self.target_user).count(), 0)
self.assertEqual(AccessToken.objects.filter(user=self.target_user).count(), 0)
self.assertEqual(RefreshToken.objects.filter(user=self.target_user).count(), 0)
self.assertEqual(DeviceToken.objects.filter(user=self.target_user).count(), 0)
self.assertEqual(SAMLSession.objects.filter(user=self.target_user).count(), 0)
def test_lockdown_selective_actions(self):
"""Test lockdown stage with selective actions"""
self.stage.deactivate_user = True
self.stage.set_unusable_password = False
self.stage.delete_sessions = False
self.stage.revoke_tokens = False
self.stage.save()
self.target_user.is_active = True
self.target_user.set_password("testpassword")
self.target_user.save()
Token.objects.create(
user=self.target_user,
identifier="test-token",
intent=TokenIntents.INTENT_API,
key=generate_id(),
expiring=False,
)
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
view = self.make_stage_view(plan)
view._lockdown_user(self.make_request(user=self.user), self.stage, self.target_user, "")
self.target_user.refresh_from_db()
# User should be deactivated
self.assertFalse(self.target_user.is_active)
# Password should still be usable
self.assertTrue(self.target_user.has_usable_password())
# Token should still exist
self.assertEqual(Token.objects.filter(user=self.target_user).count(), 1)
def test_lockdown_no_actions(self):
"""Test lockdown stage with all actions disabled"""
self.stage.deactivate_user = False
self.stage.set_unusable_password = False
self.stage.delete_sessions = False
self.stage.revoke_tokens = False
self.stage.save()
self.target_user.is_active = True
self.target_user.set_password("testpassword")
self.target_user.save()
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
view = self.make_stage_view(plan)
view._lockdown_user(self.make_request(user=self.user), self.stage, self.target_user, "")
self.target_user.refresh_from_db()
# User should still be active
self.assertTrue(self.target_user.is_active)
# Password should still be usable
self.assertTrue(self.target_user.has_usable_password())
# Event should still be created
event = self.get_lockdown_event()
self.assertIsNotNone(event)
def test_lockdown_deactivation_inhibits_signal_dispatch_until_after_commit(self):
"""Test lockdown queues explicit outgoing syncs after the deactivation transaction."""
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
view = self.make_stage_view(plan)
with (
patch(
"authentik.enterprise.stages.account_lockdown.stage.sync_outgoing_inhibit_dispatch"
) as inhibit,
patch.object(view, "_sync_deactivated_user_to_outgoing_providers") as sync_outgoing,
):
view._lockdown_user(self.make_request(user=self.user), self.stage, self.target_user, "")
inhibit.assert_called_once()
sync_outgoing.assert_called_once()
synced_user = sync_outgoing.call_args.args[0]
self.assertEqual(synced_user.pk, self.target_user.pk)
self.assertFalse(synced_user.is_active)
def test_lockdown_waits_for_direct_outgoing_provider_syncs(self):
"""Test direct outgoing sync tasks are enqueued and waited on."""
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
view = self.make_stage_view(plan)
provider = SimpleNamespace(name="outgoing", pk=1, sync_page_timeout="seconds=5")
task_sync_direct = MagicMock()
task_sync_direct.message_with_options.return_value = "direct-message"
provider_model = SimpleNamespace(
objects=SimpleNamespace(filter=MagicMock(return_value=[provider]))
)
task_group = MagicMock()
with (
patch(
"authentik.enterprise.stages.account_lockdown.stage.get_outgoing_sync_tasks",
return_value=((provider_model, task_sync_direct),),
),
patch(
"authentik.enterprise.stages.account_lockdown.stage.group",
return_value=task_group,
) as task_group_cls,
):
view._sync_deactivated_user_to_outgoing_providers(self.target_user)
task_sync_direct.message_with_options.assert_called_once_with(
args=(class_to_path(type(self.target_user)), self.target_user.pk, provider.pk),
rel_obj=provider,
time_limit=5000,
uid=f"{provider.name}:user:{self.target_user.pk}:direct",
)
task_group_cls.assert_called_once_with(["direct-message"])
task_group.run.return_value.wait.assert_called_once_with(timeout=5000)
def test_lockdown_outgoing_provider_sync_timeout_leaves_tasks_running(self):
"""Test timeout while waiting for direct outgoing syncs does not fail lockdown."""
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
view = self.make_stage_view(plan)
provider = SimpleNamespace(name="outgoing", pk=1, sync_page_timeout="seconds=5")
task_sync_direct = MagicMock()
task_sync_direct.message_with_options.return_value = "direct-message"
provider_model = SimpleNamespace(
objects=SimpleNamespace(filter=MagicMock(return_value=[provider]))
)
task_group = MagicMock()
task_group.run.return_value.wait.side_effect = ResultTimeout("timed out")
with (
patch(
"authentik.enterprise.stages.account_lockdown.stage.get_outgoing_sync_tasks",
return_value=((provider_model, task_sync_direct),),
),
patch(
"authentik.enterprise.stages.account_lockdown.stage.group",
return_value=task_group,
),
):
view._sync_deactivated_user_to_outgoing_providers(self.target_user)
task_group.run.assert_called_once_with()
task_group.run.return_value.wait.assert_called_once_with(timeout=5000)
def test_lockdown_outgoing_provider_sync_failure_does_not_fail_lockdown(self):
"""Test completed local lockdown still emits an event if outgoing sync fails."""
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
view = self.make_stage_view(plan)
with patch.object(
view,
"_sync_deactivated_user_to_outgoing_providers",
side_effect=ValueError("sync failed"),
):
view._lockdown_user(self.make_request(user=self.user), self.stage, self.target_user, "")
self.target_user.refresh_from_db()
self.assertFalse(self.target_user.is_active)
event = self.get_lockdown_event()
self.assertIsNotNone(event)
class TestAccountLockdownStageConcurrency(AccountLockdownStageTestMixin, TransactionTestCase):
"""Account lockdown concurrency tests."""
def test_lockdown_retries_when_another_transaction_recreates_a_token(self):
"""Lockdown should remove a token recreated before the retry check runs."""
Token.objects.create(
user=self.target_user,
identifier=f"initial-token-{generate_id()}",
intent=TokenIntents.INTENT_API,
key=generate_id(),
expiring=False,
)
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
view = self.make_stage_view(plan)
original_has_artifacts = view._has_lockdown_artifacts
target_user = self.target_user
thread_ready = ThreadEvent()
start_create = ThreadEvent()
thread_done = ThreadEvent()
thread_errors = []
class TokenCreatorThread(Thread):
__test__ = False
def run(self):
try:
thread_ready.set()
if not start_create.wait(timeout=5):
thread_errors.append("timed out waiting to recreate token")
return
Token.objects.create(
user=target_user,
identifier=f"concurrent-token-{generate_id()}",
intent=TokenIntents.INTENT_API,
key=generate_id(),
expiring=False,
)
except Exception as exc: # noqa: BLE001
thread_errors.append(exc)
finally:
thread_done.set()
connection.close()
def has_artifacts_after_concurrent_create(stage, user):
if not start_create.is_set():
start_create.set()
self.assertTrue(
thread_done.wait(timeout=30),
(
"Concurrent token creation did not complete "
f"before retry check: {thread_errors}"
),
)
return original_has_artifacts(stage, user)
creator = TokenCreatorThread()
with patch.object(
view, "_has_lockdown_artifacts", side_effect=has_artifacts_after_concurrent_create
):
creator.start()
self.assertTrue(
thread_ready.wait(timeout=5),
"Concurrent token creation thread did not start",
)
view._lockdown_user(self.make_request(user=self.user), self.stage, self.target_user, "")
creator.join()
self.assertEqual(thread_errors, [])
self.assertEqual(Token.objects.filter(user=self.target_user).count(), 0)

View File

@@ -1,5 +0,0 @@
"""API URLs"""
from authentik.enterprise.stages.account_lockdown.api import AccountLockdownStageViewSet
api_urlpatterns = [("stages/account_lockdown", AccountLockdownStageViewSet)]

View File

@@ -8,6 +8,7 @@ from inspect import currentframe
from typing import Any
from uuid import uuid4
from asgiref.sync import async_to_sync
from channels.layers import get_channel_layer
from django.apps import apps
from django.db import models
@@ -409,7 +410,7 @@ class NotificationTransport(TasksModel, SerializerModel):
)
notification.save()
layer = get_channel_layer()
layer.group_send_blocking(
async_to_sync(layer.group_send)(
build_user_group(notification.user),
{
"type": "event.notification",

View File

@@ -29,7 +29,6 @@ class RefreshOtherFlowsAfterAuthentication(Flag[bool], key="flows_refresh_others
default = False
visibility = "public"
description = _("Refresh other tabs after successful authentication.")
deprecated = True
class ContinuousLogin(Flag[bool], key="flows_continuous_login"):

View File

@@ -23,7 +23,7 @@
height: 100%;
}
body {
background-image: url("{{ flow_background_url|iriencode|safe }}");
background-image: url("{{ flow_background_url }}");
background-repeat: no-repeat;
background-size: cover;
}

View File

@@ -39,7 +39,7 @@
<script src="{% versioned_script 'dist/flow/FlowInterface-%v.js' %}" type="module"></script>
<style data-id="flow-css">
:root {
--ak-global--background-image: url("{{ flow_background_url|iriencode|safe }}");
--ak-global--background-image: url("{{ flow_background_url }}");
}
</style>
{% endblock %}

View File

@@ -1,14 +1,12 @@
"""stage view tests"""
from collections.abc import Callable
from unittest.mock import patch
from django.test import RequestFactory, TestCase
from django.urls import reverse
from authentik.core.tests.utils import RequestFactory as AuthentikRequestFactory
from authentik.core.tests.utils import create_test_flow
from authentik.flows.models import Flow, FlowStageBinding
from authentik.flows.models import FlowStageBinding
from authentik.flows.stage import StageView
from authentik.flows.views.executor import FlowExecutorView
from authentik.lib.utils.reflection import all_subclasses
@@ -44,46 +42,6 @@ class TestViews(TestCase):
"/static/dist/assets/images/flow_background.jpg",
)
def test_flow_interface_css_background_preserves_presigned_url_query(self):
"""Test flow CSS keeps signed URL query separators intact."""
flow = create_test_flow()
background_url = (
"https://s3.ca-central-1.amazonaws.com/example/media/public/background.png"
"?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=credential"
"&X-Amz-Signature=signature"
)
with patch.object(Flow, "background_url", return_value=background_url):
response = self.client.get(
reverse("authentik_core:if-flow", kwargs={"flow_slug": flow.slug})
)
self.assertContains(
response,
f'--ak-global--background-image: url("{background_url}");',
html=False,
)
def test_flow_sfe_css_background_preserves_presigned_url_query(self):
"""Test SFE flow CSS keeps signed URL query separators intact."""
flow = create_test_flow()
background_url = (
"https://s3.ca-central-1.amazonaws.com/example/media/public/background.png"
"?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=credential"
"&X-Amz-Signature=signature"
)
with patch.object(Flow, "background_url", return_value=background_url):
response = self.client.get(
reverse("authentik_core:if-flow", kwargs={"flow_slug": flow.slug}) + "?sfe"
)
self.assertContains(
response,
f'background-image: url("{background_url}");',
html=False,
)
def view_tester_factory(view_class: type[StageView]) -> Callable:
"""Test a form"""

View File

@@ -53,16 +53,6 @@ class TestEndSessionView(OAuthTestCase):
self.brand.flow_invalidation = self.invalidation_flow
self.brand.save()
def _id_token_hint(self, host: str) -> str:
"""Issue a valid id_token_hint for the test provider under the given host."""
return self.provider.encode(
{
"iss": f"http://{host}/application/o/{self.app.slug}/",
"aud": self.provider.client_id,
"sub": str(self.user.pk),
}
)
def test_post_logout_redirect_uri_strict_match(self):
"""Test strict URI matching redirects to flow"""
self.client.force_login(self.user)
@@ -71,10 +61,7 @@ class TestEndSessionView(OAuthTestCase):
"authentik_providers_oauth2:end-session",
kwargs={"application_slug": self.app.slug},
),
{
"post_logout_redirect_uri": "http://testserver/logout",
"id_token_hint": self._id_token_hint(self.brand.domain),
},
{"post_logout_redirect_uri": "http://testserver/logout"},
HTTP_HOST=self.brand.domain,
)
# Should redirect to the invalidation flow
@@ -82,12 +69,7 @@ class TestEndSessionView(OAuthTestCase):
self.assertIn(self.invalidation_flow.slug, response.url)
def test_post_logout_redirect_uri_strict_no_match(self):
"""Test strict URI not matching returns an error and does not start logout flow.
Required by OIDC RP-Initiated Logout 1.0: on an unregistered
post_logout_redirect_uri, the OP MUST NOT redirect and MUST NOT proceed with
logout that targets the RP.
"""
"""Test strict URI not matching still proceeds with flow (no redirect URI in context)"""
self.client.force_login(self.user)
invalid_uri = "http://testserver/other"
response = self.client.get(
@@ -95,14 +77,12 @@ class TestEndSessionView(OAuthTestCase):
"authentik_providers_oauth2:end-session",
kwargs={"application_slug": self.app.slug},
),
{
"post_logout_redirect_uri": invalid_uri,
"id_token_hint": self._id_token_hint(self.brand.domain),
},
{"post_logout_redirect_uri": invalid_uri},
HTTP_HOST=self.brand.domain,
)
self.assertEqual(response.status_code, 400)
self.assertNotIn(invalid_uri, response.content.decode())
# Should still redirect to flow, but invalid URI should not be in response
self.assertEqual(response.status_code, 302)
self.assertNotIn(invalid_uri, response.url)
def test_post_logout_redirect_uri_regex_match(self):
"""Test regex URI matching redirects to flow"""
@@ -112,10 +92,7 @@ class TestEndSessionView(OAuthTestCase):
"authentik_providers_oauth2:end-session",
kwargs={"application_slug": self.app.slug},
),
{
"post_logout_redirect_uri": "https://app.example.com/logout",
"id_token_hint": self._id_token_hint(self.brand.domain),
},
{"post_logout_redirect_uri": "https://app.example.com/logout"},
HTTP_HOST=self.brand.domain,
)
# Should redirect to the invalidation flow
@@ -123,7 +100,7 @@ class TestEndSessionView(OAuthTestCase):
self.assertIn(self.invalidation_flow.slug, response.url)
def test_post_logout_redirect_uri_regex_no_match(self):
"""Test regex URI not matching returns an error and does not start logout flow."""
"""Test regex URI not matching"""
self.client.force_login(self.user)
invalid_uri = "https://malicious.com/logout"
response = self.client.get(
@@ -131,14 +108,12 @@ class TestEndSessionView(OAuthTestCase):
"authentik_providers_oauth2:end-session",
kwargs={"application_slug": self.app.slug},
),
{
"post_logout_redirect_uri": invalid_uri,
"id_token_hint": self._id_token_hint(self.brand.domain),
},
{"post_logout_redirect_uri": invalid_uri},
HTTP_HOST=self.brand.domain,
)
self.assertEqual(response.status_code, 400)
self.assertNotIn(invalid_uri, response.content.decode())
# Should still proceed to flow, but invalid URI should not be in response
self.assertEqual(response.status_code, 302)
self.assertNotIn(invalid_uri, response.url)
def test_state_parameter_appended_to_uri(self):
"""Test state parameter is appended to validated redirect URI"""
@@ -148,7 +123,6 @@ class TestEndSessionView(OAuthTestCase):
{
"post_logout_redirect_uri": "http://testserver/logout",
"state": "test-state-123",
"id_token_hint": self._id_token_hint("testserver"),
},
)
request.user = self.user
@@ -158,7 +132,6 @@ class TestEndSessionView(OAuthTestCase):
view.request = request
view.kwargs = {"application_slug": self.app.slug}
view.resolve_provider_application()
view.validate()
self.assertIn("state=test-state-123", view.post_logout_redirect_uri)
@@ -173,7 +146,6 @@ class TestEndSessionView(OAuthTestCase):
{
"post_logout_redirect_uri": "http://testserver/logout",
"state": "xyz789",
"id_token_hint": self._id_token_hint(self.brand.domain),
},
HTTP_HOST=self.brand.domain,
)

View File

@@ -5,8 +5,6 @@ from urllib.parse import quote, urlparse
from django.http import Http404, HttpRequest, HttpResponse
from django.shortcuts import get_object_or_404
from jwt import PyJWTError
from jwt import decode as jwt_decode
from authentik.common.oauth.constants import (
FORBIDDEN_URI_SCHEMES,
@@ -23,14 +21,11 @@ from authentik.flows.planner import (
from authentik.flows.stage import SessionEndStage
from authentik.flows.views.executor import SESSION_KEY_PLAN
from authentik.lib.views import bad_request_message
from authentik.policies.views import PolicyAccessView
from authentik.policies.views import PolicyAccessView, RequestValidationError
from authentik.providers.iframe_logout import IframeLogoutStageView
from authentik.providers.oauth2.errors import TokenError
from authentik.providers.oauth2.models import (
AccessToken,
JWTAlgorithms,
OAuth2LogoutMethod,
OAuth2Provider,
RedirectURIMatchingMode,
)
from authentik.providers.oauth2.tasks import send_backchannel_logout_request
@@ -52,45 +47,21 @@ class EndSessionView(PolicyAccessView):
if not self.flow:
raise Http404
def validate(self):
# Parse end session parameters
query_dict = self.request.POST if self.request.method == "POST" else self.request.GET
state = query_dict.get("state")
request_redirect_uri = query_dict.get("post_logout_redirect_uri")
id_token_hint = query_dict.get("id_token_hint")
self.post_logout_redirect_uri = None
# OIDC Certification: Verify id_token_hint. If invalid or missing, throw an error
if id_token_hint:
# Load a fresh provider instance that's not part of the flow
# since it'll have the cryptography Certificate that can't be pickled
provider = OAuth2Provider.objects.get(pk=self.provider.pk)
key, alg = provider.jwt_key
if alg != JWTAlgorithms.HS256:
key = provider.signing_key.public_key
try:
jwt_decode(
id_token_hint,
key,
algorithms=[alg],
audience=provider.client_id,
issuer=provider.get_issuer(self.request),
# ID Tokens are short-lived; a logout request arriving
# after expiry is still legitimate and must succeed.
options={"verify_exp": False},
)
except PyJWTError:
raise TokenError("invalid_request").with_cause(
"id_token_hint_decode_failed"
) from None
# Validate post_logout_redirect_uri against registered URIs
if request_redirect_uri:
# OIDC Certification: id_token_hint required with post_logout_redirect_uri
if not id_token_hint:
raise TokenError("invalid_request").with_cause("id_token_hint_missing")
if urlparse(request_redirect_uri).scheme in FORBIDDEN_URI_SCHEMES:
raise TokenError("invalid_request").with_cause("post_logout_redirect_uri")
raise RequestValidationError(
bad_request_message(
self.request,
"Forbidden URI scheme in post_logout_redirect_uri",
)
)
for allowed in self.provider.post_logout_redirect_uris:
if allowed.matching_mode == RedirectURIMatchingMode.STRICT:
if request_redirect_uri == allowed.url:
@@ -100,10 +71,6 @@ class EndSessionView(PolicyAccessView):
if fullmatch(allowed.url, request_redirect_uri):
self.post_logout_redirect_uri = request_redirect_uri
break
# OIDC Certification: OP MUST NOT perform post-logout redirection
# if the supplied URI does not exactly match a registered one
if self.post_logout_redirect_uri is None:
raise TokenError("invalid_request").with_cause("invalid_post_logout_redirect_uri")
# Append state to the redirect URI if both are present
if self.post_logout_redirect_uri and state:
@@ -124,43 +91,50 @@ class EndSessionView(PolicyAccessView):
"<html><body>Logout successful</body></html>", content_type="text/html", status=200
)
# Otherwise, continue with normal policy checks
return super().dispatch(request, *args, **kwargs)
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
"""Dispatch the flow planner for the invalidation flow"""
try:
self.validate()
except TokenError as exc:
return bad_request_message(
self.request,
exc.description,
)
planner = FlowPlanner(self.flow)
planner.allow_empty_flows = True
# Build flow context with logout parameters
context = {
PLAN_CONTEXT_APPLICATION: self.application,
}
# Get session info for logout notifications and token invalidation
auth_session = AuthenticatedSession.from_request(request, request.user)
# Add validated redirect URI (with state appended) to context if available
if self.post_logout_redirect_uri:
context[PLAN_CONTEXT_POST_LOGOUT_REDIRECT_URI] = self.post_logout_redirect_uri
# Invalidate tokens for this provider/session (RP-initiated logout:
# user stays logged into authentik, only this provider's tokens are revoked)
if request.user.is_authenticated and auth_session:
AccessToken.objects.filter(
user=request.user,
provider=self.provider,
session=auth_session,
).delete()
session_key = (
auth_session.session.session_key if auth_session and auth_session.session else None
)
# Handle frontchannel logout
frontchannel_logout_url = None
if self.provider.logout_method == OAuth2LogoutMethod.FRONTCHANNEL:
frontchannel_logout_url = build_frontchannel_logout_url(
self.provider, request, session_key
)
# Handle backchannel logout
if (
self.provider.logout_method == OAuth2LogoutMethod.BACKCHANNEL
and self.provider.logout_uri
):
# Find access token to get iss and sub for the logout token
access_token = AccessToken.objects.filter(
user=request.user,
provider=self.provider,
@@ -189,16 +163,9 @@ class EndSessionView(PolicyAccessView):
}
]
access_tokens = AccessToken.objects.filter(
user=request.user,
provider=self.provider,
)
if auth_session:
access_tokens = access_tokens.filter(session=auth_session)
access_tokens.delete()
plan = planner.plan(request, context)
# Inject iframe logout stage if frontchannel logout is configured
if frontchannel_logout_url:
plan.insert_stage(in_memory_stage(IframeLogoutStageView))

View File

@@ -1,5 +1,6 @@
"""RAC Signals"""
from asgiref.sync import async_to_sync
from channels.layers import get_channel_layer
from django.core.cache import cache
from django.db.models.signals import post_delete, post_save, pre_delete
@@ -17,7 +18,7 @@ from authentik.providers.rac.models import ConnectionToken, Endpoint
@receiver(pre_delete, sender=AuthenticatedSession)
def user_session_deleted(sender, instance: AuthenticatedSession, **_):
layer = get_channel_layer()
layer.group_send_blocking(
async_to_sync(layer.group_send)(
build_rac_client_group_session(instance.session.session_key),
{"type": "event.disconnect", "reason": "session_logout"},
)
@@ -27,7 +28,7 @@ def user_session_deleted(sender, instance: AuthenticatedSession, **_):
def pre_delete_connection_token_disconnect(sender, instance: ConnectionToken, **_):
"""Disconnect session when connection token is deleted"""
layer = get_channel_layer()
layer.group_send_blocking(
async_to_sync(layer.group_send)(
build_rac_client_group_token(instance.token),
{"type": "event.disconnect", "reason": "token_delete"},
)

View File

@@ -6,7 +6,6 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0056_user_roles"), # must run before group field is removed
("authentik_rbac", "0009_remove_initialpermissions_mode"),
]

View File

@@ -172,7 +172,6 @@ SPECTACULAR_SETTINGS = {
},
"ENUM_NAME_OVERRIDES": {
"AppEnum": "authentik.lib.api.Apps",
"AuthenticationEnum": "authentik.flows.models.FlowAuthenticationRequirement",
"ConsentModeEnum": "authentik.stages.consent.models.ConsentMode",
"CountryCodeEnum": "django_countries.countries",
"DeviceClassesEnum": "authentik.stages.authenticator_validate.models.DeviceClasses",
@@ -187,7 +186,6 @@ SPECTACULAR_SETTINGS = {
"PolicyEngineMode": "authentik.policies.models.PolicyEngineMode",
"PromptTypeEnum": "authentik.stages.prompt.models.FieldTypes",
"ProxyMode": "authentik.providers.proxy.models.ProxyMode",
"RedirectURITypeEnum": "authentik.providers.oauth2.models.RedirectURIType",
"SAMLBindingsEnum": "authentik.providers.saml.models.SAMLBindings",
"SAMLLogoutMethods": "authentik.providers.saml.models.SAMLLogoutMethods",
"SAMLNameIDPolicyEnum": "authentik.sources.saml.models.SAMLNameIDPolicy",
@@ -221,7 +219,7 @@ REST_FRAMEWORK = {
"authentik.api.search.ql.QLSearch",
"authentik.rbac.filters.ObjectFilter",
"django_filters.rest_framework.DjangoFilterBackend",
"authentik.api.ordering.NullsAwareOrderingFilter",
"rest_framework.filters.OrderingFilter",
],
"DEFAULT_PERMISSION_CLASSES": ("authentik.rbac.permissions.ObjectPermissions",),
"DEFAULT_AUTHENTICATION_CLASSES": (

File diff suppressed because one or more lines are too long

View File

@@ -1,64 +0,0 @@
# Generated by Django 5.2.12 on 2026-03-14 02:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"authentik_stages_prompt",
"0011_prompt_initial_value_prompt_initial_value_expression_and_more",
),
]
operations = [
migrations.AlterField(
model_name="prompt",
name="type",
field=models.CharField(
choices=[
("text", "Text: Simple Text input"),
("text_area", "Text area: Multiline Text Input."),
(
"text_read_only",
"Text (read-only): Simple Text input, but cannot be edited.",
),
(
"text_area_read_only",
"Text area (read-only): Multiline Text input, but cannot be edited.",
),
(
"username",
"Username: Same as Text input, but checks for and prevents duplicate usernames.",
),
("email", "Email: Text field with Email type."),
(
"password",
"Password: Masked input, multiple inputs of this type on the same prompt need to be identical.",
),
("number", "Number"),
("checkbox", "Checkbox"),
(
"radio-button-group",
"Fixed choice field rendered as a group of radio buttons.",
),
("dropdown", "Fixed choice field rendered as a dropdown."),
("date", "Date"),
("date-time", "Date Time"),
(
"file",
"File: File upload for arbitrary files. File content will be available in flow context as data-URI",
),
("separator", "Separator: Static Separator Line"),
("hidden", "Hidden: Hidden field, can be used to insert data into form."),
("static", "Static: Static value, displayed as-is."),
("alert_info", "Alert (Info): Static alert box with info styling"),
("alert_warning", "Alert (Warning): Static alert box with warning styling"),
("alert_danger", "Alert (Danger): Static alert box with danger styling"),
("ak-locale", "authentik: Selection of locales authentik supports"),
],
max_length=100,
),
),
]

View File

@@ -87,11 +87,6 @@ class FieldTypes(models.TextChoices):
HIDDEN = "hidden", _("Hidden: Hidden field, can be used to insert data into form.")
STATIC = "static", _("Static: Static value, displayed as-is.")
# Alert box types for displaying styled messages
ALERT_INFO = "alert_info", _("Alert (Info): Static alert box with info styling")
ALERT_WARNING = "alert_warning", _("Alert (Warning): Static alert box with warning styling")
ALERT_DANGER = "alert_danger", _("Alert (Danger): Static alert box with danger styling")
AK_LOCALE = "ak-locale", _("authentik: Selection of locales authentik supports")
@@ -304,12 +299,7 @@ class Prompt(SerializerModel):
field_class = HiddenField
kwargs["required"] = False
kwargs["default"] = self.placeholder
case (
FieldTypes.STATIC
| FieldTypes.ALERT_INFO
| FieldTypes.ALERT_WARNING
| FieldTypes.ALERT_DANGER
):
case FieldTypes.STATIC:
kwargs["default"] = self.placeholder
kwargs["required"] = False
kwargs["label"] = ""

View File

@@ -124,9 +124,6 @@ class PromptChallengeResponse(ChallengeResponse):
type__in=[
FieldTypes.HIDDEN,
FieldTypes.STATIC,
FieldTypes.ALERT_INFO,
FieldTypes.ALERT_WARNING,
FieldTypes.ALERT_DANGER,
FieldTypes.TEXT_READ_ONLY,
FieldTypes.TEXT_AREA_READ_ONLY,
]

View File

@@ -330,20 +330,10 @@ class TestPromptStage(FlowTestCase):
def test_static_hidden_overwrite(self):
"""Test that static and hidden fields ignore any value sent to them"""
alert_prompt = Prompt.objects.create(
name=generate_id(),
field_key="alert_prompt",
type=FieldTypes.ALERT_INFO,
required=True,
placeholder="alert fallback",
initial_value="alert content",
)
self.stage.fields.add(alert_prompt)
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
plan.context[PLAN_CONTEXT_PROMPT] = {"hidden_prompt": "hidden"}
self.prompt_data["hidden_prompt"] = "foo"
self.prompt_data["static_prompt"] = "foo"
self.prompt_data["alert_prompt"] = "foo"
challenge_response = PromptChallengeResponse(
None, stage_instance=self.stage, plan=plan, data=self.prompt_data, stage=self.stage_view
)
@@ -351,7 +341,6 @@ class TestPromptStage(FlowTestCase):
self.assertNotEqual(challenge_response.validated_data["hidden_prompt"], "foo")
self.assertEqual(challenge_response.validated_data["hidden_prompt"], "hidden")
self.assertNotEqual(challenge_response.validated_data["static_prompt"], "foo")
self.assertEqual(challenge_response.validated_data["alert_prompt"], "alert content")
def test_prompt_placeholder(self):
"""Test placeholder and expression"""

View File

@@ -16,7 +16,7 @@ class RedirectMode(models.TextChoices):
class RedirectStage(Stage):
"""Redirect the user to a static URL or another flow, optionally with all gathered context."""
"""Redirect the user to another flow, potentially with all gathered context."""
keep_context = models.BooleanField(default=True)
mode = models.TextField(choices=RedirectMode.choices)

View File

@@ -7,7 +7,7 @@ from dramatiq.broker import Broker, MessageProxy, get_broker
from dramatiq.middleware.middleware import Middleware
from dramatiq.middleware.retries import Retries
from dramatiq.results.middleware import Results
from dramatiq.worker import ConsumerThread, Worker, WorkerThread
from dramatiq.worker import Worker, _ConsumerThread, _WorkerThread
from authentik.tasks.broker import PostgresBroker
@@ -20,7 +20,7 @@ class TestWorker(Worker):
self.worker_id = 1000
self.work_queue = PriorityQueue()
self.consumers = {
TESTING_QUEUE: ConsumerThread(
TESTING_QUEUE: _ConsumerThread(
broker=self.broker,
queue_name=TESTING_QUEUE,
prefetch=2,
@@ -33,7 +33,7 @@ class TestWorker(Worker):
prefetch=2,
timeout=1,
)
self._worker = WorkerThread(
self._worker = _WorkerThread(
broker=self.broker,
consumers=self.consumers,
work_queue=self.work_queue,
@@ -78,18 +78,17 @@ def use_test_broker():
actor.broker = broker
actor.broker.declare_actor(actor)
for middleware_class_path, middleware_kwargs in Conf().middlewares:
middleware_class = import_string(middleware_class_path)
if issubclass(middleware_class, Results):
middleware_kwargs["backend"] = import_string(Conf().result_backend)(
*Conf().result_backend_args,
**Conf().result_backend_kwargs,
)
middleware: Middleware = middleware_class(
for middleware_class, middleware_kwargs in Conf().middlewares:
middleware: Middleware = import_string(middleware_class)(
**middleware_kwargs,
)
if isinstance(middleware, Retries):
middleware.max_retries = 0
if isinstance(middleware, Results):
middleware.backend = import_string(Conf().result_backend)(
*Conf().result_backend_args,
**Conf().result_backend_kwargs,
)
broker.add_middleware(middleware)
broker.start()

View File

@@ -59,8 +59,6 @@ class FlagsJSONExtension(OpenApiSerializerFieldExtension):
props[_flag.key] = build_basic_type(get_args(_flag.__orig_bases__[0])[0])
if _flag.description:
props[_flag.key]["description"] = _flag.description
if _flag.deprecated:
props[_flag.key]["deprecated"] = _flag.deprecated
return build_object_type(props, required=props.keys())

View File

@@ -18,7 +18,6 @@ class Flag[T]:
Literal["none"] | Literal["public"] | Literal["authenticated"] | Literal["system"]
) = "none"
description: str | None = None
deprecated = False
def __init_subclass__(cls, key: str, **kwargs):
cls.__key = key

View File

@@ -1,306 +0,0 @@
version: 1
metadata:
name: Example - Account lockdown flow
labels:
blueprints.goauthentik.io/instantiate: "false"
entries:
flows:
# Main lockdown flow - requires authentication
- conditions:
- !Context goauthentik.io/enterprise/licensed
attrs:
designation: stage_configuration
name: Account Lockdown
title: Lock Account
authentication: require_authenticated
identifiers:
slug: default-account-lockdown
model: authentik_flows.flow
id: flow
# Self-service completion flow - no authentication required
- conditions:
- !Context goauthentik.io/enterprise/licensed
attrs:
designation: stage_configuration
name: Account Lockdown Complete
title: Account Locked
authentication: none
identifiers:
slug: default-account-lockdown-complete
model: authentik_flows.flow
id: completion-flow
prompt_fields:
# Warning field - danger alert box (content varies based on self-service vs admin)
- conditions:
- !Context goauthentik.io/enterprise/licensed
attrs:
order: 50
initial_value: |
target_uuid = (http_request.session.get("authentik/flows/get", {}) or {}).get("user_uuid")
current_user_uuid = str(getattr(user, "pk", "") or getattr(http_request.user, "pk", ""))
is_self_service = not target_uuid or target_uuid == current_user_uuid
pending_user = None
if target_uuid and not is_self_service:
from authentik.core.models import User
pending_user = User.objects.filter(pk=target_uuid).first()
if is_self_service:
return (
"<p><strong>You are about to lock down your own account.</strong></p>"
"<p>This is an emergency action for cutting off access to your account right away.</p>"
"<p><strong>This will immediately:</strong></p>"
"<ul>"
"<li><strong>Invalidate your password</strong> - Your password will be set to a random value "
"and cannot be recovered</li>"
"<li><strong>Deactivate your account</strong> - Your account will be disabled</li>"
"<li><strong>Terminate all your sessions</strong> - You will be logged out everywhere</li>"
"<li><strong>Revoke all your tokens</strong> - All your API, app password, recovery, "
"verification, and OAuth2 tokens and grants will be revoked</li>"
"</ul>"
"<p><strong>This action cannot be easily undone.</strong></p>"
)
from django.utils.html import escape
if pending_user:
email = escape(pending_user.email or pending_user.name or "No email")
user_html = f"<p><code>{escape(pending_user.username)}</code> ({email})</p>"
else:
user_html = "<p>the account selected when this one-time lockdown link was created</p>"
return (
"<p><strong>You are about to lock down the following account:</strong></p>"
f"{user_html}"
"<p>This is an emergency action for cutting off access to the account right away. "
"It does not lock the administrator who opened this page.</p>"
"<p><strong>This will immediately:</strong></p>"
"<ul>"
"<li>Invalidate the user's password</li>"
"<li>Deactivate the user</li>"
"<li>Terminate all sessions - All active sessions will be ended</li>"
"<li>Revoke all tokens - All API, app password, recovery, verification, and OAuth2 "
"tokens and grants will be revoked</li>"
"</ul>"
"<p><strong>This action cannot be easily undone.</strong></p>"
)
initial_value_expression: true
required: false
type: alert_danger
field_key: lockdown_warning
label: Warning
sub_text: ""
identifiers:
name: default-account-lockdown-field-warning
id: prompt-field-warning
model: authentik_stages_prompt.prompt
# Info field - when to use lockdown (content varies based on self-service vs admin)
- conditions:
- !Context goauthentik.io/enterprise/licensed
attrs:
order: 100
initial_value: |
target_uuid = (http_request.session.get("authentik/flows/get", {}) or {}).get("user_uuid")
current_user_uuid = str(getattr(user, "pk", "") or getattr(http_request.user, "pk", ""))
is_self_service = not target_uuid or target_uuid == current_user_uuid
if is_self_service:
info = (
"Use this if you no longer trust your current password or sessions. "
"After lockdown, you will need help from your administrator or security team to regain access."
)
else:
info = (
"Use this for incident response on the listed account, for example after a compromise report "
"or suspicious activity. The reason you enter below will be recorded in the audit log."
)
return (
f"<p>{info}</p>"
'<p><a href="https://docs.goauthentik.io/docs/security/'
'account-lockdown?utm_source=authentik" '
'target="_blank" rel="noopener noreferrer">Learn more about account lockdown</a></p>'
)
initial_value_expression: true
required: false
type: alert_info
field_key: lockdown_info
label: Information
sub_text: ""
identifiers:
name: default-account-lockdown-field-info
id: prompt-field-info
model: authentik_stages_prompt.prompt
# Reason field - text area for lockdown reason
- conditions:
- !Context goauthentik.io/enterprise/licensed
attrs:
order: 200
placeholder: |
target_uuid = (http_request.session.get("authentik/flows/get", {}) or {}).get("user_uuid")
current_user_uuid = str(getattr(user, "pk", "") or getattr(http_request.user, "pk", ""))
is_self_service = not target_uuid or target_uuid == current_user_uuid
if is_self_service:
return "Describe why you are locking your account..."
return "Describe why this account is being locked down..."
placeholder_expression: true
required: true
type: text_area
field_key: lockdown_reason
label: Reason
sub_text: This explanation will be recorded in the audit log.
identifiers:
name: default-account-lockdown-field-reason
id: prompt-field-reason
model: authentik_stages_prompt.prompt
prompt_stages:
# Prompt stage for warnings and reason input
- conditions:
- !Context goauthentik.io/enterprise/licensed
attrs:
fields:
- !KeyOf prompt-field-warning
- !KeyOf prompt-field-info
- !KeyOf prompt-field-reason
identifiers:
name: default-account-lockdown-prompt
id: default-account-lockdown-prompt
model: authentik_stages_prompt.promptstage
lockdown_stage:
# Account lockdown stage - performs the actual lockdown
- conditions:
- !Context goauthentik.io/enterprise/licensed
identifiers:
name: default-account-lockdown-stage
id: default-account-lockdown-stage
model: authentik_stages_account_lockdown.accountlockdownstage
attrs:
deactivate_user: true
set_unusable_password: true
delete_sessions: true
revoke_tokens: true
self_service_completion_flow: !Find [authentik_flows.flow, [slug, default-account-lockdown-complete]]
completion_prompt:
# Completion message field - confirmation shown after an admin-triggered lockdown
- conditions:
- !Context goauthentik.io/enterprise/licensed
attrs:
order: 300
initial_value: |
target_uuid = (http_request.session.get("authentik/flows/get", {}) or {}).get("user_uuid")
from django.utils.html import escape
from authentik.core.models import User
if target_uuid:
target = User.objects.filter(pk=target_uuid).first()
if target:
return f"<p><code>{escape(target.username)}</code> has been locked down.</p>"
return "<p>The selected account has been locked down.</p>"
initial_value_expression: true
required: false
type: alert_info
field_key: lockdown_complete
label: Result
sub_text: ""
identifiers:
name: default-account-lockdown-field-complete
id: prompt-field-complete
model: authentik_stages_prompt.prompt
# Prompt stage for admin completion message
- conditions:
- !Context goauthentik.io/enterprise/licensed
attrs:
fields:
- !KeyOf prompt-field-complete
identifiers:
name: default-account-lockdown-complete-prompt
id: default-account-lockdown-complete-prompt
model: authentik_stages_prompt.promptstage
policies:
# Expression policy to check if this is NOT a self-service lockdown (admin)
- conditions:
- !Context goauthentik.io/enterprise/licensed
attrs:
name: default-account-lockdown-admin-policy
expression: |
target_uuid = (request.http_request.session.get("authentik/flows/get", {}) or {}).get("user_uuid")
current_user_uuid = str(getattr(request.user, "pk", "") or getattr(request.http_request.user, "pk", ""))
return bool(target_uuid) and target_uuid != current_user_uuid
identifiers:
name: default-account-lockdown-admin-policy
id: admin-policy
model: authentik_policies_expression.expressionpolicy
bindings:
# Stage bindings
- conditions:
- !Context goauthentik.io/enterprise/licensed
identifiers:
order: 0
stage: !KeyOf default-account-lockdown-prompt
target: !KeyOf flow
model: authentik_flows.flowstagebinding
- conditions:
- !Context goauthentik.io/enterprise/licensed
identifiers:
order: 10
stage: !KeyOf default-account-lockdown-stage
target: !KeyOf flow
model: authentik_flows.flowstagebinding
# Admin completion stage binding - shown for admin lockdown only
- conditions:
- !Context goauthentik.io/enterprise/licensed
identifiers:
order: 20
stage: !KeyOf default-account-lockdown-complete-prompt
target: !KeyOf flow
id: admin-completion-binding
model: authentik_flows.flowstagebinding
# Bind the admin policy to the admin completion stage
- conditions:
- !Context goauthentik.io/enterprise/licensed
attrs:
enabled: true
negate: false
order: 0
identifiers:
policy: !KeyOf admin-policy
target: !KeyOf admin-completion-binding
model: authentik_policies.policybinding
self_service_completion:
# Self-service completion message field (for the unauthenticated completion flow)
- conditions:
- !Context goauthentik.io/enterprise/licensed
attrs:
order: 100
initial_value: |
return (
"<h1>Your account has been locked</h1>"
"<p>You have been logged out of all sessions and your password has been invalidated.</p>"
"<p>To regain access to your account, please contact your IT administrator or security team.</p>"
)
initial_value_expression: true
required: false
type: alert_warning
field_key: self_lockdown_complete
label: Account locked
sub_text: ""
identifiers:
name: default-account-lockdown-self-field-complete
id: self-prompt-field-complete
model: authentik_stages_prompt.prompt
# Prompt stage for self-service completion (unauthenticated)
- conditions:
- !Context goauthentik.io/enterprise/licensed
attrs:
fields:
- !KeyOf self-prompt-field-complete
identifiers:
name: default-account-lockdown-self-complete-prompt
id: default-account-lockdown-self-complete-prompt
model: authentik_stages_prompt.promptstage
# Bind self-service completion stage to the completion flow
- conditions:
- !Context goauthentik.io/enterprise/licensed
identifiers:
order: 0
stage: !KeyOf default-account-lockdown-self-complete-prompt
target: !Find [authentik_flows.flow, [slug, default-account-lockdown-complete]]
model: authentik_flows.flowstagebinding

View File

@@ -1,211 +0,0 @@
# Minimal Invitation-based Enrollment Blueprint
#
# Companion to flows-invitation-enrollment.yaml, intended for the "New Invitation"
# wizard in the admin UI. Creates a single enrollment flow with an invitation stage
# bound to it, plus the supporting prompt/user-write/user-login stages.
#
# All user-facing fields are parameterized via !Context with fallback defaults, so
# this blueprint can be imported directly (without context) or through the wizard
# with custom values.
#
# Context keys (all optional):
# flow_name Display name of the enrollment flow.
# flow_slug URL slug of the flow and suffix for sub-entity
# identifiers (so repeated imports with different
# slugs don't overwrite each other).
# stage_name Name of the invitation stage.
# continue_flow_without_invitation Whether the flow continues when no invitation
# is supplied (default: false).
# user_type "external" or "internal" (default: "external").
# Drives the user-write stage's user_type and
# user_path_template.
version: 1
metadata:
labels:
blueprints.goauthentik.io/instantiate: "false"
name: Invitation-based Enrollment (minimal)
entries:
- identifiers:
slug: !Context [flow_slug, invitation-enrollment-flow]
model: authentik_flows.flow
id: flow
attrs:
name: !Context [flow_name, Invitation Enrollment Flow]
title: !Context [flow_name, Invitation Enrollment Flow]
designation: enrollment
authentication: require_unauthenticated
- identifiers:
name: !Context [stage_name, invitation-stage]
id: invitation-stage
model: authentik_stages_invitation.invitationstage
attrs:
continue_flow_without_invitation: !Context [continue_flow_without_invitation, false]
- identifiers:
name:
!Format [
"invitation-enrollment-field-username-%s",
!Context [flow_slug, invitation-enrollment-flow],
]
id: prompt-field-username
model: authentik_stages_prompt.prompt
attrs:
field_key: username
label: Username
type: username
required: true
placeholder: Username
placeholder_expression: false
order: 0
- identifiers:
name:
!Format [
"invitation-enrollment-field-password-%s",
!Context [flow_slug, invitation-enrollment-flow],
]
id: prompt-field-password
model: authentik_stages_prompt.prompt
attrs:
field_key: password
label: Password
type: password
required: true
placeholder: Password
placeholder_expression: false
order: 1
- identifiers:
name:
!Format [
"invitation-enrollment-field-password-repeat-%s",
!Context [flow_slug, invitation-enrollment-flow],
]
id: prompt-field-password-repeat
model: authentik_stages_prompt.prompt
attrs:
field_key: password_repeat
label: Password (repeat)
type: password
required: true
placeholder: Password (repeat)
placeholder_expression: false
order: 2
- identifiers:
name:
!Format [
"invitation-enrollment-field-name-%s",
!Context [flow_slug, invitation-enrollment-flow],
]
id: prompt-field-name
model: authentik_stages_prompt.prompt
attrs:
field_key: name
label: Name
type: text
required: true
placeholder: Name
placeholder_expression: false
order: 0
- identifiers:
name:
!Format [
"invitation-enrollment-field-email-%s",
!Context [flow_slug, invitation-enrollment-flow],
]
id: prompt-field-email
model: authentik_stages_prompt.prompt
attrs:
field_key: email
label: Email
type: email
required: true
placeholder: Email
placeholder_expression: false
order: 1
- identifiers:
name:
!Format [
"invitation-enrollment-prompt-credentials-%s",
!Context [flow_slug, invitation-enrollment-flow],
]
id: prompt-stage-credentials
model: authentik_stages_prompt.promptstage
attrs:
fields:
- !KeyOf prompt-field-username
- !KeyOf prompt-field-password
- !KeyOf prompt-field-password-repeat
- identifiers:
name:
!Format [
"invitation-enrollment-prompt-details-%s",
!Context [flow_slug, invitation-enrollment-flow],
]
id: prompt-stage-details
model: authentik_stages_prompt.promptstage
attrs:
fields:
- !KeyOf prompt-field-name
- !KeyOf prompt-field-email
- identifiers:
name:
!Format [
"invitation-enrollment-user-write-%s",
!Context [flow_slug, invitation-enrollment-flow],
]
id: user-write-stage
model: authentik_stages_user_write.userwritestage
attrs:
user_creation_mode: always_create
user_type: !Context [user_type, external]
user_path_template:
!Format ["users/%s", !Context [user_type, external]]
- identifiers:
name:
!Format [
"invitation-enrollment-user-login-%s",
!Context [flow_slug, invitation-enrollment-flow],
]
id: user-login-stage
model: authentik_stages_user_login.userloginstage
- identifiers:
target: !KeyOf flow
stage: !KeyOf invitation-stage
order: 5
model: authentik_flows.flowstagebinding
attrs:
evaluate_on_plan: true
re_evaluate_policies: true
- identifiers:
target: !KeyOf flow
stage: !KeyOf prompt-stage-credentials
order: 10
model: authentik_flows.flowstagebinding
- identifiers:
target: !KeyOf flow
stage: !KeyOf prompt-stage-details
order: 15
model: authentik_flows.flowstagebinding
- identifiers:
target: !KeyOf flow
stage: !KeyOf user-write-stage
order: 20
model: authentik_flows.flowstagebinding
- identifiers:
target: !KeyOf flow
stage: !KeyOf user-login-stage
order: 100
model: authentik_flows.flowstagebinding

View File

@@ -1216,46 +1216,6 @@
}
}
},
{
"type": "object",
"required": [
"model",
"identifiers"
],
"properties": {
"model": {
"const": "authentik_stages_account_lockdown.accountlockdownstage"
},
"id": {
"type": "string"
},
"state": {
"type": "string",
"enum": [
"absent",
"created",
"must_created",
"present"
],
"default": "present"
},
"conditions": {
"type": "array",
"items": {
"type": "boolean"
}
},
"permissions": {
"$ref": "#/$defs/model_authentik_stages_account_lockdown.accountlockdownstage_permissions"
},
"attrs": {
"$ref": "#/$defs/model_authentik_stages_account_lockdown.accountlockdownstage"
},
"identifiers": {
"$ref": "#/$defs/model_authentik_stages_account_lockdown.accountlockdownstage"
}
}
},
{
"type": "object",
"required": [
@@ -5140,11 +5100,6 @@
"format": "uuid",
"title": "Flow device code"
},
"flow_lockdown": {
"type": "string",
"format": "uuid",
"title": "Flow lockdown"
},
"default_application": {
"type": "string",
"format": "uuid",
@@ -6139,10 +6094,6 @@
"authentik_sources_telegram.view_telegramsource",
"authentik_sources_telegram.view_telegramsourcepropertymapping",
"authentik_sources_telegram.view_usertelegramsourceconnection",
"authentik_stages_account_lockdown.add_accountlockdownstage",
"authentik_stages_account_lockdown.change_accountlockdownstage",
"authentik_stages_account_lockdown.delete_accountlockdownstage",
"authentik_stages_account_lockdown.view_accountlockdownstage",
"authentik_stages_authenticator_duo.add_authenticatorduostage",
"authentik_stages_authenticator_duo.add_duodevice",
"authentik_stages_authenticator_duo.change_authenticatorduostage",
@@ -7806,69 +7757,6 @@
}
}
},
"model_authentik_stages_account_lockdown.accountlockdownstage": {
"type": "object",
"properties": {
"name": {
"type": "string",
"minLength": 1,
"title": "Name"
},
"deactivate_user": {
"type": "boolean",
"title": "Deactivate user",
"description": "Deactivate the user account (set is_active to False)"
},
"set_unusable_password": {
"type": "boolean",
"title": "Set unusable password",
"description": "Set an unusable password for the user"
},
"delete_sessions": {
"type": "boolean",
"title": "Delete sessions",
"description": "Delete all active sessions for the user"
},
"revoke_tokens": {
"type": "boolean",
"title": "Revoke tokens",
"description": "Revoke all tokens for the user (API, app password, recovery, verification, OAuth)"
},
"self_service_completion_flow": {
"type": "string",
"format": "uuid",
"title": "Self service completion flow",
"description": "Flow to redirect users to after self-service lockdown. This flow should not require authentication since the user's session is deleted."
}
},
"required": []
},
"model_authentik_stages_account_lockdown.accountlockdownstage_permissions": {
"type": "array",
"items": {
"type": "object",
"required": [
"permission"
],
"properties": {
"permission": {
"type": "string",
"enum": [
"add_accountlockdownstage",
"change_accountlockdownstage",
"delete_accountlockdownstage",
"view_accountlockdownstage"
]
},
"user": {
"type": "integer"
},
"role": {
"type": "string"
}
}
}
},
"model_authentik_stages_authenticator_endpoint_gdtc.authenticatorendpointgdtcstage": {
"type": "object",
"properties": {
@@ -9064,7 +8952,6 @@
"authentik.enterprise.providers.ssf",
"authentik.enterprise.providers.ws_federation",
"authentik.enterprise.reports",
"authentik.enterprise.stages.account_lockdown",
"authentik.enterprise.stages.authenticator_endpoint_gdtc",
"authentik.enterprise.stages.mtls",
"authentik.enterprise.stages.source"
@@ -9197,7 +9084,6 @@
"authentik_providers_ssf.ssfprovider",
"authentik_providers_ws_federation.wsfederationprovider",
"authentik_reports.dataexport",
"authentik_stages_account_lockdown.accountlockdownstage",
"authentik_stages_authenticator_endpoint_gdtc.authenticatorendpointgdtcstage",
"authentik_stages_mtls.mutualtlsstage",
"authentik_stages_source.sourcestage"
@@ -11905,10 +11791,6 @@
"authentik_sources_telegram.view_telegramsource",
"authentik_sources_telegram.view_telegramsourcepropertymapping",
"authentik_sources_telegram.view_usertelegramsourceconnection",
"authentik_stages_account_lockdown.add_accountlockdownstage",
"authentik_stages_account_lockdown.change_accountlockdownstage",
"authentik_stages_account_lockdown.delete_accountlockdownstage",
"authentik_stages_account_lockdown.view_accountlockdownstage",
"authentik_stages_authenticator_duo.add_authenticatorduostage",
"authentik_stages_authenticator_duo.add_duodevice",
"authentik_stages_authenticator_duo.change_authenticatorduostage",
@@ -15775,9 +15657,6 @@
"separator",
"hidden",
"static",
"alert_info",
"alert_warning",
"alert_danger",
"ak-locale"
],
"title": "Type"

View File

@@ -73,16 +73,8 @@ entries:
redirect_uris:
- matching_mode: strict
url: https://localhost:8443/test/a/authentik/callback
redirect_uri_type: authorization
- matching_mode: strict
url: https://host.docker.internal:8443/test/a/authentik/callback
redirect_uri_type: authorization
- matching_mode: strict
url: https://localhost:8443/test/a/authentik/post_logout_redirect
redirect_uri_type: logout
- matching_mode: strict
url: https://host.docker.internal:8443/test/a/authentik/post_logout_redirect
redirect_uri_type: logout
grant_types:
- authorization_code
- implicit
@@ -116,16 +108,8 @@ entries:
redirect_uris:
- matching_mode: strict
url: https://localhost:8443/test/a/authentik/callback
redirect_uri_type: authorization
- matching_mode: strict
url: https://host.docker.internal:8443/test/a/authentik/callback
redirect_uri_type: authorization
- matching_mode: strict
url: https://localhost:8443/test/a/authentik/post_logout_redirect
redirect_uri_type: logout
- matching_mode: strict
url: https://host.docker.internal:8443/test/a/authentik/post_logout_redirect
redirect_uri_type: logout
grant_types:
- authorization_code
- implicit

View File

@@ -26,8 +26,6 @@ var healthcheckCmd = &cobra.Command{
exitCode := 1
log.WithField("mode", mode).Debug("checking health")
switch strings.ToLower(mode) {
case "allinone":
fallthrough
case "server":
exitCode = check(fmt.Sprintf("http://localhost%s-/health/live/", config.Get().Web.Path))
case "worker":

2
go.mod
View File

@@ -7,7 +7,7 @@ require (
beryju.io/radius-eap v0.1.0
github.com/avast/retry-go/v4 v4.7.0
github.com/coreos/go-oidc/v3 v3.18.0
github.com/getsentry/sentry-go v0.46.1
github.com/getsentry/sentry-go v0.46.0
github.com/go-http-utils/etag v0.0.0-20161124023236-513ea8f21eb1
github.com/go-ldap/ldap/v3 v3.4.13
github.com/go-openapi/runtime v0.29.4

4
go.sum
View File

@@ -20,8 +20,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/felixge/httpsnoop v1.0.3 h1:s/nj+GCswXYzN5v2DpNMuMQYe+0DDwt5WVCU6CWBdXk=
github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/getsentry/sentry-go v0.46.1 h1:mZyQFaQYkPxAdDG4HR8gDg6j4CnKYVWt4TF92N7i3XY=
github.com/getsentry/sentry-go v0.46.1/go.mod h1:evVbw2qotNUdYG8KxXbAdjOQWWvWIwKxpjdZZIvcIPw=
github.com/getsentry/sentry-go v0.46.0 h1:mbdDaarbUdOt9X+dx6kDdntkShLEX3/+KyOsVDTPDj0=
github.com/getsentry/sentry-go v0.46.0/go.mod h1:evVbw2qotNUdYG8KxXbAdjOQWWvWIwKxpjdZZIvcIPw=
github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667 h1:BP4M0CvQ4S3TGls2FvczZtj5Re/2ZzkV9VwqPHH/3Bo=
github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667/go.mod h1:hEBeB/ic+5LoWskz+yKT7vGhhPYkProFKoKdwZRWMe0=
github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA=

View File

@@ -31,7 +31,7 @@ function run_authentik {
echo go run ./cmd/server "$@"
fi
;;
allinone | worker)
worker)
if [[ -x "$(command -v authentik)" ]]; then
echo authentik "$@"
else
@@ -79,7 +79,7 @@ function prepare_debug {
apt-get update
apt-get install -y --no-install-recommends krb5-kdc krb5-user krb5-admin-server libkrb5-dev gcc
source "${VENV_PATH}/bin/activate"
uv sync --active --locked
uv sync --active --frozen
touch /unittest.xml
chown authentik:authentik /unittest.xml
}
@@ -105,7 +105,7 @@ elif [[ "$1" == "test-all" ]]; then
prepare_debug
chmod 777 /root
check_if_root_and_run manage test authentik
elif [[ "$1" == "allinone" ]] || [[ "$1" == "server" ]] || [[ "$1" == "worker" ]]; then
elif [[ "$1" == "server" ]] || [[ "$1" == "worker" ]]; then
wait_for_db
check_if_root_and_run "$@"
elif [[ "$1" == "healthcheck" ]]; then

View File

@@ -9,12 +9,12 @@
"version": "0.0.0",
"license": "MIT",
"devDependencies": {
"aws-cdk": "^2.1119.0",
"aws-cdk": "^2.1118.4",
"cross-env": "^10.1.0"
},
"engines": {
"node": ">=24",
"npm": ">=11.10.1"
"node": ">=20",
"npm": ">=11.6.2"
}
},
"node_modules/@epic-web/invariant": {
@@ -25,9 +25,9 @@
"license": "MIT"
},
"node_modules/aws-cdk": {
"version": "2.1119.0",
"resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.1119.0.tgz",
"integrity": "sha512-XBxZEKH3BY4M1EX6x0qBkmOAj8viErjpww14iH6Z3z6nI0YzjZeJ05eEl7eJwzUgv7NTGagWBS9m/eDJW5+dAg==",
"version": "2.1118.4",
"resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.1118.4.tgz",
"integrity": "sha512-wJfRQdvb+FJ2cni059mYdmjhfwhMskP+PAB59BL9jhon+jYtjy8X3pbj3uzHgAOJwNhh6jGkP8xq36Cffccbbw==",
"dev": true,
"license": "Apache-2.0",
"bin": {

View File

@@ -7,24 +7,11 @@
"aws-cfn": "cross-env CI=false cdk synth --version-reporting=false > template.yaml"
},
"devDependencies": {
"aws-cdk": "^2.1119.0",
"aws-cdk": "^2.1118.4",
"cross-env": "^10.1.0"
},
"engines": {
"node": ">=24",
"npm": ">=11.10.1"
},
"devEngines": {
"runtime": {
"name": "node",
"onFail": "warn",
"version": ">=24"
},
"packageManager": {
"name": "npm",
"version": ">=11.10.1",
"onFail": "warn"
}
},
"packageManager": "npm@11.13.0+sha512.7119a16a0843580d65160977520e3f5710c974f04afd4fad36d9eb97d917ba716a856c35c78c4be6dc64367eeaccfb957ef5ce997ca31e9330b2e936ba2b1b92"
"node": ">=20",
"npm": ">=11.6.2"
}
}

View File

@@ -7,17 +7,6 @@ ARG GIT_BUILD_HASH
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
ENV NODE_ENV=production
WORKDIR /work
RUN --mount=type=bind,target=/work/package.json,src=./package.json \
--mount=type=bind,target=/work/package-lock.json,src=./package-lock.json \
--mount=type=bind,target=/work/web/package.json,src=./web/package.json \
--mount=type=bind,target=/work/web/package-lock.json,src=./web/package-lock.json \
--mount=type=bind,target=/work/scripts/node/,src=./scripts/node/ \
--mount=type=bind,target=/work/packages/logger-js/,src=./packages/logger-js/ \
node ./scripts/node/setup-corepack.mjs --force && \
node ./scripts/node/lint-runtime.mjs ./web
WORKDIR /work/web
# These files need to be copied and cannot be mounted as `npm ci` will build the client's typescript
@@ -29,7 +18,7 @@ RUN --mount=type=bind,target=/work/web/package.json,src=./web/package.json \
--mount=type=bind,target=/work/web/packages/sfe/package.json,src=./web/packages/sfe/package.json \
--mount=type=bind,target=/work/web/scripts,src=./web/scripts \
--mount=type=cache,id=npm-ak,sharing=shared,target=/root/.npm \
corepack npm ci
npm ci
COPY ./package.json /work
COPY ./web /work/web/
@@ -211,7 +200,7 @@ RUN --mount=type=bind,target=pyproject.toml,src=pyproject.toml \
--mount=type=bind,target=packages/django-postgres-cache,src=packages/django-postgres-cache \
--mount=type=bind,target=rust-toolchain.toml,src=rust-toolchain.toml \
--mount=type=cache,id=uv-python-deps-$TARGETARCH$TARGETVARIANT,target=/root/.cache/uv \
uv sync --locked --no-install-project --no-dev
uv sync --frozen --no-install-project --no-dev
# Stage: Run
FROM python-base AS final-image

View File

@@ -10,22 +10,12 @@ WORKDIR /static
COPY ./packages /packages
COPY ./web/packages /static/packages
RUN --mount=type=bind,target=/static/package.json,src=./package.json \
--mount=type=bind,target=/static/package-lock.json,src=./package-lock.json \
--mount=type=bind,target=/static/web/package.json,src=./web/package.json \
--mount=type=bind,target=/static/web/package-lock.json,src=./web/package-lock.json \
--mount=type=bind,target=/static/scripts/node/,src=./scripts/node/ \
--mount=type=bind,target=/static/packages/logger-js/,src=./packages/logger-js/ \
node ./scripts/node/setup-corepack.mjs --force && \
node ./scripts/node/lint-runtime.mjs ./web
COPY package.json /
RUN --mount=type=bind,target=/static/package.json,src=./web/package.json \
--mount=type=bind,target=/static/package-lock.json,src=./web/package-lock.json \
--mount=type=bind,target=/static/scripts,src=./web/scripts \
--mount=type=cache,target=/root/.npm \
corepack npm ci
npm ci
COPY web .
RUN npm run build-proxy

View File

@@ -1,5 +1,6 @@
#!/usr/bin/env python3
import faulthandler
import os
import random
import signal
@@ -28,39 +29,20 @@ class HttpHandler(BaseHTTPRequestHandler):
_ = db_conn.cursor()
def do_GET(self):
from django.db import DatabaseError, InterfaceError, OperationalError, connections
from psycopg.errors import AdminShutdown
from authentik.root.monitoring import monitoring_set
DATABASE_ERRORS = (
AdminShutdown,
InterfaceError,
DatabaseError,
OperationalError,
)
if self.path == "/-/metrics/":
try:
monitoring_set.send(self)
except DATABASE_ERRORS as exc:
LOGGER.warning("failed to send monitoring_set", exc=exc)
for db_conn in connections.all():
db_conn.close()
self.send_response(503)
else:
self.send_response(200)
from authentik.root.monitoring import monitoring_set
monitoring_set.send_robust(self)
self.send_response(200)
self.end_headers()
elif self.path == "/-/health/ready/":
from django.db.utils import OperationalError
try:
self.check_db()
except DATABASE_ERRORS as exc:
LOGGER.warning("failed to check database health", exc=exc)
for db_conn in connections.all():
db_conn.close()
except OperationalError:
self.send_response(503)
else:
self.send_response(200)
self.send_response(200)
self.end_headers()
else:
self.send_response(200)
@@ -95,6 +77,12 @@ def main(worker_id: int, socket_path: str):
signal.signal(signal.SIGINT, immediate_shutdown)
signal.signal(signal.SIGQUIT, immediate_shutdown)
signal.signal(signal.SIGTERM, graceful_shutdown)
# SIGUSR1 dumps every thread's traceback to stderr. Without this, the default
# action is "terminate", which kills the worker (and trips the Rust supervisor).
# Side-benefit: signal delivery wakes the eval loop, so `pdb -p` can attach to
# an otherwise-idle worker parked in a C-level syscall.
faulthandler.enable()
faulthandler.register(signal.SIGUSR1)
random.seed()
@@ -116,7 +104,11 @@ def main(worker_id: int, socket_path: str):
# Notify rust process that we are ready
os.kill(os.getppid(), signal.SIGUSR2)
shutdown.wait()
# Poll instead of waiting indefinitely so the main thread's eval loop ticks
# periodically — PEP 768's debugger pending hook is serviced on the main
# thread, and a permanent Event.wait() never returns to bytecode execution.
while not shutdown.wait(timeout=1.0):
pass
logger.info("Shutting down worker...")

View File

@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2026-05-06 00:27+0000\n"
"POT-Creation-Date: 2026-04-29 00:28+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
@@ -101,14 +101,6 @@ msgstr ""
msgid "Blueprint file does not exist"
msgstr ""
#: authentik/blueprints/api.py
msgid "Context must be valid JSON"
msgstr ""
#: authentik/blueprints/api.py
msgid "Context must be a JSON object"
msgstr ""
#: authentik/blueprints/api.py
msgid "Failed to validate blueprint"
msgstr ""
@@ -232,14 +224,6 @@ msgid ""
"providers are returned. When set to false, backchannel providers are excluded"
msgstr ""
#: authentik/core/api/users.py
msgid "Invalid password hash format. Must be a valid Django password hash."
msgstr ""
#: authentik/core/api/users.py
msgid "Cannot set both password and password_hash. Use only one."
msgstr ""
#: authentik/core/api/users.py
msgid "No leading or trailing slashes allowed."
msgstr ""
@@ -880,6 +864,10 @@ msgstr ""
msgid "Grace period must be shorter than the interval."
msgstr ""
#: authentik/enterprise/lifecycle/api/rules.py
msgid "Only one type-wide rule for each object type is allowed."
msgstr ""
#: authentik/enterprise/lifecycle/models.py
msgid ""
"Select which transports should be used to notify the reviewers. If none are "
@@ -907,8 +895,7 @@ msgid "Go to {self._get_model_name()}"
msgstr ""
#: authentik/enterprise/lifecycle/models.py
msgid ""
"Access review is due for {self.content_type.name.lower()} {object_label}"
msgid "Access review is due for {self.content_type.name} {str(self.object)}"
msgstr ""
#: authentik/enterprise/lifecycle/models.py
@@ -921,7 +908,7 @@ msgid "Access review completed for {self.content_type.name} {str(self.object)}"
msgstr ""
#: authentik/enterprise/lifecycle/tasks.py
msgid "Dispatch tasks to apply lifecycle rules."
msgid "Dispatch tasks to validate lifecycle rules."
msgstr ""
#: authentik/enterprise/lifecycle/tasks.py
@@ -1234,78 +1221,6 @@ msgstr ""
msgid "Generate data export."
msgstr ""
#: authentik/enterprise/stages/account_lockdown/api.py
msgid "User to lock. If omitted, locks the current user (self-service)."
msgstr ""
#: authentik/enterprise/stages/account_lockdown/api.py
msgid "No lockdown flow configured."
msgstr ""
#: authentik/enterprise/stages/account_lockdown/api.py
msgid "Lockdown flow is not applicable."
msgstr ""
#: authentik/enterprise/stages/account_lockdown/api.py
msgid "Choose the target account, then return a flow link."
msgstr ""
#: authentik/enterprise/stages/account_lockdown/api.py
msgid "No lockdown flow configured or the flow is not applicable"
msgstr ""
#: authentik/enterprise/stages/account_lockdown/api.py
msgid "Permission denied (when targeting another user)"
msgstr ""
#: authentik/enterprise/stages/account_lockdown/models.py
msgid "Deactivate the user account (set is_active to False)"
msgstr ""
#: authentik/enterprise/stages/account_lockdown/models.py
msgid "Set an unusable password for the user"
msgstr ""
#: authentik/enterprise/stages/account_lockdown/models.py
msgid "Delete all active sessions for the user"
msgstr ""
#: authentik/enterprise/stages/account_lockdown/models.py
msgid ""
"Revoke all tokens for the user (API, app password, recovery, verification, "
"OAuth)"
msgstr ""
#: authentik/enterprise/stages/account_lockdown/models.py
msgid ""
"Flow to redirect users to after self-service lockdown. This flow should not "
"require authentication since the user's session is deleted."
msgstr ""
#: authentik/enterprise/stages/account_lockdown/models.py
msgid "Account Lockdown Stage"
msgstr ""
#: authentik/enterprise/stages/account_lockdown/models.py
msgid "Account Lockdown Stages"
msgstr ""
#: authentik/enterprise/stages/account_lockdown/stage.py
msgid "No target user specified for account lockdown"
msgstr ""
#: authentik/enterprise/stages/account_lockdown/stage.py
msgid "You do not have permission to lock down this account."
msgstr ""
#: authentik/enterprise/stages/account_lockdown/stage.py
msgid "Account lockdown failed for this account."
msgstr ""
#: authentik/enterprise/stages/account_lockdown/stage.py
msgid "Self-service account lockdown requires a completion flow."
msgstr ""
#: authentik/enterprise/stages/authenticator_endpoint_gdtc/models.py
msgid "Endpoint Authenticator Google Device Trust Connector Stage"
msgstr ""
@@ -4533,18 +4448,6 @@ msgstr ""
msgid "Static: Static value, displayed as-is."
msgstr ""
#: authentik/stages/prompt/models.py
msgid "Alert (Info): Static alert box with info styling"
msgstr ""
#: authentik/stages/prompt/models.py
msgid "Alert (Warning): Static alert box with warning styling"
msgstr ""
#: authentik/stages/prompt/models.py
msgid "Alert (Danger): Static alert box with danger styling"
msgstr ""
#: authentik/stages/prompt/models.py
msgid "authentik: Selection of locales authentik supports"
msgstr ""

View File

@@ -19,7 +19,6 @@ Forti
Fortigate
Gatus
Gestionnaire
ghec
Gitea
Gravitee
Homarr
@@ -53,7 +52,6 @@ Relatedly
Sidero
snipeit
sonarqube
Technitium
Terrakube
Ueberauth
Veeam

Binary file not shown.

View File

@@ -15,7 +15,7 @@ msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2026-05-01 03:47+0000\n"
"POT-Creation-Date: 2026-04-08 00:28+0000\n"
"PO-Revision-Date: 2025-12-01 19:09+0000\n"
"Last-Translator: Sp P, 2026\n"
"Language-Team: French (France) (https://app.transifex.com/authentik/teams/119923/fr_FR/)\n"
@@ -263,18 +263,6 @@ msgstr ""
"fournisseurs backchannels sont retournés. Si faux, les fournisseurs "
"backchannels sont exclus"
#: authentik/core/api/users.py
msgid "Invalid password hash format. Must be a valid Django password hash."
msgstr ""
"Format de hachage de mot de passe invalide. Cela doit être un hachage de mot"
" de passe Django valide."
#: authentik/core/api/users.py
msgid "Cannot set both password and password_hash. Use only one."
msgstr ""
"Impossible de définir à la fois password (mot de passe) et password_hash "
"(hachage de mot de passe). N'en utiliser qu'un seul."
#: authentik/core/api/users.py
msgid "No leading or trailing slashes allowed."
msgstr ""
@@ -455,11 +443,6 @@ msgid "Open launch URL in a new browser tab or window."
msgstr ""
"Ouvrir l'URL de lancement dans une nouvelle fenêtre ou un nouvel onglet."
#: authentik/core/models.py
msgid "Hide this application from the user's My applications page."
msgstr ""
"Masquer cette application dans la page Mes applications de l'utilisateur."
#: authentik/core/models.py
msgid "Application"
msgstr "Application"
@@ -827,14 +810,6 @@ msgstr "Nonce Apple"
msgid "Apple Nonces"
msgstr "Nonces Apple"
#: authentik/endpoints/connectors/agent/models.py
msgid "Apple Independent Secure Enclave"
msgstr "Secure Enclave indépendante d'Apple"
#: authentik/endpoints/connectors/agent/models.py
msgid "Apple Independent Secure Enclaves"
msgstr "Secure Enclaves indépendantes d'Apple"
#: authentik/endpoints/facts.py
msgid "Operating System name, such as 'Server 2022' or 'Ubuntu'"
msgstr "Nom du système d'exploitation, comme 'Server 2022' ou 'Ubuntu'"
@@ -961,6 +936,12 @@ msgstr "Soit un groupe de réviseurs soit un réviseur doit être défini."
msgid "Grace period must be shorter than the interval."
msgstr "La période de grâce doit être plus courte que l'intervalle."
#: authentik/enterprise/lifecycle/api/rules.py
msgid "Only one type-wide rule for each object type is allowed."
msgstr ""
"Une seule règle pour l'ensemble du type est autorisée pour chaque type "
"d'objet."
#: authentik/enterprise/lifecycle/models.py
msgid ""
"Select which transports should be used to notify the reviewers. If none are "
@@ -991,11 +972,10 @@ msgid "Go to {self._get_model_name()}"
msgstr "Aller à {self._get_model_name()}"
#: authentik/enterprise/lifecycle/models.py
msgid ""
"Access review is due for {self.content_type.name.lower()} {object_label}"
msgid "Access review is due for {self.content_type.name} {str(self.object)}"
msgstr ""
"La révision de l'accès doit être effectuée pour "
"{self.content_type.name.lower()} {object_label}"
"La révision d'accès est attendue pour {self.content_type.name} "
"{str(self.object)}"
#: authentik/enterprise/lifecycle/models.py
msgid ""
@@ -1012,8 +992,8 @@ msgstr ""
"{str(self.object)}"
#: authentik/enterprise/lifecycle/tasks.py
msgid "Dispatch tasks to apply lifecycle rules."
msgstr "Déclencher les tâches pour appliquer les règles de cycle de vie"
msgid "Dispatch tasks to validate lifecycle rules."
msgstr "Déclenche les tâches pour valider les règles de cycle de vie"
#: authentik/enterprise/lifecycle/tasks.py
msgid "Apply lifecycle rule."
@@ -1356,86 +1336,6 @@ msgstr "Télécharger"
msgid "Generate data export."
msgstr "Générer un export de données."
#: authentik/enterprise/stages/account_lockdown/api.py
msgid "User to lock. If omitted, locks the current user (self-service)."
msgstr ""
"Utilisateur à bloquer. Si non renseigné, bloque l'utilisateur actuel (libre "
"service)."
#: authentik/enterprise/stages/account_lockdown/api.py
msgid "No lockdown flow configured."
msgstr "Aucun flux de blocage configuré."
#: authentik/enterprise/stages/account_lockdown/api.py
msgid "Lockdown flow is not applicable."
msgstr "Le flux de blocage n'est pas applicable."
#: authentik/enterprise/stages/account_lockdown/api.py
msgid "Choose the target account, then return a flow link."
msgstr "Choisit le compte cible, puis renvoie un lien de flux."
#: authentik/enterprise/stages/account_lockdown/api.py
msgid "No lockdown flow configured or the flow is not applicable"
msgstr "Aucun flux de blocage configuré, ou le flux n'est pas applicable"
#: authentik/enterprise/stages/account_lockdown/api.py
msgid "Permission denied (when targeting another user)"
msgstr "Permission refusée (lors du ciblage d'un autre utilisateur)"
#: authentik/enterprise/stages/account_lockdown/models.py
msgid "Deactivate the user account (set is_active to False)"
msgstr "Désactiver le compte de l'utilisateur (définir is_active à False)."
#: authentik/enterprise/stages/account_lockdown/models.py
msgid "Set an unusable password for the user"
msgstr "Définit un mot de passe inutilisable pour cet utilisateur."
#: authentik/enterprise/stages/account_lockdown/models.py
msgid "Delete all active sessions for the user"
msgstr "Supprimer toutes les sessions actives pour cet utilisateur."
#: authentik/enterprise/stages/account_lockdown/models.py
msgid ""
"Revoke all tokens for the user (API, app password, recovery, verification, "
"OAuth)"
msgstr ""
"Révoquer tous les jetons pour cet utilisateur (API, mot de passe applicatif,"
" récupération, vérification, OAuth)"
#: authentik/enterprise/stages/account_lockdown/models.py
msgid ""
"Flow to redirect users to after self-service lockdown. This flow should not "
"require authentication since the user's session is deleted."
msgstr ""
"Flux vers lequel rediriger les utilisateurs après le blocage en libre "
"service. Ce flux ne doit pas nécessiter d'authentification car la session "
"utilisateur est supprimée."
#: authentik/enterprise/stages/account_lockdown/models.py
msgid "Account Lockdown Stage"
msgstr "Etape de blocage de compte"
#: authentik/enterprise/stages/account_lockdown/models.py
msgid "Account Lockdown Stages"
msgstr "Etapes de blocage de compte"
#: authentik/enterprise/stages/account_lockdown/stage.py
msgid "No target user specified for account lockdown"
msgstr "Aucun utilisateur ciblé défini pour le blocage de compte"
#: authentik/enterprise/stages/account_lockdown/stage.py
msgid "You do not have permission to lock down this account."
msgstr "Vous n'avez pas la permission de bloquer ce compte."
#: authentik/enterprise/stages/account_lockdown/stage.py
msgid "Account lockdown failed for this account."
msgstr "Echec du blocage de compte pour ce compte."
#: authentik/enterprise/stages/account_lockdown/stage.py
msgid "Self-service account lockdown requires a completion flow."
msgstr ""
"Le blocage de compte en libre service nécessite un flux de finalisation."
#: authentik/enterprise/stages/authenticator_endpoint_gdtc/models.py
msgid "Endpoint Authenticator Google Device Trust Connector Stage"
msgstr ""
@@ -1569,11 +1469,11 @@ msgstr "Évènement utilisateur"
#: authentik/events/models.py
msgid "Notification Transport"
msgstr "Transport de notification"
msgstr "Transport de Notification"
#: authentik/events/models.py
msgid "Notification Transports"
msgstr "Transports de notifications"
msgstr "Transports de notification"
#: authentik/events/models.py
msgid "Notice"
@@ -1845,10 +1745,6 @@ msgstr "Jeton du flux"
msgid "Flow Tokens"
msgstr "Jetons du flux"
#: authentik/flows/planner.py
msgid "This link is invalid or has expired. Please request a new one."
msgstr "Ce lien est invalide ou a expiré. Veuillez un demander un nouveau."
#: authentik/flows/views/executor.py
msgid "Invalid next URL"
msgstr "URL suivante invalide"
@@ -2876,12 +2772,8 @@ msgstr ""
"restriction d'audience ne sera ajoutée."
#: authentik/providers/saml/models.py
msgid ""
"Also known as EntityID. Providing a value overrides the default issuer "
"generated by authentik."
msgstr ""
"Aussi appelé EntityID. Fournir une valeur remplace l'émetteur par défaut "
"généré par authentik."
msgid "Also known as EntityID"
msgstr "Aussi appelé EntityID"
#: authentik/providers/saml/models.py
msgid "SLS URL"
@@ -3102,10 +2994,6 @@ msgstr "SAML NameID pour cette session"
msgid "SAML NameID format"
msgstr "Format SAML NameID"
#: authentik/providers/saml/models.py
msgid "SAML Issuer used for this session"
msgstr "Émetteur SAML utilisé pour cette session"
#: authentik/providers/saml/models.py
msgid "SAML Session"
msgstr "Session SAML"
@@ -3138,10 +3026,6 @@ msgstr "Salesforce"
msgid "Webex"
msgstr "Webex"
#: authentik/providers/scim/models.py
msgid "vCenter"
msgstr "vCenter"
#: authentik/providers/scim/models.py
msgid "Group filters used to define sync-scope for groups."
msgstr ""
@@ -3865,8 +3749,8 @@ msgid ""
"Which servers a user has to be a member of to be granted access. Empty list "
"allows every server."
msgstr ""
"De quels serveurs un utilisateur doit être membre afin d'obtenir l'accès. "
"Une liste vide autorise tous les serveurs."
"De quels serveurs un utilisateur doit être membre afin d'être autorisé. Une "
"liste vide autorise tous les serveurs."
#: authentik/sources/plex/models.py
msgid "Allow friends to authenticate, even if you don't share a server."
@@ -4571,11 +4455,11 @@ msgstr "Activer les utilisateurs à la complétion de l'étape."
#: authentik/stages/email/models.py
msgid "Email Stage"
msgstr "Étape de Courriel"
msgstr "Étape Courriel"
#: authentik/stages/email/models.py
msgid "Email Stages"
msgstr "Étapes de Courriel"
msgstr "Étapes Courriel"
#: authentik/stages/email/stage.py
msgid "Successfully verified Email."
@@ -5049,19 +4933,6 @@ msgstr ""
msgid "Static: Static value, displayed as-is."
msgstr "Statique : valeur statique, affichée comme telle."
#: authentik/stages/prompt/models.py
msgid "Alert (Info): Static alert box with info styling"
msgstr "Alerte (Info) : message d'alerte statique au format information"
#: authentik/stages/prompt/models.py
msgid "Alert (Warning): Static alert box with warning styling"
msgstr ""
"Alerte (Avertissement) : message d'alerte statique au format avertissement"
#: authentik/stages/prompt/models.py
msgid "Alert (Danger): Static alert box with danger styling"
msgstr "Alerte (Danger) : message d'alerte statique au format danger"
#: authentik/stages/prompt/models.py
msgid "authentik: Selection of locales authentik supports"
msgstr "authentik : sélection des locales prises en charges par authentik"

View File

@@ -45,6 +45,7 @@
"onFail": "warn"
}
},
"packageManager": "npm@11.11.0+sha512.f36811c4aae1fde639527368ae44c571d050006a608d67a191f195a801a52637a312d259186254aa3a3799b05335b7390539cf28656d18f0591a1125ba35f973",
"prettier": "@goauthentik/prettier-config",
"overrides": {
"@typescript-eslint/eslint-plugin": {
@@ -59,6 +60,5 @@
"typescript-eslint": {
"typescript": "$typescript"
}
},
"packageManager": "npm@11.13.0+sha512.7119a16a0843580d65160977520e3f5710c974f04afd4fad36d9eb97d917ba716a856c35c78c4be6dc64367eeaccfb957ef5ce997ca31e9330b2e936ba2b1b92"
}
}

Some files were not shown because too many files have changed in this diff Show More