Compare commits

..

10 Commits

Author SHA1 Message Date
Teffen Ellis
e8029061e1 Bump package lock. 2026-01-30 01:58:34 +01:00
Teffen Ellis
2ce825ecb0 Flesh out stories. 2026-01-30 01:56:42 +01:00
Teffen Ellis
90278d0210 Bump package. 2026-01-30 01:56:42 +01:00
Teffen Ellis
10393d5c7f Flesh out captcha stories. 2026-01-30 01:56:41 +01:00
Teffen Ellis
5d29d720fc Fix incompatibilities with Storybook. 2026-01-30 01:56:41 +01:00
Teffen Ellis
661f8e6b95 Tidy refresh. 2026-01-30 01:56:41 +01:00
Teffen Ellis
57a043d07d Flesh out controllers. 2026-01-30 01:56:41 +01:00
Teffen Ellis
dd35d0e88b Clean up mutation and resize observer lifecycle. 2026-01-30 01:56:41 +01:00
Teffen Ellis
557727adaa Fix preferred order of captcha vendor discovery. 2026-01-30 01:56:40 +01:00
Teffen Ellis
97f91362f7 Move inline styles into separate file. 2026-01-30 01:56:40 +01:00
1000 changed files with 16601 additions and 163738 deletions

View File

@@ -215,9 +215,6 @@ runs:
--head "$CHERRY_PICK_BRANCH" \
--label "cherry-pick")
# Assign the PR to the original author
gh pr edit "$NEW_PR" --add-assignee "$PR_AUTHOR" || true
echo "✅ Created cherry-pick PR $NEW_PR for $TARGET_BRANCH"
# Comment on original PR
@@ -257,9 +254,6 @@ runs:
--head "$CHERRY_PICK_BRANCH" \
--label "cherry-pick")
# Assign the PR to the original author
gh pr edit "$NEW_PR" --add-assignee "$PR_AUTHOR" || true
echo "⚠️ Created conflict resolution PR $NEW_PR for $TARGET_BRANCH"
# Comment on original PR

View File

@@ -22,7 +22,7 @@ runs:
sudo rm -rf /usr/local/lib/android
- name: Install uv
if: ${{ contains(inputs.dependencies, 'python') }}
uses: astral-sh/setup-uv@5a095e7a2014a4212f075830d4f7277575a9d098 # v5
uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v5
with:
enable-cache: true
- name: Setup python
@@ -36,7 +36,7 @@ runs:
run: uv sync --all-extras --dev --frozen
- name: Setup node
if: ${{ contains(inputs.dependencies, 'node') }}
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v4
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v4
with:
node-version-file: web/package.json
cache: "npm"
@@ -44,7 +44,7 @@ runs:
registry-url: 'https://registry.npmjs.org'
- name: Setup go
if: ${{ contains(inputs.dependencies, 'go') }}
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v5
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v5
with:
go-version-file: "go.mod"
- name: Setup docker cache
@@ -58,7 +58,7 @@ runs:
run: |
export PSQL_TAG=${{ inputs.postgresql_version }}
docker compose -f .github/actions/setup/compose.yml up -d
cd web && npm ci
cd web && npm i
- name: Generate config
if: ${{ contains(inputs.dependencies, 'python') }}
shell: uv run python {0}

View File

@@ -38,21 +38,6 @@ updates:
#endregion
#region Rust
- package-ecosystem: rust-toolchain
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
commit-message:
prefix: "core:"
labels:
- dependencies
#endregion
#region Web
- package-ecosystem: npm
@@ -249,7 +234,7 @@ updates:
- package-ecosystem: docker
directories:
- /lifecycle/container
- /
- /website
schedule:
interval: daily

View File

@@ -43,8 +43,8 @@ jobs:
contents: read
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0
- uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
- uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
- uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
@@ -56,31 +56,32 @@ jobs:
release: ${{ inputs.release }}
- name: Login to Docker Hub
if: ${{ inputs.registry_dockerhub }}
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
with:
username: ${{ secrets.DOCKER_CORP_USERNAME }}
password: ${{ secrets.DOCKER_CORP_PASSWORD }}
- name: Login to GitHub Container Registry
if: ${{ inputs.registry_ghcr }}
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- name: make empty clients
if: ${{ inputs.release }}
run: |
mkdir -p ./gen-ts-api
mkdir -p ./gen-go-api
- name: Setup node
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
with:
go-version-file: "go.mod"
- name: Generate API Clients
run: |
make gen-client-ts
make gen-client-go
- name: generate ts client
run: make gen-client-ts
- name: Build Docker Image
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6
id: push
with:
context: .
@@ -95,7 +96,7 @@ jobs:
platforms: linux/${{ inputs.image_arch }}
cache-from: type=registry,ref=${{ steps.ev.outputs.attestImageNames }}:buildcache-${{ inputs.image_arch }}
cache-to: ${{ steps.ev.outputs.cacheTo }}
- uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v3
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
with:

View File

@@ -79,25 +79,25 @@ jobs:
image-name: ${{ inputs.image_name }}
- name: Login to Docker Hub
if: ${{ inputs.registry_dockerhub }}
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
with:
username: ${{ secrets.DOCKER_CORP_USERNAME }}
password: ${{ secrets.DOCKER_CORP_PASSWORD }}
- name: Login to GitHub Container Registry
if: ${{ inputs.registry_ghcr }}
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: int128/docker-manifest-create-action@8aac06098a12365ccdf99372dcfb453ccce8a0b0 # v2
- uses: int128/docker-manifest-create-action@a39573caa37b6a8a03302d43b57c3f48635096e2 # v2
id: build
with:
tags: ${{ matrix.tag }}
sources: |
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-amd64.outputs.image-digest }}
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-arm64.outputs.image-digest }}
- uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v3
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
id: attest
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}

View File

@@ -25,7 +25,7 @@ jobs:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
token: ${{ steps.generate_token.outputs.token }}
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: web/package.json
registry-url: "https://registry.npmjs.org"

View File

@@ -33,7 +33,7 @@ jobs:
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: website/package.json
cache: "npm"
@@ -41,7 +41,7 @@ jobs:
- working-directory: website/
name: Install Dependencies
run: npm ci
- uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v4
- uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v4
with:
path: |
${{ github.workspace }}/website/api/.docusaurus
@@ -55,7 +55,7 @@ jobs:
env:
NODE_ENV: production
run: npm run build -w api
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v4
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v4
with:
name: api-docs
path: website/api/build
@@ -67,11 +67,11 @@ jobs:
- build
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v5
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v5
with:
name: api-docs
path: website/api/build
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: website/package.json
cache: "npm"

View File

@@ -24,7 +24,7 @@ jobs:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- name: Setup authentik env
uses: ./.github/actions/setup
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: lifecycle/aws/package.json
cache: "npm"

View File

@@ -15,8 +15,6 @@ on:
jobs:
lint:
runs-on: ubuntu-latest
env:
NODE_ENV: production
strategy:
fail-fast: false
matrix:
@@ -32,11 +30,10 @@ jobs:
run: npm run ${{ matrix.command }}
build-docs:
runs-on: ubuntu-latest
env:
NODE_ENV: production
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: website/package.json
cache: "npm"
@@ -49,11 +46,10 @@ jobs:
run: npm run build
build-integrations:
runs-on: ubuntu-latest
env:
NODE_ENV: production
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: website/package.json
cache: "npm"
@@ -77,9 +73,9 @@ jobs:
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU
uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
@@ -89,14 +85,14 @@ jobs:
image-name: ghcr.io/goauthentik/dev-docs
- name: Login to Container Registry
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
id: push
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6
with:
tags: ${{ steps.ev.outputs.imageTags }}
file: website/Dockerfile
@@ -105,7 +101,7 @@ jobs:
context: .
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache,mode=max' || '' }}
- uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v3
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
with:

View File

@@ -6,10 +6,6 @@ on:
schedule:
# Every night at 3am
- cron: "0 3 * * *"
pull_request:
paths:
# Needs to refer to itself
- .github/workflows/ci-main-daily.yml
jobs:
test-container:
@@ -19,14 +15,14 @@ jobs:
matrix:
version:
- docs
- version-2025-12
- version-2025-10
- version-2025-4
- version-2025-2
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- run: |
current="$(pwd)"
dir="/tmp/authentik/${{ matrix.version }}"
mkdir -p "${dir}/lifecycle/container"
cd "${dir}"
wget "https://${{ matrix.version }}.goauthentik.io/docker-compose.yml" -O "${dir}/lifecycle/container/compose.yml"
"${current}/scripts/test_docker.sh"
mkdir -p $dir
cd $dir
wget https://${{ matrix.version }}.goauthentik.io/compose.yml
${current}/scripts/test_docker.sh

View File

@@ -42,16 +42,6 @@ jobs:
uses: ./.github/actions/setup
- name: run job
run: uv run make ci-${{ matrix.job }}
test-gen-build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- name: Setup authentik env
uses: ./.github/actions/setup
- name: generate schema
run: make migrate gen-build
- name: ensure schema is up-to-date
run: git diff --exit-code -- schema.yml blueprints/schema.json
test-migrations:
runs-on: ubuntu-latest
steps:
@@ -170,7 +160,7 @@ jobs:
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Create k8s Kind Cluster
uses: helm/kind-action@ef37e7f390d99f746eb8b610417061a60e82a6cc # v1.14.0
uses: helm/kind-action@92086f6be054225fa813e0a4b13787fc9088faab # v1.13.0
- name: run integration
run: |
uv run coverage run manage.py test tests/integration
@@ -215,7 +205,7 @@ jobs:
run: |
docker compose -f tests/e2e/compose.yml up -d --quiet-pull
- id: cache-web
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v4
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v4
with:
path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
@@ -258,7 +248,7 @@ jobs:
run: |
docker compose -f tests/openid_conformance/compose.yml up -d --quiet-pull
- id: cache-web
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v4
uses: actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v4
with:
path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
@@ -279,7 +269,7 @@ jobs:
with:
flags: conformance
- if: ${{ !cancelled() }}
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
with:
name: conformance-certification-${{ matrix.job.name }}
path: tests/openid_conformance/exports/
@@ -287,7 +277,6 @@ jobs:
if: always()
needs:
- lint
- test-gen-build
- test-migrations
- test-migrations-from-stable
- test-unittest

View File

@@ -22,7 +22,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
with:
go-version-file: "go.mod"
- name: Prepare and generate API
@@ -43,7 +43,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
with:
go-version-file: "go.mod"
- name: Setup authentik env
@@ -90,9 +90,9 @@ jobs:
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU
uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
@@ -102,7 +102,7 @@ jobs:
image-name: ghcr.io/goauthentik/dev-${{ matrix.type }}
- name: Login to Container Registry
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -111,7 +111,7 @@ jobs:
run: make gen-client-go
- name: Build Docker Image
id: push
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6
with:
tags: ${{ steps.ev.outputs.imageTags }}
file: lifecycle/container/${{ matrix.type }}.Dockerfile
@@ -122,7 +122,7 @@ jobs:
context: .
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && format('type=registry,ref=ghcr.io/goauthentik/dev-{0}:buildcache,mode=max', matrix.type) || '' }}
- uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v3
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
with:
@@ -148,10 +148,10 @@ jobs:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
ref: ${{ github.event.pull_request.head.sha }}
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
with:
go-version-file: "go.mod"
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: web/package.json
cache: "npm"

View File

@@ -32,7 +32,7 @@ jobs:
project: web
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: ${{ matrix.project }}/package.json
cache: "npm"
@@ -49,7 +49,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: web/package.json
cache: "npm"
@@ -77,7 +77,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: web/package.json
cache: "npm"

View File

@@ -38,7 +38,7 @@ jobs:
token: ${{ steps.generate_token.outputs.token }}
- name: Compress images
id: compress
uses: calibreapp/image-actions@d9c8ee5c3dc52ae4622c82ead88d658f4b16b65f # main
uses: calibreapp/image-actions@420075c115b26f8785e293c5bd5bef0911c506e5 # main
with:
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }}
compressOnly: ${{ github.event_name != 'pull_request' }}

View File

@@ -29,19 +29,18 @@ jobs:
- packages/eslint-config
- packages/prettier-config
- packages/docusaurus-config
- packages/logger-js
- packages/esbuild-plugin-live-reload
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
fetch-depth: 2
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: ${{ matrix.package }}/package.json
registry-url: "https://registry.npmjs.org"
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # 24d32ffd492484c1d75e0c0b894501ddb9d30d62
uses: tj-actions/changed-files@e0021407031f5be11a464abee9a0776171c79891 # 24d32ffd492484c1d75e0c0b894501ddb9d30d62
with:
files: |
${{ matrix.package }}/package.json

View File

@@ -33,9 +33,9 @@ jobs:
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- name: Set up QEMU
uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
@@ -44,21 +44,21 @@ jobs:
with:
image-name: ghcr.io/goauthentik/docs
- name: Login to GitHub Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
id: push
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6
with:
tags: ${{ steps.ev.outputs.imageTags }}
file: website/Dockerfile
push: true
platforms: linux/amd64,linux/arm64
context: .
- uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v3
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
id: attest
if: true
with:
@@ -84,18 +84,13 @@ jobs:
- rac
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
with:
go-version-file: "go.mod"
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
with:
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- name: Set up QEMU
uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
@@ -103,23 +98,23 @@ jobs:
DOCKER_USERNAME: ${{ secrets.DOCKER_CORP_USERNAME }}
with:
image-name: ghcr.io/goauthentik/${{ matrix.type }},authentik/${{ matrix.type }}
- name: Generate API Clients
- name: make empty clients
run: |
make gen-client-ts
make gen-client-go
mkdir -p ./gen-ts-api
mkdir -p ./gen-go-api
- name: Docker Login Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
with:
username: ${{ secrets.DOCKER_CORP_USERNAME }}
password: ${{ secrets.DOCKER_CORP_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6
id: push
with:
push: true
@@ -129,7 +124,7 @@ jobs:
file: lifecycle/container/${{ matrix.type }}.Dockerfile
platforms: linux/amd64,linux/arm64
context: .
- uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v3
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
id: attest
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
@@ -152,25 +147,18 @@ jobs:
goarch: [amd64, arm64]
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
with:
go-version-file: "go.mod"
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- name: Install web dependencies
working-directory: web/
run: |
npm ci
- name: Generate API Clients
run: |
make gen-client-ts
make gen-client-go
- name: Build web
working-directory: web/
run: |
npm ci
npm run build-proxy
- name: Build outpost
run: |
@@ -180,7 +168,7 @@ jobs:
export CGO_ENABLED=0
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
- name: Upload binaries to release
uses: svenstaro/upload-release-action@b98a3b12e86552593f3e4e577ca8a62aa2f3f22b # v2
uses: svenstaro/upload-release-action@6b7fa9f267e90b50a19fef07b3596790bb941741 # v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
@@ -199,7 +187,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6.0.0
- uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708 # v5
with:
role-to-assume: "arn:aws:iam::016170277896:role/github_goauthentik_authentik"
aws-region: ${{ env.AWS_REGION }}
@@ -217,12 +205,12 @@ jobs:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- name: Run test suite in final docker images
run: |
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> lifecycle/container/.env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> lifecycle/container/.env
docker compose -f lifecycle/container/compose.yml pull -q
docker compose -f lifecycle/container/compose.yml up --no-start
docker compose -f lifecycle/container/compose.yml start postgresql
docker compose -f lifecycle/container/compose.yml run -u root server test-all
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env
docker compose pull -q
docker compose up --no-start
docker compose start postgresql
docker compose run -u root server test-all
sentry-release:
needs:
- build-server

View File

@@ -55,8 +55,6 @@ jobs:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
ref: "version-${{ needs.check-inputs.outputs.major_version }}"
- name: Setup authentik env
uses: ./.github/actions/setup
- run: make test-docker
bump-authentik:
name: Bump authentik version
@@ -91,7 +89,6 @@ jobs:
# ID from https://api.github.com/users/authentik-automation[bot]
git config --global user.name '${{ steps.app-token.outputs.app-slug }}[bot]'
git config --global user.email '${{ steps.get-user-id.outputs.user-id }}+${{ steps.app-token.outputs.app-slug }}[bot]@users.noreply.github.com'
git pull
git commit -a -m "release: ${{ inputs.version }}" --allow-empty
git tag "version/${{ inputs.version }}" HEAD -m "version/${{ inputs.version }}"
git push --follow-tags
@@ -175,25 +172,21 @@ jobs:
if: "${{ inputs.release_reason == 'feature' }}"
run: |
changelog_url="https://docs.goauthentik.io/docs/releases/${{ needs.check-inputs.outputs.major_version }}"
reason="${{ inputs.release_reason }}"
jq \
--arg version "${{ inputs.version }}" \
--arg changelog "See ${changelog_url}" \
--arg changelog_url "${changelog_url}" \
--arg reason "${reason}" \
'.stable.version = $version | .stable.changelog = $changelog | .stable.changelog_url = $changelog_url | .stable.reason = $reason' version.json > version.new.json
'.stable.version = $version | .stable.changelog = $changelog | .stable.changelog_url = $changelog_url' version.json > version.new.json
mv version.new.json version.json
- name: Bump version
if: "${{ inputs.release_reason != 'feature' }}"
run: |
changelog_url="https://docs.goauthentik.io/docs/releases/${{ needs.check-inputs.outputs.major_version }}#fixed-in-$(echo -n ${{ inputs.version}} | sed 's/\.//g')"
reason="${{ inputs.release_reason }}"
jq \
--arg version "${{ inputs.version }}" \
--arg changelog "See ${changelog_url}" \
--arg changelog_url "${changelog_url}" \
--arg reason "${reason}" \
'.stable.version = $version | .stable.changelog = $changelog | .stable.changelog_url = $changelog_url | .stable.reason = $reason' version.json > version.new.json
'.stable.version = $version | .stable.changelog = $changelog | .stable.changelog_url = $changelog_url' version.json > version.new.json
mv version.new.json version.json
- name: Create pull request
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7

View File

@@ -19,7 +19,7 @@ jobs:
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/stale@b5d41d4e1d5dceea10e7104786b73624c18a190f # v10
- uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10
with:
repo-token: ${{ steps.generate_token.outputs.token }}
days-before-stale: 60

View File

@@ -34,7 +34,6 @@ packages/docusaurus-config @goauthentik/frontend
packages/esbuild-plugin-live-reload @goauthentik/frontend
packages/eslint-config @goauthentik/frontend
packages/prettier-config @goauthentik/frontend
packages/logger-js @goauthentik/frontend
packages/tsconfig @goauthentik/frontend
# Web
web/ @goauthentik/frontend

View File

@@ -5,6 +5,7 @@ SHELL := /usr/bin/env bash
PWD = $(shell pwd)
UID = $(shell id -u)
GID = $(shell id -g)
NPM_VERSION = $(shell python -m scripts.generate_semver)
PY_SOURCES = authentik packages tests scripts lifecycle .github
DOCKER_IMAGE ?= "authentik:test"
@@ -49,14 +50,6 @@ ifeq ($(UNAME_S),Darwin)
endif
endif
NPM_VERSION :=
UV_EXISTS := $(shell command -v uv 2> /dev/null)
ifdef UV_EXISTS
NPM_VERSION := $(shell $(UV) run python -m scripts.generate_semver)
else
NPM_VERSION = $(shell python -m scripts.generate_semver)
endif
all: lint-fix lint gen web test ## Lint, build, and test everything
HELP_WIDTH := $(shell grep -h '^[a-z][^ ]*:.*\#\#' $(MAKEFILE_LIST) 2>/dev/null | \
@@ -84,7 +77,7 @@ lint-fix: lint-codespell ## Lint and automatically fix errors in the python sou
lint-codespell: ## Reports spelling errors.
$(UV) run codespell -w
lint: ci-bandit ci-mypy ## Lint the python and golang sources
lint: ci-bandit ## Lint the python and golang sources
golangci-lint run -v
core-install:
@@ -148,11 +141,11 @@ bump: ## Bump authentik version. Usage: make bump version=20xx.xx.xx
ifndef version
$(error Usage: make bump version=20xx.xx.xx )
endif
$(eval current_version := $(shell cat ${PWD}/internal/constants/VERSION))
$(SED_INPLACE) 's/^version = ".*"/version = "$(version)"/' ${PWD}/pyproject.toml
$(SED_INPLACE) 's/^VERSION = ".*"/VERSION = "$(version)"/' ${PWD}/authentik/__init__.py
$(SED_INPLACE) 's/^version = ".*"/version = "$(version)"/' pyproject.toml
$(SED_INPLACE) 's/^VERSION = ".*"/VERSION = "$(version)"/' authentik/__init__.py
$(MAKE) gen-build gen-compose aws-cfn
$(SED_INPLACE) "s/\"${current_version}\"/\"$(version)\"/" ${PWD}/package.json ${PWD}/package-lock.json ${PWD}/web/package.json ${PWD}/web/package-lock.json
npm version --no-git-tag-version --allow-same-version $(version)
cd ${PWD}/web && npm version --no-git-tag-version --allow-same-version $(version)
echo -n $(version) > ${PWD}/internal/constants/VERSION
#########################
@@ -168,22 +161,12 @@ gen-build: ## Extract the schema from the database
gen-compose:
$(UV) run scripts/generate_compose.py
gen-changelog: ## (Release) generate the changelog based from the commits since the last version
# These are best-effort guesses based on commit messages
$(eval last_version := $(shell git tag --list 'version/*' --sort 'version:refname' | grep -vE 'rc\d+$$' | tail -1))
$(eval current_commit := $(shell git rev-parse HEAD))
git log --pretty=format:"- %s" $(shell git merge-base ${last_version} ${current_commit})...${current_commit} > merged_to_current
git log --pretty=format:"- %s" $(shell git merge-base ${last_version} ${current_commit})...${last_version} > merged_to_last
grep -Eo 'cherry-pick (#\d+)' merged_to_last | cut -d ' ' -f 2 | sed 's/.*/(&)$$/' > cherry_picked_to_last
grep -vf cherry_picked_to_last merged_to_current | sort > changelog.md
rm merged_to_current
rm merged_to_last
rm cherry_picked_to_last
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
npx prettier --write changelog.md
gen-diff: ## (Release) generate the changelog diff between the current schema and the last version
$(eval last_version := $(shell git tag --list 'version/*' --sort 'version:refname' | grep -vE 'rc\d+$$' | tail -1))
git show ${last_version}:schema.yml > schema-old.yml
gen-diff: ## (Release) generate the changelog diff between the current schema and the last tag
git show $(shell git describe --tags $(shell git rev-list --tags --max-count=1)):schema.yml > schema-old.yml
docker compose -f scripts/api/compose.yml run --rm --user "${UID}:${GID}" diff \
--markdown \
/local/diff.md \

View File

@@ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
| Version | Supported |
| ---------- | ---------- |
| 2025.10.x | ✅ |
| 2025.12.x | ✅ |
| 2026.2.x | ✅ |
## Reporting a Vulnerability

View File

@@ -3,7 +3,7 @@
from functools import lru_cache
from os import environ
VERSION = "2026.5.0-rc1"
VERSION = "2026.2.0-rc1"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"

View File

@@ -18,6 +18,7 @@ from rest_framework.views import APIView
from authentik import authentik_full_version
from authentik.core.api.utils import PassiveSerializer
from authentik.enterprise.license import LicenseKey
from authentik.lib.config import CONFIG
from authentik.lib.utils.reflection import get_env
from authentik.outposts.apps import MANAGED_OUTPOST
@@ -25,15 +26,6 @@ from authentik.outposts.models import Outpost
from authentik.rbac.permissions import HasPermission
def fips_enabled():
try:
from authentik.enterprise.license import LicenseKey
return backend._fips_enabled if LicenseKey.get_total().status().is_valid else None
except ModuleNotFoundError:
return None
class RuntimeDict(TypedDict):
"""Runtime information"""
@@ -88,7 +80,9 @@ class SystemInfoSerializer(PassiveSerializer):
"architecture": platform.machine(),
"authentik_version": authentik_full_version(),
"environment": get_env(),
"openssl_fips_enabled": fips_enabled(),
"openssl_fips_enabled": (
backend._fips_enabled if LicenseKey.get_total().status().is_valid else None
),
"openssl_version": OPENSSL_VERSION,
"platform": platform.platform(),
"python_version": python_version,

View File

@@ -94,7 +94,7 @@ class Backend:
Args:
file_path: Relative file path
request: Optional Django HttpRequest for fully qualified URL building
request: Optional Django HttpRequest for fully qualifed URL building
use_cache: whether to retrieve the URL from cache
Returns:

View File

@@ -100,25 +100,13 @@ class S3Backend(ManageableBackend):
f"storage.{self.usage.value}.{self.name}.addressing_style",
CONFIG.get(f"storage.{self.name}.addressing_style", "auto"),
)
signature_version = CONFIG.get(
f"storage.{self.usage.value}.{self.name}.signature_version",
CONFIG.get(f"storage.{self.name}.signature_version", "s3v4"),
)
# Keep signature_version pass-through and let boto3/botocore handle it.
# In boto3's S3 configuration docs, `s3v4` (default) and deprecated `s3`
# are the documented values:
# https://github.com/boto/boto3/blob/791a3e8f36d83664a47b4281a0586b3546cef3ec/docs/source/guide/configuration.rst?plain=1#L398-L407
# Botocore also supports additional signer names, so we intentionally do
# not enforce a restricted allowlist here.
return self.session.client(
"s3",
endpoint_url=endpoint_url,
use_ssl=use_ssl,
region_name=region_name,
config=Config(
signature_version=signature_version, s3={"addressing_style": addressing_style}
),
config=Config(signature_version="s3v4", s3={"addressing_style": addressing_style}),
)
@property

View File

@@ -1,6 +1,5 @@
from unittest import skipUnless
from botocore.exceptions import UnsupportedSignatureVersionError
from django.test import TestCase
from authentik.admin.files.tests.utils import FileTestS3BackendMixin, s3_test_server_available
@@ -82,27 +81,6 @@ class TestS3Backend(FileTestS3BackendMixin, TestCase):
self.assertIn("X-Amz-Signature=", url)
self.assertIn("test.png", url)
def test_client_signature_version_default_v4(self):
"""Test S3 client defaults to v4 signature when not configured."""
self.assertEqual(self.media_s3_backend.client.meta.config.signature_version, "s3v4")
@CONFIG.patch("storage.s3.signature_version", "s3")
def test_client_signature_version_global_override(self):
"""Test S3 client respects globally configured signature version."""
self.assertEqual(self.media_s3_backend.client.meta.config.signature_version, "s3")
@CONFIG.patch("storage.s3.signature_version", "s3v4")
@CONFIG.patch("storage.media.s3.signature_version", "s3")
def test_client_signature_version_media_override(self):
"""Test usage-specific signature version takes precedence over global."""
self.assertEqual(self.media_s3_backend.client.meta.config.signature_version, "s3")
@CONFIG.patch("storage.media.s3.signature_version", "not-a-real-signature")
def test_client_signature_version_unsupported(self):
"""Test unsupported signature version raises botocore error."""
with self.assertRaises(UnsupportedSignatureVersionError):
self.media_s3_backend.file_url("test.png", use_cache=False)
@CONFIG.patch("storage.s3.bucket_name", "test-bucket")
def test_file_exists_true(self):
"""Test file_exists returns True for existing file"""

View File

@@ -13,10 +13,10 @@ from rest_framework.exceptions import AuthenticationFailed
from rest_framework.request import Request
from structlog.stdlib import get_logger
from authentik.common.oauth.constants import SCOPE_AUTHENTIK_API
from authentik.core.middleware import CTX_AUTH_VIA
from authentik.core.models import Token, TokenIntents, User, UserTypes
from authentik.outposts.models import Outpost
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
LOGGER = get_logger()
_tmp = Path(gettempdir())

View File

@@ -71,7 +71,7 @@ def postprocess_schema_responses(
def postprocess_schema_query_params(
result: dict[str, Any], generator: SchemaGenerator, **kwargs
) -> dict[str, Any]:
"""Optimize pagination parameters, instead of redeclaring parameters for each endpoint
"""Optimise pagination parameters, instead of redeclaring parameters for each endpoint
declare them globally and refer to them"""
LOGGER.debug("Deduplicating query parameters")
for path in result["paths"].values():

View File

@@ -11,12 +11,12 @@ from rest_framework.exceptions import AuthenticationFailed
from authentik.api.authentication import IPCUser, TokenAuthentication
from authentik.blueprints.tests import reconcile_app
from authentik.common.oauth.constants import SCOPE_AUTHENTIK_API
from authentik.core.models import Token, TokenIntents, UserTypes
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
from authentik.lib.generators import generate_id
from authentik.outposts.apps import MANAGED_OUTPOST
from authentik.outposts.models import Outpost
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider

View File

@@ -1,8 +1,6 @@
"""Schema generation tests"""
from pathlib import Path
from tempfile import gettempdir
from uuid import uuid4
from django.core.management import call_command
from django.urls import reverse
@@ -31,14 +29,15 @@ class TestSchemaGeneration(APITestCase):
def test_build_schema(self):
"""Test schema build command"""
tmp = Path(gettempdir())
blueprint_file = tmp / f"{str(uuid4())}.json"
api_file = tmp / f"{str(uuid4())}.yml"
blueprint_file = Path("blueprints/schema.json")
api_file = Path("schema.yml")
blueprint_file.unlink()
api_file.unlink()
with (
CONFIG.patch("debug", True),
CONFIG.patch("tenants.enabled", True),
CONFIG.patch("outposts.disable_embedded_outpost", True),
):
call_command("build_schema", blueprint_file=blueprint_file, api_file=api_file)
call_command("build_schema")
self.assertTrue(blueprint_file.exists())
self.assertTrue(api_file.exists())

View File

@@ -43,6 +43,8 @@ def get_attrs(obj: SerializerModel) -> dict[str, Any]:
continue
if _field.read_only:
data.pop(field_name, None)
if _field.get_initial() == data.get(field_name, None):
data.pop(field_name, None)
if field_name.endswith("_set"):
data.pop(field_name, None)
return data

View File

@@ -41,6 +41,7 @@ from authentik.core.models import (
UserSourceConnection,
)
from authentik.endpoints.models import Connector
from authentik.enterprise.license import LicenseKey
from authentik.events.logs import LogEvent, capture_logs
from authentik.events.utils import cleanse_dict
from authentik.flows.models import Stage
@@ -139,19 +140,10 @@ class Importer:
def default_context(self):
"""Default context"""
context = {
return {
"goauthentik.io/enterprise/licensed": LicenseKey.get_total().status().is_valid,
"goauthentik.io/rbac/models": rbac_models(),
"goauthentik.io/enterprise/licensed": False,
}
try:
from authentik.enterprise.license import LicenseKey
context["goauthentik.io/enterprise/licensed"] = (
LicenseKey.get_total().status().is_valid,
)
except ModuleNotFoundError:
pass
return context
@staticmethod
def from_string(yaml_input: str, context: dict | None = None) -> Importer:
@@ -272,7 +264,7 @@ class Importer:
and entry.state != BlueprintEntryDesiredState.MUST_CREATED
):
self.logger.debug(
"Initialize serializer with instance",
"Initialise serializer with instance",
model=model,
instance=model_instance,
pk=model_instance.pk,
@@ -290,7 +282,7 @@ class Importer:
)
else:
self.logger.debug(
"Initialized new serializer instance",
"Initialised new serializer instance",
model=model,
**cleanse_dict(updated_identifiers),
)

View File

@@ -3,7 +3,7 @@
from typing import Any
from django.db.models import Case, F, IntegerField, Q, Value, When
from django.db.models.functions import Concat, Length
from django.db.models.functions import Length
from django.http.request import HttpRequest
from django.utils.html import _json_script_escapes
from django.utils.safestring import mark_safe
@@ -26,8 +26,7 @@ def get_brand_for_request(request: HttpRequest) -> Brand:
domain_length=Length("domain"),
match_priority=Case(
When(
condition=Q(host_domain__iexact=F("domain"))
| Q(host_domain__iendswith=Concat(Value("."), F("domain"))),
condition=Q(host_domain__iendswith=F("domain")),
then=F("domain_length"),
),
default=Value(-1),

View File

@@ -154,14 +154,14 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
return queryset
def _get_allowed_applications(
self, paginated_apps: Iterator[Application], user: User | None = None
self, pagined_apps: Iterator[Application], user: User | None = None
) -> list[Application]:
applications = []
request = self.request._request
if user:
request = copy(request)
request.user = user
for application in paginated_apps:
for application in pagined_apps:
engine = PolicyEngine(application, request.user, request)
engine.build()
if engine.passing:

View File

@@ -16,15 +16,11 @@ from rest_framework.viewsets import ViewSet
from authentik.api.validation import validate
from authentik.core.api.users import ParamUserSerializer
from authentik.core.api.utils import MetaNameSerializer
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice
from authentik.stages.authenticator import device_classes, devices_for_user
from authentik.stages.authenticator.models import Device
from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
try:
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice
except ModuleNotFoundError:
EndpointDevice = None
class DeviceSerializer(MetaNameSerializer):
"""Serializer for authenticator devices"""
@@ -47,7 +43,7 @@ class DeviceSerializer(MetaNameSerializer):
"""Get extra description"""
if isinstance(instance, WebAuthnDevice):
return instance.device_type.description if instance.device_type else None
if EndpointDevice and isinstance(instance, EndpointDevice):
if isinstance(instance, EndpointDevice):
return instance.data.get("deviceSignals", {}).get("deviceModel")
return None
@@ -55,7 +51,7 @@ class DeviceSerializer(MetaNameSerializer):
"""Get external Device ID"""
if isinstance(instance, WebAuthnDevice):
return instance.device_type.aaguid if instance.device_type else None
if EndpointDevice and isinstance(instance, EndpointDevice):
if isinstance(instance, EndpointDevice):
return instance.data.get("deviceSignals", {}).get("deviceModel")
return None

View File

@@ -10,6 +10,7 @@ from rest_framework.request import Request
from rest_framework.response import Response
from authentik.core.api.utils import PassiveSerializer
from authentik.enterprise.apps import EnterpriseConfig
from authentik.lib.models import DeprecatedMixin
from authentik.lib.utils.reflection import all_subclasses
@@ -60,25 +61,19 @@ class TypesMixin:
continue
instance = subclass()
try:
type_signature = {
"name": subclass._meta.verbose_name,
"description": subclass.__doc__,
"component": instance.component,
"model_name": subclass._meta.model_name,
"icon_url": getattr(instance, "icon_url", None),
"requires_enterprise": False,
"deprecated": isinstance(instance, DeprecatedMixin),
}
try:
from authentik.enterprise.apps import EnterpriseConfig
type_signature["requires_enterprise"] = isinstance(
subclass._meta.app_config, EnterpriseConfig
)
except ModuleNotFoundError:
pass
data.append(type_signature)
data.append(
{
"name": subclass._meta.verbose_name,
"description": subclass.__doc__,
"component": instance.component,
"model_name": subclass._meta.model_name,
"icon_url": getattr(instance, "icon_url", None),
"requires_enterprise": isinstance(
subclass._meta.app_config, EnterpriseConfig
),
"deprecated": isinstance(instance, DeprecatedMixin),
}
)
except NotImplementedError:
continue
if additional:

View File

@@ -75,8 +75,7 @@ class TokenSerializer(ManagedSerializer, ModelSerializer):
except ValueError:
pass
expires = attrs.get("expires")
if expires is not None and expires > max_token_lifetime_dt:
if "expires" in attrs and attrs.get("expires") > max_token_lifetime_dt:
raise ValidationError(
{
"expires": (

View File

@@ -30,6 +30,7 @@ from drf_spectacular.utils import (
extend_schema_field,
inline_serializer,
)
from guardian.shortcuts import get_objects_for_user
from rest_framework.authentication import SessionAuthentication
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
@@ -41,7 +42,6 @@ from rest_framework.fields import (
IntegerField,
ListField,
SerializerMethodField,
UUIDField,
)
from rest_framework.permissions import IsAuthenticated
from rest_framework.request import Request
@@ -72,14 +72,12 @@ from authentik.core.middleware import (
from authentik.core.models import (
USER_ATTRIBUTE_TOKEN_EXPIRING,
USER_PATH_SERVICE_ACCOUNT,
USERNAME_MAX_LENGTH,
Group,
Session,
Token,
TokenIntents,
User,
UserTypes,
default_token_duration,
)
from authentik.endpoints.connectors.agent.auth import AgentAuth
from authentik.events.models import Event, EventAction
@@ -89,7 +87,6 @@ from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner
from authentik.flows.views.executor import QS_KEY_TOKEN
from authentik.lib.avatars import get_avatar
from authentik.lib.utils.reflection import ConditionalInheritance
from authentik.lib.utils.time import timedelta_from_string, timedelta_string_validator
from authentik.rbac.api.roles import RoleSerializer
from authentik.rbac.decorators import permission_required
from authentik.rbac.models import Role, get_permission_choices
@@ -145,7 +142,7 @@ class UserSerializer(ModelSerializer):
roles_obj = SerializerMethodField(allow_null=True)
uid = CharField(read_only=True)
username = CharField(
max_length=USERNAME_MAX_LENGTH,
max_length=150,
validators=[UniqueValidator(queryset=User.objects.all().order_by("username"))],
)
@@ -241,14 +238,14 @@ class UserSerializer(ModelSerializer):
and self.instance.type == UserTypes.INTERNAL_SERVICE_ACCOUNT
and user_type != UserTypes.INTERNAL_SERVICE_ACCOUNT.value
):
raise ValidationError(_("Can't change internal service account to other user type."))
raise ValidationError("Can't change internal service account to other user type.")
if not self.instance and user_type == UserTypes.INTERNAL_SERVICE_ACCOUNT.value:
raise ValidationError(_("Setting a user to internal service account is not allowed."))
raise ValidationError("Setting a user to internal service account is not allowed.")
return user_type
def validate(self, attrs: dict) -> dict:
if self.instance and self.instance.type == UserTypes.INTERNAL_SERVICE_ACCOUNT:
raise ValidationError(_("Can't modify internal service account users"))
raise ValidationError("Can't modify internal service account users")
return super().validate(attrs)
class Meta:
@@ -400,18 +397,6 @@ class UserServiceAccountSerializer(PassiveSerializer):
)
class UserRecoveryLinkSerializer(PassiveSerializer):
"""Payload to create a recovery link"""
token_duration = CharField(required=False)
class UserRecoveryEmailSerializer(UserRecoveryLinkSerializer):
"""Payload to create and email a recovery link"""
email_stage = UUIDField()
class UsersFilter(FilterSet):
"""Filter for users"""
@@ -473,14 +458,14 @@ class UsersFilter(FilterSet):
try:
value = loads(value)
except ValueError:
raise ValidationError(_("filter: failed to parse JSON")) from None
raise ValidationError(detail="filter: failed to parse JSON") from None
if not isinstance(value, dict):
raise ValidationError(_("filter: value must be key:value mapping"))
raise ValidationError(detail="filter: value must be key:value mapping")
qs = {}
for key, _value in value.items():
qs[f"attributes__{key}"] = _value
try:
__ = len(queryset.filter(**qs))
_ = len(queryset.filter(**qs))
return queryset.filter(**qs)
except ValueError:
return queryset
@@ -558,16 +543,14 @@ class UserViewSet(
def list(self, request, *args, **kwargs):
return super().list(request, *args, **kwargs)
def _create_recovery_link(
self, token_duration: str | None, for_email=False
) -> tuple[str, Token]:
def _create_recovery_link(self, for_email=False) -> tuple[str, Token]:
"""Create a recovery link (when the current brand has a recovery flow set),
that can either be shown to an admin or sent to the user directly"""
brand: Brand = self.request.brand
brand: Brand = self.request._request.brand
# Check that there is a recovery flow, if not return an error
flow = brand.flow_recovery
if not flow:
raise ValidationError({"non_field_errors": _("No recovery flow set.")})
raise ValidationError({"non_field_errors": "No recovery flow set."})
user: User = self.get_object()
planner = FlowPlanner(flow)
planner.allow_empty_flows = True
@@ -581,15 +564,11 @@ class UserViewSet(
)
except FlowNonApplicableException:
raise ValidationError(
{"non_field_errors": _("Recovery flow not applicable to user")}
{"non_field_errors": "Recovery flow not applicable to user"}
) from None
_plan = FlowToken.pickle(plan)
if for_email:
_plan = pickle_flow_token_for_email(plan)
expires = default_token_duration()
if token_duration:
timedelta_string_validator(token_duration)
expires = now() + timedelta_from_string(token_duration)
token, __ = FlowToken.objects.update_or_create(
identifier=f"{user.uid}-password-reset",
defaults={
@@ -597,7 +576,6 @@ class UserViewSet(
"flow": flow,
"_plan": _plan,
"revoke_on_execution": not for_email,
"expires": expires,
},
)
querystring = urlencode({QS_KEY_TOKEN: token.key})
@@ -745,60 +723,60 @@ class UserViewSet(
@permission_required("authentik_core.reset_user_password")
@extend_schema(
request=UserRecoveryLinkSerializer,
responses={
"200": LinkSerializer(many=False),
},
request=None,
)
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
@validate(UserRecoveryLinkSerializer)
def recovery(self, request: Request, pk: int, body: UserRecoveryLinkSerializer) -> Response:
def recovery(self, request: Request, pk: int) -> Response:
"""Create a temporary link that a user can use to recover their account"""
link, _ = self._create_recovery_link(
token_duration=body.validated_data.get("token_duration")
)
link, _ = self._create_recovery_link()
return Response({"link": link})
@permission_required("authentik_core.reset_user_password")
@extend_schema(
request=UserRecoveryEmailSerializer,
parameters=[
OpenApiParameter(
name="email_stage",
location=OpenApiParameter.QUERY,
type=OpenApiTypes.STR,
required=True,
)
],
responses={
"204": OpenApiResponse(description="Successfully sent recover email"),
},
request=None,
)
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
@validate(UserRecoveryEmailSerializer)
def recovery_email(
self, request: Request, pk: int, body: UserRecoveryEmailSerializer
) -> Response:
def recovery_email(self, request: Request, pk: int) -> Response:
"""Send an email with a temporary link that a user can use to recover their account"""
email_error_message = _("User does not have an email address set.")
stage_error_message = _("Email stage not found.")
user: User = self.get_object()
if not user.email:
for_user: User = self.get_object()
if for_user.email == "":
LOGGER.debug("User doesn't have an email address")
raise ValidationError({"non_field_errors": email_error_message})
if not (stage := EmailStage.objects.filter(pk=body.validated_data["email_stage"]).first()):
LOGGER.debug("Email stage does not exist")
raise ValidationError({"non_field_errors": stage_error_message})
if not request.user.has_perm("authentik_stages_email.view_emailstage", stage):
LOGGER.debug("User has no view access to email stage")
raise ValidationError({"non_field_errors": stage_error_message})
link, token = self._create_recovery_link(
token_duration=body.validated_data.get("token_duration"), for_email=True
)
raise ValidationError({"non_field_errors": "User does not have an email address set."})
link, token = self._create_recovery_link(for_email=True)
# Lookup the email stage to assure the current user can access it
stages = get_objects_for_user(
request.user, "authentik_stages_email.view_emailstage"
).filter(pk=request.query_params.get("email_stage"))
if not stages.exists():
LOGGER.debug("Email stage does not exist/user has no permissions")
raise ValidationError({"non_field_errors": "Email stage does not exist."})
email_stage: EmailStage = stages.first()
message = TemplateEmailMessage(
subject=_(stage.subject),
to=[(user.name, user.email)],
template_name=stage.template,
language=user.locale(request),
subject=_(email_stage.subject),
to=[(for_user.name, for_user.email)],
template_name=email_stage.template,
language=for_user.locale(request),
template_context={
"url": link,
"user": user,
"user": for_user,
"expires": token.expires,
},
)
send_mails(stage, message)
send_mails(email_stage, message)
return Response(status=204)
@permission_required("authentik_core.impersonate")

View File

@@ -1,7 +1,5 @@
"""authentik core models"""
import re
import traceback
from datetime import datetime, timedelta
from enum import StrEnum
from hashlib import sha256
@@ -17,6 +15,7 @@ from django.contrib.sessions.base_session import AbstractBaseSession
from django.core.validators import validate_slug
from django.db import models
from django.db.models import Q, QuerySet, options
from django.db.models.constants import LOOKUP_SEP
from django.http import HttpRequest
from django.utils.functional import cached_property
from django.utils.timezone import now
@@ -44,7 +43,6 @@ from authentik.lib.models import (
DomainlessFormattedURLValidator,
SerializerModel,
)
from authentik.lib.utils.inheritance import get_deepest_child
from authentik.lib.utils.time import timedelta_from_string
from authentik.policies.models import PolicyBindingModel
from authentik.rbac.models import Role
@@ -52,7 +50,6 @@ from authentik.tenants.models import DEFAULT_TOKEN_DURATION, DEFAULT_TOKEN_LENGT
from authentik.tenants.utils import get_current_tenant, get_unique_identifier
LOGGER = get_logger()
USERNAME_MAX_LENGTH = 150
USER_PATH_SYSTEM_PREFIX = "goauthentik.io"
_USER_ATTR_PREFIX = f"{USER_PATH_SYSTEM_PREFIX}/user"
USER_ATTRIBUTE_DEBUG = f"{_USER_ATTR_PREFIX}/debug"
@@ -530,35 +527,23 @@ class User(SerializerModel, AttributesMixin, AbstractUser):
"default: in 30 days). See authentik logs for every will invocation of this "
"deprecation."
)
stacktrace = traceback.format_stack()
# The last line is this function, the next-to-last line is its caller
cause = stacktrace[-2] if len(stacktrace) > 1 else "Unknown, see stacktrace in logs"
if search := re.search(r'"(.*?)"', cause):
cause = f"Property mapping or Expression policy named {search.group(1)}"
LOGGER.warning(
"deprecation used",
message=message_logger,
deprecation=deprecation,
replacement=replacement,
cause=cause,
stacktrace=stacktrace,
)
if not Event.filter_not_expired(
action=EventAction.CONFIGURATION_WARNING,
context__deprecation=deprecation,
context__cause=cause,
action=EventAction.CONFIGURATION_WARNING, context__deprecation=deprecation
).exists():
event = Event.new(
EventAction.CONFIGURATION_WARNING,
deprecation=deprecation,
replacement=replacement,
message=message_event,
cause=cause,
)
event.expires = datetime.now() + timedelta(days=30)
event.save()
return self.groups
def set_password(self, raw_password, signal=True, sender=None, request=None):
@@ -803,7 +788,25 @@ class Application(SerializerModel, PolicyBindingModel):
"""Get casted provider instance. Needs Application queryset with_provider"""
if not self.provider:
return None
return get_deepest_child(self.provider)
candidates = []
base_class = Provider
for subclass in base_class.objects.get_queryset()._get_subclasses_recurse(base_class):
parent = self.provider
for level in subclass.split(LOOKUP_SEP):
try:
parent = getattr(parent, level)
except AttributeError:
break
if parent in candidates:
continue
idx = subclass.count(LOOKUP_SEP)
if type(parent) is not base_class:
idx += 1
candidates.insert(idx, parent)
if not candidates:
return None
return candidates[-1]
def backchannel_provider_for[T: Provider](self, provider_type: type[T], **kwargs) -> T | None:
"""Get Backchannel provider for a specific type"""

View File

@@ -44,24 +44,19 @@
{% endblock %}
</div>
</main>
<footer
name="site-footer"
aria-label="{% trans 'Site footer' %}"
class="pf-c-login__footer pf-m-dark">
<div name="flow-links" aria-label="{% trans 'Flow links' %}">
<ul class="pf-c-list pf-m-inline" part="list">
{% for link in footer_links %}
<li part="list-item">
<a part="list-item-link" href="{{ link.href }}">{{ link.name }}</a>
</li>
{% endfor %}
<li part="list-item">
<span>
{% trans 'Powered by authentik' %}
</span>
</li>
</ul>
</div>
<footer aria-label="Site footer" class="pf-c-login__footer pf-m-dark">
<ul class="pf-c-list pf-m-inline">
{% for link in footer_links %}
<li>
<a href="{{ link.href }}">{{ link.name }}</a>
</li>
{% endfor %}
<li>
<span>
{% trans 'Powered by authentik' %}
</span>
</li>
</ul>
</footer>
</div>
</div>

View File

@@ -63,7 +63,7 @@ class TestPropertyMappingAPI(APITestCase):
PropertyMappingSerializer().validate_expression("/")
def test_types(self):
"""Test PropertyMapping's types endpoint"""
"""Test PropertyMappigns's types endpoint"""
response = self.client.get(
reverse("authentik_api:propertymapping-types"),
)

View File

@@ -1,10 +1,9 @@
"""Test Users API"""
from datetime import datetime, timedelta
from datetime import datetime
from json import loads
from django.urls.base import reverse
from django.utils.timezone import now
from rest_framework.test import APITestCase
from authentik.brands.models import Brand
@@ -128,62 +127,13 @@ class TestUsersAPI(APITestCase):
)
self.assertEqual(response.status_code, 200)
def test_recovery_duration(self):
"""Test user recovery token duration"""
Token.objects.all().delete()
flow = create_test_flow(
FlowDesignation.RECOVERY,
authentication=FlowAuthenticationRequirement.REQUIRE_UNAUTHENTICATED,
)
brand: Brand = create_test_brand()
brand.flow_recovery = flow
brand.save()
self.client.force_login(self.admin)
response = self.client.post(
reverse("authentik_api:user-recovery", kwargs={"pk": self.user.pk}),
data={"token_duration": "days=33"},
)
self.assertEqual(response.status_code, 200)
expires = Token.objects.first().expires
expected_expires = now() + timedelta(days=33)
self.assertTrue(timedelta(minutes=-1) < expected_expires - expires < timedelta(minutes=1))
def test_recovery_duration_update(self):
"""Test user recovery token duration update"""
Token.objects.all().delete()
flow = create_test_flow(
FlowDesignation.RECOVERY,
authentication=FlowAuthenticationRequirement.REQUIRE_UNAUTHENTICATED,
)
brand: Brand = create_test_brand()
brand.flow_recovery = flow
brand.save()
self.client.force_login(self.admin)
response = self.client.post(
reverse("authentik_api:user-recovery", kwargs={"pk": self.user.pk}),
data={"token_duration": "days=33"},
)
self.assertEqual(response.status_code, 200)
expires = Token.objects.first().expires
expected_expires = now() + timedelta(days=33)
self.assertTrue(timedelta(minutes=-1) < expected_expires - expires < timedelta(minutes=1))
response = self.client.post(
reverse("authentik_api:user-recovery", kwargs={"pk": self.user.pk}),
data={"token_duration": "days=66"},
)
expires = Token.objects.first().expires
expected_expires = now() + timedelta(days=66)
self.assertTrue(timedelta(minutes=-1) < expected_expires - expires < timedelta(minutes=1))
def test_recovery_email_no_flow(self):
"""Test user recovery link (no recovery flow set)"""
self.client.force_login(self.admin)
self.user.email = ""
self.user.save()
stage = EmailStage.objects.create(name="email")
response = self.client.post(
reverse("authentik_api:user-recovery-email", kwargs={"pk": self.user.pk}),
data={"email_stage": stage.pk},
reverse("authentik_api:user-recovery-email", kwargs={"pk": self.user.pk})
)
self.assertEqual(response.status_code, 400)
self.assertJSONEqual(
@@ -192,8 +142,7 @@ class TestUsersAPI(APITestCase):
self.user.email = "foo@bar.baz"
self.user.save()
response = self.client.post(
reverse("authentik_api:user-recovery-email", kwargs={"pk": self.user.pk}),
data={"email_stage": stage.pk},
reverse("authentik_api:user-recovery-email", kwargs={"pk": self.user.pk})
)
self.assertEqual(response.status_code, 400)
self.assertJSONEqual(response.content, {"non_field_errors": "No recovery flow set."})
@@ -211,7 +160,7 @@ class TestUsersAPI(APITestCase):
reverse("authentik_api:user-recovery-email", kwargs={"pk": self.user.pk})
)
self.assertEqual(response.status_code, 400)
self.assertJSONEqual(response.content, {"email_stage": ["This field is required."]})
self.assertJSONEqual(response.content, {"non_field_errors": "Email stage does not exist."})
def test_recovery_email(self):
"""Test user recovery link"""
@@ -229,8 +178,8 @@ class TestUsersAPI(APITestCase):
reverse(
"authentik_api:user-recovery-email",
kwargs={"pk": self.user.pk},
),
data={"email_stage": stage.pk},
)
+ f"?email_stage={stage.pk}"
)
self.assertEqual(response.status_code, 204)

View File

@@ -7,8 +7,6 @@ from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import ec, rsa
from cryptography.hazmat.primitives.asymmetric.ed448 import Ed448PrivateKey
from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PrivateKey
from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes
from cryptography.x509.oid import NameOID
from django.db import models
@@ -23,8 +21,6 @@ class PrivateKeyAlg(models.TextChoices):
RSA = "rsa", _("rsa")
ECDSA = "ecdsa", _("ecdsa")
ED25519 = "ed25519", _("Ed25519")
ED448 = "ed448", _("Ed448")
class CertificateBuilder:
@@ -60,10 +56,6 @@ class CertificateBuilder:
return rsa.generate_private_key(
public_exponent=65537, key_size=4096, backend=default_backend()
)
if self.alg == PrivateKeyAlg.ED25519:
return Ed25519PrivateKey.generate()
if self.alg == PrivateKeyAlg.ED448:
return Ed448PrivateKey.generate()
raise ValueError(f"Invalid alg: {self.alg}")
def build(
@@ -106,25 +98,18 @@ class CertificateBuilder:
self.__builder = self.__builder.add_extension(
x509.SubjectAlternativeName(alt_names), critical=True
)
algo = hashes.SHA256()
# EdDSA doesn't take a hash algorithm
if isinstance(self.__private_key, (Ed25519PrivateKey | Ed448PrivateKey)):
algo = None
self.__certificate = self.__builder.sign(
private_key=self.__private_key,
algorithm=algo,
algorithm=hashes.SHA256(),
backend=default_backend(),
)
@property
def private_key(self):
"""Return private key in PEM format"""
format = serialization.PrivateFormat.TraditionalOpenSSL
if isinstance(self.__private_key, (Ed25519PrivateKey | Ed448PrivateKey)):
format = serialization.PrivateFormat.PKCS8
return self.__private_key.private_bytes(
encoding=serialization.Encoding.PEM,
format=format,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption(),
).decode("utf-8")

View File

@@ -78,7 +78,7 @@ def generate_key_id_legacy(key_data: str) -> str:
"""Generate Key ID using MD5 (legacy format for backwards compatibility)."""
if not key_data:
return ""
return md5(key_data.encode("utf-8"), usedforsecurity=False).hexdigest() # nosec
return md5(key_data.encode("utf-8")).hexdigest() # nosec
class CertificateKeyPair(SerializerModel, ManagedModel, CreatedUpdatedModel):

View File

@@ -10,7 +10,6 @@ class AuthentikEndpointsConnectorAgentAppConfig(ManagedAppConfig):
label = "authentik_endpoints_connectors_agent"
verbose_name = "authentik Endpoints.Connectors.Agent"
default = True
mountpoint = "endpoints/agent/"
def import_related(self):
from authentik.endpoints.connectors.agent.models import AgentConnector

View File

@@ -1,24 +1,15 @@
from datetime import timedelta
from hmac import compare_digest
from plistlib import PlistFormat, dumps
from uuid import uuid4
from xml.etree.ElementTree import Element, SubElement, tostring # nosec
from django.http import HttpRequest
from django.urls import reverse
from django.utils.timezone import now
from jwt import PyJWTError, decode, encode
from rest_framework.exceptions import ValidationError
from rest_framework.fields import CharField
from authentik.core.api.utils import PassiveSerializer
from authentik.crypto.models import CertificateKeyPair
from authentik.endpoints.connectors.agent.models import AgentConnector, DeviceToken, EnrollmentToken
from authentik.endpoints.connectors.agent.models import AgentConnector, EnrollmentToken
from authentik.endpoints.controller import BaseController
from authentik.endpoints.facts import OSFamily
from authentik.endpoints.models import Device
from authentik.lib.generators import generate_id
from authentik.providers.oauth2.models import JWTAlgorithms
def csp_create_replace_item(loc_uri, data_value) -> Element:
@@ -45,12 +36,14 @@ def csp_create_replace_item(loc_uri, data_value) -> Element:
class MDMConfigResponseSerializer(PassiveSerializer):
config = CharField(required=True)
mime_type = CharField(required=True)
filename = CharField(required=True)
class AgentController(BaseController[AgentConnector]):
class AgentConnectorController(BaseController[AgentConnector]):
@staticmethod
def vendor_identifier() -> str:
return "goauthentik.io/platform"
@@ -58,57 +51,6 @@ class AgentController(BaseController[AgentConnector]):
def supported_enrollment_methods(self):
return []
def generate_device_challenge(self):
keypair = CertificateKeyPair.objects.get(pk=self.connector.challenge_key_id)
challenge_str = generate_id()
iat = now()
challenge = encode(
{
"atc": challenge_str,
"iss": str(self.connector.pk),
"iat": int(iat.timestamp()),
"exp": int((iat + timedelta(minutes=5)).timestamp()),
"goauthentik.io/device/check_in": self.connector.challenge_trigger_check_in,
},
headers={"kid": keypair.kid},
key=keypair.private_key,
algorithm=JWTAlgorithms.from_private_key(keypair.private_key),
)
return challenge
def validate_device_challenge(self, response: str, challenge: str):
try:
raw = decode(
response,
options={"verify_signature": False},
audience="goauthentik.io/platform/endpoint",
)
except PyJWTError as exc:
self.logger.warning("Could not parse response", exc=exc)
raise ValidationError("Invalid challenge response") from None
device = Device.filter_not_expired(identifier=raw["iss"]).first()
if not device:
self.logger.warning("Could not find device for challenge")
raise ValidationError("Invalid challenge response")
for token in DeviceToken.filter_not_expired(
device__device=device, device__connector=self.connector
).values_list("key", flat=True):
try:
decoded = decode(
response,
key=token,
algorithms="HS512",
issuer=device.identifier,
audience="goauthentik.io/platform/endpoint",
)
if not compare_digest(decoded["atc"], challenge):
self.logger.warning("mismatched challenge")
raise ValidationError("Invalid challenge response")
return device
except PyJWTError as exc:
self.logger.warning("failed to validate device challenge response", exc=exc)
raise ValidationError("Invalid challenge response")
def generate_mdm_config(
self, target_platform: OSFamily, request: HttpRequest, token: EnrollmentToken
) -> MDMConfigResponseSerializer:

View File

@@ -21,7 +21,7 @@ from authentik.lib.models import InternallyManagedMixin, SerializerModel
from authentik.lib.utils.time import timedelta_string_validator
if TYPE_CHECKING:
from authentik.endpoints.connectors.agent.controller import AgentController
from authentik.endpoints.connectors.agent.controller import AgentConnectorController
class AgentConnector(Connector):
@@ -73,10 +73,10 @@ class AgentConnector(Connector):
return AuthenticatorEndpointStageView
@property
def controller(self) -> type[AgentController]:
from authentik.endpoints.connectors.agent.controller import AgentController
def controller(self) -> type[AgentConnectorController]:
from authentik.endpoints.connectors.agent.controller import AgentConnectorController
return AgentController
return AgentConnectorController
@property
def component(self) -> str:

View File

@@ -1,18 +1,15 @@
from datetime import timedelta
from hashlib import sha256
from hmac import compare_digest
from typing import cast
from urllib.parse import urlencode
from django.http import HttpResponse
from django.urls import reverse
from django.utils.timezone import now
from jwt import PyJWTError, decode, encode
from rest_framework.exceptions import ValidationError
from rest_framework.fields import CharField, IntegerField
from authentik.crypto.models import CertificateKeyPair
from authentik.endpoints.connectors.agent.controller import AgentController
from authentik.endpoints.connectors.agent.models import (
AgentConnector,
DeviceAuthenticationToken,
)
from authentik.endpoints.connectors.agent.models import DeviceAuthenticationToken, DeviceToken
from authentik.endpoints.models import Device, EndpointStage, StageMode
from authentik.flows.challenge import (
Challenge,
@@ -20,7 +17,9 @@ from authentik.flows.challenge import (
)
from authentik.flows.planner import PLAN_CONTEXT_DEVICE
from authentik.flows.stage import ChallengeStageView
from authentik.lib.generators import generate_id
from authentik.lib.utils.time import timedelta_from_string
from authentik.providers.oauth2.models import JWTAlgorithms
PLAN_CONTEXT_DEVICE_AUTH_TOKEN = "goauthentik.io/endpoints/device_auth_token" # nosec
PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE = "goauthentik.io/endpoints/connectors/agent/challenge"
@@ -32,9 +31,8 @@ class EndpointAgentChallenge(Challenge):
"""Signed challenge for authentik agent to respond to"""
component = CharField(default="ak-stage-endpoint-agent")
challenge = CharField(required=True)
challenge = CharField()
challenge_idle_timeout = IntegerField()
frame_url = CharField(required=True)
class EndpointAgentChallengeResponse(ChallengeResponse):
@@ -46,23 +44,47 @@ class EndpointAgentChallengeResponse(ChallengeResponse):
def validate_response(self, response: str | None) -> Device | None:
if not response:
return None
return cast(AgentController, self.stage.controller).validate_device_challenge(
response,
self.stage.executor.plan.context[PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE],
)
try:
raw = decode(
response,
options={"verify_signature": False},
audience="goauthentik.io/platform/endpoint",
)
except PyJWTError as exc:
self.stage.logger.warning("Could not parse response", exc=exc)
raise ValidationError("Invalid challenge response") from None
device = Device.filter_not_expired(identifier=raw["iss"]).first()
if not device:
self.stage.logger.warning("Could not find device for challenge")
raise ValidationError("Invalid challenge response")
for token in DeviceToken.filter_not_expired(
device__device=device,
device__connector=self.stage.executor.current_stage.connector,
).values_list("key", flat=True):
try:
decoded = decode(
response,
key=token,
algorithms="HS512",
issuer=device.identifier,
audience="goauthentik.io/platform/endpoint",
)
if not compare_digest(
decoded["atc"],
self.stage.executor.plan.context[PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE],
):
self.stage.logger.warning("mismatched challenge")
raise ValidationError("Invalid challenge response")
return device
except PyJWTError as exc:
self.stage.logger.warning("failed to validate device challenge response", exc=exc)
raise ValidationError("Invalid challenge response")
class AuthenticatorEndpointStageView(ChallengeStageView):
"""Endpoint stage"""
response_class = EndpointAgentChallengeResponse
controller: AgentController
def dispatch(self, request, *args, **kwargs):
stage: EndpointStage = self.executor.current_stage
connector: AgentConnector = stage.connector
self.controller = connector.controller(connector)
return super().dispatch(request, *args, **kwargs)
def get(self, request, *args, **kwargs):
# Check if we're in a device interactive auth flow, in which case we use that
@@ -97,7 +119,21 @@ class AuthenticatorEndpointStageView(ChallengeStageView):
def get_challenge(self, *args, **kwargs) -> Challenge:
stage: EndpointStage = self.executor.current_stage
challenge = self.controller.generate_device_challenge()
keypair = CertificateKeyPair.objects.get(pk=stage.connector.challenge_key_id)
challenge_str = generate_id()
iat = now()
challenge = encode(
{
"atc": challenge_str,
"iss": str(stage.pk),
"iat": int(iat.timestamp()),
"exp": int((iat + timedelta(minutes=5)).timestamp()),
"goauthentik.io/device/check_in": stage.connector.challenge_trigger_check_in,
},
headers={"kid": keypair.kid},
key=keypair.private_key,
algorithm=JWTAlgorithms.from_private_key(keypair.private_key),
)
self.executor.plan.context[PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE] = challenge
return EndpointAgentChallenge(
data={
@@ -106,11 +142,6 @@ class AuthenticatorEndpointStageView(ChallengeStageView):
"challenge_idle_timeout": int(
timedelta_from_string(stage.connector.challenge_idle_timeout).total_seconds()
),
"frame_url": self.request.build_absolute_uri(
reverse("authentik_endpoints_connectors_agent:browser-backchannel")
+ "?"
+ urlencode({"xak-agent-challenge": challenge})
),
}
)

View File

@@ -1,6 +1,5 @@
from hashlib import sha256
from json import loads
from unittest.mock import PropertyMock, patch
from django.urls import reverse
from jwt import encode
@@ -233,43 +232,3 @@ class TestEndpointStage(FlowTestCase):
plan = plan()
self.assertNotIn(PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE, plan.context)
self.assertEqual(plan.context[PLAN_CONTEXT_DEVICE], self.device)
def test_endpoint_stage_connector_no_stage_optional(self):
flow = create_test_flow()
stage = EndpointStage.objects.create(connector=self.connector, mode=StageMode.OPTIONAL)
FlowStageBinding.objects.create(stage=stage, target=flow, order=0)
with patch(
"authentik.endpoints.connectors.agent.models.AgentConnector.stage",
PropertyMock(return_value=None),
):
with self.assertFlowFinishes() as plan:
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
)
self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
plan = plan()
self.assertNotIn(PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE, plan.context)
self.assertNotIn(PLAN_CONTEXT_DEVICE, plan.context)
def test_endpoint_stage_connector_no_stage_required(self):
flow = create_test_flow()
stage = EndpointStage.objects.create(connector=self.connector, mode=StageMode.REQUIRED)
FlowStageBinding.objects.create(stage=stage, target=flow, order=0)
with patch(
"authentik.endpoints.connectors.agent.models.AgentConnector.stage",
PropertyMock(return_value=None),
):
with self.assertFlowFinishes() as plan:
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
)
self.assertStageResponse(
res,
component="ak-stage-access-denied",
error_message="Invalid stage configuration",
)
plan = plan()
self.assertNotIn(PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE, plan.context)
self.assertNotIn(PLAN_CONTEXT_DEVICE, plan.context)

View File

@@ -1,12 +1,5 @@
from django.urls import path
from authentik.endpoints.connectors.agent.api.connectors import AgentConnectorViewSet
from authentik.endpoints.connectors.agent.api.enrollment_tokens import EnrollmentTokenViewSet
from authentik.endpoints.connectors.agent.views.browser_backchannel import BrowserBackchannel
urlpatterns = [
path("browser-backchannel/", BrowserBackchannel.as_view(), name="browser-backchannel"),
]
api_urlpatterns = [
("endpoints/agents/connectors", AgentConnectorViewSet),

View File

@@ -1,40 +0,0 @@
from typing import Any
from django.http import HttpRequest, HttpResponse, HttpResponseBadRequest
from django.template.response import TemplateResponse
from django.views import View
from rest_framework.exceptions import ValidationError
from authentik.endpoints.connectors.agent.controller import AgentController
from authentik.endpoints.connectors.agent.models import AgentConnector
from authentik.endpoints.connectors.agent.stage import PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE
from authentik.endpoints.models import EndpointStage
from authentik.flows.planner import PLAN_CONTEXT_DEVICE, FlowPlan
from authentik.flows.views.executor import SESSION_KEY_PLAN
class BrowserBackchannel(View):
def get_flow_plan(self) -> FlowPlan:
flow_plan: FlowPlan = self.request.session[SESSION_KEY_PLAN]
return flow_plan
def setup(self, request: HttpRequest, *args: Any, **kwargs: Any) -> None:
super().setup(request, *args, **kwargs)
stage: EndpointStage = self.get_flow_plan().bindings[0].stage
connector = AgentConnector.objects.filter(pk=stage.connector_id).first()
if not connector:
return HttpResponseBadRequest()
self.controller: AgentController = connector.controller(connector)
def get(self, request: HttpRequest) -> HttpResponse:
response = request.GET.get("xak-agent-response")
flow_plan = self.get_flow_plan()
try:
dev = self.controller.validate_device_challenge(
response, flow_plan.context.get(PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE)
)
flow_plan.context[PLAN_CONTEXT_DEVICE] = dev
request.session[SESSION_KEY_PLAN] = flow_plan
except ValidationError:
return HttpResponseBadRequest()
return TemplateResponse(request, "flows/frame-submit.html")

View File

@@ -63,7 +63,7 @@ class OperatingSystemSerializer(Serializer):
"Operating System version, must always be the version number but may contain build name"
),
)
arch = CharField(required=False)
arch = CharField(required=True)
class NetworkInterfaceSerializer(Serializer):

View File

@@ -1,4 +1,4 @@
from authentik.endpoints.models import EndpointStage, StageMode
from authentik.endpoints.models import EndpointStage
from authentik.flows.stage import StageView
PLAN_CONTEXT_ENDPOINT_CONNECTOR = "endpoint_connector"
@@ -6,24 +6,15 @@ PLAN_CONTEXT_ENDPOINT_CONNECTOR = "endpoint_connector"
class EndpointStageView(StageView):
def _get_inner(self) -> StageView | None:
def _get_inner(self):
stage: EndpointStage = self.executor.current_stage
inner_stage: type[StageView] | None = stage.connector.stage
if not inner_stage:
return None
return self.executor.stage_ok()
return inner_stage(self.executor, request=self.request)
def dispatch(self, request, *args, **kwargs):
inner = self._get_inner()
if inner is None:
stage: EndpointStage = self.executor.current_stage
if stage.mode == StageMode.OPTIONAL:
return self.executor.stage_ok()
else:
return self.executor.stage_invalid("Invalid stage configuration")
return inner.dispatch(request, *args, **kwargs)
return self._get_inner().dispatch(request, *args, **kwargs)
def cleanup(self):
inner = self._get_inner()
if inner is not None:
return inner.cleanup()
return self._get_inner().cleanup()

View File

@@ -60,18 +60,20 @@ class TestEndpointFacts(APITestCase):
]
}
)
self.assertCountEqual(
device.cached_facts.data["software"],
[
{
"name": "software-a",
"version": "1.2.3.4",
"source": "package",
},
{
"name": "software-b",
"version": "5.6.7.8",
"source": "package",
},
],
self.assertEqual(
device.cached_facts.data,
{
"software": [
{
"name": "software-a",
"version": "1.2.3.4",
"source": "package",
},
{
"name": "software-b",
"version": "5.6.7.8",
"source": "package",
},
]
},
)

View File

@@ -10,7 +10,6 @@ from jwt import PyJWTError, decode, encode, get_unverified_header
from rest_framework.exceptions import ValidationError
from structlog.stdlib import get_logger
from authentik.common.oauth.constants import TOKEN_TYPE
from authentik.core.models import AuthenticatedSession, Session, User
from authentik.core.sessions import SessionStore
from authentik.crypto.apps import MANAGED_KEY
@@ -27,6 +26,7 @@ from authentik.events.models import Event, EventAction
from authentik.events.signals import SESSION_LOGIN_EVENT
from authentik.flows.planner import PLAN_CONTEXT_DEVICE
from authentik.lib.utils.time import timedelta_from_string
from authentik.providers.oauth2.constants import TOKEN_TYPE
from authentik.providers.oauth2.id_token import IDToken
from authentik.providers.oauth2.models import JWTAlgorithms
from authentik.root.middleware import SessionMiddleware

View File

@@ -1,42 +0,0 @@
"""GoogleChromeConnector API Views"""
from django.urls import reverse
from rest_framework.fields import SerializerMethodField
from rest_framework.request import Request
from rest_framework.viewsets import ModelViewSet
from authentik.core.api.used_by import UsedByMixin
from authentik.endpoints.api.connectors import ConnectorSerializer
from authentik.enterprise.api import EnterpriseRequiredMixin
from authentik.enterprise.endpoints.connectors.google_chrome.models import GoogleChromeConnector
class GoogleChromeConnectorSerializer(EnterpriseRequiredMixin, ConnectorSerializer):
"""GoogleChromeConnector Serializer"""
chrome_url = SerializerMethodField()
def get_chrome_url(self, _: GoogleChromeConnector) -> str | None:
"""Full URL to be used in Google Workspace configuration"""
request: Request = self.context.get("request", None)
if not request:
return True
return request.build_absolute_uri(
reverse("authentik_endpoints_connectors_google_chrome:chrome")
)
class Meta:
model = GoogleChromeConnector
fields = ConnectorSerializer.Meta.fields + ["credentials", "chrome_url"]
class GoogleChromeConnectorViewSet(UsedByMixin, ModelViewSet):
"""GoogleChromeConnector Viewset"""
queryset = GoogleChromeConnector.objects.all()
serializer_class = GoogleChromeConnectorSerializer
filterset_fields = [
"name",
]
search_fields = ["name"]
ordering = ["name"]

View File

@@ -1,13 +0,0 @@
"""authentik Endpoint app config"""
from authentik.enterprise.apps import EnterpriseConfig
class AuthentikEndpointsConnectorGoogleChromeAppConfig(EnterpriseConfig):
"""authentik endpoint config"""
name = "authentik.enterprise.endpoints.connectors.google_chrome"
label = "authentik_endpoints_connectors_google_chrome"
verbose_name = "authentik Enterprise.Endpoints.Connectors.Google Chrome"
default = True
mountpoint = "endpoints/google/"

View File

@@ -1,116 +0,0 @@
from json import dumps, loads
from django.http import HttpRequest, HttpResponseRedirect
from django.urls import reverse
from googleapiclient.discovery import build
from authentik.endpoints.controller import BaseController, EnrollmentMethods
from authentik.endpoints.facts import DeviceFacts, OSFamily
from authentik.endpoints.models import Device, DeviceConnection
from authentik.enterprise.endpoints.connectors.google_chrome.google_schema import (
DeviceSignals,
VerifyChallengeResponseResult,
)
from authentik.enterprise.endpoints.connectors.google_chrome.models import GoogleChromeConnector
from authentik.policies.utils import delete_none_values
# Header we get from chrome that initiates verified access
HEADER_DEVICE_TRUST = "X-Device-Trust"
# Header we send to the client with the challenge
HEADER_ACCESS_CHALLENGE = "X-Verified-Access-Challenge"
# Header we get back from the client that we verify with google
HEADER_ACCESS_CHALLENGE_RESPONSE = "X-Verified-Access-Challenge-Response"
# Header value for x-device-trust that initiates the flow
DEVICE_TRUST_VERIFIED_ACCESS = "VerifiedAccess"
class GoogleChromeController(BaseController[GoogleChromeConnector]):
def __init__(self, connector):
super().__init__(connector)
self.google_client = build(
"verifiedaccess",
"v2",
cache_discovery=False,
**connector.google_credentials(),
)
@staticmethod
def vendor_identifier() -> str:
return "chrome.google.com"
def supported_enrollment_methods(self) -> list[EnrollmentMethods]:
return [EnrollmentMethods.AUTOMATIC_USER]
def generate_challenge(self, request: HttpRequest) -> HttpResponseRedirect:
challenge = self.google_client.challenge().generate().execute()
res = HttpResponseRedirect(
request.build_absolute_uri(
reverse("authentik_endpoints_connectors_google_chrome:chrome")
)
)
res[HEADER_ACCESS_CHALLENGE] = dumps(challenge)
return res
def validate_challenge(self, response: str) -> Device:
response = VerifyChallengeResponseResult(
self.google_client.challenge().verify(body=loads(response)).execute()
)
# Remove deprecated string representation of deviceSignals
response.pop("deviceSignal", None)
signals = DeviceSignals(response["deviceSignals"])
device, _ = Device.objects.update_or_create(
identifier=signals["serialNumber"],
defaults={
"name": signals["hostname"],
},
)
conn, _ = DeviceConnection.objects.update_or_create(
device=device,
connector=self.connector,
)
conn.create_snapshot(self.convert_data(signals))
return device
def convert_os_family(self, family) -> OSFamily:
return {
"CHROME_OS": OSFamily.linux,
"CHROMIUM_OS": OSFamily.linux,
"WINDOWS": OSFamily.windows,
"MAC_OS_X": OSFamily.macOS,
"LINUX": OSFamily.linux,
}.get(family, OSFamily.other)
def convert_data(self, raw_signals: DeviceSignals):
data = {
"os": delete_none_values(
{
"family": self.convert_os_family(raw_signals["operatingSystem"]),
"version": raw_signals["osVersion"],
}
),
"disks": [],
"network": delete_none_values(
{
"hostname": raw_signals["hostname"],
"interfaces": [],
"firewall_enabled": raw_signals["osFirewall"] == "OS_FIREWALL_ENABLED",
},
),
"hardware": delete_none_values(
{
"model": raw_signals["deviceModel"],
"manufacturer": raw_signals["deviceManufacturer"],
"serial": raw_signals["serialNumber"],
}
),
"vendor": {
self.vendor_identifier(): {
"agent_version": raw_signals["browserVersion"],
"raw": raw_signals,
},
},
}
facts = DeviceFacts(data=data)
facts.is_valid(raise_exception=True)
return facts.validated_data

View File

@@ -1,129 +0,0 @@
from typing import Literal, TypedDict
# Based on https://github.com/henribru/google-api-python-client-stubs/blob/master/googleapiclient-stubs/_apis/verifiedaccess/v2/schemas.pyi
class Antivirus(TypedDict, total=False):
state: Literal["STATE_UNSPECIFIED", "MISSING", "DISABLED", "ENABLED"]
class Challenge(TypedDict, total=False):
challenge: str
class CrowdStrikeAgent(TypedDict, total=False):
agentId: str
customerId: str
class DeviceSignals(TypedDict, total=False):
allowScreenLock: bool
antivirus: Antivirus
browserVersion: str
builtInDnsClientEnabled: bool
chromeRemoteDesktopAppBlocked: bool
crowdStrikeAgent: CrowdStrikeAgent
deviceAffiliationIds: list[str]
deviceEnrollmentDomain: str
deviceManufacturer: str
deviceModel: str
diskEncryption: Literal[
"DISK_ENCRYPTION_UNSPECIFIED",
"DISK_ENCRYPTION_UNKNOWN",
"DISK_ENCRYPTION_DISABLED",
"DISK_ENCRYPTION_ENCRYPTED",
]
displayName: str
hostname: str
imei: list[str]
macAddresses: list[str]
meid: list[str]
operatingSystem: Literal[
"OPERATING_SYSTEM_UNSPECIFIED",
"CHROME_OS",
"CHROMIUM_OS",
"WINDOWS",
"MAC_OS_X",
"LINUX",
]
osFirewall: Literal[
"OS_FIREWALL_UNSPECIFIED",
"OS_FIREWALL_UNKNOWN",
"OS_FIREWALL_DISABLED",
"OS_FIREWALL_ENABLED",
]
osVersion: str
passwordProtectionWarningTrigger: Literal[
"PASSWORD_PROTECTION_WARNING_TRIGGER_UNSPECIFIED",
"POLICY_UNSET",
"PASSWORD_PROTECTION_OFF",
"PASSWORD_REUSE",
"PHISHING_REUSE",
]
profileAffiliationIds: list[str]
profileEnrollmentDomain: str
realtimeUrlCheckMode: Literal[
"REALTIME_URL_CHECK_MODE_UNSPECIFIED",
"REALTIME_URL_CHECK_MODE_DISABLED",
"REALTIME_URL_CHECK_MODE_ENABLED_MAIN_FRAME",
]
safeBrowsingProtectionLevel: Literal[
"SAFE_BROWSING_PROTECTION_LEVEL_UNSPECIFIED", "INACTIVE", "STANDARD", "ENHANCED"
]
screenLockSecured: Literal[
"SCREEN_LOCK_SECURED_UNSPECIFIED",
"SCREEN_LOCK_SECURED_UNKNOWN",
"SCREEN_LOCK_SECURED_DISABLED",
"SCREEN_LOCK_SECURED_ENABLED",
]
secureBootMode: Literal[
"SECURE_BOOT_MODE_UNSPECIFIED",
"SECURE_BOOT_MODE_UNKNOWN",
"SECURE_BOOT_MODE_DISABLED",
"SECURE_BOOT_MODE_ENABLED",
]
serialNumber: str
siteIsolationEnabled: bool
systemDnsServers: list[str]
thirdPartyBlockingEnabled: bool
trigger: Literal["TRIGGER_UNSPECIFIED", "TRIGGER_BROWSER_NAVIGATION", "TRIGGER_LOGIN_SCREEN"]
windowsMachineDomain: str
windowsUserDomain: str
class Empty(TypedDict, total=False): ...
class VerifyChallengeResponseRequest(TypedDict, total=False):
challengeResponse: str
expectedIdentity: str
class VerifyChallengeResponseResult(TypedDict, total=False):
attestedDeviceId: str
customerId: str
deviceEnrollmentId: str
devicePermanentId: str
deviceSignal: str
deviceSignals: DeviceSignals
keyTrustLevel: Literal[
"KEY_TRUST_LEVEL_UNSPECIFIED",
"CHROME_OS_VERIFIED_MODE",
"CHROME_OS_DEVELOPER_MODE",
"CHROME_BROWSER_HW_KEY",
"CHROME_BROWSER_OS_KEY",
"CHROME_BROWSER_NO_KEY",
]
profileCustomerId: str
profileKeyTrustLevel: Literal[
"KEY_TRUST_LEVEL_UNSPECIFIED",
"CHROME_OS_VERIFIED_MODE",
"CHROME_OS_DEVELOPER_MODE",
"CHROME_BROWSER_HW_KEY",
"CHROME_BROWSER_OS_KEY",
"CHROME_BROWSER_NO_KEY",
]
profilePermanentId: str
signedPublicKeyAndChallenge: str
virtualDeviceId: str
virtualProfileId: str

View File

@@ -1,38 +0,0 @@
# Generated by Django 5.2.11 on 2026-03-01 18:38
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
("authentik_endpoints", "0004_deviceaccessgroup_attributes"),
]
operations = [
migrations.CreateModel(
name="GoogleChromeConnector",
fields=[
(
"connector_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="authentik_endpoints.connector",
),
),
("credentials", models.JSONField()),
],
options={
"verbose_name": "Google Device Trust Connector",
"verbose_name_plural": "Google Device Trust Connectors",
},
bases=("authentik_endpoints.connector",),
),
]

View File

@@ -1,69 +0,0 @@
"""Endpoint stage"""
from typing import TYPE_CHECKING
from django.db import models
from django.templatetags.static import static
from django.utils.translation import gettext_lazy as _
from google.oauth2.service_account import Credentials
from rest_framework.serializers import BaseSerializer
from authentik.endpoints.models import Connector
from authentik.flows.stage import StageView
if TYPE_CHECKING:
from authentik.enterprise.endpoints.connectors.google_chrome.controller import (
GoogleChromeController,
)
class GoogleChromeConnector(Connector):
"""Verify Google Chrome Device Trust connection for the user's browser."""
credentials = models.JSONField()
def google_credentials(self):
return {
"credentials": Credentials.from_service_account_info(
self.credentials, scopes=["https://www.googleapis.com/auth/verifiedaccess"]
),
}
@property
def icon_url(self):
return static("authentik/sources/google.svg")
@property
def serializer(self) -> type[BaseSerializer]:
from authentik.enterprise.endpoints.connectors.google_chrome.api import (
GoogleChromeConnectorSerializer,
)
return GoogleChromeConnectorSerializer
@property
def stage(self) -> type[StageView] | None:
from authentik.enterprise.endpoints.connectors.google_chrome.stage import (
GoogleChromeStageView,
)
return GoogleChromeStageView
@property
def controller(self) -> type[GoogleChromeController]:
from authentik.enterprise.endpoints.connectors.google_chrome.controller import (
GoogleChromeController,
)
return GoogleChromeController
@property
def component(self) -> str:
return "ak-endpoints-connector-gdtc-form"
def __str__(self) -> str:
return f"Google Device Trust Connector {self.name}"
class Meta:
verbose_name = _("Google Device Trust Connector")
verbose_name_plural = _("Google Device Trust Connectors")

View File

@@ -1,32 +0,0 @@
from django.http import HttpResponse
from django.urls import reverse
from django.utils.translation import gettext as _
from authentik.flows.challenge import (
Challenge,
ChallengeResponse,
FrameChallenge,
FrameChallengeResponse,
)
from authentik.flows.stage import ChallengeStageView
class GoogleChromeStageView(ChallengeStageView):
"""Endpoint stage"""
response_class = FrameChallengeResponse
def get_challenge(self, *args, **kwargs) -> Challenge:
return FrameChallenge(
data={
"component": "xak-flow-frame",
"url": self.request.build_absolute_uri(
reverse("authentik_endpoints_connectors_google_chrome:chrome")
),
"loading_overlay": True,
"loading_text": _("Verifying your browser..."),
}
)
def challenge_valid(self, response: ChallengeResponse) -> HttpResponse:
return self.executor.stage_ok()

View File

@@ -1,36 +0,0 @@
{
"devicePermanentId": "6f30327d-e436-4f7a-9f89-c37a7b6bf408",
"keyTrustLevel": "CHROME_BROWSER_HW_KEY",
"virtualDeviceId": "Z5DDF07GK6",
"customerId": "qewrqer",
"deviceSignals": {
"deviceManufacturer": "Apple Inc.",
"deviceModel": "MacBookPro18,1",
"operatingSystem": "MAC_OS_X",
"osVersion": "26.2.0",
"displayName": "jens-mac-vm",
"diskEncryption": "DISK_ENCRYPTION_ENCRYPTED",
"serialNumber": "Z5DDF07GK6",
"osFirewall": "OS_FIREWALL_DISABLED",
"systemDnsServers": [
"10.120.20.250:53"
],
"hostname": "jens-mac-vm.lab.beryju.org",
"macAddresses": [
"f4:d4:88:79:07:0e"
],
"screenLockSecured": "SCREEN_LOCK_SECURED_ENABLED",
"deviceEnrollmentDomain": "beryju.org",
"browserVersion": "145.0.7632.76",
"deviceAffiliationIds": [
"qewrqer"
],
"builtInDnsClientEnabled": true,
"chromeRemoteDesktopAppBlocked": false,
"safeBrowsingProtectionLevel": "STANDARD",
"siteIsolationEnabled": true,
"passwordProtectionWarningTrigger": "POLICY_UNSET",
"realtimeUrlCheckMode": "REALTIME_URL_CHECK_MODE_DISABLED",
"trigger": "TRIGGER_BROWSER_NAVIGATION"
}
}

View File

@@ -1,67 +0,0 @@
from json import dumps
from unittest.mock import MagicMock, patch
from django.urls import reverse
from rest_framework.test import APITestCase
from authentik.core.tests.utils import RequestFactory
from authentik.endpoints.facts import OSFamily
from authentik.endpoints.models import Device
from authentik.enterprise.endpoints.connectors.google_chrome.controller import (
HEADER_ACCESS_CHALLENGE,
GoogleChromeController,
)
from authentik.enterprise.endpoints.connectors.google_chrome.models import GoogleChromeConnector
from authentik.enterprise.providers.google_workspace.clients.test_http import MockHTTP
from authentik.lib.generators import generate_id
from authentik.lib.tests.utils import load_fixture
class TestGoogleChromeConnector(APITestCase):
def setUp(self):
self.connector = GoogleChromeConnector.objects.create(
name=generate_id(),
credentials={},
)
self.factory = RequestFactory()
self.api_key = generate_id()
def test_generate_challenge(self):
req = self.factory.get("/")
challenge = generate_id()
http = MockHTTP()
http.add_response(
f"https://verifiedaccess.googleapis.com/v2/challenge:generate?key={self.api_key}&alt=json",
{"challenge": challenge},
method="POST",
)
with patch(
"authentik.enterprise.endpoints.connectors.google_chrome.models.GoogleChromeConnector.google_credentials",
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
):
controller = GoogleChromeController(self.connector)
res = controller.generate_challenge(req)
self.assertEqual(
res["Location"],
req.build_absolute_uri(
reverse("authentik_endpoints_connectors_google_chrome:chrome")
),
)
self.assertEqual(res.headers[HEADER_ACCESS_CHALLENGE], dumps({"challenge": challenge}))
def test_validate_challenge(self):
http = MockHTTP()
http.add_response(
f"https://verifiedaccess.googleapis.com/v2/challenge:verify?key={self.api_key}&alt=json",
load_fixture("fixtures/host_macos.json"),
method="POST",
)
with patch(
"authentik.enterprise.endpoints.connectors.google_chrome.models.GoogleChromeConnector.google_credentials",
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
):
controller = GoogleChromeController(self.connector)
controller.validate_challenge(dumps("{}"))
device = Device.objects.get(identifier="Z5DDF07GK6")
self.assertIsNotNone(device)
self.assertEqual(device.cached_facts.data["os"]["family"], OSFamily.macOS)

View File

@@ -1,91 +0,0 @@
from json import dumps
from unittest.mock import MagicMock, patch
from django.urls import reverse
from authentik.core.tests.utils import RequestFactory, create_test_flow
from authentik.endpoints.models import Device, EndpointStage
from authentik.enterprise.endpoints.connectors.google_chrome.models import GoogleChromeConnector
from authentik.enterprise.providers.google_workspace.clients.test_http import MockHTTP
from authentik.flows.models import FlowStageBinding
from authentik.flows.planner import PLAN_CONTEXT_DEVICE
from authentik.flows.tests import FlowTestCase
from authentik.lib.generators import generate_id
from authentik.lib.tests.utils import load_fixture
class TestChromeDTCView(FlowTestCase):
def setUp(self):
self.flow = create_test_flow()
self.connector = GoogleChromeConnector.objects.create(
name=generate_id(),
credentials={},
)
self.factory = RequestFactory()
self.api_key = generate_id()
self.stage = EndpointStage.objects.create(
name=generate_id(),
connector=self.connector,
)
FlowStageBinding.objects.create(
target=self.flow,
stage=self.stage,
order=0,
)
def test_dtc_generate_verify(self):
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
)
self.assertStageResponse(
res,
self.flow,
component="xak-flow-frame",
url="http://testserver/endpoints/google/chrome/",
)
challenge = generate_id()
http = MockHTTP()
http.add_response(
f"https://verifiedaccess.googleapis.com/v2/challenge:generate?key={self.api_key}&alt=json",
{"challenge": challenge},
method="POST",
)
http.add_response(
f"https://verifiedaccess.googleapis.com/v2/challenge:verify?key={self.api_key}&alt=json",
load_fixture("fixtures/host_macos.json"),
method="POST",
)
with patch(
"authentik.enterprise.endpoints.connectors.google_chrome.models.GoogleChromeConnector.google_credentials",
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
):
# Generate challenge
res = self.client.get(
reverse("authentik_endpoints_connectors_google_chrome:chrome"),
HTTP_X_DEVICE_TRUST="VerifiedAccess",
)
self.assertEqual(res.status_code, 302)
self.assertEqual(
res.headers["X-Verified-Access-Challenge"],
dumps({"challenge": challenge}),
)
# Validate challenge
res = self.client.get(
reverse("authentik_endpoints_connectors_google_chrome:chrome"),
HTTP_X_VERIFIED_ACCESS_CHALLENGE_RESPONSE=dumps({}),
)
self.assertEqual(res.status_code, 200)
device = Device.objects.get(identifier="Z5DDF07GK6")
self.assertIsNotNone(device)
# Continue flow
with self.assertFlowFinishes() as plan:
res = self.client.post(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
)
self.assertStageRedirects(res, "/")
plan = plan()
plan_device = plan.context[PLAN_CONTEXT_DEVICE]
self.assertEqual(device.pk, plan_device.pk)

View File

@@ -1,16 +0,0 @@
"""API URLs"""
from django.urls import path
from authentik.enterprise.endpoints.connectors.google_chrome.api import GoogleChromeConnectorViewSet
from authentik.enterprise.endpoints.connectors.google_chrome.views.dtc import (
GoogleChromeDeviceTrustConnector,
)
urlpatterns = [
path("chrome/", GoogleChromeDeviceTrustConnector.as_view(), name="chrome"),
]
api_urlpatterns = [
("endpoints/google_chrome/connectors", GoogleChromeConnectorViewSet),
]

View File

@@ -1,46 +0,0 @@
from typing import Any
from django.http import HttpRequest, HttpResponse, HttpResponseBadRequest
from django.template.response import TemplateResponse
from django.utils.decorators import method_decorator
from django.views import View
from django.views.decorators.clickjacking import xframe_options_sameorigin
from authentik.endpoints.models import EndpointStage
from authentik.enterprise.endpoints.connectors.google_chrome.controller import (
HEADER_ACCESS_CHALLENGE_RESPONSE,
HEADER_DEVICE_TRUST,
GoogleChromeController,
)
from authentik.enterprise.endpoints.connectors.google_chrome.models import GoogleChromeConnector
from authentik.flows.planner import PLAN_CONTEXT_DEVICE, FlowPlan
from authentik.flows.views.executor import SESSION_KEY_PLAN
@method_decorator(xframe_options_sameorigin, name="dispatch")
class GoogleChromeDeviceTrustConnector(View):
"""Google Chrome Device-trust connector based endpoint authenticator"""
def get_flow_plan(self) -> FlowPlan:
flow_plan: FlowPlan = self.request.session[SESSION_KEY_PLAN]
return flow_plan
def setup(self, request: HttpRequest, *args: Any, **kwargs: Any) -> None:
super().setup(request, *args, **kwargs)
stage: EndpointStage = self.get_flow_plan().bindings[0].stage
connector = GoogleChromeConnector.objects.filter(pk=stage.connector_id).first()
if not connector:
return HttpResponseBadRequest()
self.controller: GoogleChromeController = connector.controller(connector)
def get(self, request: HttpRequest) -> HttpResponse:
x_device_trust = request.headers.get(HEADER_DEVICE_TRUST)
x_access_challenge_response = request.headers.get(HEADER_ACCESS_CHALLENGE_RESPONSE)
if x_device_trust == "VerifiedAccess" and x_access_challenge_response is None:
return self.controller.generate_challenge(request)
if x_access_challenge_response:
device = self.controller.validate_challenge(x_access_challenge_response)
flow_plan = self.get_flow_plan()
flow_plan.context[PLAN_CONTEXT_DEVICE] = device
self.request.session[SESSION_KEY_PLAN] = flow_plan
return TemplateResponse(request, "flows/frame-submit.html")

View File

@@ -15,7 +15,6 @@ from django.core.cache import cache
from django.db.models.query import QuerySet
from django.utils.timezone import now
from jwt import PyJWTError, decode, get_unverified_header
from jwt.algorithms import ECAlgorithm
from rest_framework.exceptions import ValidationError
from rest_framework.fields import (
ChoiceField,
@@ -110,20 +109,13 @@ class LicenseKey:
intermediate.verify_directly_issued_by(get_licensing_key())
except InvalidSignature, TypeError, ValueError, Error:
raise ValidationError("Unable to verify license") from None
_validate_curve_original = ECAlgorithm._validate_curve
try:
# authentik's license are generated with `algorithm="ES512"` and signed with
# a key of curve `secp384r1`. Starting with version 2.11.0, pyjwt enforces the spec, see
# https://github.com/jpadilla/pyjwt/commit/5b8622773358e56d3d3c0a9acf404809ff34433a
# authentik will change its license generation to `algorithm="ES384"` in 2026.
# TODO: remove this when the last incompatible license runs out.
ECAlgorithm._validate_curve = lambda *_: True
body = from_dict(
LicenseKey,
decode(
jwt,
our_cert.public_key(),
algorithms=["ES384", "ES512"],
algorithms=["ES512"],
audience=get_license_aud(),
options={"verify_exp": check_expiry, "verify_signature": check_expiry},
),
@@ -133,8 +125,6 @@ class LicenseKey:
if unverified["aud"] != get_license_aud():
raise ValidationError("Invalid Install ID in license") from None
raise ValidationError("Unable to verify license") from None
finally:
ECAlgorithm._validate_curve = _validate_curve_original
return body
@staticmethod

View File

@@ -1,150 +0,0 @@
from datetime import datetime
from django.db.models import BooleanField as ModelBooleanField
from django.db.models import Case, Q, Value, When
from django_filters.rest_framework import BooleanFilter, FilterSet
from drf_spectacular.utils import extend_schema
from rest_framework.decorators import action
from rest_framework.fields import IntegerField, SerializerMethodField
from rest_framework.mixins import CreateModelMixin
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet
from authentik.core.api.utils import ModelSerializer
from authentik.enterprise.api import EnterpriseRequiredMixin
from authentik.enterprise.lifecycle.api.reviews import ReviewSerializer
from authentik.enterprise.lifecycle.models import LifecycleIteration, ReviewState
from authentik.enterprise.lifecycle.utils import (
ContentTypeField,
ReviewerGroupSerializer,
ReviewerUserSerializer,
admin_link_for_model,
parse_content_type,
start_of_day,
)
from authentik.lib.utils.time import timedelta_from_string
class LifecycleIterationSerializer(EnterpriseRequiredMixin, ModelSerializer):
content_type = ContentTypeField()
object_verbose = SerializerMethodField()
object_admin_url = SerializerMethodField(read_only=True)
grace_period_end = SerializerMethodField(read_only=True)
reviews = ReviewSerializer(many=True, read_only=True, source="review_set.all")
user_can_review = SerializerMethodField(read_only=True)
reviewer_groups = ReviewerGroupSerializer(
many=True, read_only=True, source="rule.reviewer_groups"
)
min_reviewers = IntegerField(read_only=True, source="rule.min_reviewers")
reviewers = ReviewerUserSerializer(many=True, read_only=True, source="rule.reviewers")
next_review_date = SerializerMethodField(read_only=True)
class Meta:
model = LifecycleIteration
fields = [
"id",
"content_type",
"object_id",
"object_verbose",
"object_admin_url",
"state",
"opened_on",
"grace_period_end",
"next_review_date",
"reviews",
"user_can_review",
"reviewer_groups",
"min_reviewers",
"reviewers",
]
read_only_fields = fields
def get_object_verbose(self, iteration: LifecycleIteration) -> str:
return str(iteration.object)
def get_object_admin_url(self, iteration: LifecycleIteration) -> str:
return admin_link_for_model(iteration.object)
def get_grace_period_end(self, iteration: LifecycleIteration) -> datetime:
return start_of_day(
iteration.opened_on + timedelta_from_string(iteration.rule.grace_period)
)
def get_next_review_date(self, iteration: LifecycleIteration) -> datetime:
return start_of_day(iteration.opened_on + timedelta_from_string(iteration.rule.interval))
def get_user_can_review(self, iteration: LifecycleIteration) -> bool:
return iteration.user_can_review(self.context["request"].user)
class LifecycleIterationFilterSet(FilterSet):
user_is_reviewer = BooleanFilter(field_name="user_is_reviewer", lookup_expr="exact")
class IterationViewSet(EnterpriseRequiredMixin, CreateModelMixin, GenericViewSet):
queryset = LifecycleIteration.objects.all()
serializer_class = LifecycleIterationSerializer
ordering = ["-opened_on"]
ordering_fields = ["state", "content_type__model", "opened_on", "grace_period_end"]
filterset_class = LifecycleIterationFilterSet
def get_queryset(self):
user = self.request.user
return self.queryset.annotate(
user_is_reviewer=Case(
When(
Q(rule__reviewers=user)
| Q(rule__reviewer_groups__in=user.groups.all().with_ancestors()),
then=Value(True),
),
default=Value(False),
output_field=ModelBooleanField(),
)
).distinct()
@action(
detail=False,
methods=["get"],
url_path=r"latest/(?P<content_type>[^/]+)/(?P<object_id>[^/]+)",
)
def latest_iteration(self, request: Request, content_type: str, object_id: str) -> Response:
ct = parse_content_type(content_type)
try:
obj = (
self.get_queryset()
.filter(
content_type__app_label=ct["app_label"],
content_type__model=ct["model"],
object_id=object_id,
)
.latest("opened_on")
)
except LifecycleIteration.DoesNotExist:
return Response(status=404)
serializer = self.get_serializer(obj)
return Response(serializer.data)
@extend_schema(
operation_id="lifecycle_iterations_list_open",
responses={200: LifecycleIterationSerializer(many=True)},
)
@action(
detail=False,
methods=["get"],
url_path=r"open",
)
def open_iterations(self, request: Request):
iterations = self.get_queryset().filter(
Q(state=ReviewState.PENDING) | Q(state=ReviewState.OVERDUE)
)
iterations = self.filter_queryset(iterations)
page = self.paginate_queryset(iterations)
if page is not None:
serializer = self.get_serializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(iterations, many=True)
return Response(serializer.data)

View File

@@ -1,33 +0,0 @@
from django.utils.translation import gettext_lazy as _
from rest_framework.exceptions import ValidationError
from rest_framework.mixins import CreateModelMixin
from rest_framework.viewsets import GenericViewSet
from authentik.core.api.utils import ModelSerializer
from authentik.enterprise.api import EnterpriseRequiredMixin
from authentik.enterprise.lifecycle.models import LifecycleIteration, Review
from authentik.enterprise.lifecycle.utils import ReviewerUserSerializer
class ReviewSerializer(EnterpriseRequiredMixin, ModelSerializer):
reviewer = ReviewerUserSerializer(read_only=True)
class Meta:
model = Review
fields = ["id", "iteration", "reviewer", "timestamp", "note"]
read_only_fields = ["id", "timestamp", "reviewer"]
def validate_iteration(self, iteration: LifecycleIteration) -> LifecycleIteration:
user = self.context["request"].user
if not iteration.user_can_review(user):
raise ValidationError(_("You are not allowed to submit a review for this object."))
return iteration
class ReviewViewSet(EnterpriseRequiredMixin, CreateModelMixin, GenericViewSet):
queryset = Review.objects.all()
serializer_class = ReviewSerializer
def perform_create(self, serializer: ReviewSerializer) -> None:
review = serializer.save(reviewer=self.request.user)
review.iteration.on_review(self.request)

View File

@@ -1,113 +0,0 @@
from django.utils.translation import gettext as _
from rest_framework.exceptions import ValidationError
from rest_framework.fields import SerializerMethodField
from rest_framework.relations import SlugRelatedField
from rest_framework.viewsets import ModelViewSet
from authentik.core.api.utils import ModelSerializer
from authentik.core.models import User
from authentik.enterprise.api import EnterpriseRequiredMixin
from authentik.enterprise.lifecycle.models import LifecycleRule
from authentik.enterprise.lifecycle.utils import (
ContentTypeField,
ReviewerGroupSerializer,
ReviewerUserSerializer,
)
from authentik.lib.utils.time import timedelta_from_string
class LifecycleRuleSerializer(EnterpriseRequiredMixin, ModelSerializer):
content_type = ContentTypeField()
target_verbose = SerializerMethodField()
reviewer_groups_obj = ReviewerGroupSerializer(
many=True, read_only=True, source="reviewer_groups"
)
reviewers = SlugRelatedField(slug_field="uuid", many=True, queryset=User.objects.all())
reviewers_obj = ReviewerUserSerializer(many=True, read_only=True, source="reviewers")
class Meta:
model = LifecycleRule
fields = [
"id",
"name",
"content_type",
"object_id",
"interval",
"grace_period",
"reviewer_groups",
"reviewer_groups_obj",
"min_reviewers",
"min_reviewers_is_per_group",
"reviewers",
"reviewers_obj",
"notification_transports",
"target_verbose",
]
read_only_fields = ["id", "reviewers_obj", "reviewer_groups_obj", "target_verbose"]
def get_target_verbose(self, rule: LifecycleRule) -> str:
if rule.object_id is None:
return rule.content_type.model_class()._meta.verbose_name_plural
else:
return f"{rule.content_type.model_class()._meta.verbose_name}: {rule.object}"
def validate_object_id(self, value: str) -> str | None:
if value == "":
return None
return value
def validate(self, attrs: dict) -> dict:
if (
attrs.get("object_id") is not None
and not attrs["content_type"]
.get_all_objects_for_this_type(pk=attrs["object_id"])
.exists()
):
raise ValidationError({"object_id": _("Object does not exist")})
if "reviewer_groups" in attrs or "reviewers" in attrs:
reviewer_groups = attrs.get(
"reviewer_groups", self.instance.reviewer_groups.all() if self.instance else []
)
reviewers = attrs.get(
"reviewers", self.instance.reviewers.all() if self.instance else []
)
if len(reviewer_groups) == 0 and len(reviewers) == 0:
raise ValidationError(_("Either a reviewer group or a reviewer must be set."))
if "grace_period" in attrs or "interval" in attrs:
grace_period = attrs.get("grace_period", getattr(self.instance, "grace_period", None))
interval = attrs.get("interval", getattr(self.instance, "interval", None))
if (
grace_period is not None
and interval is not None
and (timedelta_from_string(grace_period) > timedelta_from_string(interval))
):
raise ValidationError(
{"grace_period": _("Grace period must be shorter than the interval.")}
)
if "content_type" in attrs or "object_id" in attrs:
content_type = attrs.get("content_type", getattr(self.instance, "content_type", None))
object_id = attrs.get("object_id", getattr(self.instance, "object_id", None))
if content_type is not None and object_id is None:
existing = LifecycleRule.objects.filter(
content_type=content_type, object_id__isnull=True
)
if self.instance:
existing = existing.exclude(pk=self.instance.pk)
if existing.exists():
raise ValidationError(
{
"content_type": _(
"Only one type-wide rule for each object type is allowed."
)
}
)
return attrs
class LifecycleRuleViewSet(ModelViewSet):
queryset = LifecycleRule.objects.all()
serializer_class = LifecycleRuleSerializer
search_fields = ["content_type__model", "reviewer_groups__name", "reviewers__username"]
ordering = ["name"]
ordering_fields = ["name", "content_type__model"]
filterset_fields = ["content_type__model"]

View File

@@ -1,22 +0,0 @@
from authentik.enterprise.apps import EnterpriseConfig
from authentik.lib.utils.time import fqdn_rand
from authentik.tasks.schedules.common import ScheduleSpec
class ReportsConfig(EnterpriseConfig):
name = "authentik.enterprise.lifecycle"
label = "authentik_lifecycle"
verbose_name = "authentik Enterprise.Lifecycle"
default = True
@property
def tenant_schedule_specs(self) -> list[ScheduleSpec]:
from authentik.enterprise.lifecycle.tasks import apply_lifecycle_rules
return [
ScheduleSpec(
actor=apply_lifecycle_rules,
crontab=f"{fqdn_rand('lifecycle_apply_lifecycle_rules')} "
f"{fqdn_rand('lifecycle_apply_lifecycle_rules', 24)} * * *",
)
]

View File

@@ -1,154 +0,0 @@
# Generated by Django 5.2.11 on 2026-02-09 15:57
import authentik.lib.utils.time
import django.db.models.deletion
import uuid
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
("authentik_core", "0057_remove_user_groups_remove_user_user_permissions_and_more"),
("authentik_events", "0016_alter_event_action"),
("contenttypes", "0002_remove_content_type_name"),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name="LifecycleRule",
fields=[
("id", models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
("name", models.TextField(unique=True)),
("object_id", models.TextField(default=None, null=True)),
(
"interval",
models.TextField(
default="days=60",
validators=[authentik.lib.utils.time.timedelta_string_validator],
),
),
(
"grace_period",
models.TextField(
default="days=30",
validators=[authentik.lib.utils.time.timedelta_string_validator],
),
),
("min_reviewers", models.PositiveSmallIntegerField(default=1)),
("min_reviewers_is_per_group", models.BooleanField(default=False)),
(
"content_type",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="contenttypes.contenttype"
),
),
(
"notification_transports",
models.ManyToManyField(
blank=True,
help_text="Select which transports should be used to notify the reviewers. If none are selected, the notification will only be shown in the authentik UI.",
to="authentik_events.notificationtransport",
),
),
("reviewer_groups", models.ManyToManyField(blank=True, to="authentik_core.group")),
("reviewers", models.ManyToManyField(blank=True, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name="LifecycleIteration",
fields=[
(
"managed",
models.TextField(
default=None,
help_text="Objects that are managed by authentik. These objects are created and updated automatically. This flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
null=True,
unique=True,
verbose_name="Managed by authentik",
),
),
("id", models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
("object_id", models.TextField()),
(
"state",
models.CharField(
choices=[
("REVIEWED", "Reviewed"),
("PENDING", "Pending"),
("OVERDUE", "Overdue"),
("CANCELED", "Canceled"),
],
default="PENDING",
max_length=10,
),
),
("opened_on", models.DateField(auto_now_add=True)),
(
"content_type",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="contenttypes.contenttype"
),
),
(
"rule",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="authentik_lifecycle.lifecyclerule",
),
),
],
),
migrations.CreateModel(
name="Review",
fields=[
("id", models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
("timestamp", models.DateTimeField(auto_now_add=True)),
("note", models.TextField(null=True)),
(
"iteration",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="authentik_lifecycle.lifecycleiteration",
),
),
(
"reviewer",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
migrations.AddIndex(
model_name="lifecyclerule",
index=models.Index(fields=["content_type"], name="authentik_l_content_4e3a6a_idx"),
),
migrations.AddConstraint(
model_name="lifecyclerule",
constraint=models.UniqueConstraint(
condition=models.Q(("object_id__isnull", True)),
fields=("content_type",),
name="uniq_lifecycle_rule_ct_null_object",
),
),
migrations.AlterUniqueTogether(
name="lifecyclerule",
unique_together={("content_type", "object_id")},
),
migrations.AddIndex(
model_name="lifecycleiteration",
index=models.Index(
fields=["content_type", "opened_on"], name="authentik_l_content_09c32a_idx"
),
),
migrations.AlterUniqueTogether(
name="review",
unique_together={("iteration", "reviewer")},
),
]

View File

@@ -1,18 +0,0 @@
# Generated by Django 5.2.11 on 2026-02-13 09:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_lifecycle", "0001_initial"),
]
operations = [
migrations.AlterField(
model_name="lifecycleiteration",
name="opened_on",
field=models.DateTimeField(auto_now_add=True),
),
]

View File

@@ -1,292 +0,0 @@
from datetime import timedelta
from uuid import uuid4
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.db.models import Q, QuerySet
from django.db.models.fields import Field
from django.db.models.functions import Cast
from django.http import HttpRequest
from django.utils import timezone
from django.utils.translation import gettext as _
from rest_framework.serializers import BaseSerializer
from authentik.blueprints.models import ManagedModel
from authentik.core.models import Group, User
from authentik.enterprise.lifecycle.utils import link_for_model, start_of_day
from authentik.events.models import Event, EventAction, NotificationSeverity, NotificationTransport
from authentik.lib.models import SerializerModel
from authentik.lib.utils.time import timedelta_from_string, timedelta_string_validator
class LifecycleRule(SerializerModel):
id = models.UUIDField(primary_key=True, default=uuid4)
name = models.TextField(unique=True)
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.TextField(null=True, default=None)
object = GenericForeignKey("content_type", "object_id")
interval = models.TextField(
default="days=60",
validators=[timedelta_string_validator],
)
# Grace period starts after a review is due
grace_period = models.TextField(
default="days=30",
validators=[timedelta_string_validator],
)
# The review has to be conducted by `min_reviewers` members of `reviewer_groups`
# (total or per group depending on `min_reviewers_is_per_group` flag) as well
# as all of `reviewers`
reviewer_groups = models.ManyToManyField("authentik_core.Group", blank=True)
min_reviewers = models.PositiveSmallIntegerField(default=1)
min_reviewers_is_per_group = models.BooleanField(default=False)
reviewers = models.ManyToManyField("authentik_core.User", blank=True)
notification_transports = models.ManyToManyField(
NotificationTransport,
help_text=_(
"Select which transports should be used to notify the reviewers. If none are "
"selected, the notification will only be shown in the authentik UI."
),
blank=True,
)
class Meta:
indexes = [models.Index(fields=["content_type"])]
unique_together = [["content_type", "object_id"]]
constraints = [
models.UniqueConstraint(
fields=["content_type"],
condition=Q(object_id__isnull=True),
name="uniq_lifecycle_rule_ct_null_object",
)
]
@property
def serializer(self) -> type[BaseSerializer]:
from authentik.enterprise.lifecycle.api.rules import LifecycleRuleSerializer
return LifecycleRuleSerializer
def _get_pk_field(self) -> Field:
model = self.content_type.model_class()
pk = model._meta.pk
while hasattr(pk, "target_field"):
pk = pk.target_field
return pk.__class__()
def get_objects(self) -> QuerySet:
qs = self.content_type.get_all_objects_for_this_type()
if self.object_id:
qs = qs.filter(pk=self.object_id)
else:
qs = qs.exclude(
pk__in=LifecycleRule.objects.filter(
content_type=self.content_type, object_id__isnull=False
).values_list(Cast("object_id", output_field=self._get_pk_field()), flat=True)
)
return qs
def _get_stale_iterations(self) -> QuerySet[LifecycleIteration]:
filter = ~Q(content_type=self.content_type)
if self.object_id:
filter = filter | ~Q(object_id=self.object_id)
filter = Q(state__in=(ReviewState.PENDING, ReviewState.OVERDUE)) & filter
return self.lifecycleiteration_set.filter(filter)
def _get_newly_overdue_iterations(self) -> QuerySet[LifecycleIteration]:
return self.lifecycleiteration_set.filter(
opened_on__lt=start_of_day(
timezone.now() + timedelta(days=1) - timedelta_from_string(self.grace_period)
),
state=ReviewState.PENDING,
)
def _get_newly_due_objects(self) -> QuerySet:
recent_iteration_ids = LifecycleIteration.objects.filter(
content_type=self.content_type,
object_id__isnull=False,
opened_on__gte=start_of_day(
timezone.now() + timedelta(days=1) - timedelta_from_string(self.interval)
),
).values_list(Cast("object_id", output_field=self._get_pk_field()), flat=True)
return self.get_objects().exclude(pk__in=recent_iteration_ids)
def apply(self):
self._get_stale_iterations().update(state=ReviewState.CANCELED)
for iteration in self._get_newly_overdue_iterations():
iteration.make_overdue()
for obj in self._get_newly_due_objects():
LifecycleIteration.start(content_type=self.content_type, object_id=obj.pk, rule=self)
def is_satisfied_for_iteration(self, iteration: LifecycleIteration) -> bool:
reviewers = self.reviewers.all()
if (
iteration.review_set.filter(reviewer__in=reviewers).distinct("reviewer").count()
< reviewers.count()
):
return False
if self.reviewer_groups.count() == 0:
return True
if self.min_reviewers_is_per_group:
for g in self.reviewer_groups.all():
if (
iteration.review_set.filter(
reviewer__groups__in=Group.objects.filter(pk=g.pk).with_descendants()
)
.distinct()
.count()
< self.min_reviewers
):
return False
return True
else:
return (
iteration.review_set.filter(
reviewer__groups__in=self.reviewer_groups.all().with_descendants()
)
.distinct()
.count()
>= self.min_reviewers
)
def get_reviewers(self) -> QuerySet[User]:
return User.objects.filter(
Q(id__in=self.reviewers.all().values_list("pk", flat=True))
| Q(groups__in=self.reviewer_groups.all().with_descendants())
).distinct()
def notify_reviewers(self, event: Event, severity: str):
from authentik.enterprise.lifecycle.tasks import send_notification
for transport in self.notification_transports.all():
for user in self.get_reviewers():
send_notification.send_with_options(
args=(transport.pk, event.pk, user.pk, severity),
rel_obj=transport,
)
if transport.send_once:
break
class ReviewState(models.TextChoices):
REVIEWED = "REVIEWED", _("Reviewed")
PENDING = "PENDING", _("Pending")
OVERDUE = "OVERDUE", _("Overdue")
CANCELED = "CANCELED", _("Canceled")
class LifecycleIteration(SerializerModel, ManagedModel):
id = models.UUIDField(primary_key=True, default=uuid4)
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.TextField(null=False)
object = GenericForeignKey("content_type", "object_id")
rule = models.ForeignKey(LifecycleRule, null=True, on_delete=models.SET_NULL)
state = models.CharField(max_length=10, choices=ReviewState, default=ReviewState.PENDING)
opened_on = models.DateTimeField(auto_now_add=True)
class Meta:
indexes = [models.Index(fields=["content_type", "opened_on"])]
@property
def serializer(self) -> type[BaseSerializer]:
from authentik.enterprise.lifecycle.api.iterations import LifecycleIterationSerializer
return LifecycleIterationSerializer
def _get_model_name(self) -> str:
return self.content_type.name.lower()
def _get_event_args(self) -> dict:
return {
"target": self.object,
"hyperlink": link_for_model(self.object),
"hyperlink_label": _(f"Go to {self._get_model_name()}"),
"lifecycle_iteration": self.id,
}
def initialize(self):
event = Event.new(
EventAction.REVIEW_INITIATED,
message=_(f"Access review is due for {self.content_type.name} {str(self.object)}"),
**self._get_event_args(),
)
event.save()
self.rule.notify_reviewers(event, NotificationSeverity.NOTICE)
def make_overdue(self):
self.state = ReviewState.OVERDUE
event = Event.new(
EventAction.REVIEW_OVERDUE,
message=_(f"Access review is overdue for {self.content_type.name} {str(self.object)}"),
**self._get_event_args(),
)
event.save()
self.rule.notify_reviewers(event, NotificationSeverity.ALERT)
self.save()
@staticmethod
def start(content_type: ContentType, object_id: str, rule: LifecycleRule) -> LifecycleIteration:
iteration = LifecycleIteration.objects.create(
content_type=content_type, object_id=object_id, rule=rule
)
iteration.initialize()
return iteration
def make_reviewed(self, request: HttpRequest):
self.state = ReviewState.REVIEWED
event = Event.new(
EventAction.REVIEW_COMPLETED,
message=_(f"Access review completed for {self.content_type.name} {str(self.object)}"),
**self._get_event_args(),
).from_http(request)
event.save()
self.rule.notify_reviewers(event, NotificationSeverity.NOTICE)
self.save()
def on_review(self, request: HttpRequest):
if self.state not in (ReviewState.PENDING, ReviewState.OVERDUE):
raise AssertionError("Review is not pending or overdue")
if self.rule.is_satisfied_for_iteration(self):
self.make_reviewed(request)
def user_can_review(self, user: User) -> bool:
if self.state not in (ReviewState.PENDING, ReviewState.OVERDUE):
return False
if self.review_set.filter(reviewer=user).exists():
return False
groups = self.rule.reviewer_groups.all()
if groups:
for group in groups:
if group.is_member(user):
return True
return False
else:
return user in self.rule.get_reviewers()
class Review(SerializerModel):
id = models.UUIDField(primary_key=True, default=uuid4)
iteration = models.ForeignKey(LifecycleIteration, on_delete=models.CASCADE)
reviewer = models.ForeignKey("authentik_core.User", on_delete=models.CASCADE)
timestamp = models.DateTimeField(auto_now_add=True)
note = models.TextField(null=True)
class Meta:
unique_together = [["iteration", "reviewer"]]
@property
def serializer(self) -> type[BaseSerializer]:
from authentik.enterprise.lifecycle.api.reviews import ReviewSerializer
return ReviewSerializer

View File

@@ -1,22 +0,0 @@
from django.db.models import Q
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from authentik.enterprise.lifecycle.models import LifecycleRule, ReviewState
@receiver(post_save, sender=LifecycleRule)
def post_rule_save(sender, instance: LifecycleRule, created: bool, **_):
from authentik.enterprise.lifecycle.tasks import apply_lifecycle_rule
apply_lifecycle_rule.send_with_options(
args=(instance.id,),
rel_obj=instance,
)
@receiver(pre_delete, sender=LifecycleRule)
def pre_rule_delete(sender, instance: LifecycleRule, **_):
instance.lifecycleiteration_set.filter(
Q(state=ReviewState.PENDING) | Q(state=ReviewState.OVERDUE)
).update(state=ReviewState.CANCELED)

View File

@@ -1,45 +0,0 @@
from django.utils.translation import gettext_lazy as _
from dramatiq import actor
from authentik.core.models import User
from authentik.enterprise.lifecycle.models import LifecycleRule
from authentik.events.models import Event, Notification, NotificationTransport
@actor(description=_("Dispatch tasks to validate lifecycle rules."))
def apply_lifecycle_rules():
for rule in LifecycleRule.objects.all():
apply_lifecycle_rule.send_with_options(
args=(rule.id,),
rel_obj=rule,
)
@actor(description=_("Apply lifecycle rule."))
def apply_lifecycle_rule(rule_id: str):
rule = LifecycleRule.objects.filter(pk=rule_id).first()
if rule:
rule.apply()
@actor(description=_("Send lifecycle rule notification."))
def send_notification(transport_pk: int, event_pk: str, user_pk: int, severity: str):
event = Event.objects.filter(pk=event_pk).first()
if not event:
return
user = User.objects.filter(pk=user_pk).first()
if not user:
return
notification = Notification(
severity=severity,
body=event.summary,
event=event,
user=user,
hyperlink=event.hyperlink,
hyperlink_label=event.hyperlink_label,
)
transport = NotificationTransport.objects.filter(pk=transport_pk).first()
if not transport:
return
transport.send(notification)

View File

@@ -1,425 +0,0 @@
from django.contrib.contenttypes.models import ContentType
from django.urls import reverse
from rest_framework.test import APITestCase
from authentik.core.models import Application, Group
from authentik.core.tests.utils import create_test_admin_user, create_test_user
from authentik.enterprise.lifecycle.models import LifecycleIteration, LifecycleRule, ReviewState
from authentik.enterprise.reports.tests.utils import patch_license
from authentik.lib.generators import generate_id
@patch_license
class TestLifecycleRuleAPI(APITestCase):
def setUp(self):
self.user = create_test_admin_user()
self.client.force_login(self.user)
self.app = Application.objects.create(name=generate_id(), slug=generate_id())
self.content_type = ContentType.objects.get_for_model(Application)
self.reviewer_group = Group.objects.create(name=generate_id())
def test_list_rules(self):
rule = LifecycleRule.objects.create(
name=generate_id(),
content_type=self.content_type,
object_id=str(self.app.pk),
)
rule.reviewer_groups.add(self.reviewer_group)
response = self.client.get(reverse("authentik_api:lifecyclerule-list"))
self.assertEqual(response.status_code, 200)
self.assertGreaterEqual(len(response.data["results"]), 1)
def test_create_rule_with_reviewer_group(self):
response = self.client.post(
reverse("authentik_api:lifecyclerule-list"),
{
"name": generate_id(),
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
"object_id": str(self.app.pk),
"interval": "days=30",
"grace_period": "days=10",
"reviewer_groups": [str(self.reviewer_group.pk)],
"reviewers": [],
"min_reviewers": 1,
},
)
self.assertEqual(response.status_code, 201)
self.assertEqual(response.data["object_id"], str(self.app.pk))
self.assertEqual(response.data["interval"], "days=30")
def test_create_rule_with_explicit_reviewer(self):
reviewer = create_test_user()
response = self.client.post(
reverse("authentik_api:lifecyclerule-list"),
{
"name": generate_id(),
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
"object_id": str(self.app.pk),
"interval": "days=60",
"grace_period": "days=15",
"reviewer_groups": [],
"reviewers": [str(reviewer.uuid)],
"min_reviewers": 1,
},
)
self.assertEqual(response.status_code, 201)
self.assertIn(reviewer.uuid, response.data["reviewers"])
def test_create_rule_type_level(self):
response = self.client.post(
reverse("authentik_api:lifecyclerule-list"),
{
"name": generate_id(),
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
"object_id": None,
"interval": "days=90",
"grace_period": "days=30",
"reviewer_groups": [str(self.reviewer_group.pk)],
"reviewers": [],
"min_reviewers": 1,
},
)
self.assertEqual(response.status_code, 201)
self.assertIsNone(response.data["object_id"])
def test_create_rule_fails_without_reviewers(self):
response = self.client.post(
reverse("authentik_api:lifecyclerule-list"),
{
"name": generate_id(),
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
"object_id": str(self.app.pk),
"interval": "days=30",
"grace_period": "days=10",
"reviewer_groups": [],
"reviewers": [],
"min_reviewers": 1,
},
)
self.assertEqual(response.status_code, 400)
def test_create_rule_fails_grace_period_longer_than_interval(self):
response = self.client.post(
reverse("authentik_api:lifecyclerule-list"),
{
"name": generate_id(),
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
"object_id": str(self.app.pk),
"interval": "days=10",
"grace_period": "days=30",
"reviewer_groups": [str(self.reviewer_group.pk)],
"reviewers": [],
"min_reviewers": 1,
},
)
self.assertEqual(response.status_code, 400)
self.assertIn("grace_period", response.data)
def test_create_rule_fails_invalid_object_id(self):
response = self.client.post(
reverse("authentik_api:lifecyclerule-list"),
{
"name": generate_id(),
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
"object_id": "00000000-0000-0000-0000-000000000000",
"interval": "days=30",
"grace_period": "days=10",
"reviewer_groups": [str(self.reviewer_group.pk)],
"reviewers": [],
"min_reviewers": 1,
},
)
self.assertEqual(response.status_code, 400)
self.assertIn("object_id", response.data)
def test_retrieve_rule(self):
rule = LifecycleRule.objects.create(
name=generate_id(),
content_type=self.content_type,
object_id=str(self.app.pk),
)
rule.reviewer_groups.add(self.reviewer_group)
response = self.client.get(
reverse("authentik_api:lifecyclerule-detail", kwargs={"pk": rule.pk})
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data["id"], str(rule.pk))
def test_update_rule(self):
rule = LifecycleRule.objects.create(
name=generate_id(),
content_type=self.content_type,
object_id=str(self.app.pk),
interval="days=30",
)
rule.reviewer_groups.add(self.reviewer_group)
response = self.client.patch(
reverse("authentik_api:lifecyclerule-detail", kwargs={"pk": rule.pk}),
{"interval": "days=60"},
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data["interval"], "days=60")
def test_delete_rule(self):
rule = LifecycleRule.objects.create(
name=generate_id(),
content_type=self.content_type,
object_id=str(self.app.pk),
)
rule.reviewer_groups.add(self.reviewer_group)
response = self.client.delete(
reverse("authentik_api:lifecyclerule-detail", kwargs={"pk": rule.pk})
)
self.assertEqual(response.status_code, 204)
self.assertFalse(LifecycleRule.objects.filter(pk=rule.pk).exists())
@patch_license
class TestIterationAPI(APITestCase):
def setUp(self):
self.user = create_test_admin_user()
self.client.force_login(self.user)
self.app = Application.objects.create(name=generate_id(), slug=generate_id())
self.content_type = ContentType.objects.get_for_model(Application)
self.reviewer_group = Group.objects.create(name=generate_id())
self.reviewer_group.users.add(self.user)
def test_open_iterations(self):
rule = LifecycleRule.objects.create(
name=generate_id(),
content_type=self.content_type,
object_id=str(self.app.pk),
)
rule.reviewer_groups.add(self.reviewer_group)
response = self.client.get(reverse("authentik_api:lifecycleiteration-open-iterations"))
self.assertEqual(response.status_code, 200)
self.assertGreaterEqual(len(response.data["results"]), 1)
for iteration in response.data["results"]:
self.assertEqual(iteration["state"], ReviewState.PENDING)
def test_open_iterations_filter_user_is_reviewer(self):
rule = LifecycleRule.objects.create(
name=generate_id(),
content_type=self.content_type,
object_id=str(self.app.pk),
)
rule.reviewer_groups.add(self.reviewer_group)
response = self.client.get(
reverse("authentik_api:lifecycleiteration-open-iterations"),
{"user_is_reviewer": "true"},
)
self.assertEqual(response.status_code, 200)
# User is in reviewer_group, so should see the iteration
self.assertGreaterEqual(len(response.data["results"]), 1)
def test_latest_iteration(self):
rule = LifecycleRule.objects.create(
name=generate_id(),
content_type=self.content_type,
object_id=str(self.app.pk),
)
rule.reviewer_groups.add(self.reviewer_group)
response = self.client.get(
reverse(
"authentik_api:lifecycleiteration-latest-iteration",
kwargs={
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
"object_id": str(self.app.pk),
},
)
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data["object_id"], str(self.app.pk))
def test_latest_iteration_not_found(self):
response = self.client.get(
reverse(
"authentik_api:lifecycleiteration-latest-iteration",
kwargs={
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
"object_id": "00000000-0000-0000-0000-000000000000",
},
)
)
self.assertEqual(response.status_code, 404)
def test_iteration_includes_user_can_review(self):
rule = LifecycleRule.objects.create(
name=generate_id(),
content_type=self.content_type,
object_id=str(self.app.pk),
)
rule.reviewer_groups.add(self.reviewer_group)
response = self.client.get(reverse("authentik_api:lifecycleiteration-open-iterations"))
self.assertEqual(response.status_code, 200)
self.assertGreaterEqual(len(response.data["results"]), 1)
# user_can_review should be present
self.assertIn("user_can_review", response.data["results"][0])
@patch_license
class TestReviewAPI(APITestCase):
def setUp(self):
self.user = create_test_admin_user()
self.client.force_login(self.user)
self.app = Application.objects.create(name=generate_id(), slug=generate_id())
self.content_type = ContentType.objects.get_for_model(Application)
self.reviewer_group = Group.objects.create(name=generate_id())
self.reviewer_group.users.add(self.user)
def test_create_review(self):
rule = LifecycleRule.objects.create(
name=generate_id(),
content_type=self.content_type,
object_id=str(self.app.pk),
min_reviewers=1,
)
rule.reviewer_groups.add(self.reviewer_group)
# Get the auto-created iteration
iteration = LifecycleIteration.objects.get(
content_type=self.content_type, object_id=str(self.app.pk), rule=rule
)
response = self.client.post(
reverse("authentik_api:review-list"),
{
"iteration": str(iteration.pk),
"note": "Reviewed and approved",
},
)
self.assertEqual(response.status_code, 201)
self.assertEqual(response.data["iteration"], iteration.pk)
self.assertEqual(response.data["note"], "Reviewed and approved")
self.assertEqual(response.data["reviewer"]["pk"], self.user.pk)
def test_create_review_completes_iteration(self):
rule = LifecycleRule.objects.create(
name=generate_id(),
content_type=self.content_type,
object_id=str(self.app.pk),
min_reviewers=1,
)
rule.reviewer_groups.add(self.reviewer_group)
iteration = LifecycleIteration.objects.get(
content_type=self.content_type, object_id=str(self.app.pk), rule=rule
)
self.assertEqual(iteration.state, ReviewState.PENDING)
response = self.client.post(
reverse("authentik_api:review-list"),
{
"iteration": str(iteration.pk),
},
)
self.assertEqual(response.status_code, 201)
iteration.refresh_from_db()
self.assertEqual(iteration.state, ReviewState.REVIEWED)
def test_create_review_sets_reviewer_from_request(self):
rule = LifecycleRule.objects.create(
name=generate_id(),
content_type=self.content_type,
object_id=str(self.app.pk),
min_reviewers=1,
)
rule.reviewer_groups.add(self.reviewer_group)
iteration = LifecycleIteration.objects.get(
content_type=self.content_type, object_id=str(self.app.pk), rule=rule
)
response = self.client.post(
reverse("authentik_api:review-list"),
{
"iteration": str(iteration.pk),
},
)
self.assertEqual(response.status_code, 201)
# Reviewer should be the logged-in user
self.assertEqual(response.data["reviewer"]["pk"], self.user.pk)
def test_non_reviewer_cannot_review(self):
other_group = Group.objects.create(name=generate_id())
other_user = create_test_user()
other_group.users.add(other_user)
rule = LifecycleRule.objects.create(
name=generate_id(),
content_type=self.content_type,
object_id=str(self.app.pk),
min_reviewers=1,
)
rule.reviewer_groups.add(other_group)
iteration = LifecycleIteration.objects.get(
content_type=self.content_type, object_id=str(self.app.pk), rule=rule
)
# Current user is not in the reviewer group
self.assertFalse(iteration.user_can_review(self.user))
def test_non_reviewer_review_via_api_rejected(self):
other_group = Group.objects.create(name=generate_id())
other_user = create_test_user()
other_group.users.add(other_user)
rule = LifecycleRule.objects.create(
name=generate_id(),
content_type=self.content_type,
object_id=str(self.app.pk),
min_reviewers=1,
)
rule.reviewer_groups.add(other_group)
iteration = LifecycleIteration.objects.get(
content_type=self.content_type, object_id=str(self.app.pk), rule=rule
)
# Current user (self.user) is NOT in the reviewer group
response = self.client.post(
reverse("authentik_api:review-list"),
{"iteration": str(iteration.pk)},
)
self.assertEqual(response.status_code, 400)
def test_duplicate_review_via_api_rejected(self):
rule = LifecycleRule.objects.create(
name=generate_id(),
content_type=self.content_type,
object_id=str(self.app.pk),
min_reviewers=2,
)
rule.reviewer_groups.add(self.reviewer_group)
iteration = LifecycleIteration.objects.get(
content_type=self.content_type, object_id=str(self.app.pk), rule=rule
)
# First review should succeed
response = self.client.post(
reverse("authentik_api:review-list"),
{"iteration": str(iteration.pk)},
)
self.assertEqual(response.status_code, 201)
# Second review by same user should be rejected
response = self.client.post(
reverse("authentik_api:review-list"),
{"iteration": str(iteration.pk)},
)
self.assertEqual(response.status_code, 400)

View File

@@ -1,845 +0,0 @@
import datetime as dt
from datetime import timedelta
from unittest.mock import patch
from django.contrib.contenttypes.models import ContentType
from django.test import RequestFactory, TestCase
from django.utils import timezone
from authentik.core.models import Application, Group
from authentik.core.tests.utils import create_test_user
from authentik.enterprise.lifecycle.models import (
LifecycleIteration,
LifecycleRule,
Review,
ReviewState,
)
from authentik.events.models import (
Event,
EventAction,
NotificationSeverity,
NotificationTransport,
)
from authentik.lib.generators import generate_id
from authentik.rbac.models import Role
class TestLifecycleModels(TestCase):
def setUp(self):
self.factory = RequestFactory()
def _get_request(self):
return self.factory.get("/")
def _create_object(self, model):
if model is Application:
return Application.objects.create(name=generate_id(), slug=generate_id())
if model is Role:
return Role.objects.create(name=generate_id())
if model is Group:
return Group.objects.create(name=generate_id())
raise AssertionError(f"Unsupported model {model}")
def _create_rule_for_object(self, obj, **kwargs) -> LifecycleRule:
content_type = ContentType.objects.get_for_model(obj)
return LifecycleRule.objects.create(
name=generate_id(),
content_type=content_type,
object_id=str(obj.pk),
**kwargs,
)
def _create_rule_for_type(self, model, **kwargs) -> LifecycleRule:
content_type = ContentType.objects.get_for_model(model)
return LifecycleRule.objects.create(
name=generate_id(),
content_type=content_type,
object_id=None,
**kwargs,
)
def test_iteration_start_supported_objects(self):
"""Ensure iterations are automatically started for applications, roles, and groups."""
for model in (Application, Role, Group):
with self.subTest(model=model.__name__):
obj = self._create_object(model)
content_type = ContentType.objects.get_for_model(obj)
before_events = Event.objects.filter(action=EventAction.REVIEW_INITIATED).count()
rule = self._create_rule_for_object(obj)
# Verify iteration was created automatically
iteration = LifecycleIteration.objects.get(
content_type=content_type, object_id=str(obj.pk), rule=rule
)
self.assertEqual(iteration.state, ReviewState.PENDING)
self.assertEqual(iteration.object, obj)
self.assertEqual(iteration.rule, rule)
self.assertEqual(
Event.objects.filter(action=EventAction.REVIEW_INITIATED).count(),
before_events + 1,
)
def test_review_requires_all_explicit_reviewers(self):
obj = Group.objects.create(name=generate_id())
rule = self._create_rule_for_object(obj)
reviewer_one = create_test_user()
reviewer_two = create_test_user()
rule.reviewers.add(reviewer_one, reviewer_two)
content_type = ContentType.objects.get_for_model(obj)
iteration = LifecycleIteration.objects.get(
content_type=content_type, object_id=str(obj.pk), rule=rule
)
request = self._get_request()
Review.objects.create(iteration=iteration, reviewer=reviewer_one)
iteration.on_review(request)
iteration.refresh_from_db()
self.assertEqual(iteration.state, ReviewState.PENDING)
Review.objects.create(iteration=iteration, reviewer=reviewer_two)
iteration.on_review(request)
iteration.refresh_from_db()
self.assertEqual(iteration.state, ReviewState.REVIEWED)
self.assertTrue(Event.objects.filter(action=EventAction.REVIEW_COMPLETED).exists())
def test_review_min_reviewers_from_groups(self):
"""Group-based reviews complete once the minimum number of reviewers review."""
obj = Application.objects.create(name=generate_id(), slug=generate_id())
rule = self._create_rule_for_object(obj, min_reviewers=2)
reviewer_group = Group.objects.create(name=generate_id())
reviewer_one = create_test_user()
reviewer_two = create_test_user()
reviewer_group.users.add(reviewer_one, reviewer_two)
rule.reviewer_groups.add(reviewer_group)
content_type = ContentType.objects.get_for_model(obj)
iteration = LifecycleIteration.objects.get(
content_type=content_type, object_id=str(obj.pk), rule=rule
)
request = self._get_request()
Review.objects.create(iteration=iteration, reviewer=reviewer_one)
iteration.on_review(request)
iteration.refresh_from_db()
self.assertEqual(iteration.state, ReviewState.PENDING)
Review.objects.create(iteration=iteration, reviewer=reviewer_two)
iteration.on_review(request)
iteration.refresh_from_db()
self.assertEqual(iteration.state, ReviewState.REVIEWED)
def test_review_explicit_and_group_reviewers(self):
"""Reviews require both explicit reviewers AND min_reviewers from groups."""
obj = Application.objects.create(name=generate_id(), slug=generate_id())
rule = self._create_rule_for_object(obj, min_reviewers=1)
reviewer_group = Group.objects.create(name=generate_id())
group_member = create_test_user()
reviewer_group.users.add(group_member)
rule.reviewer_groups.add(reviewer_group)
explicit_reviewer = create_test_user()
rule.reviewers.add(explicit_reviewer)
content_type = ContentType.objects.get_for_model(obj)
iteration = LifecycleIteration.objects.get(
content_type=content_type, object_id=str(obj.pk), rule=rule
)
request = self._get_request()
# Only group member reviews - not satisfied (explicit reviewer missing)
Review.objects.create(iteration=iteration, reviewer=group_member)
iteration.on_review(request)
iteration.refresh_from_db()
self.assertEqual(iteration.state, ReviewState.PENDING)
# Explicit reviewer reviews - now satisfied
Review.objects.create(iteration=iteration, reviewer=explicit_reviewer)
iteration.on_review(request)
iteration.refresh_from_db()
self.assertEqual(iteration.state, ReviewState.REVIEWED)
def test_review_min_reviewers_per_group(self):
obj = Application.objects.create(name=generate_id(), slug=generate_id())
rule = self._create_rule_for_object(obj, min_reviewers=1, min_reviewers_is_per_group=True)
group_one = Group.objects.create(name=generate_id())
group_two = Group.objects.create(name=generate_id())
member_group_one = create_test_user()
member_group_two = create_test_user()
group_one.users.add(member_group_one)
group_two.users.add(member_group_two)
rule.reviewer_groups.add(group_one, group_two)
content_type = ContentType.objects.get_for_model(obj)
iteration = LifecycleIteration.objects.get(
content_type=content_type, object_id=str(obj.pk), rule=rule
)
request = self._get_request()
# Only member from group_one reviews - not satisfied (need member from each group)
Review.objects.create(iteration=iteration, reviewer=member_group_one)
iteration.on_review(request)
iteration.refresh_from_db()
self.assertEqual(iteration.state, ReviewState.PENDING)
# Member from group_two reviews - now satisfied
Review.objects.create(iteration=iteration, reviewer=member_group_two)
iteration.on_review(request)
iteration.refresh_from_db()
self.assertEqual(iteration.state, ReviewState.REVIEWED)
def test_review_reviewers_from_child_groups(self):
obj = Application.objects.create(name=generate_id(), slug=generate_id())
rule = self._create_rule_for_object(obj, min_reviewers=1)
parent_group = Group.objects.create(name=generate_id())
child_group = Group.objects.create(name=generate_id())
child_group.parents.add(parent_group)
child_member = create_test_user()
child_group.users.add(child_member)
rule.reviewer_groups.add(parent_group)
content_type = ContentType.objects.get_for_model(obj)
iteration = LifecycleIteration.objects.get(
content_type=content_type, object_id=str(obj.pk), rule=rule
)
request = self._get_request()
# Child group member should be able to review
self.assertTrue(iteration.user_can_review(child_member))
Review.objects.create(iteration=iteration, reviewer=child_member)
iteration.on_review(request)
iteration.refresh_from_db()
self.assertEqual(iteration.state, ReviewState.REVIEWED)
def test_review_reviewers_from_nested_child_groups(self):
obj = Application.objects.create(name=generate_id(), slug=generate_id())
rule = self._create_rule_for_object(obj, min_reviewers=2)
grandparent = Group.objects.create(name=generate_id())
parent = Group.objects.create(name=generate_id())
child = Group.objects.create(name=generate_id())
parent.parents.add(grandparent)
child.parents.add(parent)
parent_member = create_test_user()
child_member = create_test_user()
parent.users.add(parent_member)
child.users.add(child_member)
rule.reviewer_groups.add(grandparent)
content_type = ContentType.objects.get_for_model(obj)
iteration = LifecycleIteration.objects.get(
content_type=content_type, object_id=str(obj.pk), rule=rule
)
request = self._get_request()
# Both nested members should be able to review
self.assertTrue(iteration.user_can_review(parent_member))
self.assertTrue(iteration.user_can_review(child_member))
Review.objects.create(iteration=iteration, reviewer=parent_member)
iteration.on_review(request)
iteration.refresh_from_db()
self.assertEqual(iteration.state, ReviewState.PENDING)
Review.objects.create(iteration=iteration, reviewer=child_member)
iteration.on_review(request)
iteration.refresh_from_db()
self.assertEqual(iteration.state, ReviewState.REVIEWED)
def test_notify_reviewers_send_once(self):
obj = Group.objects.create(name=generate_id())
rule = self._create_rule_for_object(obj)
reviewer_one = create_test_user()
reviewer_two = create_test_user()
rule.reviewers.add(reviewer_one, reviewer_two)
transport_once = NotificationTransport.objects.create(
name=generate_id(),
send_once=True,
)
transport_all = NotificationTransport.objects.create(
name=generate_id(),
send_once=False,
)
rule.notification_transports.add(transport_once, transport_all)
event = Event.new(EventAction.REVIEW_INITIATED, target=obj)
event.save()
with patch(
"authentik.enterprise.lifecycle.tasks.send_notification.send_with_options"
) as send_with_options:
rule.notify_reviewers(event, NotificationSeverity.NOTICE)
reviewer_pks = {reviewer_one.pk, reviewer_two.pk}
self.assertEqual(send_with_options.call_count, len(reviewer_pks) + 1)
calls = [call.kwargs["args"] for call in send_with_options.call_args_list]
once_calls = [args for args in calls if args[0] == transport_once.pk]
all_calls = [args for args in calls if args[0] == transport_all.pk]
self.assertEqual(len(once_calls), 1)
self.assertEqual(len(all_calls), len(reviewer_pks))
self.assertIn(once_calls[0][2], reviewer_pks)
self.assertEqual({args[2] for args in all_calls}, reviewer_pks)
def test_apply_marks_overdue_and_opens_due_reviews(self):
app_one = Application.objects.create(name=generate_id(), slug=generate_id())
app_two = Application.objects.create(name=generate_id(), slug=generate_id())
content_type = ContentType.objects.get_for_model(Application)
rule_overdue = LifecycleRule.objects.create(
name=generate_id(),
content_type=content_type,
object_id=str(app_one.pk),
interval="days=365",
grace_period="days=10",
)
# Get the automatically created iteration and backdate it past the grace period
iteration = LifecycleIteration.objects.get(
content_type=content_type, object_id=str(app_one.pk), rule=rule_overdue
)
LifecycleIteration.objects.filter(pk=iteration.pk).update(
opened_on=(timezone.now() - timedelta(days=20))
)
# Apply again to trigger overdue logic
rule_overdue.apply()
iteration.refresh_from_db()
self.assertEqual(iteration.state, ReviewState.OVERDUE)
self.assertEqual(
LifecycleIteration.objects.filter(
content_type=content_type, object_id=str(app_one.pk)
).count(),
1,
)
LifecycleRule.objects.create(
name=generate_id(),
content_type=content_type,
object_id=str(app_two.pk),
interval="days=30",
grace_period="days=10",
)
self.assertEqual(
LifecycleIteration.objects.filter(
content_type=content_type, object_id=str(app_two.pk)
).count(),
1,
)
new_iteration = LifecycleIteration.objects.get(
content_type=content_type, object_id=str(app_two.pk)
)
self.assertEqual(new_iteration.state, ReviewState.PENDING)
def test_apply_idempotent(self):
app_due = Application.objects.create(name=generate_id(), slug=generate_id())
app_overdue = Application.objects.create(name=generate_id(), slug=generate_id())
content_type = ContentType.objects.get_for_model(Application)
initiated_before = Event.objects.filter(action=EventAction.REVIEW_INITIATED).count()
overdue_before = Event.objects.filter(action=EventAction.REVIEW_OVERDUE).count()
rule_due = LifecycleRule.objects.create(
name=generate_id(),
content_type=content_type,
object_id=str(app_due.pk),
interval="days=30",
grace_period="days=30",
)
reviewer = create_test_user()
rule_due.reviewers.add(reviewer)
transport = NotificationTransport.objects.create(name=generate_id())
rule_due.notification_transports.add(transport)
rule_overdue = LifecycleRule.objects.create(
name=generate_id(),
content_type=content_type,
object_id=str(app_overdue.pk),
interval="days=365",
grace_period="days=10",
)
overdue_iteration = LifecycleIteration.objects.get(
content_type=content_type, object_id=str(app_overdue.pk), rule=rule_overdue
)
LifecycleIteration.objects.filter(pk=overdue_iteration.pk).update(
opened_on=(timezone.now() - timedelta(days=20))
)
# Apply overdue rule to mark iteration as overdue
rule_overdue.apply()
due_iteration = LifecycleIteration.objects.get(
content_type=content_type, object_id=str(app_due.pk)
)
overdue_iteration.refresh_from_db()
self.assertEqual(due_iteration.state, ReviewState.PENDING)
self.assertEqual(overdue_iteration.state, ReviewState.OVERDUE)
initiated_after_first = Event.objects.filter(action=EventAction.REVIEW_INITIATED).count()
overdue_after_first = Event.objects.filter(action=EventAction.REVIEW_OVERDUE).count()
# Both rules created iterations on save
self.assertEqual(initiated_after_first, initiated_before + 2)
self.assertEqual(overdue_after_first, overdue_before + 1)
# Apply again - should be idempotent
rule_due.apply()
rule_overdue.apply()
due_iteration.refresh_from_db()
overdue_iteration.refresh_from_db()
self.assertEqual(due_iteration.state, ReviewState.PENDING)
self.assertEqual(overdue_iteration.state, ReviewState.OVERDUE)
self.assertEqual(
Event.objects.filter(action=EventAction.REVIEW_INITIATED).count(),
initiated_after_first,
)
self.assertEqual(
Event.objects.filter(action=EventAction.REVIEW_OVERDUE).count(),
overdue_after_first,
)
def test_rule_matches_entire_type(self):
"""A rule with object_id=None matches all objects of that type."""
app_one = Application.objects.create(name=generate_id(), slug=generate_id())
app_two = Application.objects.create(name=generate_id(), slug=generate_id())
content_type = ContentType.objects.get_for_model(Application)
rule = LifecycleRule.objects.create(
name=generate_id(),
content_type=content_type,
object_id=None,
interval="days=30",
grace_period="days=10",
)
objects = list(rule.get_objects())
self.assertIn(app_one, objects)
self.assertIn(app_two, objects)
def test_rule_type_excludes_objects_with_specific_rules(self):
app_with_rule = Application.objects.create(name=generate_id(), slug=generate_id())
app_without_rule = Application.objects.create(name=generate_id(), slug=generate_id())
content_type = ContentType.objects.get_for_model(Application)
# Create a specific rule for app_with_rule
LifecycleRule.objects.create(
name=generate_id(),
content_type=content_type,
object_id=str(app_with_rule.pk),
interval="days=30",
)
# Create a type-level rule
type_rule = LifecycleRule.objects.create(
name=generate_id(),
content_type=content_type,
object_id=None,
interval="days=60",
)
objects = list(type_rule.get_objects())
self.assertNotIn(app_with_rule, objects)
self.assertIn(app_without_rule, objects)
def test_rule_type_apply_creates_iterations_for_all_objects(self):
app_one = Application.objects.create(name=generate_id(), slug=generate_id())
app_two = Application.objects.create(name=generate_id(), slug=generate_id())
content_type = ContentType.objects.get_for_model(Application)
LifecycleRule.objects.create(
name=generate_id(),
content_type=content_type,
object_id=None,
interval="days=30",
grace_period="days=10",
)
self.assertTrue(
LifecycleIteration.objects.filter(
content_type=content_type, object_id=str(app_one.pk)
).exists()
)
self.assertTrue(
LifecycleIteration.objects.filter(
content_type=content_type, object_id=str(app_two.pk)
).exists()
)
def test_delete_rule_cancels_open_iterations(self):
obj = Application.objects.create(name=generate_id(), slug=generate_id())
rule = self._create_rule_for_object(obj)
content_type = ContentType.objects.get_for_model(obj)
pending_iteration = LifecycleIteration.objects.get(
content_type=content_type, object_id=str(obj.pk), rule=rule
)
self.assertEqual(pending_iteration.state, ReviewState.PENDING)
overdue_iteration = LifecycleIteration.objects.create(
content_type=content_type,
object_id=str(obj.pk),
rule=rule,
state=ReviewState.OVERDUE,
)
reviewed_iteration = LifecycleIteration.objects.create(
content_type=content_type,
object_id=str(obj.pk),
rule=rule,
state=ReviewState.REVIEWED,
)
rule.delete()
pending_iteration.refresh_from_db()
overdue_iteration.refresh_from_db()
reviewed_iteration.refresh_from_db()
self.assertEqual(pending_iteration.state, ReviewState.CANCELED)
self.assertEqual(overdue_iteration.state, ReviewState.CANCELED)
self.assertEqual(reviewed_iteration.state, ReviewState.REVIEWED) # Not affected
def test_update_rule_target_cancels_stale_iterations(self):
app_one = Application.objects.create(name=generate_id(), slug=generate_id())
app_two = Application.objects.create(name=generate_id(), slug=generate_id())
content_type = ContentType.objects.get_for_model(Application)
rule = LifecycleRule.objects.create(
name=generate_id(),
content_type=content_type,
object_id=str(app_one.pk),
interval="days=30",
)
iteration_for_app_one = LifecycleIteration.objects.get(
content_type=content_type, object_id=str(app_one.pk), rule=rule
)
self.assertEqual(iteration_for_app_one.state, ReviewState.PENDING)
# Change rule target to app_two - save() triggers apply() which cancels stale iterations
rule.object_id = str(app_two.pk)
rule.save()
iteration_for_app_one.refresh_from_db()
self.assertEqual(iteration_for_app_one.state, ReviewState.CANCELED)
def test_update_rule_content_type_cancels_stale_iterations(self):
app = Application.objects.create(name=generate_id(), slug=generate_id())
group = Group.objects.create(name=generate_id())
app_content_type = ContentType.objects.get_for_model(Application)
group_content_type = ContentType.objects.get_for_model(Group)
# Creating rule triggers automatic apply() which creates a iteration for app
rule = LifecycleRule.objects.create(
name=generate_id(),
content_type=app_content_type,
object_id=str(app.pk),
interval="days=30",
)
iteration = LifecycleIteration.objects.get(
content_type=app_content_type, object_id=str(app.pk), rule=rule
)
self.assertEqual(iteration.state, ReviewState.PENDING)
# Change content type to Group - save() triggers apply() which cancels stale iterations
rule.content_type = group_content_type
rule.object_id = str(group.pk)
rule.save()
iteration.refresh_from_db()
self.assertEqual(iteration.state, ReviewState.CANCELED)
def test_user_can_review_checks_group_hierarchy(self):
obj = Application.objects.create(name=generate_id(), slug=generate_id())
rule = self._create_rule_for_object(obj)
parent_group = Group.objects.create(name=generate_id())
child_group = Group.objects.create(name=generate_id())
child_group.parents.add(parent_group)
parent_member = create_test_user()
child_member = create_test_user()
non_member = create_test_user()
parent_group.users.add(parent_member)
child_group.users.add(child_member)
rule.reviewer_groups.add(parent_group)
content_type = ContentType.objects.get_for_model(obj)
# iteration is created automatically when rule is saved
iteration = LifecycleIteration.objects.get(
content_type=content_type, object_id=str(obj.pk), rule=rule
)
self.assertTrue(iteration.user_can_review(parent_member))
self.assertTrue(iteration.user_can_review(child_member))
self.assertFalse(iteration.user_can_review(non_member))
def test_user_cannot_review_twice(self):
obj = Application.objects.create(name=generate_id(), slug=generate_id())
rule = self._create_rule_for_object(obj)
reviewer = create_test_user()
rule.reviewers.add(reviewer)
content_type = ContentType.objects.get_for_model(obj)
# iteration is created automatically when rule is saved
iteration = LifecycleIteration.objects.get(
content_type=content_type, object_id=str(obj.pk), rule=rule
)
self.assertTrue(iteration.user_can_review(reviewer))
Review.objects.create(iteration=iteration, reviewer=reviewer)
self.assertFalse(iteration.user_can_review(reviewer))
def test_user_cannot_review_completed_iteration(self):
obj = Application.objects.create(name=generate_id(), slug=generate_id())
rule = self._create_rule_for_object(obj)
reviewer = create_test_user()
rule.reviewers.add(reviewer)
content_type = ContentType.objects.get_for_model(obj)
# Get the automatically created pending iteration and test with different states
iteration = LifecycleIteration.objects.get(
content_type=content_type, object_id=str(obj.pk), rule=rule
)
for state in (ReviewState.REVIEWED, ReviewState.CANCELED):
iteration.state = state
iteration.save()
self.assertFalse(iteration.user_can_review(reviewer))
def test_get_reviewers_includes_child_group_members(self):
obj = Application.objects.create(name=generate_id(), slug=generate_id())
rule = self._create_rule_for_object(obj)
parent_group = Group.objects.create(name=generate_id())
child_group = Group.objects.create(name=generate_id())
child_group.parents.add(parent_group)
parent_member = create_test_user()
child_member = create_test_user()
parent_group.users.add(parent_member)
child_group.users.add(child_member)
rule.reviewer_groups.add(parent_group)
reviewers = list(rule.get_reviewers())
self.assertIn(parent_member, reviewers)
self.assertIn(child_member, reviewers)
def test_get_reviewers_includes_explicit_reviewers(self):
obj = Application.objects.create(name=generate_id(), slug=generate_id())
rule = self._create_rule_for_object(obj)
explicit_reviewer = create_test_user()
rule.reviewers.add(explicit_reviewer)
group = Group.objects.create(name=generate_id())
group_member = create_test_user()
group.users.add(group_member)
rule.reviewer_groups.add(group)
reviewers = list(rule.get_reviewers())
self.assertIn(explicit_reviewer, reviewers)
self.assertIn(group_member, reviewers)
class TestLifecycleDateBoundaries(TestCase):
"""Verify that start_of_day normalization ensures correct overdue/due
detection regardless of exact task execution time within a day.
The daily task may run at any point during the day. The start_of_day
normalization in _get_newly_overdue_iterations and _get_newly_due_objects
ensures that the boundary is always at midnight, so millisecond variations
in task execution time do not affect results."""
def _create_rule_and_iteration(self, grace_period="days=1", interval="days=365"):
app = Application.objects.create(name=generate_id(), slug=generate_id())
content_type = ContentType.objects.get_for_model(Application)
rule = LifecycleRule.objects.create(
name=generate_id(),
content_type=content_type,
object_id=str(app.pk),
interval=interval,
grace_period=grace_period,
)
iteration = LifecycleIteration.objects.get(
content_type=content_type, object_id=str(app.pk), rule=rule
)
return app, rule, iteration
def test_overdue_iteration_opened_yesterday(self):
"""grace_period=1 day: iteration opened yesterday at any time is overdue today."""
_, rule, iteration = self._create_rule_and_iteration(grace_period="days=1")
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
for opened_on in [
dt.datetime(2025, 6, 14, 0, 0, 0, tzinfo=dt.UTC),
dt.datetime(2025, 6, 14, 12, 0, 0, tzinfo=dt.UTC),
dt.datetime(2025, 6, 14, 23, 59, 59, 999999, tzinfo=dt.UTC),
]:
with self.subTest(opened_on=opened_on):
LifecycleIteration.objects.filter(pk=iteration.pk).update(
opened_on=opened_on, state=ReviewState.PENDING
)
with patch("django.utils.timezone.now", return_value=fixed_now):
self.assertIn(iteration, list(rule._get_newly_overdue_iterations()))
def test_not_overdue_iteration_opened_today(self):
"""grace_period=1 day: iteration opened today at any time is NOT overdue."""
_, rule, iteration = self._create_rule_and_iteration(grace_period="days=1")
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
for opened_on in [
dt.datetime(2025, 6, 15, 0, 0, 0, tzinfo=dt.UTC),
dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC),
dt.datetime(2025, 6, 15, 23, 59, 59, 999999, tzinfo=dt.UTC),
]:
with self.subTest(opened_on=opened_on):
LifecycleIteration.objects.filter(pk=iteration.pk).update(
opened_on=opened_on, state=ReviewState.PENDING
)
with patch("django.utils.timezone.now", return_value=fixed_now):
self.assertNotIn(iteration, list(rule._get_newly_overdue_iterations()))
def test_overdue_independent_of_task_execution_time(self):
"""Overdue detection gives the same result whether the task runs at 00:00:01 or 23:59:59."""
_, rule, iteration = self._create_rule_and_iteration(grace_period="days=1")
opened_on = dt.datetime(2025, 6, 14, 18, 0, 0, tzinfo=dt.UTC)
LifecycleIteration.objects.filter(pk=iteration.pk).update(
opened_on=opened_on, state=ReviewState.PENDING
)
for task_time in [
dt.datetime(2025, 6, 15, 0, 0, 1, tzinfo=dt.UTC),
dt.datetime(2025, 6, 15, 12, 0, 0, tzinfo=dt.UTC),
dt.datetime(2025, 6, 15, 23, 59, 59, tzinfo=dt.UTC),
]:
with self.subTest(task_time=task_time):
with patch("django.utils.timezone.now", return_value=task_time):
self.assertIn(iteration, list(rule._get_newly_overdue_iterations()))
def test_overdue_boundary_multi_day_grace_period(self):
"""grace_period=30 days: overdue after 30 full days, not after 29."""
_, rule, iteration = self._create_rule_and_iteration(grace_period="days=30")
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
# Opened 30 days ago (May 16), should go overdue
LifecycleIteration.objects.filter(pk=iteration.pk).update(
opened_on=dt.datetime(2025, 5, 16, 12, 0, 0, tzinfo=dt.UTC),
state=ReviewState.PENDING,
)
with patch("django.utils.timezone.now", return_value=fixed_now):
self.assertIn(iteration, list(rule._get_newly_overdue_iterations()))
# Opened 29 days ago (May 17), should NOT go overdue
LifecycleIteration.objects.filter(pk=iteration.pk).update(
opened_on=dt.datetime(2025, 5, 17, 12, 0, 0, tzinfo=dt.UTC),
state=ReviewState.PENDING,
)
with patch("django.utils.timezone.now", return_value=fixed_now):
self.assertNotIn(iteration, list(rule._get_newly_overdue_iterations()))
def test_due_object_iteration_opened_yesterday(self):
"""interval=1 day: object with iteration opened yesterday is due for a new review."""
app, rule, iteration = self._create_rule_and_iteration(interval="days=1")
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
for opened_on in [
dt.datetime(2025, 6, 14, 0, 0, 0, tzinfo=dt.UTC),
dt.datetime(2025, 6, 14, 12, 0, 0, tzinfo=dt.UTC),
dt.datetime(2025, 6, 14, 23, 59, 59, 999999, tzinfo=dt.UTC),
]:
with self.subTest(opened_on=opened_on):
LifecycleIteration.objects.filter(pk=iteration.pk).update(opened_on=opened_on)
with patch("django.utils.timezone.now", return_value=fixed_now):
self.assertIn(app, list(rule._get_newly_due_objects()))
def test_not_due_object_iteration_opened_today(self):
"""interval=1 day: object with iteration opened today is NOT due."""
app, rule, iteration = self._create_rule_and_iteration(interval="days=1")
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
for opened_on in [
dt.datetime(2025, 6, 15, 0, 0, 0, tzinfo=dt.UTC),
dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC),
dt.datetime(2025, 6, 15, 23, 59, 59, 999999, tzinfo=dt.UTC),
]:
with self.subTest(opened_on=opened_on):
LifecycleIteration.objects.filter(pk=iteration.pk).update(opened_on=opened_on)
with patch("django.utils.timezone.now", return_value=fixed_now):
self.assertNotIn(app, list(rule._get_newly_due_objects()))
def test_due_independent_of_task_execution_time(self):
"""Due detection gives the same result whether the task runs at 00:00:01 or 23:59:59."""
app, rule, iteration = self._create_rule_and_iteration(interval="days=1")
opened_on = dt.datetime(2025, 6, 14, 18, 0, 0, tzinfo=dt.UTC)
LifecycleIteration.objects.filter(pk=iteration.pk).update(opened_on=opened_on)
for task_time in [
dt.datetime(2025, 6, 15, 0, 0, 1, tzinfo=dt.UTC),
dt.datetime(2025, 6, 15, 12, 0, 0, tzinfo=dt.UTC),
dt.datetime(2025, 6, 15, 23, 59, 59, tzinfo=dt.UTC),
]:
with self.subTest(task_time=task_time):
with patch("django.utils.timezone.now", return_value=task_time):
self.assertIn(app, list(rule._get_newly_due_objects()))
def test_due_boundary_multi_day_interval(self):
"""interval=30 days: due after 30 full days, not after 29."""
app, rule, iteration = self._create_rule_and_iteration(interval="days=30")
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
# Previous review opened 30 days ago (May 16), review is due for the object
LifecycleIteration.objects.filter(pk=iteration.pk).update(
opened_on=dt.datetime(2025, 5, 16, 12, 0, 0, tzinfo=dt.UTC)
)
with patch("django.utils.timezone.now", return_value=fixed_now):
self.assertIn(app, list(rule._get_newly_due_objects()))
# Previous review opened 29 days ago (May 17), new review is NOT due
LifecycleIteration.objects.filter(pk=iteration.pk).update(
opened_on=dt.datetime(2025, 5, 17, 12, 0, 0, tzinfo=dt.UTC)
)
with patch("django.utils.timezone.now", return_value=fixed_now):
self.assertNotIn(app, list(rule._get_newly_due_objects()))
def test_apply_overdue_at_boundary(self):
"""apply() marks iteration overdue when grace period just expired,
regardless of what time the daily task runs."""
_, rule, iteration = self._create_rule_and_iteration(
grace_period="days=1", interval="days=365"
)
opened_on = dt.datetime(2025, 6, 14, 20, 0, 0, tzinfo=dt.UTC)
for task_time in [
dt.datetime(2025, 6, 15, 0, 0, 1, tzinfo=dt.UTC),
dt.datetime(2025, 6, 15, 23, 59, 59, tzinfo=dt.UTC),
]:
with self.subTest(task_time=task_time):
LifecycleIteration.objects.filter(pk=iteration.pk).update(
opened_on=opened_on, state=ReviewState.PENDING
)
with patch("django.utils.timezone.now", return_value=task_time):
rule.apply()
iteration.refresh_from_db()
self.assertEqual(iteration.state, ReviewState.OVERDUE)

View File

@@ -1,11 +0,0 @@
"""API URLs"""
from authentik.enterprise.lifecycle.api.iterations import IterationViewSet
from authentik.enterprise.lifecycle.api.reviews import ReviewViewSet
from authentik.enterprise.lifecycle.api.rules import LifecycleRuleViewSet
api_urlpatterns = [
("lifecycle/iterations", IterationViewSet),
("lifecycle/reviews", ReviewViewSet),
("lifecycle/rules", LifecycleRuleViewSet),
]

View File

@@ -1,75 +0,0 @@
from datetime import datetime
from urllib import parse
from django.contrib.contenttypes.models import ContentType
from django.db.models import Model
from django.urls import reverse
from rest_framework.serializers import ChoiceField, Serializer, UUIDField
from authentik.core.api.utils import ModelSerializer
from authentik.core.models import Application, Group, User
from authentik.rbac.models import Role
def parse_content_type(value: str) -> dict:
app_label, model = value.split(".")
return {"app_label": app_label, "model": model}
def model_choices() -> list[tuple[str, str]]:
return [
("authentik_core.application", "Application"),
("authentik_core.group", "Group"),
("authentik_rbac.role", "Role"),
]
def admin_link_for_model(model: Model) -> str:
if isinstance(model, Application):
url = f"/core/applications/{model.slug}"
elif isinstance(model, Group):
url = f"/identity/groups/{model.pk}"
elif isinstance(model, Role):
url = f"/identity/roles/{model.pk}"
else:
raise TypeError("Unsupported model")
return url + ";" + parse.quote('{"page":"page-lifecycle"}')
def link_for_model(model: Model) -> str:
return f"{reverse("authentik_core:if-admin")}#{admin_link_for_model(model)}"
def start_of_day(dt: datetime) -> datetime:
return dt.replace(hour=0, minute=0, second=0, microsecond=0)
class ContentTypeField(ChoiceField):
def __init__(self, **kwargs):
super().__init__(choices=model_choices(), **kwargs)
def to_representation(self, content_type: ContentType) -> str:
return f"{content_type.app_label}.{content_type.model}"
def to_internal_value(self, data: str) -> ContentType:
return ContentType.objects.get(**parse_content_type(data))
class GenericForeignKeySerializer(Serializer):
content_type = ContentTypeField()
object_id = UUIDField()
class ReviewerGroupSerializer(ModelSerializer):
class Meta:
model = Group
fields = [
"pk",
"name",
]
class ReviewerUserSerializer(ModelSerializer):
class Meta:
model = User
fields = ["pk", "uuid", "username", "name"]

View File

@@ -331,7 +331,7 @@ class GoogleWorkspaceGroupTests(TestCase):
).exists()
)
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
self.assertEqual(len(http.requests()), 7)
self.assertEqual(len(http.requests()), 5)
def test_sync_discover_multiple(self):
"""Test group discovery"""
@@ -372,7 +372,7 @@ class GoogleWorkspaceGroupTests(TestCase):
).exists()
)
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
self.assertEqual(len(http.requests()), 7)
self.assertEqual(len(http.requests()), 5)
# Change response to trigger update
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/groups?customer=my_customer&maxResults=500&orderBy=email&key={self.api_key}&alt=json",

View File

@@ -309,7 +309,7 @@ class GoogleWorkspaceUserTests(TestCase):
).exists()
)
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
self.assertEqual(len(http.requests()), 7)
self.assertEqual(len(http.requests()), 5)
def test_sync_discover_multiple(self):
"""Test user discovery, running multiple times"""
@@ -352,7 +352,7 @@ class GoogleWorkspaceUserTests(TestCase):
).exists()
)
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
self.assertEqual(len(http.requests()), 7)
self.assertEqual(len(http.requests()), 5)
# Change response, which will trigger a discovery update
http.add_response(
f"https://admin.googleapis.com/admin/directory/v1/users?customer=my_customer&maxResults=500&orderBy=email&key={self.api_key}&alt=json",

View File

@@ -78,8 +78,7 @@ class MicrosoftEntraUserClient(MicrosoftEntraSyncClient[User, MicrosoftEntraProv
def create(self, user: User):
"""Create user from scratch and create a connection object"""
microsoft_user = self.to_schema(user, None)
if microsoft_user.user_principal_name:
self.check_email_valid(microsoft_user.user_principal_name)
self.check_email_valid(microsoft_user.user_principal_name)
with transaction.atomic():
try:
response = self._request(self.client.users.post(microsoft_user))
@@ -119,8 +118,7 @@ class MicrosoftEntraUserClient(MicrosoftEntraSyncClient[User, MicrosoftEntraProv
def update(self, user: User, connection: MicrosoftEntraProviderUser):
"""Update existing user"""
microsoft_user = self.to_schema(user, connection)
if microsoft_user.user_principal_name:
self.check_email_valid(microsoft_user.user_principal_name)
self.check_email_valid(microsoft_user.user_principal_name)
response = self._request(
self.client.users.by_user_id(connection.microsoft_id).patch(microsoft_user)
)

View File

@@ -2,10 +2,10 @@
from django.http import HttpRequest
from django.urls import reverse
from rest_framework.fields import CharField, SerializerMethodField, URLField
from rest_framework.fields import SerializerMethodField, URLField
from authentik.core.api.providers import ProviderSerializer
from authentik.core.models import Provider
from authentik.core.models import Application
from authentik.enterprise.api import EnterpriseRequiredMixin
from authentik.enterprise.providers.ws_federation.models import WSFederationProvider
from authentik.enterprise.providers.ws_federation.processors.metadata import MetadataProcessor
@@ -16,31 +16,8 @@ class WSFederationProviderSerializer(EnterpriseRequiredMixin, SAMLProviderSerial
"""WSFederationProvider Serializer"""
reply_url = URLField(source="acs_url")
wtrealm = CharField(source="audience")
url_wsfed = SerializerMethodField()
def get_url_download_metadata(self, instance: WSFederationProvider) -> str:
"""Get metadata download URL"""
if "request" not in self._context:
return ""
request: HttpRequest = self._context["request"]._request
try:
return request.build_absolute_uri(
reverse(
"authentik_providers_ws_federation:metadata-download",
kwargs={"application_slug": instance.application.slug},
)
)
except Provider.application.RelatedObjectDoesNotExist:
return request.build_absolute_uri(
reverse(
"authentik_api:wsfederationprovider-metadata",
kwargs={
"pk": instance.pk,
},
)
+ "?download"
)
wtrealm = SerializerMethodField()
def get_url_wsfed(self, instance: WSFederationProvider) -> str:
"""Get WS-Fed url"""
@@ -49,11 +26,16 @@ class WSFederationProviderSerializer(EnterpriseRequiredMixin, SAMLProviderSerial
request: HttpRequest = self._context["request"]._request
return request.build_absolute_uri(reverse("authentik_providers_ws_federation:wsfed"))
def get_wtrealm(self, instance: WSFederationProvider) -> str:
try:
return f"goauthentik.io://app/{instance.application.slug}"
except Application.DoesNotExist:
return None
class Meta(SAMLProviderSerializer.Meta):
model = WSFederationProvider
fields = ProviderSerializer.Meta.fields + [
"reply_url",
"wtrealm",
"assertion_valid_not_before",
"assertion_valid_not_on_or_after",
"session_valid_not_on_or_after",
@@ -69,6 +51,7 @@ class WSFederationProviderSerializer(EnterpriseRequiredMixin, SAMLProviderSerial
"default_name_id_policy",
"url_download_metadata",
"url_wsfed",
"wtrealm",
]
extra_kwargs = ProviderSerializer.Meta.extra_kwargs

View File

@@ -8,10 +8,6 @@ from authentik.providers.saml.models import SAMLProvider
class WSFederationProvider(SAMLProvider):
"""WS-Federation for applications which support WS-Fed."""
# Alias'd fields:
# - acs_url -> reply_url
# - audience -> realm / wtrealm
@property
def serializer(self) -> type[Serializer]:
from authentik.enterprise.providers.ws_federation.api.providers import (

View File

@@ -1,4 +1,4 @@
from authentik.common.saml.constants import NS_MAP as _map
from authentik.sources.saml.processors.constants import NS_MAP as _map
WS_FED_ACTION_SIGN_IN = "wsignin1.0"
WS_FED_ACTION_SIGN_OUT = "wsignout1.0"

View File

@@ -1,7 +1,6 @@
from django.urls import reverse
from lxml.etree import SubElement, _Element # nosec
from authentik.common.saml.constants import NS_SAML_METADATA
from authentik.enterprise.providers.ws_federation.processors.constants import (
NS_ADDRESSING,
NS_MAP,
@@ -9,6 +8,7 @@ from authentik.enterprise.providers.ws_federation.processors.constants import (
NS_WSI,
)
from authentik.providers.saml.processors.metadata import MetadataProcessor as BaseMetadataProcessor
from authentik.sources.saml.processors.constants import NS_SAML_METADATA
class MetadataProcessor(BaseMetadataProcessor):

Some files were not shown because too many files have changed in this diff Show More