Compare commits

..

41 Commits

Author SHA1 Message Date
Teffen Ellis
a440cd3669 Tidy. Fix colors. 2026-02-04 00:32:59 +01:00
Tana M Berry
d6b07d5348 more dewi and dominic edits 2026-02-03 11:28:58 -06:00
Tana M Berry
6f87b9c165 Merge branch 'main' into docs-first-steps 2026-02-03 10:55:49 -05:00
Tana M Berry
bd72cb2815 Merge branch 'main' into docs-first-steps
Signed-off-by: Tana M Berry <tanamarieberry@yahoo.com>
2026-02-02 14:58:55 -05:00
Tana M Berry
c42dad58b0 more dewi and dominic edits 2026-02-02 13:56:32 -06:00
Tana M Berry
1e7e4d11d7 fix heading size 2026-02-02 13:48:59 -06:00
Tana M Berry
45cdf97f0e more link fixing 2026-02-02 13:44:36 -06:00
Tana M Berry
86e37201c1 tweak 2026-02-02 13:38:33 -06:00
Tana M Berry
ed8c373427 fix sidebar, tweaks 2026-02-02 13:36:56 -06:00
Tana M Berry
2e4abd410c add mermaid diagram, more links, more content 2026-02-02 13:35:02 -06:00
Tana M Berry
dbaff40c60 more edits, more TODOs done 2026-01-30 17:37:00 -06:00
Tana M Berry
f956ca1300 links, more tweaks 2026-01-28 21:31:35 -06:00
Tana M Berry
d990f53596 work on bindings docs that was needed for the first steps docs 2026-01-28 21:12:41 -06:00
Tana M Berry
dfbbbe03c2 changes from Jens 2026-01-28 11:56:13 -06:00
Jens Langhammer
4da35e143e fix some alignments
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2026-01-28 18:50:33 +01:00
Tana M Berry
bc06779f4f formatting fights on tips 2026-01-27 21:13:11 -06:00
Tana M Berry
727ba71280 few more dominic edits, tweaks 2026-01-27 20:46:58 -06:00
Tana M Berry
7480cb3e86 Merge branch 'main' into docs-first-steps 2026-01-27 21:39:26 -05:00
Tana M Berry
7027b7c4bf new styles, more content 2026-01-27 20:35:18 -06:00
Tana M Berry
b47016cd30 thanks Teffen 2026-01-27 09:34:27 -06:00
Tana M Berry
d18ab068c4 tweaks 2026-01-26 10:18:56 -06:00
Jens Langhammer
3b552530f1 a bunch of things
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2026-01-26 17:04:19 +01:00
Tana M Berry
cd46166c8e more dewi and dominic edits, links 2026-01-23 15:03:49 -06:00
Dewi Roberts
8e37908ecb Merge branch 'main' into docs-first-steps 2026-01-22 09:58:18 +00:00
Tana M Berry
9ec9f116b0 another fine Dominic edit 2026-01-21 18:01:23 -06:00
Tana M Berry
942a0029a0 dominic's eedits, more content 2026-01-21 17:56:00 -06:00
Tana M Berry
597a67075c conflicts? 2026-01-21 14:30:12 -06:00
Tana M Berry
b31517cd79 Merge branch 'main' into docs-first-steps 2026-01-21 15:19:28 -05:00
authentik-automation[bot]
a473d0618a Optimised images with calibre/image-actions 2026-01-14 23:10:00 +00:00
authentik-automation[bot]
7858ab874d Optimised images with calibre/image-actions 2026-01-14 23:09:33 +00:00
authentik-automation[bot]
0fd8069593 Optimised images with calibre/image-actions 2026-01-14 23:09:02 +00:00
Tana M Berry
9d210d92f3 Merge branch 'main' into docs-first-steps
Signed-off-by: Tana M Berry <tanamarieberry@yahoo.com>
2026-01-14 18:08:35 -05:00
Tana M Berry
9ce75e06be more content, green tips, other fixes 2026-01-14 16:52:18 -06:00
Tana M Berry
ee11194957 added Dewi ideas, more content, tweaks 2026-01-08 20:19:54 -06:00
Tana M Berry
297d4ca38b dewis edits 2026-01-07 16:13:41 -06:00
Tana M Berry
f120e179f4 more content, tweaks 2026-01-07 16:02:48 -06:00
Tana M Berry
c9a2405247 moved sections and retitled some 2026-01-06 17:12:38 -06:00
Tana M Berry
c58d7ff13e first draft 2026-01-06 17:02:10 -06:00
Tana M Berry
e8996c9b8d Merge branch 'main' into docs-first-steps 2026-01-06 08:35:44 -06:00
Tana M Berry
ec2b9ba0e0 moved email config up to match Docker 2026-01-05 17:32:47 -06:00
Tana M Berry
8bb702b95a new first steps docs 2026-01-05 17:30:27 -06:00
1007 changed files with 18119 additions and 177641 deletions

View File

@@ -1,2 +0,0 @@
[build]
rustflags = ["--cfg", "tokio_unstable"]

View File

@@ -1,47 +0,0 @@
[licenses]
allow = [
"Apache-2.0 WITH LLVM-exception",
"Apache-2.0",
"BSD-3-Clause",
"CC0-1.0",
"CDLA-Permissive-2.0",
"ISC",
"MIT",
"MPL-2.0",
"OpenSSL",
"Unicode-3.0",
"Zlib",
]
[licenses.private]
ignore = true
[bans]
multiple-versions = "allow"
wildcards = "deny"
[bans.workspace-dependencies]
duplicates = "deny"
include-path-dependencies = true
unused = "deny"
# No non-FIPS compliant dependencies
[[bans.deny]]
name = "native-tls"
[[bans.deny]]
name = "openssl"
[[bans.deny]]
name = "openssl-sys"
[[bans.deny]]
name = "ring"
[[bans.features]]
allow = [
"alloc",
"aws-lc-sys",
"default",
"fips",
"prebuilt-nasm",
"ring-io",
"ring-sig-verify",
]
name = "aws-lc-rs"
exact = true

View File

@@ -1,16 +0,0 @@
comment_width = 100
format_code_in_doc_comments = true
format_strings = true
group_imports = "StdExternalCrate"
hex_literal_case = "Lower"
imports_granularity = "Crate"
max_width = 100
newline_style = "Unix"
normalize_comments = true
normalize_doc_attributes = true
reorder_impl_items = true
style_edition = "2024"
use_field_init_shorthand = true
use_try_shorthand = true
where_single_line = true
wrap_comments = true

View File

@@ -216,7 +216,7 @@ runs:
--label "cherry-pick")
# Assign the PR to the original author
gh pr edit "$NEW_PR" --add-assignee "$PR_AUTHOR" || true
gh pr edit "$NEW_PR" --add-assignee "$PR_AUTHOR"
echo "✅ Created cherry-pick PR $NEW_PR for $TARGET_BRANCH"
@@ -258,7 +258,7 @@ runs:
--label "cherry-pick")
# Assign the PR to the original author
gh pr edit "$NEW_PR" --add-assignee "$PR_AUTHOR" || true
gh pr edit "$NEW_PR" --add-assignee "$PR_AUTHOR"
echo "⚠️ Created conflict resolution PR $NEW_PR for $TARGET_BRANCH"

View File

@@ -4,7 +4,7 @@ description: "Setup authentik testing environment"
inputs:
dependencies:
description: "List of dependencies to setup"
default: "system,python,rust,node,go,runtime"
default: "system,python,node,go,runtime"
postgresql_version:
description: "Optional postgresql image tag"
default: "16"
@@ -22,7 +22,7 @@ runs:
sudo rm -rf /usr/local/lib/android
- name: Install uv
if: ${{ contains(inputs.dependencies, 'python') }}
uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v5
uses: astral-sh/setup-uv@803947b9bd8e9f986429fa0c5a41c367cd732b41 # v5
with:
enable-cache: true
- name: Setup python
@@ -34,43 +34,17 @@ runs:
if: ${{ contains(inputs.dependencies, 'python') }}
shell: bash
run: uv sync --all-extras --dev --frozen
- name: Setup rust (stable)
if: ${{ contains(inputs.dependencies, 'rust') && !contains(inputs.dependencies, 'rust-nightly') }}
uses: actions-rust-lang/setup-rust-toolchain@a0b538fa0b742a6aa35d6e2c169b4bd06d225a98 # v1
- name: Setup rust (nightly)
if: ${{ contains(inputs.dependencies, 'rust-nightly') }}
uses: actions-rust-lang/setup-rust-toolchain@a0b538fa0b742a6aa35d6e2c169b4bd06d225a98 # v1
with:
toolchain: nightly
components: rustfmt
- name: Setup rust dependencies
if: ${{ contains(inputs.dependencies, 'rust') }}
uses: taiki-e/install-action@64c5c20c872907b6f7cd50994ac189e7274160f2 # v2
with:
tool: cargo-deny cargo-machete cargo-llvm-cov nextest
- name: Setup node (web)
- name: Setup node
if: ${{ contains(inputs.dependencies, 'node') }}
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v4
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v4
with:
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
registry-url: "https://registry.npmjs.org"
- name: Setup node (root)
if: ${{ contains(inputs.dependencies, 'node') }}
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v4
with:
node-version-file: package.json
cache: "npm"
cache-dependency-path: package-lock.json
registry-url: "https://registry.npmjs.org"
- name: Install Node deps
if: ${{ contains(inputs.dependencies, 'node') }}
shell: bash
run: npm ci
registry-url: 'https://registry.npmjs.org'
- name: Setup go
if: ${{ contains(inputs.dependencies, 'go') }}
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v5
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v5
with:
go-version-file: "go.mod"
- name: Setup docker cache
@@ -84,7 +58,7 @@ runs:
run: |
export PSQL_TAG=${{ inputs.postgresql_version }}
docker compose -f .github/actions/setup/compose.yml up -d
cd web && npm ci
cd web && npm i
- name: Generate config
if: ${{ contains(inputs.dependencies, 'python') }}
shell: uv run python {0}

View File

@@ -2,22 +2,18 @@ name: "Process test results"
description: Convert test results to JUnit, add them to GitHub Actions and codecov
inputs:
files:
description: Comma-separated explicit list of files to upload
flags:
description: Codecov flags
runs:
using: "composite"
steps:
- uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
- uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5
with:
files: ${{ inputs.files }}
flags: ${{ inputs.flags }}
use_oidc: true
- uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
- uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5
with:
files: ${{ inputs.files }}
flags: ${{ inputs.flags }}
use_oidc: true
report_type: test_results

1
.github/codespell-dictionary.txt vendored Normal file
View File

@@ -0,0 +1 @@
authentic->authentik

32
.github/codespell-words.txt vendored Normal file
View File

@@ -0,0 +1,32 @@
akadmin
asgi
assertIn
authentik
authn
crate
docstrings
entra
goauthentik
gunicorn
hass
jwe
jwks
keypair
keypairs
kubernetes
oidc
ontext
openid
passwordless
plex
saml
scim
singed
slo
sso
totp
traefik
# https://github.com/codespell-project/codespell/issues/1224
upToDate
warmup
webauthn

View File

@@ -38,21 +38,6 @@ updates:
#endregion
#region Rust
- package-ecosystem: rust-toolchain
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
commit-message:
prefix: "core:"
labels:
- dependencies
#endregion
#region Web
- package-ecosystem: npm
@@ -249,7 +234,7 @@ updates:
- package-ecosystem: docker
directories:
- /lifecycle/container
- /
- /website
schedule:
interval: daily

View File

@@ -43,8 +43,8 @@ jobs:
contents: read
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0
- uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
- uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
- uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
@@ -56,23 +56,23 @@ jobs:
release: ${{ inputs.release }}
- name: Login to Docker Hub
if: ${{ inputs.registry_dockerhub }}
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
with:
username: ${{ secrets.DOCKER_CORP_USERNAME }}
password: ${{ secrets.DOCKER_CORP_PASSWORD }}
- name: Login to GitHub Container Registry
if: ${{ inputs.registry_ghcr }}
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
with:
go-version-file: "go.mod"
- name: Generate API Clients
@@ -80,7 +80,7 @@ jobs:
make gen-client-ts
make gen-client-go
- name: Build Docker Image
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6
id: push
with:
context: .
@@ -95,7 +95,7 @@ jobs:
platforms: linux/${{ inputs.image_arch }}
cache-from: type=registry,ref=${{ steps.ev.outputs.attestImageNames }}:buildcache-${{ inputs.image_arch }}
cache-to: ${{ steps.ev.outputs.cacheTo }}
- uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v3
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
with:

View File

@@ -79,25 +79,25 @@ jobs:
image-name: ${{ inputs.image_name }}
- name: Login to Docker Hub
if: ${{ inputs.registry_dockerhub }}
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
with:
username: ${{ secrets.DOCKER_CORP_USERNAME }}
password: ${{ secrets.DOCKER_CORP_PASSWORD }}
- name: Login to GitHub Container Registry
if: ${{ inputs.registry_ghcr }}
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: int128/docker-manifest-create-action@8aac06098a12365ccdf99372dcfb453ccce8a0b0 # v2
- uses: int128/docker-manifest-create-action@a39573caa37b6a8a03302d43b57c3f48635096e2 # v2
id: build
with:
tags: ${{ matrix.tag }}
sources: |
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-amd64.outputs.image-digest }}
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-arm64.outputs.image-digest }}
- uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v3
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
id: attest
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}

View File

@@ -18,14 +18,14 @@ jobs:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
token: ${{ steps.generate_token.outputs.token }}
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: web/package.json
registry-url: "https://registry.npmjs.org"

View File

@@ -33,7 +33,7 @@ jobs:
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: website/package.json
cache: "npm"
@@ -41,7 +41,7 @@ jobs:
- working-directory: website/
name: Install Dependencies
run: npm ci
- uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v4
- uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v4
with:
path: |
${{ github.workspace }}/website/api/.docusaurus
@@ -55,7 +55,7 @@ jobs:
env:
NODE_ENV: production
run: npm run build -w api
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v4
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v4
with:
name: api-docs
path: website/api/build
@@ -67,11 +67,11 @@ jobs:
- build
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v5
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v5
with:
name: api-docs
path: website/api/build
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: website/package.json
cache: "npm"

View File

@@ -24,7 +24,7 @@ jobs:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- name: Setup authentik env
uses: ./.github/actions/setup
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: lifecycle/aws/package.json
cache: "npm"

View File

@@ -36,7 +36,7 @@ jobs:
NODE_ENV: production
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: website/package.json
cache: "npm"
@@ -53,7 +53,7 @@ jobs:
NODE_ENV: production
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: website/package.json
cache: "npm"
@@ -77,9 +77,9 @@ jobs:
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU
uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
@@ -89,14 +89,14 @@ jobs:
image-name: ghcr.io/goauthentik/dev-docs
- name: Login to Container Registry
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
id: push
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6
with:
tags: ${{ steps.ev.outputs.imageTags }}
file: website/Dockerfile
@@ -105,7 +105,7 @@ jobs:
context: .
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache,mode=max' || '' }}
- uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v3
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
with:

View File

@@ -6,10 +6,6 @@ on:
schedule:
# Every night at 3am
- cron: "0 3 * * *"
pull_request:
paths:
# Needs to refer to itself
- .github/workflows/ci-main-daily.yml
jobs:
test-container:
@@ -19,14 +15,14 @@ jobs:
matrix:
version:
- docs
- version-2025-12
- version-2025-10
- version-2025-4
- version-2025-2
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- run: |
current="$(pwd)"
dir="/tmp/authentik/${{ matrix.version }}"
mkdir -p "${dir}/lifecycle/container"
cd "${dir}"
wget "https://${{ matrix.version }}.goauthentik.io/docker-compose.yml" -O "${dir}/lifecycle/container/compose.yml"
"${current}/scripts/test_docker.sh"
mkdir -p $dir
cd $dir
wget https://${{ matrix.version }}.goauthentik.io/compose.yml
${current}/scripts/test_docker.sh

View File

@@ -16,7 +16,6 @@ env:
POSTGRES_DB: authentik
POSTGRES_USER: authentik
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
RUSTFLAGS: "-Dwarnings"
permissions:
# Needed for checkout
@@ -29,46 +28,20 @@ jobs:
strategy:
fail-fast: false
matrix:
include:
- job: bandit
deps: python
- job: black
deps: python
- job: spellcheck
deps: node
- job: pending-migrations
deps: python,runtime
- job: ruff
deps: python
- job: mypy
deps: python
- job: cargo-deny
deps: rust
- job: cargo-machete
deps: rust
- job: clippy
deps: rust
- job: rustfmt
deps: rust-nightly
job:
- bandit
- black
- codespell
- pending-migrations
- ruff
- mypy
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- name: Setup authentik env
uses: ./.github/actions/setup
with:
dependencies: ${{ matrix.deps }}
- name: run job
run: make ci-lint-${{ matrix.job }}
test-gen-build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- name: Setup authentik env
uses: ./.github/actions/setup
- name: generate schema
run: make migrate gen-build
- name: ensure schema is up-to-date
run: git diff --exit-code -- schema.yml blueprints/schema.json
run: uv run make ci-${{ matrix.job }}
test-migrations:
runs-on: ubuntu-latest
steps:
@@ -144,7 +117,6 @@ jobs:
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
CI_RUN_ID: ${{ matrix.run_id }}
CI_TOTAL_RUNS: "5"
PROMETHEUS_MULTIPROC_DIR: /tmp
run: |
uv run make ci-test
- uses: ./.github/actions/test-results
@@ -174,7 +146,6 @@ jobs:
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
CI_RUN_ID: ${{ matrix.run_id }}
CI_TOTAL_RUNS: "5"
PROMETHEUS_MULTIPROC_DIR: /tmp
run: |
uv run make ci-test
- uses: ./.github/actions/test-results
@@ -189,10 +160,8 @@ jobs:
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Create k8s Kind Cluster
uses: helm/kind-action@ef37e7f390d99f746eb8b610417061a60e82a6cc # v1.14.0
uses: helm/kind-action@92086f6be054225fa813e0a4b13787fc9088faab # v1.13.0
- name: run integration
env:
PROMETHEUS_MULTIPROC_DIR: /tmp
run: |
uv run coverage run manage.py test tests/integration
uv run coverage xml
@@ -236,7 +205,7 @@ jobs:
run: |
docker compose -f tests/e2e/compose.yml up -d --quiet-pull
- id: cache-web
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v4
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v4
with:
path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
@@ -249,8 +218,6 @@ jobs:
npm run build
npm run build:sfe
- name: run e2e
env:
PROMETHEUS_MULTIPROC_DIR: /tmp
run: |
uv run coverage run manage.py test ${{ matrix.job.glob }}
uv run coverage xml
@@ -281,7 +248,7 @@ jobs:
run: |
docker compose -f tests/openid_conformance/compose.yml up -d --quiet-pull
- id: cache-web
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v4
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v4
with:
path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
@@ -294,8 +261,6 @@ jobs:
npm run build
npm run build:sfe
- name: run conformance
env:
PROMETHEUS_MULTIPROC_DIR: /tmp
run: |
uv run coverage run manage.py test ${{ matrix.job.glob }}
uv run coverage xml
@@ -304,38 +269,14 @@ jobs:
with:
flags: conformance
- if: ${{ !cancelled() }}
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
with:
name: conformance-certification-${{ matrix.job.name }}
path: tests/openid_conformance/exports/
test-rust:
runs-on: ubuntu-latest
timeout-minutes: 30
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- name: Setup authentik env
uses: ./.github/actions/setup
with:
dependencies: rust
- name: run tests
run: |
cargo llvm-cov --no-report nextest --workspace
cargo llvm-cov report --codecov --output-path target/llvm-cov-target/rust.json
- uses: ./.github/actions/test-results
if: ${{ always() }}
with:
files: target/llvm-cov-target/rust.json
flags: rust
- if: ${{ !cancelled() }}
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: test-rust
path: target/llvm-cov-target/rust.json
ci-core-mark:
if: always()
needs:
- lint
- test-gen-build
- test-migrations
- test-migrations-from-stable
- test-unittest

View File

@@ -22,7 +22,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
with:
go-version-file: "go.mod"
- name: Prepare and generate API
@@ -43,7 +43,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
with:
go-version-file: "go.mod"
- name: Setup authentik env
@@ -90,9 +90,9 @@ jobs:
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up QEMU
uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
@@ -102,7 +102,7 @@ jobs:
image-name: ghcr.io/goauthentik/dev-${{ matrix.type }}
- name: Login to Container Registry
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -111,7 +111,7 @@ jobs:
run: make gen-client-go
- name: Build Docker Image
id: push
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6
with:
tags: ${{ steps.ev.outputs.imageTags }}
file: lifecycle/container/${{ matrix.type }}.Dockerfile
@@ -122,7 +122,7 @@ jobs:
context: .
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && format('type=registry,ref=ghcr.io/goauthentik/dev-{0}:buildcache,mode=max', matrix.type) || '' }}
- uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v3
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
id: attest
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
with:
@@ -148,10 +148,10 @@ jobs:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
ref: ${{ github.event.pull_request.head.sha }}
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
with:
go-version-file: "go.mod"
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: web/package.json
cache: "npm"

View File

@@ -32,7 +32,7 @@ jobs:
project: web
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: ${{ matrix.project }}/package.json
cache: "npm"
@@ -49,7 +49,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: web/package.json
cache: "npm"
@@ -77,7 +77,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: web/package.json
cache: "npm"

View File

@@ -29,7 +29,7 @@ jobs:
github.event.pull_request.head.repo.full_name == github.repository)
steps:
- id: generate_token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
@@ -38,7 +38,7 @@ jobs:
token: ${{ steps.generate_token.outputs.token }}
- name: Compress images
id: compress
uses: calibreapp/image-actions@03c976c29803442fc4040a9de5509669e7759b81 # main
uses: calibreapp/image-actions@420075c115b26f8785e293c5bd5bef0911c506e5 # main
with:
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }}
compressOnly: ${{ github.event_name != 'pull_request' }}

View File

@@ -16,7 +16,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}

View File

@@ -10,7 +10,7 @@ jobs:
steps:
- id: app-token
name: Generate app token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
if: ${{ env.GH_APP_ID != '' }}
with:
app-id: ${{ secrets.GH_APP_ID }}

View File

@@ -16,7 +16,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}

View File

@@ -29,19 +29,18 @@ jobs:
- packages/eslint-config
- packages/prettier-config
- packages/docusaurus-config
- packages/logger-js
- packages/esbuild-plugin-live-reload
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
fetch-depth: 2
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: ${{ matrix.package }}/package.json
registry-url: "https://registry.npmjs.org"
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # 24d32ffd492484c1d75e0c0b894501ddb9d30d62
uses: tj-actions/changed-files@e0021407031f5be11a464abee9a0776171c79891 # 24d32ffd492484c1d75e0c0b894501ddb9d30d62
with:
files: |
${{ matrix.package }}/package.json

View File

@@ -29,7 +29,7 @@ jobs:
steps:
- id: app-token
name: Generate app token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
@@ -57,7 +57,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}

View File

@@ -33,9 +33,9 @@ jobs:
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- name: Set up QEMU
uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
@@ -44,21 +44,21 @@ jobs:
with:
image-name: ghcr.io/goauthentik/docs
- name: Login to GitHub Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
id: push
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6
with:
tags: ${{ steps.ev.outputs.imageTags }}
file: website/Dockerfile
push: true
platforms: linux/amd64,linux/arm64
context: .
- uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v3
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
id: attest
if: true
with:
@@ -84,18 +84,18 @@ jobs:
- rac
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
with:
go-version-file: "go.mod"
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- name: Set up QEMU
uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3
- name: prepare variables
uses: ./.github/actions/docker-push-variables
id: ev
@@ -108,18 +108,18 @@ jobs:
make gen-client-ts
make gen-client-go
- name: Docker Login Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
with:
username: ${{ secrets.DOCKER_CORP_USERNAME }}
password: ${{ secrets.DOCKER_CORP_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6
id: push
with:
push: true
@@ -129,7 +129,7 @@ jobs:
file: lifecycle/container/${{ matrix.type }}.Dockerfile
platforms: linux/amd64,linux/arm64
context: .
- uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v3
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
id: attest
with:
subject-name: ${{ steps.ev.outputs.attestImageNames }}
@@ -152,25 +152,18 @@ jobs:
goarch: [amd64, arm64]
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
with:
go-version-file: "go.mod"
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
with:
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- name: Install web dependencies
working-directory: web/
run: |
npm ci
- name: Generate API Clients
run: |
make gen-client-ts
make gen-client-go
- name: Build web
working-directory: web/
run: |
npm ci
npm run build-proxy
- name: Build outpost
run: |
@@ -180,7 +173,7 @@ jobs:
export CGO_ENABLED=0
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
- name: Upload binaries to release
uses: svenstaro/upload-release-action@29e53e917877a24fad85510ded594ab3c9ca12de # v2
uses: svenstaro/upload-release-action@6b7fa9f267e90b50a19fef07b3596790bb941741 # v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
@@ -199,7 +192,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6.0.0
- uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708 # v5
with:
role-to-assume: "arn:aws:iam::016170277896:role/github_goauthentik_authentik"
aws-region: ${{ env.AWS_REGION }}
@@ -217,12 +210,12 @@ jobs:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- name: Run test suite in final docker images
run: |
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> lifecycle/container/.env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> lifecycle/container/.env
docker compose -f lifecycle/container/compose.yml pull -q
docker compose -f lifecycle/container/compose.yml up --no-start
docker compose -f lifecycle/container/compose.yml start postgresql
docker compose -f lifecycle/container/compose.yml run -u root server test-all
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env
docker compose pull -q
docker compose up --no-start
docker compose start postgresql
docker compose run -u root server test-all
sentry-release:
needs:
- build-server

View File

@@ -67,7 +67,7 @@ jobs:
steps:
- id: app-token
name: Generate app token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
@@ -91,12 +91,11 @@ jobs:
# ID from https://api.github.com/users/authentik-automation[bot]
git config --global user.name '${{ steps.app-token.outputs.app-slug }}[bot]'
git config --global user.email '${{ steps.get-user-id.outputs.user-id }}+${{ steps.app-token.outputs.app-slug }}[bot]@users.noreply.github.com'
git pull
git commit -a -m "release: ${{ inputs.version }}" --allow-empty
git tag "version/${{ inputs.version }}" HEAD -m "version/${{ inputs.version }}"
git push --follow-tags
- name: Create Release
uses: softprops/action-gh-release@153bb8e04406b158c6c84fc1615b65b24149a1fe # v2.6.1
uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2.5.0
with:
token: "${{ steps.app-token.outputs.token }}"
tag_name: "version/${{ inputs.version }}"
@@ -115,7 +114,7 @@ jobs:
steps:
- id: app-token
name: Generate app token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
@@ -157,7 +156,7 @@ jobs:
steps:
- id: app-token
name: Generate app token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
@@ -175,25 +174,21 @@ jobs:
if: "${{ inputs.release_reason == 'feature' }}"
run: |
changelog_url="https://docs.goauthentik.io/docs/releases/${{ needs.check-inputs.outputs.major_version }}"
reason="${{ inputs.release_reason }}"
jq \
--arg version "${{ inputs.version }}" \
--arg changelog "See ${changelog_url}" \
--arg changelog_url "${changelog_url}" \
--arg reason "${reason}" \
'.stable.version = $version | .stable.changelog = $changelog | .stable.changelog_url = $changelog_url | .stable.reason = $reason' version.json > version.new.json
'.stable.version = $version | .stable.changelog = $changelog | .stable.changelog_url = $changelog_url' version.json > version.new.json
mv version.new.json version.json
- name: Bump version
if: "${{ inputs.release_reason != 'feature' }}"
run: |
changelog_url="https://docs.goauthentik.io/docs/releases/${{ needs.check-inputs.outputs.major_version }}#fixed-in-$(echo -n ${{ inputs.version}} | sed 's/\.//g')"
reason="${{ inputs.release_reason }}"
jq \
--arg version "${{ inputs.version }}" \
--arg changelog "See ${changelog_url}" \
--arg changelog_url "${changelog_url}" \
--arg reason "${reason}" \
'.stable.version = $version | .stable.changelog = $changelog | .stable.changelog_url = $changelog_url | .stable.reason = $reason' version.json > version.new.json
'.stable.version = $version | .stable.changelog = $changelog | .stable.changelog_url = $changelog_url' version.json > version.new.json
mv version.new.json version.json
- name: Create pull request
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7

View File

@@ -15,11 +15,11 @@ jobs:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/stale@b5d41d4e1d5dceea10e7104786b73624c18a190f # v10
- uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10
with:
repo-token: ${{ steps.generate_token.outputs.token }}
days-before-stale: 60

View File

@@ -21,7 +21,7 @@ jobs:
steps:
- id: generate_token
if: ${{ github.event_name != 'pull_request' }}
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}

21
.gitignore vendored
View File

@@ -15,9 +15,6 @@ media
node_modules
.cspellcache
cspell-report.*
# If your build process includes running collectstatic, then you probably don't need or want to include staticfiles/
# in your Git repository. Update and uncomment the following line accordingly.
# <django-project-name>/staticfiles/
@@ -195,24 +192,6 @@ pyvenv.cfg
pip-selfcheck.json
# End of https://www.gitignore.io/api/python,django
# Created by https://www.toptal.com/developers/gitignore/api/rust
# Edit at https://www.toptal.com/developers/gitignore?templates=rust
### Rust ###
# Generated by Cargo
# will have compiled files and executables
debug/
target/
# These are backup files generated by rustfmt
**/*.rs.bk
# MSVC Windows builds of rustc generate these, which store debugging information
*.pdb
# End of https://www.toptal.com/developers/gitignore/api/rust
/static/
local.env.yml

12
.vscode/settings.json vendored
View File

@@ -14,10 +14,6 @@
"[xml]": {
"editor.minimap.markSectionHeaderRegex": "<!--\\s*#\\bregion\\s*(?<separator>-?)\\s*(?<label>.*)\\s*-->"
},
"files.associations": {
// The built-in "ignore" language gives us enough syntax highlighting to make these files readable.
"**/dictionaries/*.txt": "ignore"
},
"todo-tree.tree.showCountsInTree": true,
"todo-tree.tree.showBadges": true,
"yaml.customTags": [
@@ -53,9 +49,13 @@
"ignoreCase": false
}
],
"go.testFlags": ["-count=1"],
"go.testFlags": [
"-count=1"
],
"go.testEnvVars": {
"WORKSPACE_DIR": "${workspaceFolder}"
},
"github-actions.workflows.pinned.workflows": [".github/workflows/ci-main.yml"]
"github-actions.workflows.pinned.workflows": [
".github/workflows/ci-main.yml"
]
}

View File

@@ -3,7 +3,6 @@
# Backend
authentik/ @goauthentik/backend
blueprints/ @goauthentik/backend
src/ @goauthentik/backend
cmd/ @goauthentik/backend
internal/ @goauthentik/backend
lifecycle/ @goauthentik/backend
@@ -12,12 +11,8 @@ scripts/ @goauthentik/backend
tests/ @goauthentik/backend
pyproject.toml @goauthentik/backend
uv.lock @goauthentik/backend
Cargo.toml @goauthentik/backend
Cargo.lock @goauthentik/backend
go.mod @goauthentik/backend
go.sum @goauthentik/backend
.config/ @goauthentik/backend
rust-toolchain.toml @goauthentik/backend
# Infrastructure
.github/ @goauthentik/infrastructure
lifecycle/aws/ @goauthentik/infrastructure
@@ -39,7 +34,6 @@ packages/docusaurus-config @goauthentik/frontend
packages/esbuild-plugin-live-reload @goauthentik/frontend
packages/eslint-config @goauthentik/frontend
packages/prettier-config @goauthentik/frontend
packages/logger-js @goauthentik/frontend
packages/tsconfig @goauthentik/frontend
# Web
web/ @goauthentik/frontend

5181
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,296 +0,0 @@
[workspace]
members = [".", "website/scripts/docsmg"]
resolver = "3"
[workspace.package]
authors = ["authentik Team <hello@goauthentik.io>"]
edition = "2024"
readme = "README.md"
homepage = "https://goauthentik.io"
repository = "https://github.com/goauthentik/authentik.git"
license-file = "LICENSE"
publish = false
[workspace.dependencies]
arc-swap = "1.8.2"
argh = "0.1.17"
async-trait = "0.1.89"
aws-lc-rs = { version = "1.16.1", features = ["fips"] }
axum = { version = "0.8.8", features = ["http2", "macros", "ws"] }
axum-server = { version = "0.8.0", features = ["tls-rustls-no-provider"] }
bytes = "1.11.1"
chrono = "0.4.44"
clap = { version = "4.5.59", features = ["derive", "env"] }
client-ip = { version = "0.2.1", features = ["forwarded-header"] }
color-eyre = "0.6.5"
colored = "3.1.1"
config = { version = "0.15.19", default-features = false, features = [
"yaml",
"async",
] }
console-subscriber = "0.5.0"
dotenvy = "0.15.7"
durstr = "0.4.0"
eyre = "0.6.12"
forwarded-header-value = "0.1.1"
futures = "0.3.32"
glob = "0.3.3"
http-body-util = "0.1.3"
hyper = "1.8.1"
hyper-unix-socket = "0.3.0"
hyper-util = "0.1.20"
ipnet = { version = "2.12.0", features = ["serde"] }
# See https://github.com/mladedav/json-subscriber/pull/23
json-subscriber = { git = "https://github.com/rissson/json-subscriber.git", rev = "950ad7cb887a0a14fd5cb8afb8e76db1f456c032" }
jsonwebtoken = { version = "10.3.0", default-features = false, features = [
"aws_lc_rs",
] }
metrics = "0.24.3"
metrics-exporter-prometheus = { version = "0.18.1", default-features = false }
nix = { version = "0.31.2", features = ["hostname", "signal"] }
notify = "8.2.0"
pem = "3.0.6"
pin-project-lite = "0.2.17"
pyo3 = "0.28.2"
percent-encoding = "2.3.2"
rcgen = { version = "0.14.7", default-features = false, features = [
"aws_lc_rs",
"fips",
] }
regex = "1.12.3"
rustls = { version = "0.23.37", features = ["fips"] }
sentry = { version = "0.47.0", default-features = false, features = [
"backtrace",
"contexts",
"debug-images",
"panic",
"rustls",
"reqwest",
"tower",
"tracing",
] }
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.149"
sqlx = { version = "0.8.6", default-features = false, features = [
"runtime-tokio",
"tls-rustls-aws-lc-rs",
"postgres",
"derive",
"macros",
"uuid",
"chrono",
"ipnet",
"json",
] }
time = "0.3.47"
thiserror = "2.0.18"
tokio = { version = "1.50.0", features = ["full"] }
tokio-rustls = "0.26.4"
tokio-tungstenite = "0.28.0"
tokio-util = "0.7.18"
tower = "0.5.3"
tower-http = { version = "0.6.8", features = [
"compression-br",
"compression-deflate",
"compression-gzip",
"compression-zstd",
"fs",
"timeout",
] }
tower-service = "0.3.3"
tracing = "0.1.44"
tracing-error = "0.2.1"
tracing-subscriber = { version = "0.3.22", features = [
"env-filter",
"json",
"tracing-log",
] }
url = "2.5.8"
uuid = { version = "1.22.0", features = ["v4"] }
[profile.dev.package.backtrace]
opt-level = 3
[profile.release]
lto = true
debug = 2
[workspace.lints.rust]
ambiguous_negative_literals = "warn"
closure_returning_async_block = "warn"
macro_use_extern_crate = "deny"
# must_not_suspend = "deny", unstable see https://github.com/rust-lang/rust/issues/83310
non_ascii_idents = "deny"
redundant_imports = "warn"
semicolon_in_expressions_from_macros = "warn"
trivial_casts = "warn"
trivial_numeric_casts = "warn"
unit_bindings = "warn"
unreachable_pub = "warn"
unsafe_code = "deny"
unused_extern_crates = "warn"
unused_import_braces = "warn"
unused_lifetimes = "warn"
unused_macro_rules = "warn"
unused_qualifications = "warn"
[workspace.lints.rustdoc]
unescaped_backticks = "warn"
[workspace.lints.clippy]
### enable all lints
cargo = { priority = -1, level = "warn" }
complexity = { priority = -1, level = "warn" }
correctness = { priority = -1, level = "warn" }
nursery = { priority = -1, level = "warn" }
pedantic = { priority = -1, level = "warn" }
perf = { priority = -1, level = "warn" }
# Those are too restrictive and disabled by default, however we enable some below
# restriction = { priority = -1, level = "warn" }
style = { priority = -1, level = "warn" }
suspicious = { priority = -1, level = "warn" }
### and disable the ones we don't want
### cargo group
multiple_crate_versions = "allow"
### pedantic group
redundant_closure_for_method_calls = "allow"
struct_field_names = "allow"
too_many_lines = "allow"
### nursery
missing_const_for_fn = "allow"
redundant_pub_crate = "allow"
option_if_let_else = "allow"
### restriction group
allow_attributes = "warn"
allow_attributes_without_reason = "warn"
as_conversions = "warn"
as_pointer_underscore = "warn"
as_underscore = "warn"
assertions_on_result_states = "warn"
clone_on_ref_ptr = "warn"
create_dir = "warn"
dbg_macro = "warn"
default_numeric_fallback = "warn"
disallowed_script_idents = "warn"
empty_drop = "warn"
empty_enum_variants_with_brackets = "warn"
empty_structs_with_brackets = "warn"
error_impl_error = "warn"
exit = "warn"
filetype_is_file = "warn"
float_cmp_const = "warn"
fn_to_numeric_cast_any = "warn"
get_unwrap = "warn"
if_then_some_else_none = "warn"
impl_trait_in_params = "warn"
infinite_loop = "warn"
lossy_float_literal = "warn"
map_with_unused_argument_over_ranges = "warn"
mem_forget = "warn"
missing_asserts_for_indexing = "warn"
missing_trait_methods = "warn"
mixed_read_write_in_expression = "warn"
mutex_atomic = "warn"
mutex_integer = "warn"
needless_raw_strings = "warn"
non_zero_suggestions = "warn"
panic_in_result_fn = "warn"
pathbuf_init_then_push = "warn"
print_stdout = "warn"
rc_buffer = "warn"
redundant_test_prefix = "warn"
redundant_type_annotations = "warn"
ref_patterns = "warn"
renamed_function_params = "warn"
rest_pat_in_fully_bound_structs = "warn"
return_and_then = "warn"
same_name_method = "warn"
semicolon_inside_block = "warn"
str_to_string = "warn"
string_add = "warn"
suspicious_xor_used_as_pow = "warn"
tests_outside_test_module = "warn"
todo = "warn"
try_err = "warn"
undocumented_unsafe_blocks = "warn"
unimplemented = "warn"
unnecessary_safety_comment = "warn"
unnecessary_safety_doc = "warn"
unnecessary_self_imports = "warn"
unneeded_field_pattern = "warn"
unseparated_literal_suffix = "warn"
unused_result_ok = "warn"
unused_trait_names = "warn"
unwrap_in_result = "warn"
unwrap_used = "warn"
verbose_file_reads = "warn"
[package]
name = "authentik"
version = "2026.5.0-rc1"
authors.workspace = true
edition.workspace = true
readme.workspace = true
homepage.workspace = true
repository.workspace = true
license-file.workspace = true
publish.workspace = true
[features]
default = ["core", "proxy"]
proxy = []
core = ["proxy", "dep:sqlx", "dep:pyo3"]
[dependencies]
arc-swap.workspace = true
argh.workspace = true
async-trait.workspace = true
aws-lc-rs.workspace = true
axum-server.workspace = true
axum.workspace = true
client-ip.workspace = true
color-eyre.workspace = true
config.workspace = true
console-subscriber.workspace = true
durstr.workspace = true
eyre.workspace = true
forwarded-header-value.workspace = true
futures.workspace = true
glob.workspace = true
http-body-util.workspace = true
hyper-unix-socket.workspace = true
hyper-util.workspace = true
hyper.workspace = true
ipnet.workspace = true
json-subscriber.workspace = true
jsonwebtoken.workspace = true
metrics.workspace = true
metrics-exporter-prometheus.workspace = true
nix.workspace = true
notify.workspace = true
pem.workspace = true
percent-encoding.workspace = true
pin-project-lite.workspace = true
pyo3 = { workspace = true, optional = true }
rcgen.workspace = true
rustls.workspace = true
sentry.workspace = true
serde.workspace = true
serde_json.workspace = true
sqlx = { workspace = true, optional = true }
thiserror.workspace = true
time.workspace = true
tokio-rustls.workspace = true
tokio-tungstenite.workspace = true
tokio-util.workspace = true
tokio.workspace = true
tower-http.workspace = true
tower.workspace = true
tracing-error.workspace = true
tracing-subscriber.workspace = true
tracing.workspace = true
url.workspace = true
uuid.workspace = true
[lints]
workspace = true

View File

@@ -23,7 +23,6 @@ BREW_LDFLAGS :=
BREW_CPPFLAGS :=
BREW_PKG_CONFIG_PATH :=
CARGO := cargo
UV := uv
# For macOS users, add the libxml2 installed from brew libxmlsec1 to the build path
@@ -70,26 +69,22 @@ help: ## Show this help
sort
@echo ""
go-test: ## Run the golang tests
go-test:
go test -timeout 0 -v -race -cover ./...
rust-test: ## Run the Rust tests
$(CARGO) nextest run --workspace
test: ## Run the server tests and produce a coverage report (locally)
$(UV) run coverage run manage.py test --keepdb $(or $(filter-out $@,$(MAKECMDGOALS)),authentik)
$(UV) run coverage html
$(UV) run coverage report
lint-fix: ## Lint and automatically fix errors in the python source code. Reports spelling errors.
lint-fix: lint-codespell ## Lint and automatically fix errors in the python source code. Reports spelling errors.
$(UV) run black $(PY_SOURCES)
$(UV) run ruff check --fix $(PY_SOURCES)
$(CARGO) +nightly fmt --all -- --config-path .cargo/rustfmt.toml
lint-spellcheck: ## Reports spelling errors.
npm run lint:spellcheck
lint-codespell: ## Reports spelling errors.
$(UV) run codespell -w
lint: ci-lint-bandit ci-lint-mypy ci-lint-cargo-deny ci-lint-cargo-machete ## Lint the python and golang sources
lint: ci-bandit ci-mypy ## Lint the python and golang sources
golangci-lint run -v
core-install:
@@ -110,24 +105,12 @@ i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that requir
aws-cfn:
cd lifecycle/aws && npm i && $(UV) run npm run aws-cfn
run: ## Run the authentik server and worker, without auto reloading
$(UV) run ak allinone
run-watch: ## Run the authentik server and worker, with auto reloading
$(UV) run watchexec --on-busy-update=restart --stop-signal=SIGINT --exts py,rs --no-meta --notify -- ak allinone
run-server: ## Run the authentik server, without auto reloading
run-server: ## Run the main authentik server process
$(UV) run ak server
run-server-watch: ## Run the authentik server, with auto reloading
$(UV) run watchexec --on-busy-update=restart --stop-signal=SIGINT --exts py,rs --no-meta --notify -- ak server
run-worker: ## Run the authentik worker, without auto reloading
run-worker: ## Run the main authentik worker process
$(UV) run ak worker
run-worker-watch: ## Run the authentik worker, with auto reloading
$(UV) run watchexec --on-busy-update=restart --stop-signal=SIGINT --exts py,rs --no-meta --notify -- ak worker
core-i18n-extract:
$(UV) run ak makemessages \
--add-location file \
@@ -165,11 +148,11 @@ bump: ## Bump authentik version. Usage: make bump version=20xx.xx.xx
ifndef version
$(error Usage: make bump version=20xx.xx.xx )
endif
$(eval current_version := $(shell cat ${PWD}/internal/constants/VERSION))
$(SED_INPLACE) 's/^version = ".*"/version = "$(version)"/' ${PWD}/pyproject.toml ${PWD}/Cargo.toml
$(SED_INPLACE) 's/^VERSION = ".*"/VERSION = "$(version)"/' ${PWD}/authentik/__init__.py
$(SED_INPLACE) 's/^version = ".*"/version = "$(version)"/' pyproject.toml
$(SED_INPLACE) 's/^VERSION = ".*"/VERSION = "$(version)"/' authentik/__init__.py
$(MAKE) gen-build gen-compose aws-cfn
$(SED_INPLACE) "s/\"${current_version}\"/\"$(version)\"/" ${PWD}/package.json ${PWD}/package-lock.json ${PWD}/web/package.json ${PWD}/web/package-lock.json
npm version --no-git-tag-version --allow-same-version $(version)
cd ${PWD}/web && npm version --no-git-tag-version --allow-same-version $(version)
echo -n $(version) > ${PWD}/internal/constants/VERSION
#########################
@@ -185,22 +168,12 @@ gen-build: ## Extract the schema from the database
gen-compose:
$(UV) run scripts/generate_compose.py
gen-changelog: ## (Release) generate the changelog based from the commits since the last version
# These are best-effort guesses based on commit messages
$(eval last_version := $(shell git tag --list 'version/*' --sort 'version:refname' | grep -vE 'rc\d+$$' | tail -1))
$(eval current_commit := $(shell git rev-parse HEAD))
git log --pretty=format:"- %s" $(shell git merge-base ${last_version} ${current_commit})...${current_commit} > merged_to_current
git log --pretty=format:"- %s" $(shell git merge-base ${last_version} ${current_commit})...${last_version} > merged_to_last
grep -Eo 'cherry-pick (#\d+)' merged_to_last | cut -d ' ' -f 2 | sed 's/.*/(&)$$/' > cherry_picked_to_last
grep -vf cherry_picked_to_last merged_to_current | sort > changelog.md
rm merged_to_current
rm merged_to_last
rm cherry_picked_to_last
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
npx prettier --write changelog.md
gen-diff: ## (Release) generate the changelog diff between the current schema and the last version
$(eval last_version := $(shell git tag --list 'version/*' --sort 'version:refname' | grep -vE 'rc\d+$$' | tail -1))
git show ${last_version}:schema.yml > schema-old.yml
gen-diff: ## (Release) generate the changelog diff between the current schema and the last tag
git show $(shell git describe --tags $(shell git rev-list --tags --max-count=1)):schema.yml > schema-old.yml
docker compose -f scripts/api/compose.yml run --rm --user "${UID}:${GID}" diff \
--markdown \
/local/diff.md \
@@ -303,7 +276,7 @@ docs: docs-lint-fix docs-build ## Automatically fix formatting issues in the Au
docs-install:
npm ci --prefix website
docs-lint-fix: lint-spellcheck
docs-lint-fix: lint-codespell
npm run --prefix website prettier
docs-build:
@@ -348,40 +321,27 @@ test-docker:
# which makes the YAML File a lot smaller
ci--meta-debug:
$(UV) run python -V || echo "No python installed"
$(CARGO) --version || echo "No rust installed"
node --version || echo "No node installed"
$(UV) run python -V
node --version
ci-lint-mypy: ci--meta-debug
ci-mypy: ci--meta-debug
$(UV) run mypy --strict $(PY_SOURCES)
ci-lint-black: ci--meta-debug
ci-black: ci--meta-debug
$(UV) run black --check $(PY_SOURCES)
ci-lint-ruff: ci--meta-debug
ci-ruff: ci--meta-debug
$(UV) run ruff check $(PY_SOURCES)
ci-lint-spellcheck: ci--meta-debug
npm run lint:spellcheck
ci-codespell: ci--meta-debug
$(UV) run codespell -s
ci-lint-bandit: ci--meta-debug
ci-bandit: ci--meta-debug
$(UV) run bandit -c pyproject.toml -r $(PY_SOURCES) -iii
ci-lint-pending-migrations: ci--meta-debug
ci-pending-migrations: ci--meta-debug
$(UV) run ak makemigrations --check
ci-lint-cargo-deny: ci--meta-debug
$(CARGO) deny --locked --workspace check --config .cargo/deny.toml
ci-lint-cargo-machete: ci--meta-debug
$(CARGO) machete
ci-lint-rustfmt: ci--meta-debug
$(CARGO) +nightly fmt --all --check -- --config-path .cargo/rustfmt.toml
ci-lint-clippy: ci--meta-debug
$(CARGO) clippy -- -D warnings
ci-test: ci--meta-debug
$(UV) run coverage run manage.py test --keepdb authentik
$(UV) run coverage report

View File

@@ -20,8 +20,8 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
| Version | Supported |
| ---------- | ---------- |
| 2025.10.x | ✅ |
| 2025.12.x | ✅ |
| 2026.2.x | ✅ |
## Reporting a Vulnerability

View File

@@ -3,7 +3,7 @@
from functools import lru_cache
from os import environ
VERSION = "2026.5.0-rc1"
VERSION = "2026.2.0-rc1"
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"

View File

@@ -18,6 +18,7 @@ from rest_framework.views import APIView
from authentik import authentik_full_version
from authentik.core.api.utils import PassiveSerializer
from authentik.enterprise.license import LicenseKey
from authentik.lib.config import CONFIG
from authentik.lib.utils.reflection import get_env
from authentik.outposts.apps import MANAGED_OUTPOST
@@ -25,15 +26,6 @@ from authentik.outposts.models import Outpost
from authentik.rbac.permissions import HasPermission
def fips_enabled():
try:
from authentik.enterprise.license import LicenseKey
return backend._fips_enabled if LicenseKey.get_total().status().is_valid else None
except ModuleNotFoundError:
return None
class RuntimeDict(TypedDict):
"""Runtime information"""
@@ -88,7 +80,9 @@ class SystemInfoSerializer(PassiveSerializer):
"architecture": platform.machine(),
"authentik_version": authentik_full_version(),
"environment": get_env(),
"openssl_fips_enabled": fips_enabled(),
"openssl_fips_enabled": (
backend._fips_enabled if LicenseKey.get_total().status().is_valid else None
),
"openssl_version": OPENSSL_VERSION,
"platform": platform.platform(),
"python_version": python_version,

View File

@@ -94,7 +94,7 @@ class Backend:
Args:
file_path: Relative file path
request: Optional Django HttpRequest for fully qualified URL building
request: Optional Django HttpRequest for fully qualifed URL building
use_cache: whether to retrieve the URL from cache
Returns:

View File

@@ -92,7 +92,6 @@ class FileBackend(ManageableBackend):
"nbf": now() - timedelta(seconds=15),
},
key=sha256(f"{settings.SECRET_KEY}:{self.usage}".encode()).hexdigest(),
# Must match crates/authentik-server/src/static.rs
algorithm="HS256",
)
url = f"{prefix}/files/{path}?token={token}"

View File

@@ -100,25 +100,13 @@ class S3Backend(ManageableBackend):
f"storage.{self.usage.value}.{self.name}.addressing_style",
CONFIG.get(f"storage.{self.name}.addressing_style", "auto"),
)
signature_version = CONFIG.get(
f"storage.{self.usage.value}.{self.name}.signature_version",
CONFIG.get(f"storage.{self.name}.signature_version", "s3v4"),
)
# Keep signature_version pass-through and let boto3/botocore handle it.
# In boto3's S3 configuration docs, `s3v4` (default) and deprecated `s3`
# are the documented values:
# https://github.com/boto/boto3/blob/791a3e8f36d83664a47b4281a0586b3546cef3ec/docs/source/guide/configuration.rst?plain=1#L398-L407
# Botocore also supports additional signer names, so we intentionally do
# not enforce a restricted allowlist here.
return self.session.client(
"s3",
endpoint_url=endpoint_url,
use_ssl=use_ssl,
region_name=region_name,
config=Config(
signature_version=signature_version, s3={"addressing_style": addressing_style}
),
config=Config(signature_version="s3v4", s3={"addressing_style": addressing_style}),
)
@property

View File

@@ -1,6 +1,5 @@
from unittest import skipUnless
from botocore.exceptions import UnsupportedSignatureVersionError
from django.test import TestCase
from authentik.admin.files.tests.utils import FileTestS3BackendMixin, s3_test_server_available
@@ -82,27 +81,6 @@ class TestS3Backend(FileTestS3BackendMixin, TestCase):
self.assertIn("X-Amz-Signature=", url)
self.assertIn("test.png", url)
def test_client_signature_version_default_v4(self):
"""Test S3 client defaults to v4 signature when not configured."""
self.assertEqual(self.media_s3_backend.client.meta.config.signature_version, "s3v4")
@CONFIG.patch("storage.s3.signature_version", "s3")
def test_client_signature_version_global_override(self):
"""Test S3 client respects globally configured signature version."""
self.assertEqual(self.media_s3_backend.client.meta.config.signature_version, "s3")
@CONFIG.patch("storage.s3.signature_version", "s3v4")
@CONFIG.patch("storage.media.s3.signature_version", "s3")
def test_client_signature_version_media_override(self):
"""Test usage-specific signature version takes precedence over global."""
self.assertEqual(self.media_s3_backend.client.meta.config.signature_version, "s3")
@CONFIG.patch("storage.media.s3.signature_version", "not-a-real-signature")
def test_client_signature_version_unsupported(self):
"""Test unsupported signature version raises botocore error."""
with self.assertRaises(UnsupportedSignatureVersionError):
self.media_s3_backend.file_url("test.png", use_cache=False)
@CONFIG.patch("storage.s3.bucket_name", "test-bucket")
def test_file_exists_true(self):
"""Test file_exists returns True for existing file"""

View File

@@ -71,7 +71,7 @@ def postprocess_schema_responses(
def postprocess_schema_query_params(
result: dict[str, Any], generator: SchemaGenerator, **kwargs
) -> dict[str, Any]:
"""Optimize pagination parameters, instead of redeclaring parameters for each endpoint
"""Optimise pagination parameters, instead of redeclaring parameters for each endpoint
declare them globally and refer to them"""
LOGGER.debug("Deduplicating query parameters")
for path in result["paths"].values():

View File

@@ -1,8 +1,6 @@
"""Schema generation tests"""
from pathlib import Path
from tempfile import gettempdir
from uuid import uuid4
from django.core.management import call_command
from django.urls import reverse
@@ -31,14 +29,15 @@ class TestSchemaGeneration(APITestCase):
def test_build_schema(self):
"""Test schema build command"""
tmp = Path(gettempdir())
blueprint_file = tmp / f"{str(uuid4())}.json"
api_file = tmp / f"{str(uuid4())}.yml"
blueprint_file = Path("blueprints/schema.json")
api_file = Path("schema.yml")
blueprint_file.unlink()
api_file.unlink()
with (
CONFIG.patch("debug", True),
CONFIG.patch("tenants.enabled", True),
CONFIG.patch("outposts.disable_embedded_outpost", True),
):
call_command("build_schema", blueprint_file=blueprint_file, api_file=api_file)
call_command("build_schema")
self.assertTrue(blueprint_file.exists())
self.assertTrue(api_file.exists())

View File

@@ -1,5 +1,7 @@
"""Apply blueprint from commandline"""
from sys import exit as sys_exit
from django.core.management.base import BaseCommand, no_translations
from structlog.stdlib import get_logger
@@ -26,7 +28,7 @@ class Command(BaseCommand):
self.stderr.write("Blueprint invalid")
for log in logs:
self.stderr.write(f"\t{log.logger}: {log.event}: {log.attributes}")
raise RuntimeError("Blueprint invalid")
sys_exit(1)
importer.apply()
def add_arguments(self, parser):

View File

@@ -43,6 +43,8 @@ def get_attrs(obj: SerializerModel) -> dict[str, Any]:
continue
if _field.read_only:
data.pop(field_name, None)
if _field.get_initial() == data.get(field_name, None):
data.pop(field_name, None)
if field_name.endswith("_set"):
data.pop(field_name, None)
return data

View File

@@ -41,6 +41,7 @@ from authentik.core.models import (
UserSourceConnection,
)
from authentik.endpoints.models import Connector
from authentik.enterprise.license import LicenseKey
from authentik.events.logs import LogEvent, capture_logs
from authentik.events.utils import cleanse_dict
from authentik.flows.models import Stage
@@ -139,19 +140,10 @@ class Importer:
def default_context(self):
"""Default context"""
context = {
return {
"goauthentik.io/enterprise/licensed": LicenseKey.get_total().status().is_valid,
"goauthentik.io/rbac/models": rbac_models(),
"goauthentik.io/enterprise/licensed": False,
}
try:
from authentik.enterprise.license import LicenseKey
context["goauthentik.io/enterprise/licensed"] = (
LicenseKey.get_total().status().is_valid,
)
except ModuleNotFoundError:
pass
return context
@staticmethod
def from_string(yaml_input: str, context: dict | None = None) -> Importer:
@@ -272,7 +264,7 @@ class Importer:
and entry.state != BlueprintEntryDesiredState.MUST_CREATED
):
self.logger.debug(
"Initialize serializer with instance",
"Initialise serializer with instance",
model=model,
instance=model_instance,
pk=model_instance.pk,
@@ -290,7 +282,7 @@ class Importer:
)
else:
self.logger.debug(
"Initialized new serializer instance",
"Initialised new serializer instance",
model=model,
**cleanse_dict(updated_identifiers),
)

View File

@@ -3,7 +3,7 @@
from typing import Any
from django.db.models import Case, F, IntegerField, Q, Value, When
from django.db.models.functions import Concat, Length
from django.db.models.functions import Length
from django.http.request import HttpRequest
from django.utils.html import _json_script_escapes
from django.utils.safestring import mark_safe
@@ -26,8 +26,7 @@ def get_brand_for_request(request: HttpRequest) -> Brand:
domain_length=Length("domain"),
match_priority=Case(
When(
condition=Q(host_domain__iexact=F("domain"))
| Q(host_domain__iendswith=Concat(Value("."), F("domain"))),
condition=Q(host_domain__iendswith=F("domain")),
then=F("domain_length"),
),
default=Value(-1),

View File

@@ -28,8 +28,6 @@ SAML_ATTRIBUTES_GROUP = "http://schemas.xmlsoap.org/claims/Group"
SAML_BINDING_POST = "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST"
SAML_BINDING_REDIRECT = "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
SAML_STATUS_SUCCESS = "urn:oasis:names:tc:SAML:2.0:status:Success"
DSA_SHA1 = "http://www.w3.org/2000/09/xmldsig#dsa-sha1"
RSA_SHA1 = "http://www.w3.org/2000/09/xmldsig#rsa-sha1"
# https://datatracker.ietf.org/doc/html/rfc4051#section-2.3.2

View File

@@ -154,14 +154,14 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
return queryset
def _get_allowed_applications(
self, paginated_apps: Iterator[Application], user: User | None = None
self, pagined_apps: Iterator[Application], user: User | None = None
) -> list[Application]:
applications = []
request = self.request._request
if user:
request = copy(request)
request.user = user
for application in paginated_apps:
for application in pagined_apps:
engine = PolicyEngine(application, request.user, request)
engine.build()
if engine.passing:

View File

@@ -16,15 +16,11 @@ from rest_framework.viewsets import ViewSet
from authentik.api.validation import validate
from authentik.core.api.users import ParamUserSerializer
from authentik.core.api.utils import MetaNameSerializer
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice
from authentik.stages.authenticator import device_classes, devices_for_user
from authentik.stages.authenticator.models import Device
from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
try:
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice
except ModuleNotFoundError:
EndpointDevice = None
class DeviceSerializer(MetaNameSerializer):
"""Serializer for authenticator devices"""
@@ -47,7 +43,7 @@ class DeviceSerializer(MetaNameSerializer):
"""Get extra description"""
if isinstance(instance, WebAuthnDevice):
return instance.device_type.description if instance.device_type else None
if EndpointDevice and isinstance(instance, EndpointDevice):
if isinstance(instance, EndpointDevice):
return instance.data.get("deviceSignals", {}).get("deviceModel")
return None
@@ -55,7 +51,7 @@ class DeviceSerializer(MetaNameSerializer):
"""Get external Device ID"""
if isinstance(instance, WebAuthnDevice):
return instance.device_type.aaguid if instance.device_type else None
if EndpointDevice and isinstance(instance, EndpointDevice):
if isinstance(instance, EndpointDevice):
return instance.data.get("deviceSignals", {}).get("deviceModel")
return None

View File

@@ -10,6 +10,7 @@ from rest_framework.request import Request
from rest_framework.response import Response
from authentik.core.api.utils import PassiveSerializer
from authentik.enterprise.apps import EnterpriseConfig
from authentik.lib.models import DeprecatedMixin
from authentik.lib.utils.reflection import all_subclasses
@@ -60,25 +61,19 @@ class TypesMixin:
continue
instance = subclass()
try:
type_signature = {
"name": subclass._meta.verbose_name,
"description": subclass.__doc__,
"component": instance.component,
"model_name": subclass._meta.model_name,
"icon_url": getattr(instance, "icon_url", None),
"requires_enterprise": False,
"deprecated": isinstance(instance, DeprecatedMixin),
}
try:
from authentik.enterprise.apps import EnterpriseConfig
type_signature["requires_enterprise"] = isinstance(
subclass._meta.app_config, EnterpriseConfig
)
except ModuleNotFoundError:
pass
data.append(type_signature)
data.append(
{
"name": subclass._meta.verbose_name,
"description": subclass.__doc__,
"component": instance.component,
"model_name": subclass._meta.model_name,
"icon_url": getattr(instance, "icon_url", None),
"requires_enterprise": isinstance(
subclass._meta.app_config, EnterpriseConfig
),
"deprecated": isinstance(instance, DeprecatedMixin),
}
)
except NotImplementedError:
continue
if additional:

View File

@@ -72,7 +72,6 @@ from authentik.core.middleware import (
from authentik.core.models import (
USER_ATTRIBUTE_TOKEN_EXPIRING,
USER_PATH_SERVICE_ACCOUNT,
USERNAME_MAX_LENGTH,
Group,
Session,
Token,
@@ -145,7 +144,7 @@ class UserSerializer(ModelSerializer):
roles_obj = SerializerMethodField(allow_null=True)
uid = CharField(read_only=True)
username = CharField(
max_length=USERNAME_MAX_LENGTH,
max_length=150,
validators=[UniqueValidator(queryset=User.objects.all().order_by("username"))],
)

View File

@@ -1,7 +1,5 @@
"""authentik core models"""
import re
import traceback
from datetime import datetime, timedelta
from enum import StrEnum
from hashlib import sha256
@@ -17,6 +15,7 @@ from django.contrib.sessions.base_session import AbstractBaseSession
from django.core.validators import validate_slug
from django.db import models
from django.db.models import Q, QuerySet, options
from django.db.models.constants import LOOKUP_SEP
from django.http import HttpRequest
from django.utils.functional import cached_property
from django.utils.timezone import now
@@ -44,7 +43,6 @@ from authentik.lib.models import (
DomainlessFormattedURLValidator,
SerializerModel,
)
from authentik.lib.utils.inheritance import get_deepest_child
from authentik.lib.utils.time import timedelta_from_string
from authentik.policies.models import PolicyBindingModel
from authentik.rbac.models import Role
@@ -52,7 +50,6 @@ from authentik.tenants.models import DEFAULT_TOKEN_DURATION, DEFAULT_TOKEN_LENGT
from authentik.tenants.utils import get_current_tenant, get_unique_identifier
LOGGER = get_logger()
USERNAME_MAX_LENGTH = 150
USER_PATH_SYSTEM_PREFIX = "goauthentik.io"
_USER_ATTR_PREFIX = f"{USER_PATH_SYSTEM_PREFIX}/user"
USER_ATTRIBUTE_DEBUG = f"{_USER_ATTR_PREFIX}/debug"
@@ -530,35 +527,23 @@ class User(SerializerModel, AttributesMixin, AbstractUser):
"default: in 30 days). See authentik logs for every will invocation of this "
"deprecation."
)
stacktrace = traceback.format_stack()
# The last line is this function, the next-to-last line is its caller
cause = stacktrace[-2] if len(stacktrace) > 1 else "Unknown, see stacktrace in logs"
if search := re.search(r'"(.*?)"', cause):
cause = f"Property mapping or Expression policy named {search.group(1)}"
LOGGER.warning(
"deprecation used",
message=message_logger,
deprecation=deprecation,
replacement=replacement,
cause=cause,
stacktrace=stacktrace,
)
if not Event.filter_not_expired(
action=EventAction.CONFIGURATION_WARNING,
context__deprecation=deprecation,
context__cause=cause,
action=EventAction.CONFIGURATION_WARNING, context__deprecation=deprecation
).exists():
event = Event.new(
EventAction.CONFIGURATION_WARNING,
deprecation=deprecation,
replacement=replacement,
message=message_event,
cause=cause,
)
event.expires = datetime.now() + timedelta(days=30)
event.save()
return self.groups
def set_password(self, raw_password, signal=True, sender=None, request=None):
@@ -803,7 +788,25 @@ class Application(SerializerModel, PolicyBindingModel):
"""Get casted provider instance. Needs Application queryset with_provider"""
if not self.provider:
return None
return get_deepest_child(self.provider)
candidates = []
base_class = Provider
for subclass in base_class.objects.get_queryset()._get_subclasses_recurse(base_class):
parent = self.provider
for level in subclass.split(LOOKUP_SEP):
try:
parent = getattr(parent, level)
except AttributeError:
break
if parent in candidates:
continue
idx = subclass.count(LOOKUP_SEP)
if type(parent) is not base_class:
idx += 1
candidates.insert(idx, parent)
if not candidates:
return None
return candidates[-1]
def backchannel_provider_for[T: Provider](self, provider_type: type[T], **kwargs) -> T | None:
"""Get Backchannel provider for a specific type"""
@@ -1115,11 +1118,7 @@ class ExpiringModel(models.Model):
default the object is deleted. This is less efficient compared
to bulk deleting objects, but classes like Token() need to change
values instead of being deleted."""
try:
return self.delete(*args, **kwargs)
except self.DoesNotExist:
# Object has already been deleted, so this should be fine
return None
return self.delete(*args, **kwargs)
@classmethod
def filter_not_expired(cls, **kwargs) -> QuerySet[Self]:

View File

@@ -21,10 +21,6 @@
{% block head_before %}
{% endblock %}
{% block interface_stylesheet %}
<link rel="stylesheet" type="text/css" href="{% versioned_script 'dist/styles/interface-%v.css' %}" />
{% endblock %}
{% include "base/theme.html" %}
<style data-id="brand-css">{{ brand_css }}</style>

View File

@@ -1,6 +1,8 @@
{% load static %}
{% load authentik_core %}
<link rel="stylesheet" type="text/css" href="{% versioned_script 'dist/styles/interface-%v.css' %}" />
{% if ui_theme == "dark" %}
<meta name="color-scheme" content="dark" />
<meta name="theme-color" content="#18191a">

View File

@@ -44,24 +44,19 @@
{% endblock %}
</div>
</main>
<footer
name="site-footer"
aria-label="{% trans 'Site footer' %}"
class="pf-c-login__footer pf-m-dark">
<div name="flow-links" aria-label="{% trans 'Flow links' %}">
<ul class="pf-c-list pf-m-inline" part="list">
{% for link in footer_links %}
<li part="list-item">
<a part="list-item-link" href="{{ link.href }}">{{ link.name }}</a>
</li>
{% endfor %}
<li part="list-item">
<span>
{% trans 'Powered by authentik' %}
</span>
</li>
</ul>
</div>
<footer aria-label="Site footer" class="pf-c-login__footer pf-m-dark">
<ul class="pf-c-list pf-m-inline">
{% for link in footer_links %}
<li>
<a href="{{ link.href }}">{{ link.name }}</a>
</li>
{% endfor %}
<li>
<span>
{% trans 'Powered by authentik' %}
</span>
</li>
</ul>
</footer>
</div>
</div>

View File

@@ -1,101 +0,0 @@
"""Test interface view redirect behavior by user type"""
from django.test import TestCase
from django.urls import reverse
from authentik.brands.models import Brand
from authentik.core.models import Application, UserTypes
from authentik.core.tests.utils import create_test_brand, create_test_user
class TestInterfaceRedirects(TestCase):
"""Test RootRedirectView and BrandDefaultRedirectView redirect logic by user type"""
def setUp(self):
self.app = Application.objects.create(name="test-app", slug="test-app")
self.brand: Brand = create_test_brand(default_application=self.app)
def _assert_redirects_to_app(self, url_name: str, user_type: UserTypes):
user = create_test_user(type=user_type)
self.client.force_login(user)
response = self.client.get(reverse(f"authentik_core:{url_name}"))
self.assertRedirects(
response,
reverse(
"authentik_core:application-launch", kwargs={"application_slug": self.app.slug}
),
fetch_redirect_response=False,
)
def _assert_no_redirect(self, url_name: str, user_type: UserTypes):
"""Internal users should not be redirected away."""
user = create_test_user(type=user_type)
self.client.force_login(user)
response = self.client.get(reverse(f"authentik_core:{url_name}"))
# Internal users get a 200 (rendered template) or redirect to if-user, not to the app
app_url = reverse(
"authentik_core:application-launch", kwargs={"application_slug": self.app.slug}
)
self.assertNotEqual(response.get("Location"), app_url)
# --- RootRedirectView ---
def test_root_redirect_external_user(self):
"""External users are redirected to the default app from root"""
self._assert_redirects_to_app("root-redirect", UserTypes.EXTERNAL)
def test_root_redirect_service_account(self):
"""Service accounts are redirected to the default app from root"""
self._assert_redirects_to_app("root-redirect", UserTypes.SERVICE_ACCOUNT)
def test_root_redirect_internal_service_account(self):
"""Internal service accounts are redirected to the default app from root"""
self._assert_redirects_to_app("root-redirect", UserTypes.INTERNAL_SERVICE_ACCOUNT)
def test_root_redirect_internal_user(self):
"""Internal users are NOT redirected to the app from root"""
self._assert_no_redirect("root-redirect", UserTypes.INTERNAL)
# --- BrandDefaultRedirectView (if/user/) ---
def test_if_user_external_user(self):
"""External users are redirected to the default app from if/user/"""
self._assert_redirects_to_app("if-user", UserTypes.EXTERNAL)
def test_if_user_service_account(self):
"""Service accounts are redirected to the default app from if/user/"""
self._assert_redirects_to_app("if-user", UserTypes.SERVICE_ACCOUNT)
def test_if_user_internal_service_account(self):
"""Internal service accounts are redirected to the default app from if/user/"""
self._assert_redirects_to_app("if-user", UserTypes.INTERNAL_SERVICE_ACCOUNT)
def test_if_user_internal_user(self):
"""Internal users are NOT redirected to the app from if/user/"""
self._assert_no_redirect("if-user", UserTypes.INTERNAL)
# --- BrandDefaultRedirectView (if/admin/) ---
def test_if_admin_service_account(self):
"""Service accounts are redirected to the default app from if/admin/"""
self._assert_redirects_to_app("if-admin", UserTypes.SERVICE_ACCOUNT)
def test_if_admin_internal_service_account(self):
"""Internal service accounts are redirected to the default app from if/admin/"""
self._assert_redirects_to_app("if-admin", UserTypes.INTERNAL_SERVICE_ACCOUNT)
def test_if_admin_internal_user(self):
"""Internal users are NOT redirected to the app from if/admin/"""
self._assert_no_redirect("if-admin", UserTypes.INTERNAL)
# --- No default app set ---
def test_service_account_no_default_app_access_denied(self):
"""Service accounts get access denied when no default app is configured"""
self.brand.default_application = None
self.brand.save()
user = create_test_user(type=UserTypes.SERVICE_ACCOUNT)
self.client.force_login(user)
response = self.client.get(reverse("authentik_core:if-user"))
self.assertEqual(response.status_code, 200)
self.assertIn(b"Interface can only be accessed by internal users", response.content)

View File

@@ -63,7 +63,7 @@ class TestPropertyMappingAPI(APITestCase):
PropertyMappingSerializer().validate_expression("/")
def test_types(self):
"""Test PropertyMapping's types endpoint"""
"""Test PropertyMappigns's types endpoint"""
response = self.client.get(
reverse("authentik_api:propertymapping-types"),
)

View File

@@ -26,11 +26,7 @@ class RootRedirectView(RedirectView):
query_string = True
def redirect_to_app(self, request: HttpRequest):
if request.user.is_authenticated and request.user.type in (
UserTypes.EXTERNAL,
UserTypes.SERVICE_ACCOUNT,
UserTypes.INTERNAL_SERVICE_ACCOUNT,
):
if request.user.is_authenticated and request.user.type == UserTypes.EXTERNAL:
brand: Brand = request.brand
if brand.default_application:
return redirect(
@@ -66,11 +62,7 @@ class BrandDefaultRedirectView(InterfaceView):
"""By default redirect to default app"""
def dispatch(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
if request.user.is_authenticated and request.user.type in (
UserTypes.EXTERNAL,
UserTypes.SERVICE_ACCOUNT,
UserTypes.INTERNAL_SERVICE_ACCOUNT,
):
if request.user.is_authenticated and request.user.type == UserTypes.EXTERNAL:
brand: Brand = request.brand
if brand.default_application:
return redirect(

View File

@@ -78,7 +78,7 @@ def generate_key_id_legacy(key_data: str) -> str:
"""Generate Key ID using MD5 (legacy format for backwards compatibility)."""
if not key_data:
return ""
return md5(key_data.encode("utf-8"), usedforsecurity=False).hexdigest() # nosec
return md5(key_data.encode("utf-8")).hexdigest() # nosec
class CertificateKeyPair(SerializerModel, ManagedModel, CreatedUpdatedModel):

View File

@@ -1,11 +1,8 @@
from django.utils.translation import gettext_lazy as _
from rest_framework.exceptions import ValidationError
from rest_framework.viewsets import ModelViewSet
from authentik.core.api.used_by import UsedByMixin
from authentik.endpoints.api.connectors import ConnectorSerializer
from authentik.endpoints.controller import Capabilities
from authentik.endpoints.models import Connector, EndpointStage
from authentik.endpoints.models import EndpointStage
from authentik.flows.api.stages import StageSerializer
@@ -14,13 +11,6 @@ class EndpointStageSerializer(StageSerializer):
connector_obj = ConnectorSerializer(source="connector", read_only=True)
def validate_connector(self, connector: Connector) -> Connector:
conn: Connector = Connector.objects.get_subclass(pk=connector.pk)
controller = conn.controller(conn)
if Capabilities.STAGE_ENDPOINTS not in controller.capabilities():
raise ValidationError(_("Selected connector is not compatible with this stage."))
return connector
class Meta:
model = EndpointStage
fields = StageSerializer.Meta.fields + [

View File

@@ -8,7 +8,7 @@ from rest_framework.fields import CharField
from authentik.core.api.utils import PassiveSerializer
from authentik.endpoints.connectors.agent.models import AgentConnector, EnrollmentToken
from authentik.endpoints.controller import BaseController, Capabilities
from authentik.endpoints.controller import BaseController
from authentik.endpoints.facts import OSFamily
@@ -48,8 +48,8 @@ class AgentConnectorController(BaseController[AgentConnector]):
def vendor_identifier() -> str:
return "goauthentik.io/platform"
def capabilities(self) -> list[Capabilities]:
return [Capabilities.STAGE_ENDPOINTS]
def supported_enrollment_methods(self):
return []
def generate_mdm_config(
self, target_platform: OSFamily, request: HttpRequest, token: EnrollmentToken

View File

@@ -1,6 +1,5 @@
from hashlib import sha256
from json import loads
from unittest.mock import PropertyMock, patch
from django.urls import reverse
from jwt import encode
@@ -233,43 +232,3 @@ class TestEndpointStage(FlowTestCase):
plan = plan()
self.assertNotIn(PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE, plan.context)
self.assertEqual(plan.context[PLAN_CONTEXT_DEVICE], self.device)
def test_endpoint_stage_connector_no_stage_optional(self):
flow = create_test_flow()
stage = EndpointStage.objects.create(connector=self.connector, mode=StageMode.OPTIONAL)
FlowStageBinding.objects.create(stage=stage, target=flow, order=0)
with patch(
"authentik.endpoints.connectors.agent.models.AgentConnector.stage",
PropertyMock(return_value=None),
):
with self.assertFlowFinishes() as plan:
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
)
self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
plan = plan()
self.assertNotIn(PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE, plan.context)
self.assertNotIn(PLAN_CONTEXT_DEVICE, plan.context)
def test_endpoint_stage_connector_no_stage_required(self):
flow = create_test_flow()
stage = EndpointStage.objects.create(connector=self.connector, mode=StageMode.REQUIRED)
FlowStageBinding.objects.create(stage=stage, target=flow, order=0)
with patch(
"authentik.endpoints.connectors.agent.models.AgentConnector.stage",
PropertyMock(return_value=None),
):
with self.assertFlowFinishes() as plan:
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
)
self.assertStageResponse(
res,
component="ak-stage-access-denied",
error_message="Invalid stage configuration",
)
plan = plan()
self.assertNotIn(PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE, plan.context)
self.assertNotIn(PLAN_CONTEXT_DEVICE, plan.context)

View File

@@ -8,15 +8,13 @@ from authentik.lib.sentry import SentryIgnoredException
MERGED_VENDOR = "goauthentik.io/@merged"
class Capabilities(models.TextChoices):
class EnrollmentMethods(models.TextChoices):
# Automatically enrolled through user action
ENROLL_AUTOMATIC_USER = "enroll_automatic_user"
AUTOMATIC_USER = "automatic_user"
# Automatically enrolled through connector integration
ENROLL_AUTOMATIC_API = "enroll_automatic_api"
AUTOMATIC_API = "automatic_api"
# Manually enrolled with user interaction (user scanning a QR code for example)
ENROLL_MANUAL_USER = "enroll_manual_user"
# Supported for use with Endpoints stage
STAGE_ENDPOINTS = "stage_endpoints"
MANUAL_USER = "manual_user"
class ConnectorSyncException(SentryIgnoredException):
@@ -36,7 +34,7 @@ class BaseController[T: "Connector"]:
def vendor_identifier() -> str:
raise NotImplementedError
def capabilities(self) -> list[Capabilities]:
def supported_enrollment_methods(self) -> list[EnrollmentMethods]:
return []
def stage_view_enrollment(self) -> StageView | None:
@@ -44,6 +42,3 @@ class BaseController[T: "Connector"]:
def stage_view_authentication(self) -> StageView | None:
return None
def sync_endpoints(self):
raise NotImplementedError

View File

@@ -63,7 +63,7 @@ class OperatingSystemSerializer(Serializer):
"Operating System version, must always be the version number but may contain build name"
),
)
arch = CharField(required=False)
arch = CharField(required=True)
class NetworkInterfaceSerializer(Serializer):

View File

@@ -162,11 +162,8 @@ class Connector(ScheduledModel, SerializerModel):
@property
def schedule_specs(self) -> list[ScheduleSpec]:
from authentik.endpoints.controller import Capabilities
from authentik.endpoints.tasks import endpoints_sync
if Capabilities.ENROLL_AUTOMATIC_API not in self.controller(self).capabilities():
return []
return [
ScheduleSpec(
actor=endpoints_sync,

View File

@@ -1,4 +1,4 @@
from authentik.endpoints.models import EndpointStage, StageMode
from authentik.endpoints.models import EndpointStage
from authentik.flows.stage import StageView
PLAN_CONTEXT_ENDPOINT_CONNECTOR = "endpoint_connector"
@@ -6,24 +6,15 @@ PLAN_CONTEXT_ENDPOINT_CONNECTOR = "endpoint_connector"
class EndpointStageView(StageView):
def _get_inner(self) -> StageView | None:
def _get_inner(self):
stage: EndpointStage = self.executor.current_stage
inner_stage: type[StageView] | None = stage.connector.stage
if not inner_stage:
return None
return self.executor.stage_ok()
return inner_stage(self.executor, request=self.request)
def dispatch(self, request, *args, **kwargs):
inner = self._get_inner()
if inner is None:
stage: EndpointStage = self.executor.current_stage
if stage.mode == StageMode.OPTIONAL:
return self.executor.stage_ok()
else:
return self.executor.stage_invalid("Invalid stage configuration")
return inner.dispatch(request, *args, **kwargs)
return self._get_inner().dispatch(request, *args, **kwargs)
def cleanup(self):
inner = self._get_inner()
if inner is not None:
return inner.cleanup()
return self._get_inner().cleanup()

View File

@@ -6,7 +6,7 @@ from django.utils.translation import gettext_lazy as _
from dramatiq.actor import actor
from structlog.stdlib import get_logger
from authentik.endpoints.controller import Capabilities
from authentik.endpoints.controller import EnrollmentMethods
from authentik.endpoints.models import Connector
LOGGER = get_logger()
@@ -21,7 +21,7 @@ def endpoints_sync(connector_pk: Any):
return
controller = connector.controller
ctrl = controller(connector)
if Capabilities.ENROLL_AUTOMATIC_API not in ctrl.capabilities():
if EnrollmentMethods.AUTOMATIC_API not in ctrl.supported_enrollment_methods():
return
LOGGER.info("Syncing connector", connector=connector.name)
ctrl.sync_endpoints()

View File

@@ -1,41 +0,0 @@
from django.urls import reverse
from rest_framework.test import APITestCase
from authentik.core.tests.utils import create_test_admin_user
from authentik.endpoints.connectors.agent.models import AgentConnector
from authentik.endpoints.models import StageMode
from authentik.enterprise.endpoints.connectors.fleet.models import FleetConnector
from authentik.lib.generators import generate_id
class TestAPI(APITestCase):
def setUp(self):
self.user = create_test_admin_user()
self.client.force_login(self.user)
def test_endpoint_stage_agent(self):
connector = AgentConnector.objects.create(name=generate_id())
res = self.client.post(
reverse("authentik_api:stages-endpoint-list"),
data={
"name": generate_id(),
"connector": str(connector.pk),
"mode": StageMode.REQUIRED,
},
)
self.assertEqual(res.status_code, 201)
def test_endpoint_stage_fleet(self):
connector = FleetConnector.objects.create(name=generate_id())
res = self.client.post(
reverse("authentik_api:stages-endpoint-list"),
data={
"name": generate_id(),
"connector": str(connector.pk),
"mode": StageMode.REQUIRED,
},
)
self.assertEqual(res.status_code, 400)
self.assertJSONEqual(
res.content, {"connector": ["Selected connector is not compatible with this stage."]}
)

View File

@@ -1,35 +0,0 @@
from unittest.mock import PropertyMock, patch
from rest_framework.test import APITestCase
from authentik.endpoints.controller import BaseController, Capabilities
from authentik.endpoints.models import Connector
from authentik.endpoints.tasks import endpoints_sync
from authentik.lib.generators import generate_id
class TestEndpointTasks(APITestCase):
def test_agent_sync(self):
class controller(BaseController):
def capabilities(self):
return [Capabilities.ENROLL_AUTOMATIC_API]
def sync_endpoints(self):
pass
with patch.object(Connector, "controller", PropertyMock(return_value=controller)):
connector = Connector.objects.create(name=generate_id())
self.assertEqual(len(connector.schedule_specs), 1)
endpoints_sync.send(connector.pk).get_result(block=True)
def test_agent_no_sync(self):
class controller(BaseController):
def capabilities(self):
return []
with patch.object(Connector, "controller", PropertyMock(return_value=controller)):
connector = Connector.objects.create(name=generate_id())
self.assertEqual(len(connector.schedule_specs), 0)
endpoints_sync.send(connector.pk).get_result(block=True)

View File

@@ -6,7 +6,7 @@ from requests import RequestException
from rest_framework.exceptions import ValidationError
from authentik.core.models import User
from authentik.endpoints.controller import BaseController, Capabilities, ConnectorSyncException
from authentik.endpoints.controller import BaseController, ConnectorSyncException, EnrollmentMethods
from authentik.endpoints.facts import (
DeviceFacts,
OSFamily,
@@ -43,8 +43,8 @@ class FleetController(BaseController[DBC]):
def vendor_identifier() -> str:
return "fleetdm.com"
def capabilities(self) -> list[Capabilities]:
return [Capabilities.ENROLL_AUTOMATIC_API]
def supported_enrollment_methods(self) -> list[EnrollmentMethods]:
return [EnrollmentMethods.AUTOMATIC_API]
def _url(self, path: str) -> str:
return f"{self.connector.url}{path}"

View File

@@ -1,42 +0,0 @@
"""GoogleChromeConnector API Views"""
from django.urls import reverse
from rest_framework.fields import SerializerMethodField
from rest_framework.request import Request
from rest_framework.viewsets import ModelViewSet
from authentik.core.api.used_by import UsedByMixin
from authentik.endpoints.api.connectors import ConnectorSerializer
from authentik.enterprise.api import EnterpriseRequiredMixin
from authentik.enterprise.endpoints.connectors.google_chrome.models import GoogleChromeConnector
class GoogleChromeConnectorSerializer(EnterpriseRequiredMixin, ConnectorSerializer):
"""GoogleChromeConnector Serializer"""
chrome_url = SerializerMethodField()
def get_chrome_url(self, _: GoogleChromeConnector) -> str | None:
"""Full URL to be used in Google Workspace configuration"""
request: Request = self.context.get("request", None)
if not request:
return True
return request.build_absolute_uri(
reverse("authentik_endpoints_connectors_google_chrome:chrome")
)
class Meta:
model = GoogleChromeConnector
fields = ConnectorSerializer.Meta.fields + ["credentials", "chrome_url"]
class GoogleChromeConnectorViewSet(UsedByMixin, ModelViewSet):
"""GoogleChromeConnector Viewset"""
queryset = GoogleChromeConnector.objects.all()
serializer_class = GoogleChromeConnectorSerializer
filterset_fields = [
"name",
]
search_fields = ["name"]
ordering = ["name"]

View File

@@ -1,13 +0,0 @@
"""authentik Endpoint app config"""
from authentik.enterprise.apps import EnterpriseConfig
class AuthentikEndpointsConnectorGoogleChromeAppConfig(EnterpriseConfig):
"""authentik endpoint config"""
name = "authentik.enterprise.endpoints.connectors.google_chrome"
label = "authentik_endpoints_connectors_google_chrome"
verbose_name = "authentik Enterprise.Endpoints.Connectors.Google Chrome"
default = True
mountpoint = "endpoints/google/"

View File

@@ -1,116 +0,0 @@
from json import dumps, loads
from django.http import HttpRequest, HttpResponseRedirect
from django.urls import reverse
from googleapiclient.discovery import build
from authentik.endpoints.controller import BaseController, Capabilities
from authentik.endpoints.facts import DeviceFacts, OSFamily
from authentik.endpoints.models import Device, DeviceConnection
from authentik.enterprise.endpoints.connectors.google_chrome.google_schema import (
DeviceSignals,
VerifyChallengeResponseResult,
)
from authentik.enterprise.endpoints.connectors.google_chrome.models import GoogleChromeConnector
from authentik.policies.utils import delete_none_values
# Header we get from chrome that initiates verified access
HEADER_DEVICE_TRUST = "X-Device-Trust"
# Header we send to the client with the challenge
HEADER_ACCESS_CHALLENGE = "X-Verified-Access-Challenge"
# Header we get back from the client that we verify with google
HEADER_ACCESS_CHALLENGE_RESPONSE = "X-Verified-Access-Challenge-Response"
# Header value for x-device-trust that initiates the flow
DEVICE_TRUST_VERIFIED_ACCESS = "VerifiedAccess"
class GoogleChromeController(BaseController[GoogleChromeConnector]):
def __init__(self, connector):
super().__init__(connector)
self.google_client = build(
"verifiedaccess",
"v2",
cache_discovery=False,
**connector.google_credentials(),
)
@staticmethod
def vendor_identifier() -> str:
return "chrome.google.com"
def capabilities(self) -> list[Capabilities]:
return [Capabilities.STAGE_ENDPOINTS, Capabilities.ENROLL_AUTOMATIC_USER]
def generate_challenge(self, request: HttpRequest) -> HttpResponseRedirect:
challenge = self.google_client.challenge().generate().execute()
res = HttpResponseRedirect(
request.build_absolute_uri(
reverse("authentik_endpoints_connectors_google_chrome:chrome")
)
)
res[HEADER_ACCESS_CHALLENGE] = dumps(challenge)
return res
def validate_challenge(self, response: str) -> Device:
response = VerifyChallengeResponseResult(
self.google_client.challenge().verify(body=loads(response)).execute()
)
# Remove deprecated string representation of deviceSignals
response.pop("deviceSignal", None)
signals = DeviceSignals(response["deviceSignals"])
device, _ = Device.objects.update_or_create(
identifier=signals["serialNumber"],
defaults={
"name": signals["hostname"],
},
)
conn, _ = DeviceConnection.objects.update_or_create(
device=device,
connector=self.connector,
)
conn.create_snapshot(self.convert_data(signals))
return device
def convert_os_family(self, family) -> OSFamily:
return {
"CHROME_OS": OSFamily.linux,
"CHROMIUM_OS": OSFamily.linux,
"WINDOWS": OSFamily.windows,
"MAC_OS_X": OSFamily.macOS,
"LINUX": OSFamily.linux,
}.get(family, OSFamily.other)
def convert_data(self, raw_signals: DeviceSignals):
data = {
"os": delete_none_values(
{
"family": self.convert_os_family(raw_signals["operatingSystem"]),
"version": raw_signals["osVersion"],
}
),
"disks": [],
"network": delete_none_values(
{
"hostname": raw_signals["hostname"],
"interfaces": [],
"firewall_enabled": raw_signals["osFirewall"] == "OS_FIREWALL_ENABLED",
},
),
"hardware": delete_none_values(
{
"model": raw_signals["deviceModel"],
"manufacturer": raw_signals["deviceManufacturer"],
"serial": raw_signals["serialNumber"],
}
),
"vendor": {
self.vendor_identifier(): {
"agent_version": raw_signals["browserVersion"],
"raw": raw_signals,
},
},
}
facts = DeviceFacts(data=data)
facts.is_valid(raise_exception=True)
return facts.validated_data

View File

@@ -1,129 +0,0 @@
from typing import Literal, TypedDict
# Based on https://github.com/henribru/google-api-python-client-stubs/blob/master/googleapiclient-stubs/_apis/verifiedaccess/v2/schemas.pyi
class Antivirus(TypedDict, total=False):
state: Literal["STATE_UNSPECIFIED", "MISSING", "DISABLED", "ENABLED"]
class Challenge(TypedDict, total=False):
challenge: str
class CrowdStrikeAgent(TypedDict, total=False):
agentId: str
customerId: str
class DeviceSignals(TypedDict, total=False):
allowScreenLock: bool
antivirus: Antivirus
browserVersion: str
builtInDnsClientEnabled: bool
chromeRemoteDesktopAppBlocked: bool
crowdStrikeAgent: CrowdStrikeAgent
deviceAffiliationIds: list[str]
deviceEnrollmentDomain: str
deviceManufacturer: str
deviceModel: str
diskEncryption: Literal[
"DISK_ENCRYPTION_UNSPECIFIED",
"DISK_ENCRYPTION_UNKNOWN",
"DISK_ENCRYPTION_DISABLED",
"DISK_ENCRYPTION_ENCRYPTED",
]
displayName: str
hostname: str
imei: list[str]
macAddresses: list[str]
meid: list[str]
operatingSystem: Literal[
"OPERATING_SYSTEM_UNSPECIFIED",
"CHROME_OS",
"CHROMIUM_OS",
"WINDOWS",
"MAC_OS_X",
"LINUX",
]
osFirewall: Literal[
"OS_FIREWALL_UNSPECIFIED",
"OS_FIREWALL_UNKNOWN",
"OS_FIREWALL_DISABLED",
"OS_FIREWALL_ENABLED",
]
osVersion: str
passwordProtectionWarningTrigger: Literal[
"PASSWORD_PROTECTION_WARNING_TRIGGER_UNSPECIFIED",
"POLICY_UNSET",
"PASSWORD_PROTECTION_OFF",
"PASSWORD_REUSE",
"PHISHING_REUSE",
]
profileAffiliationIds: list[str]
profileEnrollmentDomain: str
realtimeUrlCheckMode: Literal[
"REALTIME_URL_CHECK_MODE_UNSPECIFIED",
"REALTIME_URL_CHECK_MODE_DISABLED",
"REALTIME_URL_CHECK_MODE_ENABLED_MAIN_FRAME",
]
safeBrowsingProtectionLevel: Literal[
"SAFE_BROWSING_PROTECTION_LEVEL_UNSPECIFIED", "INACTIVE", "STANDARD", "ENHANCED"
]
screenLockSecured: Literal[
"SCREEN_LOCK_SECURED_UNSPECIFIED",
"SCREEN_LOCK_SECURED_UNKNOWN",
"SCREEN_LOCK_SECURED_DISABLED",
"SCREEN_LOCK_SECURED_ENABLED",
]
secureBootMode: Literal[
"SECURE_BOOT_MODE_UNSPECIFIED",
"SECURE_BOOT_MODE_UNKNOWN",
"SECURE_BOOT_MODE_DISABLED",
"SECURE_BOOT_MODE_ENABLED",
]
serialNumber: str
siteIsolationEnabled: bool
systemDnsServers: list[str]
thirdPartyBlockingEnabled: bool
trigger: Literal["TRIGGER_UNSPECIFIED", "TRIGGER_BROWSER_NAVIGATION", "TRIGGER_LOGIN_SCREEN"]
windowsMachineDomain: str
windowsUserDomain: str
class Empty(TypedDict, total=False): ...
class VerifyChallengeResponseRequest(TypedDict, total=False):
challengeResponse: str
expectedIdentity: str
class VerifyChallengeResponseResult(TypedDict, total=False):
attestedDeviceId: str
customerId: str
deviceEnrollmentId: str
devicePermanentId: str
deviceSignal: str
deviceSignals: DeviceSignals
keyTrustLevel: Literal[
"KEY_TRUST_LEVEL_UNSPECIFIED",
"CHROME_OS_VERIFIED_MODE",
"CHROME_OS_DEVELOPER_MODE",
"CHROME_BROWSER_HW_KEY",
"CHROME_BROWSER_OS_KEY",
"CHROME_BROWSER_NO_KEY",
]
profileCustomerId: str
profileKeyTrustLevel: Literal[
"KEY_TRUST_LEVEL_UNSPECIFIED",
"CHROME_OS_VERIFIED_MODE",
"CHROME_OS_DEVELOPER_MODE",
"CHROME_BROWSER_HW_KEY",
"CHROME_BROWSER_OS_KEY",
"CHROME_BROWSER_NO_KEY",
]
profilePermanentId: str
signedPublicKeyAndChallenge: str
virtualDeviceId: str
virtualProfileId: str

View File

@@ -1,38 +0,0 @@
# Generated by Django 5.2.11 on 2026-03-01 18:38
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
("authentik_endpoints", "0004_deviceaccessgroup_attributes"),
]
operations = [
migrations.CreateModel(
name="GoogleChromeConnector",
fields=[
(
"connector_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="authentik_endpoints.connector",
),
),
("credentials", models.JSONField()),
],
options={
"verbose_name": "Google Device Trust Connector",
"verbose_name_plural": "Google Device Trust Connectors",
},
bases=("authentik_endpoints.connector",),
),
]

View File

@@ -1,69 +0,0 @@
"""Endpoint stage"""
from typing import TYPE_CHECKING
from django.db import models
from django.templatetags.static import static
from django.utils.translation import gettext_lazy as _
from google.oauth2.service_account import Credentials
from rest_framework.serializers import BaseSerializer
from authentik.endpoints.models import Connector
from authentik.flows.stage import StageView
if TYPE_CHECKING:
from authentik.enterprise.endpoints.connectors.google_chrome.controller import (
GoogleChromeController,
)
class GoogleChromeConnector(Connector):
"""Verify Google Chrome Device Trust connection for the user's browser."""
credentials = models.JSONField()
def google_credentials(self):
return {
"credentials": Credentials.from_service_account_info(
self.credentials, scopes=["https://www.googleapis.com/auth/verifiedaccess"]
),
}
@property
def icon_url(self):
return static("authentik/sources/google.svg")
@property
def serializer(self) -> type[BaseSerializer]:
from authentik.enterprise.endpoints.connectors.google_chrome.api import (
GoogleChromeConnectorSerializer,
)
return GoogleChromeConnectorSerializer
@property
def stage(self) -> type[StageView] | None:
from authentik.enterprise.endpoints.connectors.google_chrome.stage import (
GoogleChromeStageView,
)
return GoogleChromeStageView
@property
def controller(self) -> type[GoogleChromeController]:
from authentik.enterprise.endpoints.connectors.google_chrome.controller import (
GoogleChromeController,
)
return GoogleChromeController
@property
def component(self) -> str:
return "ak-endpoints-connector-gdtc-form"
def __str__(self) -> str:
return f"Google Device Trust Connector {self.name}"
class Meta:
verbose_name = _("Google Device Trust Connector")
verbose_name_plural = _("Google Device Trust Connectors")

View File

@@ -1,32 +0,0 @@
from django.http import HttpResponse
from django.urls import reverse
from django.utils.translation import gettext as _
from authentik.flows.challenge import (
Challenge,
ChallengeResponse,
FrameChallenge,
FrameChallengeResponse,
)
from authentik.flows.stage import ChallengeStageView
class GoogleChromeStageView(ChallengeStageView):
"""Endpoint stage"""
response_class = FrameChallengeResponse
def get_challenge(self, *args, **kwargs) -> Challenge:
return FrameChallenge(
data={
"component": "xak-flow-frame",
"url": self.request.build_absolute_uri(
reverse("authentik_endpoints_connectors_google_chrome:chrome")
),
"loading_overlay": True,
"loading_text": _("Verifying your browser..."),
}
)
def challenge_valid(self, response: ChallengeResponse) -> HttpResponse:
return self.executor.stage_ok()

View File

@@ -1,36 +0,0 @@
{
"devicePermanentId": "6f30327d-e436-4f7a-9f89-c37a7b6bf408",
"keyTrustLevel": "CHROME_BROWSER_HW_KEY",
"virtualDeviceId": "Z5DDF07GK6",
"customerId": "qewrqer",
"deviceSignals": {
"deviceManufacturer": "Apple Inc.",
"deviceModel": "MacBookPro18,1",
"operatingSystem": "MAC_OS_X",
"osVersion": "26.2.0",
"displayName": "jens-mac-vm",
"diskEncryption": "DISK_ENCRYPTION_ENCRYPTED",
"serialNumber": "Z5DDF07GK6",
"osFirewall": "OS_FIREWALL_DISABLED",
"systemDnsServers": [
"10.120.20.250:53"
],
"hostname": "jens-mac-vm.lab.beryju.org",
"macAddresses": [
"f4:d4:88:79:07:0e"
],
"screenLockSecured": "SCREEN_LOCK_SECURED_ENABLED",
"deviceEnrollmentDomain": "beryju.org",
"browserVersion": "145.0.7632.76",
"deviceAffiliationIds": [
"qewrqer"
],
"builtInDnsClientEnabled": true,
"chromeRemoteDesktopAppBlocked": false,
"safeBrowsingProtectionLevel": "STANDARD",
"siteIsolationEnabled": true,
"passwordProtectionWarningTrigger": "POLICY_UNSET",
"realtimeUrlCheckMode": "REALTIME_URL_CHECK_MODE_DISABLED",
"trigger": "TRIGGER_BROWSER_NAVIGATION"
}
}

View File

@@ -1,67 +0,0 @@
from json import dumps
from unittest.mock import MagicMock, patch
from django.urls import reverse
from rest_framework.test import APITestCase
from authentik.core.tests.utils import RequestFactory
from authentik.endpoints.facts import OSFamily
from authentik.endpoints.models import Device
from authentik.enterprise.endpoints.connectors.google_chrome.controller import (
HEADER_ACCESS_CHALLENGE,
GoogleChromeController,
)
from authentik.enterprise.endpoints.connectors.google_chrome.models import GoogleChromeConnector
from authentik.enterprise.providers.google_workspace.clients.test_http import MockHTTP
from authentik.lib.generators import generate_id
from authentik.lib.tests.utils import load_fixture
class TestGoogleChromeConnector(APITestCase):
def setUp(self):
self.connector = GoogleChromeConnector.objects.create(
name=generate_id(),
credentials={},
)
self.factory = RequestFactory()
self.api_key = generate_id()
def test_generate_challenge(self):
req = self.factory.get("/")
challenge = generate_id()
http = MockHTTP()
http.add_response(
f"https://verifiedaccess.googleapis.com/v2/challenge:generate?key={self.api_key}&alt=json",
{"challenge": challenge},
method="POST",
)
with patch(
"authentik.enterprise.endpoints.connectors.google_chrome.models.GoogleChromeConnector.google_credentials",
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
):
controller = GoogleChromeController(self.connector)
res = controller.generate_challenge(req)
self.assertEqual(
res["Location"],
req.build_absolute_uri(
reverse("authentik_endpoints_connectors_google_chrome:chrome")
),
)
self.assertEqual(res.headers[HEADER_ACCESS_CHALLENGE], dumps({"challenge": challenge}))
def test_validate_challenge(self):
http = MockHTTP()
http.add_response(
f"https://verifiedaccess.googleapis.com/v2/challenge:verify?key={self.api_key}&alt=json",
load_fixture("fixtures/host_macos.json"),
method="POST",
)
with patch(
"authentik.enterprise.endpoints.connectors.google_chrome.models.GoogleChromeConnector.google_credentials",
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
):
controller = GoogleChromeController(self.connector)
controller.validate_challenge(dumps("{}"))
device = Device.objects.get(identifier="Z5DDF07GK6")
self.assertIsNotNone(device)
self.assertEqual(device.cached_facts.data["os"]["family"], OSFamily.macOS)

View File

@@ -1,91 +0,0 @@
from json import dumps
from unittest.mock import MagicMock, patch
from django.urls import reverse
from authentik.core.tests.utils import RequestFactory, create_test_flow
from authentik.endpoints.models import Device, EndpointStage
from authentik.enterprise.endpoints.connectors.google_chrome.models import GoogleChromeConnector
from authentik.enterprise.providers.google_workspace.clients.test_http import MockHTTP
from authentik.flows.models import FlowStageBinding
from authentik.flows.planner import PLAN_CONTEXT_DEVICE
from authentik.flows.tests import FlowTestCase
from authentik.lib.generators import generate_id
from authentik.lib.tests.utils import load_fixture
class TestChromeDTCView(FlowTestCase):
def setUp(self):
self.flow = create_test_flow()
self.connector = GoogleChromeConnector.objects.create(
name=generate_id(),
credentials={},
)
self.factory = RequestFactory()
self.api_key = generate_id()
self.stage = EndpointStage.objects.create(
name=generate_id(),
connector=self.connector,
)
FlowStageBinding.objects.create(
target=self.flow,
stage=self.stage,
order=0,
)
def test_dtc_generate_verify(self):
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
)
self.assertStageResponse(
res,
self.flow,
component="xak-flow-frame",
url="http://testserver/endpoints/google/chrome/",
)
challenge = generate_id()
http = MockHTTP()
http.add_response(
f"https://verifiedaccess.googleapis.com/v2/challenge:generate?key={self.api_key}&alt=json",
{"challenge": challenge},
method="POST",
)
http.add_response(
f"https://verifiedaccess.googleapis.com/v2/challenge:verify?key={self.api_key}&alt=json",
load_fixture("fixtures/host_macos.json"),
method="POST",
)
with patch(
"authentik.enterprise.endpoints.connectors.google_chrome.models.GoogleChromeConnector.google_credentials",
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
):
# Generate challenge
res = self.client.get(
reverse("authentik_endpoints_connectors_google_chrome:chrome"),
HTTP_X_DEVICE_TRUST="VerifiedAccess",
)
self.assertEqual(res.status_code, 302)
self.assertEqual(
res.headers["X-Verified-Access-Challenge"],
dumps({"challenge": challenge}),
)
# Validate challenge
res = self.client.get(
reverse("authentik_endpoints_connectors_google_chrome:chrome"),
HTTP_X_VERIFIED_ACCESS_CHALLENGE_RESPONSE=dumps({}),
)
self.assertEqual(res.status_code, 200)
device = Device.objects.get(identifier="Z5DDF07GK6")
self.assertIsNotNone(device)
# Continue flow
with self.assertFlowFinishes() as plan:
res = self.client.post(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
)
self.assertStageRedirects(res, "/")
plan = plan()
plan_device = plan.context[PLAN_CONTEXT_DEVICE]
self.assertEqual(device.pk, plan_device.pk)

View File

@@ -1,16 +0,0 @@
"""API URLs"""
from django.urls import path
from authentik.enterprise.endpoints.connectors.google_chrome.api import GoogleChromeConnectorViewSet
from authentik.enterprise.endpoints.connectors.google_chrome.views.dtc import (
GoogleChromeDeviceTrustConnector,
)
urlpatterns = [
path("chrome/", GoogleChromeDeviceTrustConnector.as_view(), name="chrome"),
]
api_urlpatterns = [
("endpoints/google_chrome/connectors", GoogleChromeConnectorViewSet),
]

View File

@@ -1,46 +0,0 @@
from typing import Any
from django.http import HttpRequest, HttpResponse, HttpResponseBadRequest
from django.template.response import TemplateResponse
from django.utils.decorators import method_decorator
from django.views import View
from django.views.decorators.clickjacking import xframe_options_sameorigin
from authentik.endpoints.models import EndpointStage
from authentik.enterprise.endpoints.connectors.google_chrome.controller import (
HEADER_ACCESS_CHALLENGE_RESPONSE,
HEADER_DEVICE_TRUST,
GoogleChromeController,
)
from authentik.enterprise.endpoints.connectors.google_chrome.models import GoogleChromeConnector
from authentik.flows.planner import PLAN_CONTEXT_DEVICE, FlowPlan
from authentik.flows.views.executor import SESSION_KEY_PLAN
@method_decorator(xframe_options_sameorigin, name="dispatch")
class GoogleChromeDeviceTrustConnector(View):
"""Google Chrome Device-trust connector based endpoint authenticator"""
def get_flow_plan(self) -> FlowPlan:
flow_plan: FlowPlan = self.request.session[SESSION_KEY_PLAN]
return flow_plan
def setup(self, request: HttpRequest, *args: Any, **kwargs: Any) -> None:
super().setup(request, *args, **kwargs)
stage: EndpointStage = self.get_flow_plan().bindings[0].stage
connector = GoogleChromeConnector.objects.filter(pk=stage.connector_id).first()
if not connector:
return HttpResponseBadRequest()
self.controller: GoogleChromeController = connector.controller(connector)
def get(self, request: HttpRequest) -> HttpResponse:
x_device_trust = request.headers.get(HEADER_DEVICE_TRUST)
x_access_challenge_response = request.headers.get(HEADER_ACCESS_CHALLENGE_RESPONSE)
if x_device_trust == "VerifiedAccess" and x_access_challenge_response is None:
return self.controller.generate_challenge(request)
if x_access_challenge_response:
device = self.controller.validate_challenge(x_access_challenge_response)
flow_plan = self.get_flow_plan()
flow_plan.context[PLAN_CONTEXT_DEVICE] = device
self.request.session[SESSION_KEY_PLAN] = flow_plan
return TemplateResponse(request, "flows/frame-submit.html")

View File

@@ -15,7 +15,6 @@ from django.core.cache import cache
from django.db.models.query import QuerySet
from django.utils.timezone import now
from jwt import PyJWTError, decode, get_unverified_header
from jwt.algorithms import ECAlgorithm
from rest_framework.exceptions import ValidationError
from rest_framework.fields import (
ChoiceField,
@@ -110,20 +109,13 @@ class LicenseKey:
intermediate.verify_directly_issued_by(get_licensing_key())
except InvalidSignature, TypeError, ValueError, Error:
raise ValidationError("Unable to verify license") from None
_validate_curve_original = ECAlgorithm._validate_curve
try:
# authentik's license are generated with `algorithm="ES512"` and signed with
# a key of curve `secp384r1`. Starting with version 2.11.0, pyjwt enforces the spec, see
# https://github.com/jpadilla/pyjwt/commit/5b8622773358e56d3d3c0a9acf404809ff34433a
# authentik will change its license generation to `algorithm="ES384"` in 2026.
# TODO: remove this when the last incompatible license runs out.
ECAlgorithm._validate_curve = lambda *_: True
body = from_dict(
LicenseKey,
decode(
jwt,
our_cert.public_key(),
algorithms=["ES384", "ES512"],
algorithms=["ES512"],
audience=get_license_aud(),
options={"verify_exp": check_expiry, "verify_signature": check_expiry},
),
@@ -133,8 +125,6 @@ class LicenseKey:
if unverified["aud"] != get_license_aud():
raise ValidationError("Invalid Install ID in license") from None
raise ValidationError("Unable to verify license") from None
finally:
ECAlgorithm._validate_curve = _validate_curve_original
return body
@staticmethod

View File

@@ -1,150 +0,0 @@
from datetime import datetime
from django.db.models import BooleanField as ModelBooleanField
from django.db.models import Case, Q, Value, When
from django_filters.rest_framework import BooleanFilter, FilterSet
from drf_spectacular.utils import extend_schema
from rest_framework.decorators import action
from rest_framework.fields import IntegerField, SerializerMethodField
from rest_framework.mixins import CreateModelMixin
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet
from authentik.core.api.utils import ModelSerializer
from authentik.enterprise.api import EnterpriseRequiredMixin
from authentik.enterprise.lifecycle.api.reviews import ReviewSerializer
from authentik.enterprise.lifecycle.models import LifecycleIteration, ReviewState
from authentik.enterprise.lifecycle.utils import (
ContentTypeField,
ReviewerGroupSerializer,
ReviewerUserSerializer,
admin_link_for_model,
parse_content_type,
start_of_day,
)
from authentik.lib.utils.time import timedelta_from_string
class LifecycleIterationSerializer(EnterpriseRequiredMixin, ModelSerializer):
content_type = ContentTypeField()
object_verbose = SerializerMethodField()
object_admin_url = SerializerMethodField(read_only=True)
grace_period_end = SerializerMethodField(read_only=True)
reviews = ReviewSerializer(many=True, read_only=True, source="review_set.all")
user_can_review = SerializerMethodField(read_only=True)
reviewer_groups = ReviewerGroupSerializer(
many=True, read_only=True, source="rule.reviewer_groups"
)
min_reviewers = IntegerField(read_only=True, source="rule.min_reviewers")
reviewers = ReviewerUserSerializer(many=True, read_only=True, source="rule.reviewers")
next_review_date = SerializerMethodField(read_only=True)
class Meta:
model = LifecycleIteration
fields = [
"id",
"content_type",
"object_id",
"object_verbose",
"object_admin_url",
"state",
"opened_on",
"grace_period_end",
"next_review_date",
"reviews",
"user_can_review",
"reviewer_groups",
"min_reviewers",
"reviewers",
]
read_only_fields = fields
def get_object_verbose(self, iteration: LifecycleIteration) -> str:
return str(iteration.object)
def get_object_admin_url(self, iteration: LifecycleIteration) -> str:
return admin_link_for_model(iteration.object)
def get_grace_period_end(self, iteration: LifecycleIteration) -> datetime:
return start_of_day(
iteration.opened_on + timedelta_from_string(iteration.rule.grace_period)
)
def get_next_review_date(self, iteration: LifecycleIteration) -> datetime:
return start_of_day(iteration.opened_on + timedelta_from_string(iteration.rule.interval))
def get_user_can_review(self, iteration: LifecycleIteration) -> bool:
return iteration.user_can_review(self.context["request"].user)
class LifecycleIterationFilterSet(FilterSet):
user_is_reviewer = BooleanFilter(field_name="user_is_reviewer", lookup_expr="exact")
class IterationViewSet(EnterpriseRequiredMixin, CreateModelMixin, GenericViewSet):
queryset = LifecycleIteration.objects.all()
serializer_class = LifecycleIterationSerializer
ordering = ["-opened_on"]
ordering_fields = ["state", "content_type__model", "opened_on", "grace_period_end"]
filterset_class = LifecycleIterationFilterSet
def get_queryset(self):
user = self.request.user
return self.queryset.annotate(
user_is_reviewer=Case(
When(
Q(rule__reviewers=user)
| Q(rule__reviewer_groups__in=user.groups.all().with_ancestors()),
then=Value(True),
),
default=Value(False),
output_field=ModelBooleanField(),
)
).distinct()
@action(
detail=False,
methods=["get"],
url_path=r"latest/(?P<content_type>[^/]+)/(?P<object_id>[^/]+)",
)
def latest_iteration(self, request: Request, content_type: str, object_id: str) -> Response:
ct = parse_content_type(content_type)
try:
obj = (
self.get_queryset()
.filter(
content_type__app_label=ct["app_label"],
content_type__model=ct["model"],
object_id=object_id,
)
.latest("opened_on")
)
except LifecycleIteration.DoesNotExist:
return Response(status=404)
serializer = self.get_serializer(obj)
return Response(serializer.data)
@extend_schema(
operation_id="lifecycle_iterations_list_open",
responses={200: LifecycleIterationSerializer(many=True)},
)
@action(
detail=False,
methods=["get"],
url_path=r"open",
)
def open_iterations(self, request: Request):
iterations = self.get_queryset().filter(
Q(state=ReviewState.PENDING) | Q(state=ReviewState.OVERDUE)
)
iterations = self.filter_queryset(iterations)
page = self.paginate_queryset(iterations)
if page is not None:
serializer = self.get_serializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(iterations, many=True)
return Response(serializer.data)

View File

@@ -1,33 +0,0 @@
from django.utils.translation import gettext_lazy as _
from rest_framework.exceptions import ValidationError
from rest_framework.mixins import CreateModelMixin
from rest_framework.viewsets import GenericViewSet
from authentik.core.api.utils import ModelSerializer
from authentik.enterprise.api import EnterpriseRequiredMixin
from authentik.enterprise.lifecycle.models import LifecycleIteration, Review
from authentik.enterprise.lifecycle.utils import ReviewerUserSerializer
class ReviewSerializer(EnterpriseRequiredMixin, ModelSerializer):
reviewer = ReviewerUserSerializer(read_only=True)
class Meta:
model = Review
fields = ["id", "iteration", "reviewer", "timestamp", "note"]
read_only_fields = ["id", "timestamp", "reviewer"]
def validate_iteration(self, iteration: LifecycleIteration) -> LifecycleIteration:
user = self.context["request"].user
if not iteration.user_can_review(user):
raise ValidationError(_("You are not allowed to submit a review for this object."))
return iteration
class ReviewViewSet(EnterpriseRequiredMixin, CreateModelMixin, GenericViewSet):
queryset = Review.objects.all()
serializer_class = ReviewSerializer
def perform_create(self, serializer: ReviewSerializer) -> None:
review = serializer.save(reviewer=self.request.user)
review.iteration.on_review(self.request)

View File

@@ -1,113 +0,0 @@
from django.utils.translation import gettext as _
from rest_framework.exceptions import ValidationError
from rest_framework.fields import SerializerMethodField
from rest_framework.relations import SlugRelatedField
from rest_framework.viewsets import ModelViewSet
from authentik.core.api.utils import ModelSerializer
from authentik.core.models import User
from authentik.enterprise.api import EnterpriseRequiredMixin
from authentik.enterprise.lifecycle.models import LifecycleRule
from authentik.enterprise.lifecycle.utils import (
ContentTypeField,
ReviewerGroupSerializer,
ReviewerUserSerializer,
)
from authentik.lib.utils.time import timedelta_from_string
class LifecycleRuleSerializer(EnterpriseRequiredMixin, ModelSerializer):
content_type = ContentTypeField()
target_verbose = SerializerMethodField()
reviewer_groups_obj = ReviewerGroupSerializer(
many=True, read_only=True, source="reviewer_groups"
)
reviewers = SlugRelatedField(slug_field="uuid", many=True, queryset=User.objects.all())
reviewers_obj = ReviewerUserSerializer(many=True, read_only=True, source="reviewers")
class Meta:
model = LifecycleRule
fields = [
"id",
"name",
"content_type",
"object_id",
"interval",
"grace_period",
"reviewer_groups",
"reviewer_groups_obj",
"min_reviewers",
"min_reviewers_is_per_group",
"reviewers",
"reviewers_obj",
"notification_transports",
"target_verbose",
]
read_only_fields = ["id", "reviewers_obj", "reviewer_groups_obj", "target_verbose"]
def get_target_verbose(self, rule: LifecycleRule) -> str:
if rule.object_id is None:
return rule.content_type.model_class()._meta.verbose_name_plural
else:
return f"{rule.content_type.model_class()._meta.verbose_name}: {rule.object}"
def validate_object_id(self, value: str) -> str | None:
if value == "":
return None
return value
def validate(self, attrs: dict) -> dict:
if (
attrs.get("object_id") is not None
and not attrs["content_type"]
.get_all_objects_for_this_type(pk=attrs["object_id"])
.exists()
):
raise ValidationError({"object_id": _("Object does not exist")})
if "reviewer_groups" in attrs or "reviewers" in attrs:
reviewer_groups = attrs.get(
"reviewer_groups", self.instance.reviewer_groups.all() if self.instance else []
)
reviewers = attrs.get(
"reviewers", self.instance.reviewers.all() if self.instance else []
)
if len(reviewer_groups) == 0 and len(reviewers) == 0:
raise ValidationError(_("Either a reviewer group or a reviewer must be set."))
if "grace_period" in attrs or "interval" in attrs:
grace_period = attrs.get("grace_period", getattr(self.instance, "grace_period", None))
interval = attrs.get("interval", getattr(self.instance, "interval", None))
if (
grace_period is not None
and interval is not None
and (timedelta_from_string(grace_period) > timedelta_from_string(interval))
):
raise ValidationError(
{"grace_period": _("Grace period must be shorter than the interval.")}
)
if "content_type" in attrs or "object_id" in attrs:
content_type = attrs.get("content_type", getattr(self.instance, "content_type", None))
object_id = attrs.get("object_id", getattr(self.instance, "object_id", None))
if content_type is not None and object_id is None:
existing = LifecycleRule.objects.filter(
content_type=content_type, object_id__isnull=True
)
if self.instance:
existing = existing.exclude(pk=self.instance.pk)
if existing.exists():
raise ValidationError(
{
"content_type": _(
"Only one type-wide rule for each object type is allowed."
)
}
)
return attrs
class LifecycleRuleViewSet(ModelViewSet):
queryset = LifecycleRule.objects.all()
serializer_class = LifecycleRuleSerializer
search_fields = ["content_type__model", "reviewer_groups__name", "reviewers__username"]
ordering = ["name"]
ordering_fields = ["name", "content_type__model"]
filterset_fields = ["content_type__model"]

View File

@@ -1,22 +0,0 @@
from authentik.enterprise.apps import EnterpriseConfig
from authentik.lib.utils.time import fqdn_rand
from authentik.tasks.schedules.common import ScheduleSpec
class ReportsConfig(EnterpriseConfig):
name = "authentik.enterprise.lifecycle"
label = "authentik_lifecycle"
verbose_name = "authentik Enterprise.Lifecycle"
default = True
@property
def tenant_schedule_specs(self) -> list[ScheduleSpec]:
from authentik.enterprise.lifecycle.tasks import apply_lifecycle_rules
return [
ScheduleSpec(
actor=apply_lifecycle_rules,
crontab=f"{fqdn_rand('lifecycle_apply_lifecycle_rules')} "
f"{fqdn_rand('lifecycle_apply_lifecycle_rules', 24)} * * *",
)
]

View File

@@ -1,154 +0,0 @@
# Generated by Django 5.2.11 on 2026-02-09 15:57
import authentik.lib.utils.time
import django.db.models.deletion
import uuid
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
("authentik_core", "0057_remove_user_groups_remove_user_user_permissions_and_more"),
("authentik_events", "0016_alter_event_action"),
("contenttypes", "0002_remove_content_type_name"),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name="LifecycleRule",
fields=[
("id", models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
("name", models.TextField(unique=True)),
("object_id", models.TextField(default=None, null=True)),
(
"interval",
models.TextField(
default="days=60",
validators=[authentik.lib.utils.time.timedelta_string_validator],
),
),
(
"grace_period",
models.TextField(
default="days=30",
validators=[authentik.lib.utils.time.timedelta_string_validator],
),
),
("min_reviewers", models.PositiveSmallIntegerField(default=1)),
("min_reviewers_is_per_group", models.BooleanField(default=False)),
(
"content_type",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="contenttypes.contenttype"
),
),
(
"notification_transports",
models.ManyToManyField(
blank=True,
help_text="Select which transports should be used to notify the reviewers. If none are selected, the notification will only be shown in the authentik UI.",
to="authentik_events.notificationtransport",
),
),
("reviewer_groups", models.ManyToManyField(blank=True, to="authentik_core.group")),
("reviewers", models.ManyToManyField(blank=True, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name="LifecycleIteration",
fields=[
(
"managed",
models.TextField(
default=None,
help_text="Objects that are managed by authentik. These objects are created and updated automatically. This flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
null=True,
unique=True,
verbose_name="Managed by authentik",
),
),
("id", models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
("object_id", models.TextField()),
(
"state",
models.CharField(
choices=[
("REVIEWED", "Reviewed"),
("PENDING", "Pending"),
("OVERDUE", "Overdue"),
("CANCELED", "Canceled"),
],
default="PENDING",
max_length=10,
),
),
("opened_on", models.DateField(auto_now_add=True)),
(
"content_type",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="contenttypes.contenttype"
),
),
(
"rule",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="authentik_lifecycle.lifecyclerule",
),
),
],
),
migrations.CreateModel(
name="Review",
fields=[
("id", models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
("timestamp", models.DateTimeField(auto_now_add=True)),
("note", models.TextField(null=True)),
(
"iteration",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="authentik_lifecycle.lifecycleiteration",
),
),
(
"reviewer",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
migrations.AddIndex(
model_name="lifecyclerule",
index=models.Index(fields=["content_type"], name="authentik_l_content_4e3a6a_idx"),
),
migrations.AddConstraint(
model_name="lifecyclerule",
constraint=models.UniqueConstraint(
condition=models.Q(("object_id__isnull", True)),
fields=("content_type",),
name="uniq_lifecycle_rule_ct_null_object",
),
),
migrations.AlterUniqueTogether(
name="lifecyclerule",
unique_together={("content_type", "object_id")},
),
migrations.AddIndex(
model_name="lifecycleiteration",
index=models.Index(
fields=["content_type", "opened_on"], name="authentik_l_content_09c32a_idx"
),
),
migrations.AlterUniqueTogether(
name="review",
unique_together={("iteration", "reviewer")},
),
]

View File

@@ -1,18 +0,0 @@
# Generated by Django 5.2.11 on 2026-02-13 09:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_lifecycle", "0001_initial"),
]
operations = [
migrations.AlterField(
model_name="lifecycleiteration",
name="opened_on",
field=models.DateTimeField(auto_now_add=True),
),
]

View File

@@ -1,292 +0,0 @@
from datetime import timedelta
from uuid import uuid4
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.db.models import Q, QuerySet
from django.db.models.fields import Field
from django.db.models.functions import Cast
from django.http import HttpRequest
from django.utils import timezone
from django.utils.translation import gettext as _
from rest_framework.serializers import BaseSerializer
from authentik.blueprints.models import ManagedModel
from authentik.core.models import Group, User
from authentik.enterprise.lifecycle.utils import link_for_model, start_of_day
from authentik.events.models import Event, EventAction, NotificationSeverity, NotificationTransport
from authentik.lib.models import SerializerModel
from authentik.lib.utils.time import timedelta_from_string, timedelta_string_validator
class LifecycleRule(SerializerModel):
id = models.UUIDField(primary_key=True, default=uuid4)
name = models.TextField(unique=True)
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.TextField(null=True, default=None)
object = GenericForeignKey("content_type", "object_id")
interval = models.TextField(
default="days=60",
validators=[timedelta_string_validator],
)
# Grace period starts after a review is due
grace_period = models.TextField(
default="days=30",
validators=[timedelta_string_validator],
)
# The review has to be conducted by `min_reviewers` members of `reviewer_groups`
# (total or per group depending on `min_reviewers_is_per_group` flag) as well
# as all of `reviewers`
reviewer_groups = models.ManyToManyField("authentik_core.Group", blank=True)
min_reviewers = models.PositiveSmallIntegerField(default=1)
min_reviewers_is_per_group = models.BooleanField(default=False)
reviewers = models.ManyToManyField("authentik_core.User", blank=True)
notification_transports = models.ManyToManyField(
NotificationTransport,
help_text=_(
"Select which transports should be used to notify the reviewers. If none are "
"selected, the notification will only be shown in the authentik UI."
),
blank=True,
)
class Meta:
indexes = [models.Index(fields=["content_type"])]
unique_together = [["content_type", "object_id"]]
constraints = [
models.UniqueConstraint(
fields=["content_type"],
condition=Q(object_id__isnull=True),
name="uniq_lifecycle_rule_ct_null_object",
)
]
@property
def serializer(self) -> type[BaseSerializer]:
from authentik.enterprise.lifecycle.api.rules import LifecycleRuleSerializer
return LifecycleRuleSerializer
def _get_pk_field(self) -> Field:
model = self.content_type.model_class()
pk = model._meta.pk
while hasattr(pk, "target_field"):
pk = pk.target_field
return pk.__class__()
def get_objects(self) -> QuerySet:
qs = self.content_type.get_all_objects_for_this_type()
if self.object_id:
qs = qs.filter(pk=self.object_id)
else:
qs = qs.exclude(
pk__in=LifecycleRule.objects.filter(
content_type=self.content_type, object_id__isnull=False
).values_list(Cast("object_id", output_field=self._get_pk_field()), flat=True)
)
return qs
def _get_stale_iterations(self) -> QuerySet[LifecycleIteration]:
filter = ~Q(content_type=self.content_type)
if self.object_id:
filter = filter | ~Q(object_id=self.object_id)
filter = Q(state__in=(ReviewState.PENDING, ReviewState.OVERDUE)) & filter
return self.lifecycleiteration_set.filter(filter)
def _get_newly_overdue_iterations(self) -> QuerySet[LifecycleIteration]:
return self.lifecycleiteration_set.filter(
opened_on__lt=start_of_day(
timezone.now() + timedelta(days=1) - timedelta_from_string(self.grace_period)
),
state=ReviewState.PENDING,
)
def _get_newly_due_objects(self) -> QuerySet:
recent_iteration_ids = LifecycleIteration.objects.filter(
content_type=self.content_type,
object_id__isnull=False,
opened_on__gte=start_of_day(
timezone.now() + timedelta(days=1) - timedelta_from_string(self.interval)
),
).values_list(Cast("object_id", output_field=self._get_pk_field()), flat=True)
return self.get_objects().exclude(pk__in=recent_iteration_ids)
def apply(self):
self._get_stale_iterations().update(state=ReviewState.CANCELED)
for iteration in self._get_newly_overdue_iterations():
iteration.make_overdue()
for obj in self._get_newly_due_objects():
LifecycleIteration.start(content_type=self.content_type, object_id=obj.pk, rule=self)
def is_satisfied_for_iteration(self, iteration: LifecycleIteration) -> bool:
reviewers = self.reviewers.all()
if (
iteration.review_set.filter(reviewer__in=reviewers).distinct("reviewer").count()
< reviewers.count()
):
return False
if self.reviewer_groups.count() == 0:
return True
if self.min_reviewers_is_per_group:
for g in self.reviewer_groups.all():
if (
iteration.review_set.filter(
reviewer__groups__in=Group.objects.filter(pk=g.pk).with_descendants()
)
.distinct()
.count()
< self.min_reviewers
):
return False
return True
else:
return (
iteration.review_set.filter(
reviewer__groups__in=self.reviewer_groups.all().with_descendants()
)
.distinct()
.count()
>= self.min_reviewers
)
def get_reviewers(self) -> QuerySet[User]:
return User.objects.filter(
Q(id__in=self.reviewers.all().values_list("pk", flat=True))
| Q(groups__in=self.reviewer_groups.all().with_descendants())
).distinct()
def notify_reviewers(self, event: Event, severity: str):
from authentik.enterprise.lifecycle.tasks import send_notification
for transport in self.notification_transports.all():
for user in self.get_reviewers():
send_notification.send_with_options(
args=(transport.pk, event.pk, user.pk, severity),
rel_obj=transport,
)
if transport.send_once:
break
class ReviewState(models.TextChoices):
REVIEWED = "REVIEWED", _("Reviewed")
PENDING = "PENDING", _("Pending")
OVERDUE = "OVERDUE", _("Overdue")
CANCELED = "CANCELED", _("Canceled")
class LifecycleIteration(SerializerModel, ManagedModel):
id = models.UUIDField(primary_key=True, default=uuid4)
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.TextField(null=False)
object = GenericForeignKey("content_type", "object_id")
rule = models.ForeignKey(LifecycleRule, null=True, on_delete=models.SET_NULL)
state = models.CharField(max_length=10, choices=ReviewState, default=ReviewState.PENDING)
opened_on = models.DateTimeField(auto_now_add=True)
class Meta:
indexes = [models.Index(fields=["content_type", "opened_on"])]
@property
def serializer(self) -> type[BaseSerializer]:
from authentik.enterprise.lifecycle.api.iterations import LifecycleIterationSerializer
return LifecycleIterationSerializer
def _get_model_name(self) -> str:
return self.content_type.name.lower()
def _get_event_args(self) -> dict:
return {
"target": self.object,
"hyperlink": link_for_model(self.object),
"hyperlink_label": _(f"Go to {self._get_model_name()}"),
"lifecycle_iteration": self.id,
}
def initialize(self):
event = Event.new(
EventAction.REVIEW_INITIATED,
message=_(f"Access review is due for {self.content_type.name} {str(self.object)}"),
**self._get_event_args(),
)
event.save()
self.rule.notify_reviewers(event, NotificationSeverity.NOTICE)
def make_overdue(self):
self.state = ReviewState.OVERDUE
event = Event.new(
EventAction.REVIEW_OVERDUE,
message=_(f"Access review is overdue for {self.content_type.name} {str(self.object)}"),
**self._get_event_args(),
)
event.save()
self.rule.notify_reviewers(event, NotificationSeverity.ALERT)
self.save()
@staticmethod
def start(content_type: ContentType, object_id: str, rule: LifecycleRule) -> LifecycleIteration:
iteration = LifecycleIteration.objects.create(
content_type=content_type, object_id=object_id, rule=rule
)
iteration.initialize()
return iteration
def make_reviewed(self, request: HttpRequest):
self.state = ReviewState.REVIEWED
event = Event.new(
EventAction.REVIEW_COMPLETED,
message=_(f"Access review completed for {self.content_type.name} {str(self.object)}"),
**self._get_event_args(),
).from_http(request)
event.save()
self.rule.notify_reviewers(event, NotificationSeverity.NOTICE)
self.save()
def on_review(self, request: HttpRequest):
if self.state not in (ReviewState.PENDING, ReviewState.OVERDUE):
raise AssertionError("Review is not pending or overdue")
if self.rule.is_satisfied_for_iteration(self):
self.make_reviewed(request)
def user_can_review(self, user: User) -> bool:
if self.state not in (ReviewState.PENDING, ReviewState.OVERDUE):
return False
if self.review_set.filter(reviewer=user).exists():
return False
groups = self.rule.reviewer_groups.all()
if groups:
for group in groups:
if group.is_member(user):
return True
return False
else:
return user in self.rule.get_reviewers()
class Review(SerializerModel):
id = models.UUIDField(primary_key=True, default=uuid4)
iteration = models.ForeignKey(LifecycleIteration, on_delete=models.CASCADE)
reviewer = models.ForeignKey("authentik_core.User", on_delete=models.CASCADE)
timestamp = models.DateTimeField(auto_now_add=True)
note = models.TextField(null=True)
class Meta:
unique_together = [["iteration", "reviewer"]]
@property
def serializer(self) -> type[BaseSerializer]:
from authentik.enterprise.lifecycle.api.reviews import ReviewSerializer
return ReviewSerializer

View File

@@ -1,22 +0,0 @@
from django.db.models import Q
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from authentik.enterprise.lifecycle.models import LifecycleRule, ReviewState
@receiver(post_save, sender=LifecycleRule)
def post_rule_save(sender, instance: LifecycleRule, created: bool, **_):
from authentik.enterprise.lifecycle.tasks import apply_lifecycle_rule
apply_lifecycle_rule.send_with_options(
args=(instance.id,),
rel_obj=instance,
)
@receiver(pre_delete, sender=LifecycleRule)
def pre_rule_delete(sender, instance: LifecycleRule, **_):
instance.lifecycleiteration_set.filter(
Q(state=ReviewState.PENDING) | Q(state=ReviewState.OVERDUE)
).update(state=ReviewState.CANCELED)

Some files were not shown because too many files have changed in this diff Show More