Compare commits

..

6 Commits

Author SHA1 Message Date
Marc 'risson' Schmitt
3ee2e70f7b bigger pool
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-16 14:09:09 +01:00
Marc 'risson' Schmitt
3d1f28c6c0 fixup
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-12 18:13:37 +01:00
Marc 'risson' Schmitt
66d8c04404 fixup
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-12 17:58:09 +01:00
Marc 'risson' Schmitt
776f691c00 fixup
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-12 17:56:33 +01:00
Marc 'risson' Schmitt
c5f9a25a84 wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-12 17:47:30 +01:00
Marc 'risson' Schmitt
fa06af35e5 root: run tests with pgdog
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-12 17:40:01 +01:00
3898 changed files with 31161 additions and 860866 deletions

View File

@@ -1,5 +0,0 @@
[alias]
t = ["nextest", "run"]
[build]
rustflags = ["--cfg", "tokio_unstable"]

View File

@@ -1,46 +0,0 @@
[licenses]
allow = [
"Apache-2.0",
"BSD-3-Clause",
"CC0-1.0",
"CDLA-Permissive-2.0",
"ISC",
"MIT",
"MPL-2.0",
"OpenSSL",
"Unicode-3.0",
"Zlib",
]
[licenses.private]
ignore = true
[bans]
multiple-versions = "allow"
wildcards = "deny"
[bans.workspace-dependencies]
duplicates = "deny"
include-path-dependencies = true
unused = "deny"
# No non-FIPS compliant dependencies
[[bans.deny]]
name = "native-tls"
[[bans.deny]]
name = "openssl"
[[bans.deny]]
name = "openssl-sys"
[[bans.deny]]
name = "ring"
[[bans.features]]
allow = [
"alloc",
"aws-lc-sys",
"default",
"fips",
"prebuilt-nasm",
"ring-io",
"ring-sig-verify",
]
name = "aws-lc-rs"
exact = true

View File

@@ -1,15 +0,0 @@
comment_width = 100
format_code_in_doc_comments = true
format_strings = true
group_imports = "StdExternalCrate"
hex_literal_case = "Lower"
imports_granularity = "Crate"
max_width = 100
newline_style = "Unix"
normalize_comments = true
normalize_doc_attributes = true
reorder_impl_items = true
style_edition = "2024"
use_field_init_shorthand = true
use_try_shorthand = true
wrap_comments = true

View File

@@ -9,5 +9,7 @@ build_docs/**
**/*Dockerfile
blueprints/local
.git
!gen-ts-api/node_modules
!gen-ts-api/dist/**
!gen-go-api/
.venv
target

9
.gitattributes vendored
View File

@@ -1,9 +0,0 @@
packages/client-*/** linguist-generated
web/packages/lex/* linguist-vendored
web/packages/node-domexception/* linguist-vendored
web/packages/formdata-polyfill/* linguist-vendored
web/packages/sfe/vendored/* linguist-vendored
website/vendored/* linguist-vendored
website/docs/** linguist-documentation
website/integrations/** linguist-documentation
website/api/** linguist-documentation

View File

@@ -115,13 +115,20 @@ runs:
shell: bash
env:
GITHUB_TOKEN: ${{ inputs.token }}
PR_NUMBER: ${{ steps.should_run.outputs.pr_number }}
REASON: ${{ steps.should_run.outputs.reason }}
run: |
set -e -o pipefail
PR_NUMBER="${{ steps.should_run.outputs.pr_number }}"
# Get PR details
PR_DATA=$(gh api repos/${{ github.repository }}/pulls/$PR_NUMBER)
PR_TITLE=$(echo "$PR_DATA" | jq -r '.title')
PR_AUTHOR=$(echo "$PR_DATA" | jq -r '.user.login')
echo "pr_title=$PR_TITLE" >> $GITHUB_OUTPUT
echo "pr_author=$PR_AUTHOR" >> $GITHUB_OUTPUT
# Determine which labels to process
if [ "${REASON}" = "label_added_to_merged_pr" ]; then
if [ "${{ steps.should_run.outputs.reason }}" = "label_added_to_merged_pr" ]; then
# Only process the specific label that was just added
if [ "${{ github.event_name }}" = "issues" ]; then
LABEL_NAME="${{ github.event.label.name }}"
@@ -145,13 +152,13 @@ runs:
shell: bash
env:
GITHUB_TOKEN: ${{ inputs.token }}
PR_NUMBER: '${{ steps.should_run.outputs.pr_number }}'
COMMIT_SHA: '${{ steps.should_run.outputs.merge_commit_sha }}'
PR_TITLE: ${{ github.event.pull_request.title }}
PR_AUTHOR: ${{ github.event.pull_request.user.login }}
LABELS: '${{ steps.pr_details.outputs.labels }}'
run: |
set -e -o pipefail
PR_NUMBER='${{ steps.should_run.outputs.pr_number }}'
COMMIT_SHA='${{ steps.should_run.outputs.merge_commit_sha }}'
PR_TITLE='${{ steps.pr_details.outputs.pr_title }}'
PR_AUTHOR='${{ steps.pr_details.outputs.pr_author }}'
LABELS='${{ steps.pr_details.outputs.labels }}'
echo "Processing PR #$PR_NUMBER (reason: ${{ steps.should_run.outputs.reason }})"
echo "Found backport labels: $LABELS"

View File

@@ -54,6 +54,10 @@ outputs:
runs:
using: "composite"
steps:
- name: Setup authentik env
uses: ./.github/actions/setup
with:
dependencies: "python"
- name: Generate config
id: ev
shell: bash
@@ -64,4 +68,4 @@ runs:
PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
REF: ${{ github.ref }}
run: |
python3 ${{ github.action_path }}/push_vars.py
uv run python3 ${{ github.action_path }}/push_vars.py

View File

@@ -2,19 +2,10 @@
import os
from json import dumps
from pathlib import Path
from sys import exit as sysexit
from time import time
from typing import Any
def authentik_version() -> str:
init = Path(__file__).parent.parent.parent.parent / "authentik" / "__init__.py"
with open(init) as f:
content = f.read()
locals: dict[str, Any] = {}
exec(content, None, locals) # nosec
return str(locals["VERSION"])
from authentik import authentik_version
def must_or_fail(input: str | None, error: str) -> str:
@@ -106,7 +97,6 @@ if os.getenv("RELEASE", "false").lower() == "true":
image_build_args = [f"VERSION={os.getenv('REF')}"]
else:
image_build_args = [f"GIT_BUILD_HASH={sha}"]
image_build_args_str = "\n".join(image_build_args)
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
print(f"shouldPush={str(should_push).lower()}", file=_output)
@@ -119,4 +109,4 @@ with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
print(f"imageMainTag={image_main_tag}", file=_output)
print(f"imageMainName={image_tags[0]}", file=_output)
print(f"cacheTo={cache_to}", file=_output)
print(f"imageBuildArgs={image_build_args_str}", file=_output)
print(f"imageBuildArgs={"\n".join(image_build_args)}", file=_output)

View File

@@ -4,95 +4,61 @@ description: "Setup authentik testing environment"
inputs:
dependencies:
description: "List of dependencies to setup"
default: "system,python,rust,node,go,runtime"
default: "system,python,node,go,runtime"
postgresql_version:
description: "Optional postgresql image tag"
default: "16"
working-directory:
description: |
Optional working directory if this repo isn't in the root of the actions workspace.
When set, needs to contain a trailing slash
default: ""
runs:
using: "composite"
steps:
- name: Cleanup apt
if: ${{ contains(inputs.dependencies, 'system') || contains(inputs.dependencies, 'python') }}
shell: bash
run: sudo apt-get remove --purge man-db
- name: Install apt deps
if: ${{ contains(inputs.dependencies, 'system') || contains(inputs.dependencies, 'python') }}
uses: gerlero/apt-install@f4fa5265092af9e750549565d28c99aec7189639
with:
packages: libpq-dev openssl libxmlsec1-dev pkg-config gettext krb5-multidev libkrb5-dev heimdal-multidev libclang-dev krb5-kdc krb5-user krb5-admin-server
update: true
upgrade: false
install-recommends: false
- name: Make space on disk
- name: Install apt deps & cleanup
if: ${{ contains(inputs.dependencies, 'system') || contains(inputs.dependencies, 'python') }}
shell: bash
run: |
sudo mkdir -p /tmp/empty/
sudo rsync -a --delete /tmp/empty/ /usr/local/lib/android/
sudo apt-get remove --purge man-db
sudo apt-get update
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext krb5-multidev libkrb5-dev heimdal-multidev libclang-dev krb5-kdc krb5-user krb5-admin-server
sudo rm -rf /usr/local/lib/android
- name: Install uv
if: ${{ contains(inputs.dependencies, 'python') }}
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v5
uses: astral-sh/setup-uv@6ee6290f1cbc4156c0bdd66691b2c144ef8df19a # v5
with:
enable-cache: true
- name: Setup python
if: ${{ contains(inputs.dependencies, 'python') }}
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v5
with:
python-version-file: "${{ inputs.working-directory }}pyproject.toml"
python-version-file: "pyproject.toml"
- name: Install Python deps
if: ${{ contains(inputs.dependencies, 'python') }}
shell: bash
working-directory: ${{ inputs.working-directory }}
run: uv sync --all-extras --dev --frozen
- name: Setup rust (stable)
if: ${{ contains(inputs.dependencies, 'rust') && !contains(inputs.dependencies, 'rust-nightly') }}
uses: actions-rust-lang/setup-rust-toolchain@150fca883cd4034361b621bd4e6a9d34e5143606 # v1
with:
rustflags: ""
- name: Setup rust (nightly)
if: ${{ contains(inputs.dependencies, 'rust-nightly') }}
uses: actions-rust-lang/setup-rust-toolchain@150fca883cd4034361b621bd4e6a9d34e5143606 # v1
with:
toolchain: nightly
components: rustfmt
rustflags: ""
- name: Setup rust dependencies
if: ${{ contains(inputs.dependencies, 'rust') }}
uses: taiki-e/install-action@0abfcd587b70a713fdaa7fb502c885e2112acb15 # v2
with:
tool: cargo-deny cargo-machete cargo-llvm-cov nextest
- name: Setup node (web)
if: ${{ contains(inputs.dependencies, 'node') }}
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v4
with:
node-version-file: "${{ inputs.working-directory }}web/package.json"
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: "${{ inputs.working-directory }}web/package-lock.json"
cache-dependency-path: web/package-lock.json
registry-url: "https://registry.npmjs.org"
- name: Setup node (root)
if: ${{ contains(inputs.dependencies, 'node') }}
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v4
with:
node-version-file: "${{ inputs.working-directory }}package.json"
node-version-file: package.json
cache: "npm"
cache-dependency-path: "${{ inputs.working-directory }}package-lock.json"
cache-dependency-path: package-lock.json
registry-url: "https://registry.npmjs.org"
- name: Install Node deps
if: ${{ contains(inputs.dependencies, 'node') }}
shell: bash
working-directory: ${{ inputs.working-directory }}
run: npm ci
- name: Setup go
if: ${{ contains(inputs.dependencies, 'go') }}
uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v5
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v5
with:
go-version-file: "${{ inputs.working-directory }}go.mod"
go-version-file: "go.mod"
- name: Setup docker cache
if: ${{ contains(inputs.dependencies, 'runtime') }}
uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7
@@ -101,7 +67,6 @@ runs:
- name: Setup dependencies
if: ${{ contains(inputs.dependencies, 'runtime') }}
shell: bash
working-directory: ${{ inputs.working-directory }}
run: |
export PSQL_TAG=${{ inputs.postgresql_version }}
docker compose -f .github/actions/setup/compose.yml up -d
@@ -109,7 +74,6 @@ runs:
- name: Generate config
if: ${{ contains(inputs.dependencies, 'python') }}
shell: uv run python {0}
working-directory: ${{ inputs.working-directory }}
run: |
from authentik.lib.generators import generate_id
from yaml import safe_dump

View File

@@ -11,6 +11,13 @@ services:
ports:
- 5432:5432
restart: always
pgdog:
image: ghcr.io/pgdogdev/pgdog:latest
volumes:
- ./pgdog.toml:/pgdog/pgdog.toml:ro
ports:
- 127.0.0.1:6432:6432
restart: always
s3:
container_name: s3
image: docker.io/zenko/cloudserver

18
.github/actions/setup/pgdog.toml vendored Normal file
View File

@@ -0,0 +1,18 @@
[general]
host = "[::]"
port = 6432
passthrough_auth = "enabled_plain"
prepared_statements = "extended_anonymous"
pub_sub_channel_size = 8192
[admin]
password = "admin"
[[databases]]
host = "postgresql"
name = "postgres"
[[databases]]
host = "postgresql"
name = "authentik"
[[databases]]
host = "postgresql"
name = "test_authentik"

View File

@@ -2,22 +2,18 @@ name: "Process test results"
description: Convert test results to JUnit, add them to GitHub Actions and codecov
inputs:
files:
description: Comma-separated explicit list of files to upload
flags:
description: Codecov flags
runs:
using: "composite"
steps:
- uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
- uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5
with:
files: ${{ inputs.files }}
flags: ${{ inputs.flags }}
use_oidc: true
- uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
- uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5
with:
files: ${{ inputs.files }}
flags: ${{ inputs.flags }}
use_oidc: true
report_type: test_results

3
.github/codecov.yml vendored
View File

@@ -8,6 +8,3 @@ coverage:
threshold: 1%
comment:
after_n_builds: 3
ignore:
- packages/client-rust
- packages/client-ts

View File

@@ -20,8 +20,6 @@ updates:
prefix: "ci:"
labels:
- dependencies
cooldown:
default-days: 3
#endregion
@@ -37,36 +35,11 @@ updates:
prefix: "core:"
labels:
- dependencies
cooldown:
default-days: 7
semver-major-days: 14
semver-patch-days: 3
exclude:
- "golang.org/x/crypto"
- "golang.org/x/net"
- "github.com/golang-jwt/jwt/*"
- "github.com/coreos/go-oidc/*"
- "github.com/go-ldap/ldap/*"
#endregion
#region Rust
- package-ecosystem: cargo
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
commit-message:
prefix: "core:"
labels:
- dependencies
cooldown:
default-days: 7
semver-major-days: 14
semver-patch-days: 3
- package-ecosystem: rust-toolchain
directory: "/"
schedule:
@@ -77,8 +50,6 @@ updates:
prefix: "core:"
labels:
- dependencies
cooldown:
default-days: 3
#endregion
@@ -97,10 +68,6 @@ updates:
open-pull-requests-limit: 10
commit-message:
prefix: "web:"
cooldown:
default-days: 7
semver-major-days: 14
semver-patch-days: 3
groups:
sentry:
patterns:
@@ -164,10 +131,6 @@ updates:
open-pull-requests-limit: 10
commit-message:
prefix: "core, web:"
cooldown:
default-days: 7
semver-major-days: 14
semver-patch-days: 3
groups:
sentry:
patterns:
@@ -226,10 +189,6 @@ updates:
prefix: "website:"
labels:
- dependencies
cooldown:
default-days: 7
semver-major-days: 14
semver-patch-days: 3
groups:
docusaurus:
patterns:
@@ -268,10 +227,6 @@ updates:
prefix: "lifecycle/aws:"
labels:
- dependencies
cooldown:
default-days: 7
semver-major-days: 14
semver-patch-days: 3
#endregion
@@ -287,18 +242,6 @@ updates:
prefix: "core:"
labels:
- dependencies
cooldown:
default-days: 7
semver-major-days: 14
semver-patch-days: 3
exclude:
- "django"
- "cryptography"
- "pyjwt"
- "xmlsec"
- "lxml"
- "psycopg"
- "pyopenssl"
#endregion
@@ -316,14 +259,10 @@ updates:
prefix: "core:"
labels:
- dependencies
cooldown:
default-days: 3
- package-ecosystem: docker-compose
directories:
- /packages/client-go
- /packages/client-rust
- /packages/client-ts
# - /scripts # Maybe
- /scripts/api
- /tests/e2e
schedule:
interval: daily
@@ -333,7 +272,5 @@ updates:
prefix: "core:"
labels:
- dependencies
cooldown:
default-days: 3
#endregion

View File

@@ -26,7 +26,7 @@ REPLACE ME
If an API change has been made
- [ ] The API schema and clients have been updated (`make gen`)
- [ ] The API schema has been updated (`make gen-build`)
If changes to the frontend have been made

View File

@@ -56,19 +56,31 @@ jobs:
release: ${{ inputs.release }}
- name: Login to Docker Hub
if: ${{ inputs.registry_dockerhub }}
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
username: ${{ secrets.DOCKER_CORP_USERNAME }}
password: ${{ secrets.DOCKER_CORP_PASSWORD }}
- name: Login to GitHub Container Registry
if: ${{ inputs.registry_ghcr }}
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
with:
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
with:
go-version-file: "go.mod"
- name: Generate API Clients
run: |
make gen-client-ts
make gen-client-go
- name: Build Docker Image
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
id: push
with:
context: .

View File

@@ -79,18 +79,18 @@ jobs:
image-name: ${{ inputs.image_name }}
- name: Login to Docker Hub
if: ${{ inputs.registry_dockerhub }}
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
username: ${{ secrets.DOCKER_CORP_USERNAME }}
password: ${{ secrets.DOCKER_CORP_PASSWORD }}
- name: Login to GitHub Container Registry
if: ${{ inputs.registry_ghcr }}
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: int128/docker-manifest-create-action@44422a4b046d55dc036df622039ed3aec43c613c # v2
- uses: int128/docker-manifest-create-action@8aac06098a12365ccdf99372dcfb453ccce8a0b0 # v2
id: build
with:
tags: ${{ matrix.tag }}

66
.github/workflows/api-ts-publish.yml vendored Normal file
View File

@@ -0,0 +1,66 @@
---
name: API - Publish Typescript client
on:
push:
branches: [main]
paths:
- "schema.yml"
workflow_dispatch:
permissions:
# Required for NPM OIDC trusted publisher
id-token: write
contents: read
jobs:
build:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
token: ${{ steps.generate_token.outputs.token }}
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
with:
node-version-file: web/package.json
registry-url: "https://registry.npmjs.org"
- name: Generate API Client
run: make gen-client-ts
- name: Publish package
working-directory: gen-ts-api/
run: |
npm i
npm publish --tag generated
- name: Upgrade /web
working-directory: web
run: |
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
npm i @goauthentik/api@$VERSION
- name: Upgrade /web/packages/sfe
working-directory: web/packages/sfe
run: |
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
npm i @goauthentik/api@$VERSION
- uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
id: cpr
with:
token: ${{ steps.generate_token.outputs.token }}
branch: update-web-api-client
commit-message: "web: bump API Client version"
title: "web: bump API Client version"
body: "web: bump API Client version"
delete-branch: true
signoff: true
# ID from https://api.github.com/users/authentik-automation[bot]
author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
labels: dependencies
- uses: peter-evans/enable-pull-request-automerge@a660677d5469627102a1c1e11409dd063606628d # v3
with:
token: ${{ steps.generate_token.outputs.token }}
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
merge-method: squash

View File

@@ -41,7 +41,7 @@ jobs:
- working-directory: website/
name: Install Dependencies
run: npm ci
- uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v4
- uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v4
with:
path: |
${{ github.workspace }}/website/api/.docusaurus
@@ -55,7 +55,7 @@ jobs:
env:
NODE_ENV: production
run: npm run build -w api
- uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v4
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v4
with:
name: api-docs
path: website/api/build

View File

@@ -89,14 +89,14 @@ jobs:
image-name: ghcr.io/goauthentik/dev-docs
- name: Login to Container Registry
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
id: push
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
with:
tags: ${{ steps.ev.outputs.imageTags }}
file: website/Dockerfile

View File

@@ -20,19 +20,13 @@ jobs:
version:
- docs
- version-2025-12
- version-2026-2
- version-2025-10
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- run: |
set -euo pipefail
current="$(pwd)"
dir="/tmp/authentik/${{ matrix.version }}"
# 2025.12 still serves the legacy docker-compose filename; newer sites use compose.yml.
compose_path="compose.yml"
if [ "${{ matrix.version }}" = "version-2025-12" ]; then
compose_path="docker-compose.yml"
fi
mkdir -p "${dir}/lifecycle/container"
cd "${dir}"
wget "https://${{ matrix.version }}.goauthentik.io/${compose_path}" -O "${dir}/lifecycle/container/compose.yml"
wget "https://${{ matrix.version }}.goauthentik.io/docker-compose.yml" -O "${dir}/lifecycle/container/compose.yml"
"${current}/scripts/test_docker.sh"

View File

@@ -28,52 +28,30 @@ jobs:
strategy:
fail-fast: false
matrix:
include:
- job: bandit
deps: python
- job: black
deps: python
- job: spellcheck
deps: node
- job: pending-migrations
deps: python,runtime
- job: ruff
deps: python
- job: mypy
deps: python
- job: cargo-deny
deps: rust
- job: cargo-machete
deps: rust
- job: clippy
deps: rust
- job: rustfmt
deps: rust-nightly
job:
- bandit
- black
- spellcheck
- pending-migrations
- ruff
- mypy
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- name: Setup authentik env
uses: ./.github/actions/setup
with:
dependencies: ${{ matrix.deps }}
- name: run job
run: make ci-lint-${{ matrix.job }}
test-gen:
run: uv run make ci-${{ matrix.job }}
test-gen-build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Setup authentik env
uses: ./.github/actions/setup
with:
dependencies: "system,python,go,node,runtime,rust-nightly"
- name: generate schema
run: make migrate gen-build
- name: generate API clients
run: make gen-clients
- name: ensure schema is up-to-date
run: git diff --exit-code -- schema.yml blueprints/schema.json packages/client-go packages/client-rust packages/client-ts
run: git diff --exit-code -- schema.yml blueprints/schema.json
test-migrations:
runs-on: ubuntu-latest
steps:
@@ -150,7 +128,7 @@ jobs:
CI_RUN_ID: ${{ matrix.run_id }}
CI_TOTAL_RUNS: "5"
run: |
uv run make ci-test
make ci-test glob="authentik"
- uses: ./.github/actions/test-results
if: ${{ always() }}
with:
@@ -179,7 +157,7 @@ jobs:
CI_RUN_ID: ${{ matrix.run_id }}
CI_TOTAL_RUNS: "5"
run: |
uv run make ci-test
make ci-test glob="authentik"
- uses: ./.github/actions/test-results
if: ${{ always() }}
with:
@@ -195,9 +173,7 @@ jobs:
uses: helm/kind-action@ef37e7f390d99f746eb8b610417061a60e82a6cc # v1.14.0
- name: run integration
run: |
uv run coverage run manage.py test tests/integration
uv run coverage combine
uv run coverage xml
make ci-test glob="tests/integration"
- uses: ./.github/actions/test-results
if: ${{ always() }}
with:
@@ -212,61 +188,47 @@ jobs:
job:
- name: proxy
glob: tests/e2e/test_provider_proxy*
profiles: selenium
- name: oauth
glob: tests/e2e/test_provider_oauth2* tests/e2e/test_source_oauth*
profiles: selenium
- name: oauth-oidc
glob: tests/e2e/test_provider_oidc*
profiles: selenium
- name: saml
glob: tests/e2e/test_provider_saml* tests/e2e/test_source_saml*
profiles: selenium
- name: ldap
glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap*
- name: rac
glob: tests/e2e/test_provider_rac*
profiles: selenium
- name: ws-fed
glob: tests/e2e/test_provider_ws_fed*
profiles: selenium
- name: radius
glob: tests/e2e/test_provider_radius*
- name: scim
glob: tests/e2e/test_source_scim*
- name: flows
glob: tests/e2e/test_flows*
profiles: selenium
- name: endpoints
glob: tests/e2e/test_endpoints_*
profiles: selenium
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Setup e2e env
env:
COMPOSE_PROFILES: ${{ matrix.job.profiles }}
- name: Setup e2e env (chrome, etc)
run: |
docker compose -f tests/e2e/compose.yml up -d --quiet-pull
- id: cache-web
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v4
if: contains(matrix.job.profiles, 'selenium')
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v4
with:
path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
- name: prepare web ui
if: steps.cache-web.outputs.cache-hit != 'true' && contains(matrix.job.profiles, 'selenium')
if: steps.cache-web.outputs.cache-hit != 'true'
working-directory: web
run: |
npm ci
make -C .. gen-client-ts
npm run build
npm run build:sfe
- name: run e2e
run: |
uv run coverage run manage.py test ${{ matrix.job.glob }}
uv run coverage combine
uv run coverage xml
make ci-test glob="${{ matrix.job.glob }}"
- uses: ./.github/actions/test-results
if: ${{ always() }}
with:
@@ -288,15 +250,13 @@ jobs:
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Setup e2e env (chrome, etc)
env:
COMPOSE_PROFILES: selenium
run: |
docker compose -f tests/e2e/compose.yml up -d --quiet-pull
- name: Setup conformance suite
run: |
docker compose -f tests/openid_conformance/compose.yml up -d --quiet-pull
- id: cache-web
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v4
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v4
with:
path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
@@ -305,50 +265,26 @@ jobs:
working-directory: web
run: |
npm ci
make -C .. gen-client-ts
npm run build
npm run build:sfe
- name: run conformance
run: |
uv run coverage run manage.py test ${{ matrix.job.glob }}
uv run coverage combine
uv run coverage xml
make ci-test glob="${{ matrix.job.glob }}"
- uses: ./.github/actions/test-results
if: ${{ always() }}
with:
flags: conformance
- if: ${{ !cancelled() }}
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: conformance-certification-${{ matrix.job.name }}
path: tests/openid_conformance/exports/
test-rust:
runs-on: ubuntu-latest
timeout-minutes: 30
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- name: Setup authentik env
uses: ./.github/actions/setup
with:
dependencies: rust,runtime
- name: run tests
run: |
cargo llvm-cov --no-report nextest --workspace
cargo llvm-cov report --codecov --output-path target/llvm-cov-target/rust.json
- uses: ./.github/actions/test-results
if: ${{ always() }}
with:
files: target/llvm-cov-target/rust.json
flags: rust
- if: ${{ !cancelled() }}
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: test-rust
path: target/llvm-cov-target/rust.json
ci-core-mark:
if: always()
needs:
- lint
- test-gen
- test-gen-build
- test-migrations
- test-migrations-from-stable
- test-unittest

View File

@@ -22,7 +22,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
with:
go-version-file: "go.mod"
- name: Prepare and generate API
@@ -31,6 +31,8 @@ jobs:
mkdir -p web/dist
mkdir -p website/help
touch web/dist/test website/help/test
- name: Generate API
run: make gen-client-go
- name: golangci-lint
uses: golangci/golangci-lint-action@1e7e51e771db61008b38414a730f564565cf7c20 # v8
with:
@@ -41,11 +43,13 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
with:
go-version-file: "go.mod"
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Generate API
run: make gen-client-go
- name: prepare database
run: |
uv run make migrate
@@ -98,14 +102,16 @@ jobs:
image-name: ghcr.io/goauthentik/dev-${{ matrix.type }}
- name: Login to Container Registry
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Generate API
run: make gen-client-go
- name: Build Docker Image
id: push
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
with:
tags: ${{ steps.ev.outputs.imageTags }}
file: lifecycle/container/${{ matrix.type }}.Dockerfile
@@ -142,7 +148,7 @@ jobs:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
ref: ${{ github.event.pull_request.head.sha }}
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
with:
go-version-file: "go.mod"
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
@@ -150,6 +156,8 @@ jobs:
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- name: Generate API
run: make gen-client-go
- name: Build web
working-directory: web/
run: |

View File

@@ -40,6 +40,8 @@ jobs:
- working-directory: ${{ matrix.project }}/
run: |
npm ci
- name: Generate API
run: make gen-client-ts
- name: Lint
working-directory: ${{ matrix.project }}/
run: npm run ${{ matrix.command }}
@@ -54,6 +56,8 @@ jobs:
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
- name: Generate API
run: make gen-client-ts
- name: build
working-directory: web/
run: npm run build
@@ -80,6 +84,8 @@ jobs:
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
- name: Generate API
run: make gen-client-ts
- name: test
working-directory: web/
run: npm run test || exit 0

View File

@@ -29,20 +29,20 @@ jobs:
github.event.pull_request.head.repo.full_name == github.repository)
steps:
- id: generate_token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
token: ${{ steps.generate_token.outputs.token }}
- name: Compress images
id: compress
uses: calibreapp/image-actions@03c976c29803442fc4040a9de5509669e7759b81 # main
uses: calibreapp/image-actions@d9c8ee5c3dc52ae4622c82ead88d658f4b16b65f # main
with:
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }}
compressOnly: ${{ github.event_name != 'pull_request' }}
- uses: peter-evans/create-pull-request@5f6978faf089d4d20b00c7766989d076bb2fc7f1 # v7
- uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
id: cpr
with:

View File

@@ -16,17 +16,17 @@ jobs:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
token: ${{ steps.generate_token.outputs.token }}
- name: Setup authentik env
uses: ./.github/actions/setup
- run: uv run ak update_webauthn_mds
- uses: peter-evans/create-pull-request@5f6978faf089d4d20b00c7766989d076bb2fc7f1 # v7
- uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
id: cpr
with:
token: ${{ steps.generate_token.outputs.token }}

View File

@@ -10,11 +10,11 @@ jobs:
steps:
- id: app-token
name: Generate app token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
if: ${{ env.GH_APP_ID != '' }}
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
env:
GH_APP_ID: ${{ secrets.GH_APP_ID }}
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5

View File

@@ -16,10 +16,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- name: Delete 'dev' containers older than a week
uses: snok/container-retention-policy@3b0972b2276b171b212f8c4efbca59ebba26eceb # v3.0.1
with:

View File

@@ -29,10 +29,10 @@ jobs:
steps:
- id: app-token
name: Generate app token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- name: Checkout main
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
@@ -57,10 +57,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- name: Checkout main
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
@@ -73,7 +73,7 @@ jobs:
- name: Bump version
run: "make bump version=${{ inputs.next_version }}.0-rc1"
- name: Create pull request
uses: peter-evans/create-pull-request@5f6978faf089d4d20b00c7766989d076bb2fc7f1 # v7
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
with:
token: ${{ steps.generate_token.outputs.token }}
branch: release-bump-${{ inputs.next_version }}

View File

@@ -44,14 +44,14 @@ jobs:
with:
image-name: ghcr.io/goauthentik/docs
- name: Login to GitHub Container Registry
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
id: push
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
with:
tags: ${{ steps.ev.outputs.imageTags }}
file: website/Dockerfile
@@ -84,7 +84,7 @@ jobs:
- rac
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
with:
go-version-file: "go.mod"
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
@@ -103,19 +103,23 @@ jobs:
DOCKER_USERNAME: ${{ secrets.DOCKER_CORP_USERNAME }}
with:
image-name: ghcr.io/goauthentik/${{ matrix.type }},authentik/${{ matrix.type }}
- name: Generate API Clients
run: |
make gen-client-ts
make gen-client-go
- name: Docker Login Registry
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
username: ${{ secrets.DOCKER_CORP_USERNAME }}
password: ${{ secrets.DOCKER_CORP_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
id: push
with:
push: true
@@ -148,7 +152,7 @@ jobs:
goarch: [amd64, arm64]
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
with:
go-version-file: "go.mod"
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
@@ -160,6 +164,10 @@ jobs:
working-directory: web/
run: |
npm ci
- name: Generate API Clients
run: |
make gen-client-ts
make gen-client-go
- name: Build web
working-directory: web/
run: |
@@ -172,7 +180,7 @@ jobs:
export CGO_ENABLED=0
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
- name: Upload binaries to release
uses: svenstaro/upload-release-action@29e53e917877a24fad85510ded594ab3c9ca12de # v2
uses: svenstaro/upload-release-action@b98a3b12e86552593f3e4e577ca8a62aa2f3f22b # v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}
@@ -191,7 +199,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: aws-actions/configure-aws-credentials@ec61189d14ec14c8efccab744f656cffd0e33f37 # v6.1.0
- uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6.0.0
with:
role-to-assume: "arn:aws:iam::016170277896:role/github_goauthentik_authentik"
aws-region: ${{ env.AWS_REGION }}
@@ -236,7 +244,7 @@ jobs:
container=$(docker container create ${{ steps.ev.outputs.imageMainName }})
docker cp ${container}:web/ .
- name: Create a Sentry.io release
uses: getsentry/action-release@5657c9e888b4e2cc85f4d29143ea4131fde4a73a # v3
uses: getsentry/action-release@dab6548b3c03c4717878099e43782cf5be654289 # v3
continue-on-error: true
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}

View File

@@ -67,10 +67,10 @@ jobs:
steps:
- id: app-token
name: Generate app token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- id: get-user-id
name: Get GitHub app user ID
run: echo "user-id=$(gh api "/users/${{ steps.app-token.outputs.app-slug }}[bot]" --jq .id)" >> "$GITHUB_OUTPUT"
@@ -96,7 +96,7 @@ jobs:
git tag "version/${{ inputs.version }}" HEAD -m "version/${{ inputs.version }}"
git push --follow-tags
- name: Create Release
uses: softprops/action-gh-release@153bb8e04406b158c6c84fc1615b65b24149a1fe # v2.6.1
uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2.5.0
with:
token: "${{ steps.app-token.outputs.token }}"
tag_name: "version/${{ inputs.version }}"
@@ -115,10 +115,10 @@ jobs:
steps:
- id: app-token
name: Generate app token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
repositories: helm
- id: get-user-id
name: Get GitHub app user ID
@@ -137,7 +137,7 @@ jobs:
sed -E -i 's/[0-9]{4}\.[0-9]{1,2}\.[0-9]+$/${{ inputs.version }}/' charts/authentik/Chart.yaml
./scripts/helm-docs.sh
- name: Create pull request
uses: peter-evans/create-pull-request@5f6978faf089d4d20b00c7766989d076bb2fc7f1 # v7
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
with:
token: "${{ steps.app-token.outputs.token }}"
branch: bump-${{ inputs.version }}
@@ -157,10 +157,10 @@ jobs:
steps:
- id: app-token
name: Generate app token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
repositories: version
- id: get-user-id
name: Get GitHub app user ID
@@ -196,7 +196,7 @@ jobs:
'.stable.version = $version | .stable.changelog = $changelog | .stable.changelog_url = $changelog_url | .stable.reason = $reason' version.json > version.new.json
mv version.new.json version.json
- name: Create pull request
uses: peter-evans/create-pull-request@5f6978faf089d4d20b00c7766989d076bb2fc7f1 # v7
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
with:
token: "${{ steps.app-token.outputs.token }}"
branch: bump-${{ inputs.version }}

View File

@@ -15,10 +15,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/stale@b5d41d4e1d5dceea10e7104786b73624c18a190f # v10
with:
repo-token: ${{ steps.generate_token.outputs.token }}

View File

@@ -21,10 +21,10 @@ jobs:
steps:
- id: generate_token
if: ${{ github.event_name != 'pull_request' }}
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
if: ${{ github.event_name != 'pull_request' }}
with:
@@ -33,6 +33,8 @@ jobs:
if: ${{ github.event_name == 'pull_request' }}
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Generate API
run: make gen-client-ts
- name: run extract
run: |
uv run make i18n-extract
@@ -42,7 +44,7 @@ jobs:
make web-check-compile
- name: Create Pull Request
if: ${{ github.event_name != 'pull_request' }}
uses: peter-evans/create-pull-request@5f6978faf089d4d20b00c7766989d076bb2fc7f1 # v7
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
with:
token: ${{ steps.generate_token.outputs.token }}
branch: extract-compile-backend-translation

19
.gitignore vendored
View File

@@ -195,24 +195,6 @@ pyvenv.cfg
pip-selfcheck.json
# End of https://www.gitignore.io/api/python,django
# Created by https://www.toptal.com/developers/gitignore/api/rust
# Edit at https://www.toptal.com/developers/gitignore?templates=rust
### Rust ###
# Generated by Cargo
# will have compiled files and executables
debug/
target/
# These are backup files generated by rustfmt
**/*.rs.bk
# MSVC Windows builds of rustc generate these, which store debugging information
*.pdb
# End of https://www.toptal.com/developers/gitignore/api/rust
/static/
local.env.yml
@@ -220,6 +202,7 @@ media/
*mmdb
.idea/
/gen-*/
data/
# Local Netlify folder

View File

@@ -1,7 +1,6 @@
# Prettier Ignorefile
## Static Files
CODEOWNERS
**/LICENSE
authentik/stages/**/*

View File

@@ -17,6 +17,6 @@
"ms-python.vscode-pylance",
"redhat.vscode-yaml",
"Tobermory.es6-string-html",
"unifiedjs.vscode-mdx"
"unifiedjs.vscode-mdx",
]
}

18
.vscode/settings.json vendored
View File

@@ -38,10 +38,10 @@
"!AtIndex scalar",
"!ParseJSON scalar"
],
"js/ts.preferences.importModuleSpecifier": "non-relative",
"js/ts.preferences.importModuleSpecifierEnding": "index",
"js/ts.tsdk.path": "./node_modules/typescript/lib",
"js/ts.tsdk.promptToUseWorkspaceVersion": true,
"typescript.preferences.importModuleSpecifier": "non-relative",
"typescript.preferences.importModuleSpecifierEnding": "index",
"typescript.tsdk": "./node_modules/typescript/lib",
"typescript.enablePromptUseWorkspaceTsdk": true,
"yaml.schemas": {
"./blueprints/schema.json": "blueprints/**/*.yaml"
},
@@ -57,13 +57,5 @@
"go.testEnvVars": {
"WORKSPACE_DIR": "${workspaceFolder}"
},
"github-actions.workflows.pinned.workflows": [".github/workflows/ci-main.yml"],
"search.exclude": {
"**/*.code-search": true,
"**/bower_components": true,
"**/node_modules": true,
"**/playwright-report/**": true,
"**/website/**/build": true,
"**/client-*": true
}
"github-actions.workflows.pinned.workflows": [".github/workflows/ci-main.yml"]
}

View File

@@ -3,7 +3,6 @@
# Backend
authentik/ @goauthentik/backend
blueprints/ @goauthentik/backend
src/ @goauthentik/backend
cmd/ @goauthentik/backend
internal/ @goauthentik/backend
lifecycle/ @goauthentik/backend
@@ -12,12 +11,8 @@ scripts/ @goauthentik/backend
tests/ @goauthentik/backend
pyproject.toml @goauthentik/backend
uv.lock @goauthentik/backend
Cargo.toml @goauthentik/backend
Cargo.lock @goauthentik/backend
go.mod @goauthentik/backend
go.sum @goauthentik/backend
.cargo/ @goauthentik/backend
rust-toolchain.toml @goauthentik/backend
# Infrastructure
.github/ @goauthentik/infrastructure
lifecycle/aws/ @goauthentik/infrastructure
@@ -27,18 +22,14 @@ Makefile @goauthentik/infrastructure
.editorconfig @goauthentik/infrastructure
CODEOWNERS @goauthentik/infrastructure
# Backend packages
packages/ak-* @goauthentik/backend
packages/client-rust @goauthentik/backend
packages/django-channels-postgres @goauthentik/backend
packages/django-postgres-cache @goauthentik/backend
packages/django-dramatiq-postgres @goauthentik/backend
# Web packages
tsconfig.json @goauthentik/frontend
package.json @goauthentik/frontend
package-lock.json @goauthentik/frontend
packages/package.json @goauthentik/frontend
packages/package-lock.json @goauthentik/frontend
packages/client-ts @goauthentik/frontend
packages/docusaurus-config @goauthentik/frontend
packages/esbuild-plugin-live-reload @goauthentik/frontend
packages/eslint-config @goauthentik/frontend

4858
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,229 +0,0 @@
[workspace]
members = [
"packages/ak-axum",
"packages/ak-common",
"packages/client-rust",
"website/scripts/docsmg",
]
resolver = "3"
[workspace.package]
version = "2026.5.0-rc1"
authors = ["authentik Team <hello@goauthentik.io>"]
description = "Making authentication simple."
edition = "2024"
readme = "README.md"
homepage = "https://goauthentik.io"
repository = "https://github.com/goauthentik/authentik.git"
license-file = "LICENSE"
publish = false
[workspace.dependencies]
arc-swap = "= 1.9.1"
axum-server = { version = "= 0.8.0", features = ["tls-rustls-no-provider"] }
aws-lc-rs = { version = "= 1.16.2", features = ["fips"] }
axum = { version = "= 0.8.8", features = ["http2", "macros", "ws"] }
clap = { version = "= 4.6.0", features = ["derive", "env"] }
client-ip = { version = "0.2.1", features = ["forwarded-header"] }
colored = "= 3.1.1"
config-rs = { package = "config", version = "= 0.15.22", default-features = false, features = [
"json",
"yaml",
] }
console-subscriber = "= 0.5.0"
dotenvy = "= 0.15.7"
durstr = "= 0.5.1"
eyre = "= 0.6.12"
forwarded-header-value = "= 0.1.1"
futures = "= 0.3.32"
glob = "= 0.3.3"
ipnet = { version = "= 2.12.0", features = ["serde"] }
json-subscriber = "= 0.2.8"
nix = { version = "= 0.31.2", features = ["signal"] }
notify = "= 8.2.0"
pin-project-lite = "= 0.2.17"
regex = "= 1.12.3"
reqwest = { version = "= 0.13.2", features = [
"form",
"json",
"multipart",
"query",
"rustls",
"stream",
] }
reqwest-middleware = { version = "= 0.5.1", features = [
"form",
"json",
"multipart",
"query",
"rustls",
] }
rustls = { version = "= 0.23.37", features = ["fips"] }
sentry = { version = "= 0.47.0", default-features = false, features = [
"backtrace",
"contexts",
"debug-images",
"panic",
"rustls",
"reqwest",
"tower",
"tracing",
] }
serde = { version = "= 1.0.228", features = ["derive"] }
serde_json = "= 1.0.149"
serde_repr = "= 0.1.20"
serde_with = { version = "= 3.18.0", default-features = false, features = [
"base64",
] }
sqlx = { version = "= 0.8.6", default-features = false, features = [
"runtime-tokio",
"tls-rustls-aws-lc-rs",
"postgres",
"derive",
"macros",
"uuid",
"chrono",
"ipnet",
"json",
] }
tempfile = "= 3.27.0"
thiserror = "= 2.0.18"
time = { version = "= 0.3.47", features = ["macros"] }
tokio = { version = "= 1.51.1", features = ["full", "tracing"] }
tokio-rustls = "= 0.26.4"
tokio-util = { version = "= 0.7.18", features = ["full"] }
tower = "= 0.5.3"
tower-http = { version = "= 0.6.8", features = ["timeout"] }
tracing = "= 0.1.44"
tracing-error = "= 0.2.1"
tracing-subscriber = { version = "= 0.3.23", features = [
"env-filter",
"json",
"local-time",
"tracing-log",
] }
url = "= 2.5.8"
uuid = { version = "= 1.23.0", features = ["serde", "v4"] }
ak-common = { package = "authentik-common", version = "2026.5.0-rc1", path = "./packages/ak-common", default-features = false }
[profile.dev.package.backtrace]
opt-level = 3
[profile.release]
lto = true
debug = 2
[workspace.lints.rust]
ambiguous_negative_literals = "warn"
closure_returning_async_block = "warn"
macro_use_extern_crate = "deny"
# must_not_suspend = "deny", unstable see https://github.com/rust-lang/rust/issues/83310
non_ascii_idents = "deny"
redundant_imports = "warn"
semicolon_in_expressions_from_macros = "warn"
trivial_casts = "warn"
trivial_numeric_casts = "warn"
unit_bindings = "warn"
unreachable_pub = "warn"
unsafe_code = "deny"
unused_extern_crates = "warn"
unused_import_braces = "warn"
unused_lifetimes = "warn"
unused_macro_rules = "warn"
unused_qualifications = "warn"
[workspace.lints.rustdoc]
unescaped_backticks = "warn"
[workspace.lints.clippy]
### enable all lints
cargo = { priority = -1, level = "warn" }
complexity = { priority = -1, level = "warn" }
correctness = { priority = -1, level = "warn" }
nursery = { priority = -1, level = "warn" }
pedantic = { priority = -1, level = "warn" }
perf = { priority = -1, level = "warn" }
# Those are too restrictive and disabled by default, however we enable some below
# restriction = { priority = -1, level = "warn" }
style = { priority = -1, level = "warn" }
suspicious = { priority = -1, level = "warn" }
### and disable the ones we don't want
### cargo group
multiple_crate_versions = "allow"
### pedantic group
missing_errors_doc = "allow"
missing_panics_doc = "allow"
must_use_candidate = "allow"
redundant_closure_for_method_calls = "allow"
struct_field_names = "allow"
too_many_lines = "allow"
### nursery
missing_const_for_fn = "allow"
option_if_let_else = "allow"
redundant_pub_crate = "allow"
significant_drop_tightening = "allow"
### restriction group
allow_attributes = "warn"
allow_attributes_without_reason = "warn"
as_conversions = "warn"
as_pointer_underscore = "warn"
as_underscore = "warn"
assertions_on_result_states = "warn"
clone_on_ref_ptr = "warn"
create_dir = "warn"
dbg_macro = "warn"
default_numeric_fallback = "warn"
disallowed_script_idents = "warn"
empty_drop = "warn"
empty_enum_variants_with_brackets = "warn"
empty_structs_with_brackets = "warn"
error_impl_error = "warn"
exit = "warn"
filetype_is_file = "warn"
float_cmp_const = "warn"
fn_to_numeric_cast_any = "warn"
get_unwrap = "warn"
if_then_some_else_none = "warn"
impl_trait_in_params = "warn"
infinite_loop = "warn"
lossy_float_literal = "warn"
map_with_unused_argument_over_ranges = "warn"
mem_forget = "warn"
missing_asserts_for_indexing = "warn"
missing_trait_methods = "warn"
mixed_read_write_in_expression = "warn"
mutex_atomic = "warn"
mutex_integer = "warn"
needless_raw_strings = "warn"
non_zero_suggestions = "warn"
panic_in_result_fn = "warn"
pathbuf_init_then_push = "warn"
print_stdout = "warn"
rc_buffer = "warn"
redundant_test_prefix = "warn"
redundant_type_annotations = "warn"
ref_patterns = "warn"
renamed_function_params = "warn"
rest_pat_in_fully_bound_structs = "warn"
return_and_then = "warn"
same_name_method = "warn"
semicolon_inside_block = "warn"
str_to_string = "warn"
string_add = "warn"
suspicious_xor_used_as_pow = "warn"
tests_outside_test_module = "warn"
todo = "warn"
try_err = "warn"
undocumented_unsafe_blocks = "warn"
unimplemented = "warn"
unnecessary_safety_comment = "warn"
unnecessary_safety_doc = "warn"
unnecessary_self_imports = "warn"
unneeded_field_pattern = "warn"
unseparated_literal_suffix = "warn"
unused_result_ok = "warn"
unused_trait_names = "warn"
unwrap_in_result = "warn"
unwrap_used = "warn"
verbose_file_reads = "warn"

131
Makefile
View File

@@ -15,11 +15,14 @@ else
SED_INPLACE = sed -i
endif
GEN_API_TS = gen-ts-api
GEN_API_PY = gen-py-api
GEN_API_GO = gen-go-api
BREW_LDFLAGS :=
BREW_CPPFLAGS :=
BREW_PKG_CONFIG_PATH :=
CARGO := cargo
UV := uv
# For macOS users, add the libxml2 installed from brew libxmlsec1 to the build path
@@ -66,29 +69,22 @@ help: ## Show this help
sort
@echo ""
go-test: ## Run the golang tests
go-test:
go test -timeout 0 -v -race -cover ./...
rust-test: ## Run the Rust tests
$(CARGO) nextest run --workspace
test: ## Run the server tests and produce a coverage report (locally)
$(UV) run coverage run manage.py test --keepdb $(or $(filter-out $@,$(MAKECMDGOALS)),authentik)
$(UV) run coverage combine
$(UV) run coverage html
$(UV) run coverage report
lint-fix-rust:
$(CARGO) +nightly fmt --all -- --config-path "${PWD}/.cargo/rustfmt.toml"
lint-fix: lint-fix-rust ## Lint and automatically fix errors in the python source code. Reports spelling errors.
lint-fix: lint-spellcheck ## Lint and automatically fix errors in the python source code. Reports spelling errors.
$(UV) run black $(PY_SOURCES)
$(UV) run ruff check --fix $(PY_SOURCES)
lint-spellcheck: ## Reports spelling errors.
npm run lint:spellcheck
lint: ci-lint-bandit ci-lint-mypy ci-lint-cargo-deny ci-lint-cargo-machete ## Lint the python and golang sources
lint: ci-bandit ci-mypy ## Lint the python and golang sources
golangci-lint run -v
core-install:
@@ -121,7 +117,8 @@ core-i18n-extract:
--no-obsolete \
--ignore web \
--ignore internal \
--ignore packages/client-ts \
--ignore ${GEN_API_TS} \
--ignore ${GEN_API_GO} \
--ignore website \
-l en
@@ -129,29 +126,28 @@ install: node-install docs-install core-install ## Install all requires depende
dev-drop-db:
$(eval pg_user := $(shell $(UV) run python -m authentik.lib.config postgresql.user 2>/dev/null))
$(eval pg_pass := $(shell $(UV) run python -m authentik.lib.config postgresql.password 2>/dev/null))
$(eval pg_host := $(shell $(UV) run python -m authentik.lib.config postgresql.host 2>/dev/null))
$(eval pg_name := $(shell $(UV) run python -m authentik.lib.config postgresql.name 2>/dev/null))
dropdb -U ${pg_user} -h ${pg_host} ${pg_name} || true
PGPASSWORD="${pg_pass}" psql -U ${pg_user} -h ${pg_host} postgres -c "SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pg_stat_activity.datname = '${pg_name}'"
PGPASSWORD="${pg_pass}" dropdb -U ${pg_user} -h ${pg_host} ${pg_name} || true
# Also remove the test-db if it exists
dropdb -U ${pg_user} -h ${pg_host} test_${pg_name} || true
PGPASSWORD="${pg_pass}" psql -U ${pg_user} -h ${pg_host} postgres -c "SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pg_stat_activity.datname = 'test_${pg_name}'"
PGPASSWORD="${pg_pass}" dropdb -U ${pg_user} -h ${pg_host} test_${pg_name} || true
dev-create-db:
$(eval pg_user := $(shell $(UV) run python -m authentik.lib.config postgresql.user 2>/dev/null))
$(eval pg_pass := $(shell $(UV) run python -m authentik.lib.config postgresql.password 2>/dev/null))
$(eval pg_host := $(shell $(UV) run python -m authentik.lib.config postgresql.host 2>/dev/null))
$(eval pg_name := $(shell $(UV) run python -m authentik.lib.config postgresql.name 2>/dev/null))
createdb -U ${pg_user} -h ${pg_host} ${pg_name}
PGPASSWORD="${pg_pass}" createdb -U ${pg_user} -h ${pg_host} ${pg_name} || true
PGPASSWORD="${pg_pass}" createdb -U ${pg_user} -h ${pg_host} test_${pg_name} || true
dev-reset: dev-drop-db dev-create-db migrate ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state.
update-test-mmdb: ## Update test GeoIP and ASN Databases
curl \
-L \
-o ${PWD}/tests/geoip/GeoLite2-ASN-Test.mmdb \
https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-ASN-Test.mmdb
curl \
-L \
-o ${PWD}/tests/geoip/GeoLite2-City-Test.mmdb \
https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-City-Test.mmdb
curl -L https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-ASN-Test.mmdb -o ${PWD}/tests/GeoLite2-ASN-Test.mmdb
curl -L https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-City-Test.mmdb -o ${PWD}/tests/GeoLite2-City-Test.mmdb
bump: ## Bump authentik version. Usage: make bump version=20xx.xx.xx
ifndef version
@@ -160,7 +156,6 @@ endif
$(eval current_version := $(shell cat ${PWD}/internal/constants/VERSION))
$(SED_INPLACE) 's/^version = ".*"/version = "$(version)"/' ${PWD}/pyproject.toml
$(SED_INPLACE) 's/^VERSION = ".*"/VERSION = "$(version)"/' ${PWD}/authentik/__init__.py
$(SED_INPLACE) "s/version = \"${current_version}\"/version = \"$(version)\"" ${PWD}/Cargo.toml ${PWD}/Cargo.lock
$(MAKE) gen-build gen-compose aws-cfn
$(SED_INPLACE) "s/\"${current_version}\"/\"$(version)\"/" ${PWD}/package.json ${PWD}/package-lock.json ${PWD}/web/package.json ${PWD}/web/package-lock.json
echo -n $(version) > ${PWD}/internal/constants/VERSION
@@ -194,7 +189,7 @@ gen-changelog: ## (Release) generate the changelog based from the commits since
gen-diff: ## (Release) generate the changelog diff between the current schema and the last version
$(eval last_version := $(shell git tag --list 'version/*' --sort 'version:refname' | grep -vE 'rc\d+$$' | tail -1))
git show ${last_version}:schema.yml > schema-old.yml
docker compose -f scripts/compose.yml run --rm --user "${UID}:${GID}" diff \
docker compose -f scripts/api/compose.yml run --rm --user "${UID}:${GID}" diff \
--markdown \
/local/diff.md \
/local/schema-old.yml \
@@ -204,26 +199,51 @@ gen-diff: ## (Release) generate the changelog diff between the current schema a
$(SED_INPLACE) 's/}/&#125;/g' diff.md
npx prettier --write diff.md
gen-client-go: ## Build and install the authentik API for Golang
make -C "${PWD}/packages/client-go" build
gen-clean-ts: ## Remove generated API client for TypeScript
rm -rf ${PWD}/${GEN_API_TS}/
rm -rf ${PWD}/web/node_modules/@goauthentik/api/
gen-client-rust: ## Build and install the authentik API for Rust
make -C "${PWD}/packages/client-rust" build version=${NPM_VERSION}
make lint-fix-rust
gen-clean-py: ## Remove generated API client for Python
rm -rf ${PWD}/${GEN_API_PY}
gen-client-ts: ## Build and install the authentik API for Typescript into the authentik UI Application
make -C "${PWD}/packages/client-ts" build
npm --prefix web install
gen-clean-go: ## Remove generated API client for Go
rm -rf ${PWD}/${GEN_API_GO}
_gen-clients: gen-client-go gen-client-rust gen-client-ts
gen-clients: ## Build and install API clients used by authentik
$(MAKE) _gen-clients -j
gen-clean: gen-clean-ts gen-clean-go gen-clean-py ## Remove generated API clients
gen: gen-build gen-clients ## Build and install API schema and clients used by authentik
gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescript into the authentik UI Application
docker compose -f scripts/api/compose.yml run --rm --user "${UID}:${GID}" gen \
generate \
-i /local/schema.yml \
-g typescript-fetch \
-o /local/${GEN_API_TS} \
-c /local/scripts/api/ts-config.yaml \
--additional-properties=npmVersion=${NPM_VERSION} \
--git-repo-id authentik \
--git-user-id goauthentik
cd ${PWD}/${GEN_API_TS} && npm i
cd ${PWD}/${GEN_API_TS} && npm link
cd ${PWD}/web && npm link @goauthentik/api
gen-client-py: gen-clean-py ## Build and install the authentik API for Python
mkdir -p ${PWD}/${GEN_API_PY}
git clone --depth 1 https://github.com/goauthentik/client-python.git ${PWD}/${GEN_API_PY}
cp ${PWD}/schema.yml ${PWD}/${GEN_API_PY}
make -C ${PWD}/${GEN_API_PY} build version=${NPM_VERSION}
gen-client-go: gen-clean-go ## Build and install the authentik API for Golang
mkdir -p ${PWD}/${GEN_API_GO}
git clone --depth 1 https://github.com/goauthentik/client-go.git ${PWD}/${GEN_API_GO}
cp ${PWD}/schema.yml ${PWD}/${GEN_API_GO}
make -C ${PWD}/${GEN_API_GO} build version=${NPM_VERSION}
go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO}
gen-dev-config: ## Generate a local development config file
$(UV) run scripts/generate_config.py
gen: gen-build gen-client-ts
#########################
## Node.js
#########################
@@ -292,7 +312,7 @@ docs-api-build:
npm run --prefix website -w api build
docs-api-watch: ## Build and watch the API documentation
npm run --prefix website -w api generate
npm run --prefix website -w api build:api
npm run --prefix website -w api start
docs-api-clean: ## Clean generated API documentation
@@ -303,6 +323,7 @@ docs-api-clean: ## Clean generated API documentation
#########################
docker: ## Build a docker image of the current source tree
mkdir -p ${GEN_API_TS}
DOCKER_BUILDKIT=1 docker build . -f lifecycle/container/Dockerfile --progress plain --tag ${DOCKER_IMAGE}
test-docker:
@@ -315,42 +336,28 @@ test-docker:
# which makes the YAML File a lot smaller
ci--meta-debug:
$(UV) run python -V || echo "No python installed"
$(CARGO) --version || echo "No rust installed"
node --version || echo "No node installed"
$(UV) run python -V
node --version
ci-lint-mypy: ci--meta-debug
ci-mypy: ci--meta-debug
$(UV) run mypy --strict $(PY_SOURCES)
ci-lint-black: ci--meta-debug
ci-black: ci--meta-debug
$(UV) run black --check $(PY_SOURCES)
ci-lint-ruff: ci--meta-debug
ci-ruff: ci--meta-debug
$(UV) run ruff check $(PY_SOURCES)
ci-lint-spellcheck: ci--meta-debug
ci-spellcheck: ci--meta-debug
npm run lint:spellcheck
ci-lint-bandit: ci--meta-debug
ci-bandit: ci--meta-debug
$(UV) run bandit -c pyproject.toml -r $(PY_SOURCES) -iii
ci-lint-pending-migrations: ci--meta-debug
ci-pending-migrations: ci--meta-debug
$(UV) run ak makemigrations --check
ci-lint-cargo-deny: ci--meta-debug
$(CARGO) deny --locked --workspace check --config "${PWD}/.cargo/deny.toml"
ci-lint-cargo-machete: ci--meta-debug
$(CARGO) machete
ci-lint-rustfmt: ci--meta-debug
$(CARGO) +nightly fmt --all --check -- --config-path "${PWD}/.cargo/rustfmt.toml"
ci-lint-clippy: ci--meta-debug
$(CARGO) clippy --workspace -- -D warnings
ci-test: ci--meta-debug
$(UV) run coverage run manage.py test --keepdb --parallel auto authentik
$(UV) run coverage combine
ci-test: ci--meta-debug dev-create-db
$(UV) run coverage run manage.py test --keepdb $(glob)
$(UV) run coverage report
$(UV) run coverage xml

View File

@@ -18,10 +18,10 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
(.x being the latest patch release for each version)
| Version | Supported |
| --------- | --------- |
| 2025.12.x | ✅ |
| 2026.2.x | ✅ |
| Version | Supported |
| ---------- | ---------- |
| 2025.12.x | ✅ |
| 2026.2.x | ✅ |
## Reporting a Vulnerability
@@ -60,40 +60,6 @@ authentik reserves the right to reclassify CVSS as necessary. To determine sever
| 7.0 8.9 | High |
| 9.0 10.0 | Critical |
## Intended functionality
The following capabilities are part of intentional system design and should not be reported as security vulnerabilities:
- Expressions (property mappings/policies/prompts) can execute arbitrary Python code without safeguards.
This is expected behavior. Any user with permission to create or modify objects containing expression fields can write code that is executed within authentik. If a vulnerability allows a user without the required permissions to write or modify code and have it executed, that would be a valid security report.
However, the fact that expressions are executed as part of normal operations is not considered a privilege escalation or security vulnerability.
- Blueprints can access all files on the filesystem.
This access is intentional to allow legitimate configuration and deployment tasks. It does not represent a security problem by itself.
- Importing blueprints allows arbitrary modification of application objects.
This is intended functionality. This behavior reflects the privileged design of blueprint imports. It is "exploitable" when importing blueprints from untrusted sources without reviewing the blueprint beforehand. However, any method to create, modify or execute blueprints without the required permissions would be a valid security report.
- Flow imports may contain objects other than flows (such as policies, users, groups, etc.)
This is expected behavior as flow imports are blueprint files.
- Prompt HTML is not escaped.
Prompts intentionally allow raw HTML, including script tags, so they can be used to create interactive or customized user interface elements. Because of this, scripts within prompts may affect or interact with the surrounding page as designed.
- Open redirects that do not include tokens or other sensitive information are not considered a security vulnerability.
Redirects that only change navigation flow and do not expose session tokens, API keys, or other confidential data are considered acceptable and do not require reporting.
- Outgoing network requests are not filtered.
The destinations of outgoing network requests (HTTP, TCP, etc.) made by authentik to configurable endpoints through objects such as OAuth Sources, SSO Providers, and others are not validated. Depending on your threat model, these requests should be restricted at the network level using appropriate firewall or network policies.
## Disclosure process
1. Report from Github or Issue is reported via Email as listed above.

View File

@@ -8,8 +8,8 @@ from rest_framework.response import Response
from rest_framework.viewsets import ViewSet
from authentik.core.api.utils import PassiveSerializer
from authentik.lib.api import Models
from authentik.lib.utils.reflection import get_apps
from authentik.policies.event_matcher.models import model_choices
class AppSerializer(PassiveSerializer):
@@ -42,6 +42,6 @@ class ModelViewSet(ViewSet):
def list(self, request: Request) -> Response:
"""Read-only view list all installed models"""
data = []
for name, label in Models.choices:
for name, label in model_choices():
data.append({"name": name, "label": label})
return Response(AppSerializer(data, many=True).data)

View File

@@ -106,14 +106,14 @@ class TokenAuthentication(BaseAuthentication):
if not auth_credentials:
return None
# first, check traditional tokens
key_token = Token.objects.filter(
key_token = Token.filter_not_expired(
key=auth_credentials, intent=TokenIntents.INTENT_API
).first()
if key_token:
CTX_AUTH_VIA.set("api_token")
return key_token.user, key_token
# then try to auth via JWT
jwt_token = AccessToken.objects.filter(
jwt_token = AccessToken.filter_not_expired(
token=auth_credentials, _scope__icontains=SCOPE_AUTHENTIK_API
).first()
if jwt_token:

View File

@@ -1,18 +1,10 @@
"""Pagination which includes total pages and current page"""
from typing import TYPE_CHECKING
from drf_spectacular.plumbing import build_object_type
from rest_framework import pagination
from rest_framework.response import Response
from authentik.api.search.ql import QLSearch
from authentik.api.v3.schema.pagination import PAGINATION
from authentik.api.v3.schema.search import AUTOCOMPLETE_SCHEMA
if TYPE_CHECKING:
from django.db.models import QuerySet
from rest_framework.request import Request
from authentik.api.v3.schema.response import PAGINATION
class Pagination(pagination.PageNumberPagination):
@@ -21,14 +13,14 @@ class Pagination(pagination.PageNumberPagination):
page_query_param = "page"
page_size_query_param = "page_size"
def get_page_size(self, request: Request) -> int:
def get_page_size(self, request):
if self.page_size_query_param in request.query_params:
page_size = super().get_page_size(request)
if page_size is not None:
return min(super().get_page_size(request), request.tenant.pagination_max_page_size)
return request.tenant.pagination_default_page_size
def get_paginated_response(self, data) -> Response:
def get_paginated_response(self, data):
previous_page_number = 0
if self.page.has_previous():
previous_page_number = self.page.previous_page_number()
@@ -47,33 +39,16 @@ class Pagination(pagination.PageNumberPagination):
"end_index": self.page.end_index(),
},
"results": data,
"autocomplete": self.get_autocomplete(),
}
)
def paginate_queryset(self, queryset: QuerySet, request: Request, view=None):
self.view = view
return super().paginate_queryset(queryset, request, view)
def get_autocomplete(self):
schema = QLSearch().get_schema(self.request, self.view)
introspections = {}
if hasattr(self.view, "get_ql_fields"):
from authentik.api.search.schema import AKQLSchemaSerializer
introspections = AKQLSchemaSerializer().serialize(
schema(self.page.paginator.object_list.model)
)
return introspections
def get_paginated_response_schema(self, schema):
return build_object_type(
properties={
"pagination": PAGINATION.ref,
"results": schema,
"autocomplete": AUTOCOMPLETE_SCHEMA.ref,
},
required=["pagination", "results", "autocomplete"],
required=["pagination", "results"],
)

103
authentik/api/schema.py Normal file
View File

@@ -0,0 +1,103 @@
"""Error Response schema, from https://github.com/axnsan12/drf-yasg/issues/224"""
from collections.abc import Callable
from typing import Any
from drf_spectacular.generators import SchemaGenerator
from drf_spectacular.plumbing import ResolvedComponent
from drf_spectacular.renderers import OpenApiJsonRenderer
from drf_spectacular.settings import spectacular_settings
from structlog.stdlib import get_logger
from authentik.api.apps import AuthentikAPIConfig
from authentik.api.v3.schema.query import QUERY_PARAMS
from authentik.api.v3.schema.response import (
GENERIC_ERROR,
GENERIC_ERROR_RESPONSE,
PAGINATION,
VALIDATION_ERROR,
VALIDATION_ERROR_RESPONSE,
)
LOGGER = get_logger()
def preprocess_schema_exclude_non_api(endpoints: list[tuple[str, Any, Any, Callable]], **kwargs):
"""Filter out all API Views which are not mounted under /api"""
return [
(path, path_regex, method, callback)
for path, path_regex, method, callback in endpoints
if path.startswith("/" + AuthentikAPIConfig.mountpoint)
]
def postprocess_schema_register(
result: dict[str, Any], generator: SchemaGenerator, **kwargs
) -> dict[str, Any]:
"""Register custom schema components"""
LOGGER.debug("Registering custom schemas")
generator.registry.register_on_missing(PAGINATION)
generator.registry.register_on_missing(GENERIC_ERROR)
generator.registry.register_on_missing(GENERIC_ERROR_RESPONSE)
generator.registry.register_on_missing(VALIDATION_ERROR)
generator.registry.register_on_missing(VALIDATION_ERROR_RESPONSE)
for query in QUERY_PARAMS.values():
generator.registry.register_on_missing(query)
return result
def postprocess_schema_responses(
result: dict[str, Any], generator: SchemaGenerator, **kwargs
) -> dict[str, Any]:
"""Default error responses"""
LOGGER.debug("Adding default error responses")
for path in result["paths"].values():
for method in path.values():
method["responses"].setdefault("400", VALIDATION_ERROR_RESPONSE.ref)
method["responses"].setdefault("403", GENERIC_ERROR_RESPONSE.ref)
result["components"] = generator.registry.build(spectacular_settings.APPEND_COMPONENTS)
# This is a workaround for authentik/stages/prompt/stage.py
# since the serializer PromptChallengeResponse
# accepts dynamic keys
for component in result["components"]["schemas"]:
if component == "PromptChallengeResponseRequest":
comp = result["components"]["schemas"][component]
comp["additionalProperties"] = {}
return result
def postprocess_schema_query_params(
result: dict[str, Any], generator: SchemaGenerator, **kwargs
) -> dict[str, Any]:
"""Optimize pagination parameters, instead of redeclaring parameters for each endpoint
declare them globally and refer to them"""
LOGGER.debug("Deduplicating query parameters")
for path in result["paths"].values():
for method in path.values():
for idx, param in enumerate(method.get("parameters", [])):
if param["name"] not in QUERY_PARAMS:
continue
method["parameters"][idx] = QUERY_PARAMS[param["name"]].ref
return result
def postprocess_schema_remove_unused(
result: dict[str, Any], generator: SchemaGenerator, **kwargs
) -> dict[str, Any]:
"""Remove unused components"""
# To check if the schema is used, render it to JSON and then substring check that
# less efficient than walking through the tree but a lot simpler and no
# possibility that we miss something
raw = OpenApiJsonRenderer().render(result, renderer_context={}).decode()
count = 0
for key in result["components"][ResolvedComponent.SCHEMA].keys():
schema_usages = raw.count(f"#/components/{ResolvedComponent.SCHEMA}/{key}")
if schema_usages >= 1:
continue
del generator.registry[(key, ResolvedComponent.SCHEMA)]
count += 1
LOGGER.debug("Removing unused components", count=count)
result["components"] = generator.registry.build(spectacular_settings.APPEND_COMPONENTS)
return result

View File

@@ -1,75 +0,0 @@
"""Error Response schema, from https://github.com/axnsan12/drf-yasg/issues/224"""
from collections.abc import Callable
from typing import Any
from drf_spectacular.contrib.django_filters import (
DjangoFilterExtension as BaseDjangoFilterExtension,
)
from drf_spectacular.generators import SchemaGenerator
from drf_spectacular.plumbing import (
ResolvedComponent,
follow_field_source,
)
from drf_spectacular.renderers import OpenApiJsonRenderer
from drf_spectacular.settings import spectacular_settings
from structlog.stdlib import get_logger
from authentik.api.apps import AuthentikAPIConfig
LOGGER = get_logger()
def preprocess_schema_exclude_non_api(endpoints: list[tuple[str, Any, Any, Callable]], **kwargs):
"""Filter out all API Views which are not mounted under /api"""
return [
(path, path_regex, method, callback)
for path, path_regex, method, callback in endpoints
if path.startswith("/" + AuthentikAPIConfig.mountpoint)
]
def postprocess_schema_remove_unused(
result: dict[str, Any], generator: SchemaGenerator, **kwargs
) -> dict[str, Any]:
"""Remove unused components"""
# To check if the schema is used, render it to JSON and then substring check that
# less efficient than walking through the tree but a lot simpler and no
# possibility that we miss something
raw = OpenApiJsonRenderer().render(result, renderer_context={}).decode()
count = 0
for key in result["components"][ResolvedComponent.SCHEMA].keys():
schema_usages = raw.count(f"#/components/{ResolvedComponent.SCHEMA}/{key}")
if schema_usages >= 1:
continue
del generator.registry[(key, ResolvedComponent.SCHEMA)]
count += 1
LOGGER.debug("Removing unused components", count=count)
result["components"] = generator.registry.build(spectacular_settings.APPEND_COMPONENTS)
return result
class DjangoFilterExtension(BaseDjangoFilterExtension):
"""
From https://github.com/netbox-community/netbox/pull/21521:
Overrides drf-spectacular's DjangoFilterExtension to fix a regression in v0.29.0 where
_get_model_field() incorrectly double-appends to_field_name when field_name already ends
with that value (e.g. field_name='tags__slug', to_field_name='slug' produces the invalid
path ['tags', 'slug', 'slug']). This caused hundreds of spurious warnings during schema
generation for filters such as TagFilter, TenancyFilterSet.tenant, and OwnerFilterMixin.owner.
See: https://github.com/netbox-community/netbox/issues/20787
https://github.com/tfranzel/drf-spectacular/issues/1475
"""
priority = 1
def _get_model_field(self, filter_field, model):
if not filter_field.field_name:
return None
path = filter_field.field_name.split("__")
to_field_name = filter_field.extra.get("to_field_name")
if to_field_name is not None and path[-1] != to_field_name:
path.append(to_field_name)
return follow_field_source(model, path, emit_warnings=False)

View File

@@ -1,287 +0,0 @@
"""Error Response schema, from https://github.com/axnsan12/drf-yasg/issues/224"""
import functools
import inspect
import re
from collections import defaultdict
from enum import Enum
from django.db.models import Choices
from django.utils.translation import get_language
from drf_spectacular.drainage import error, warn
from drf_spectacular.hooks import postprocess_schema_enum_id_removal
from drf_spectacular.plumbing import (
ResolvedComponent,
deep_import_string,
list_hash,
safe_ref,
)
from drf_spectacular.settings import spectacular_settings
from inflection import camelize
from structlog.stdlib import get_logger
LOGGER = get_logger()
# See https://github.com/tfranzel/drf-spectacular/blob/master/drf_spectacular/hooks.py
# and https://github.com/tfranzel/drf-spectacular/issues/520
def postprocess_schema_enums(result, generator, **kwargs): # noqa: PLR0912, PLR0915
"""
simple replacement of Enum/Choices that globally share the same name and have
the same choices. Aids client generation to not generate a separate enum for
every occurrence. only takes effect when replacement is guaranteed to be correct.
"""
def is_enum_prop(prop_schema):
return (
"enum" in prop_schema
or prop_schema.get("type") == "array"
and "enum" in prop_schema.get("items", {})
)
def iter_field_schemas():
def iter_prop_containers(schema, component_name=None):
if not component_name:
for _component_name, _schema in schema.items():
if spectacular_settings.COMPONENT_SPLIT_PATCH:
_component_name = re.sub("^Patched(.+)", r"\1", _component_name)
if spectacular_settings.COMPONENT_SPLIT_REQUEST:
_component_name = re.sub("(.+)Request$", r"\1", _component_name)
yield from iter_prop_containers(_schema, _component_name)
elif isinstance(schema, list):
for item in schema:
yield from iter_prop_containers(item, component_name)
elif isinstance(schema, dict):
if schema.get("properties"):
yield component_name, schema["properties"]
yield from iter_prop_containers(schema.get("oneOf", []), component_name)
yield from iter_prop_containers(schema.get("allOf", []), component_name)
yield from iter_prop_containers(schema.get("anyOf", []), component_name)
def iter_path_parameters():
for path in result.get("paths", {}).values():
for operation in path.values():
for parameter in operation.get("parameters", []):
parameter_schema = parameter.get("schema", {})
if is_enum_prop(parameter_schema):
# Move description into enum schema
if "description" in parameter:
parameter_schema["description"] = parameter.pop("description")
if "name" not in parameter:
continue
yield "", {parameter["name"]: parameter_schema}
component_schemas = result.get("components", {}).get("schemas", {})
yield from iter_prop_containers(component_schemas)
yield from iter_path_parameters()
def create_enum_component(name, schema):
component = ResolvedComponent(
name=name,
type=ResolvedComponent.SCHEMA,
schema=schema,
object=name,
)
generator.registry.register_on_missing(component)
return component
def extract_hash(schema):
if "x-spec-enum-id" in schema:
# try to use the injected enum hash first as it generated from (name, value) tuples,
# which prevents collisions on choice sets only differing in labels not values.
return schema["x-spec-enum-id"]
else:
# fall back to actual list hashing when we encounter enums not generated by us.
# remove blank/null entry for hashing. will be reconstructed in the last step
return list_hash([(i, i) for i in schema["enum"] if i not in ("", None)])
overrides = load_enum_name_overrides()
prop_hash_mapping = defaultdict(set)
hash_name_mapping = defaultdict(set)
# collect all enums, their names and choice sets
for component_name, props in iter_field_schemas():
for prop_name, prop_schema in props.items():
_prop_schema = prop_schema
if prop_schema.get("type") == "array":
_prop_schema = prop_schema.get("items", {})
if "enum" not in _prop_schema:
continue
prop_enum_cleaned_hash = extract_hash(_prop_schema)
prop_hash_mapping[prop_name].add(prop_enum_cleaned_hash)
hash_name_mapping[prop_enum_cleaned_hash].add((component_name, prop_name))
# get the suffix to be used for enums from settings
enum_suffix = spectacular_settings.ENUM_SUFFIX
# traverse all enum properties and generate a name for the choice set. naming collisions
# are resolved and a warning is emitted. giving a choice set multiple names is technically
# correct but potentially unwanted. also emit a warning there to make the user aware.
enum_name_mapping = {}
for prop_name, prop_hash_set in prop_hash_mapping.items():
for prop_hash in prop_hash_set:
if prop_hash in overrides:
enum_name = overrides[prop_hash]
elif len(prop_hash_set) == 1:
# prop_name has been used exclusively for one choice set (best case)
enum_name = f"{camelize(prop_name)}{enum_suffix}"
elif len(hash_name_mapping[prop_hash]) == 1:
# prop_name has multiple choice sets, but each one limited to one component only
component_name, _ = next(iter(hash_name_mapping[prop_hash]))
enum_name = f"{camelize(component_name)}{camelize(prop_name)}{enum_suffix}"
else:
enum_name = f"{camelize(prop_name)}{prop_hash[:3].capitalize()}{enum_suffix}"
warn(
f"enum naming encountered a non-optimally resolvable collision for fields "
f'named "{prop_name}". The same name has been used for multiple choice sets '
f'in multiple components. The collision was resolved with "{enum_name}". '
f"add an entry to ENUM_NAME_OVERRIDES to fix the naming."
)
if enum_name_mapping.get(prop_hash, enum_name) != enum_name:
warn(
f"encountered multiple names for the same choice set ({enum_name}). This "
f"may be unwanted even though the generated schema is technically correct. "
f"Add an entry to ENUM_NAME_OVERRIDES to fix the naming."
)
del enum_name_mapping[prop_hash]
else:
enum_name_mapping[prop_hash] = enum_name
enum_name_mapping[(prop_hash, prop_name)] = enum_name
# replace all enum occurrences with a enum schema component. cut out the
# enum, replace it with a reference and add a corresponding component.
for _, props in iter_field_schemas():
for prop_name, _prop_schema in props.items():
prop_schema = _prop_schema
is_array = prop_schema.get("type") == "array"
if is_array:
prop_schema = prop_schema.get("items", {})
if "enum" not in prop_schema:
continue
prop_enum_original_list = prop_schema["enum"]
prop_schema["enum"] = [i for i in prop_schema["enum"] if i not in ["", None]]
prop_hash = extract_hash(prop_schema)
# when choice sets are reused under multiple names, the generated name cannot be
# resolved from the hash alone. fall back to prop_name and hash for resolution.
enum_name = enum_name_mapping.get(prop_hash) or enum_name_mapping[prop_hash, prop_name]
# split property into remaining property and enum component parts
enum_schema = {k: v for k, v in prop_schema.items() if k in ["type", "enum"]}
prop_schema = {
k: v for k, v in prop_schema.items() if k not in ["type", "enum", "x-spec-enum-id"]
}
# separate actual description from name-value tuples
if spectacular_settings.ENUM_GENERATE_CHOICE_DESCRIPTION:
if prop_schema.get("description", "").startswith("*"):
enum_schema["description"] = prop_schema.pop("description")
elif "\n\n*" in prop_schema.get("description", ""):
_, _, post = prop_schema["description"].partition("\n\n*")
enum_schema["description"] = "*" + post
components = [create_enum_component(enum_name, schema=enum_schema)]
if spectacular_settings.ENUM_ADD_EXPLICIT_BLANK_NULL_CHOICE:
if "" in prop_enum_original_list:
components.append(
create_enum_component(f"Blank{enum_suffix}", schema={"enum": [""]})
)
if None in prop_enum_original_list:
if spectacular_settings.OAS_VERSION.startswith("3.1"):
components.append(
create_enum_component(f"Null{enum_suffix}", schema={"type": "null"})
)
else:
components.append(
create_enum_component(f"Null{enum_suffix}", schema={"enum": [None]})
)
# undo OAS 3.1 type list NULL construction as we cover
# this in a separate component already
if spectacular_settings.OAS_VERSION.startswith("3.1") and isinstance(
enum_schema["type"], list
):
enum_schema["type"] = [t for t in enum_schema["type"] if t != "null"][0]
if len(components) == 1:
prop_schema.update(components[0].ref)
else:
prop_schema.update({"oneOf": [c.ref for c in components]})
patch_target = props[prop_name] # noqa: PLR1733
if is_array:
patch_target = patch_target["items"]
# Replace existing schema information with reference
patch_target.clear()
patch_target.update(safe_ref(prop_schema))
# sort again with additional components
result["components"] = generator.registry.build(spectacular_settings.APPEND_COMPONENTS)
# remove remaining ids that were not part of this hook (operation parameters mainly)
postprocess_schema_enum_id_removal(result, generator)
return result
# Fixed version of `load_enum_name_overrides()` with a LRU cache based on language
# *and* enum overrides.
# Without this, API generation breaks if there is more than 1 API present (such as in split APIs)
# Original source: drf-spectacular/drf_spectacular/plumbing.py
def load_enum_name_overrides():
cache_key = get_language() or ""
for k, v in sorted(spectacular_settings.ENUM_NAME_OVERRIDES.items()):
cache_key += f";{k}:{v}"
return _load_enum_name_overrides(cache_key)
# Original source: drf-spectacular/drf_spectacular/plumbing.py
# Only change: cache_key argument instead of language.
@functools.lru_cache
def _load_enum_name_overrides(cache_key):
overrides = {}
for name, _choices in spectacular_settings.ENUM_NAME_OVERRIDES.items():
choices = _choices
if isinstance(choices, str):
choices = deep_import_string(choices)
if not choices:
warn(
f"unable to load choice override for {name} from ENUM_NAME_OVERRIDES. "
f"please check module path string."
)
continue
if inspect.isclass(choices) and issubclass(choices, Choices):
choices = choices.choices
if inspect.isclass(choices) and issubclass(choices, Enum):
choices = [(c.value, c.name) for c in choices]
normalized_choices = []
for choice in choices:
# Allow None values in the simple values list case
if isinstance(choice, str) or choice is None:
# TODO warning
normalized_choices.append((choice, choice)) # simple choice list
elif isinstance(choice[1], (list, tuple)):
normalized_choices.extend(choice[1]) # categorized nested choices
else:
normalized_choices.append(choice) # normal 2-tuple form
# Get all of choice values that should be used in the hash, blank and
# None values get excluded in the post-processing hook for enum overrides,
# so we do the same here to ensure the hashes match
hashable_values = [
(value, label) for value, label in normalized_choices if value not in ["", None]
]
overrides[list_hash(hashable_values)] = name
if len(spectacular_settings.ENUM_NAME_OVERRIDES) != len(overrides):
error(
"ENUM_NAME_OVERRIDES has duplication issues. Encountered multiple names "
"for the same choice set. Enum naming might be unexpected."
)
return overrides

View File

@@ -1,32 +0,0 @@
from drf_spectacular.plumbing import (
ResolvedComponent,
build_basic_type,
build_object_type,
)
from drf_spectacular.types import OpenApiTypes
PAGINATION = ResolvedComponent(
name="Pagination",
type=ResolvedComponent.SCHEMA,
object="Pagination",
schema=build_object_type(
properties={
"next": build_basic_type(OpenApiTypes.NUMBER),
"previous": build_basic_type(OpenApiTypes.NUMBER),
"count": build_basic_type(OpenApiTypes.NUMBER),
"current": build_basic_type(OpenApiTypes.NUMBER),
"total_pages": build_basic_type(OpenApiTypes.NUMBER),
"start_index": build_basic_type(OpenApiTypes.NUMBER),
"end_index": build_basic_type(OpenApiTypes.NUMBER),
},
required=[
"next",
"previous",
"count",
"current",
"total_pages",
"start_index",
"end_index",
],
),
)

View File

@@ -1,17 +1,10 @@
from typing import Any
from django.utils.translation import gettext_lazy as _
from drf_spectacular.generators import SchemaGenerator
from drf_spectacular.plumbing import (
ResolvedComponent,
build_basic_type,
build_parameter_type,
)
from drf_spectacular.types import OpenApiTypes
from structlog.stdlib import get_logger
LOGGER = get_logger()
QUERY_PARAMS = {
"ordering": ResolvedComponent(
@@ -70,18 +63,3 @@ QUERY_PARAMS = {
),
),
}
def postprocess_schema_query_params(
result: dict[str, Any], generator: SchemaGenerator, **kwargs
) -> dict[str, Any]:
"""Optimize pagination parameters, instead of redeclaring parameters for each endpoint
declare them globally and refer to them"""
LOGGER.debug("Deduplicating query parameters")
for path in result["paths"].values():
for method in path.values():
for idx, param in enumerate(method.get("parameters", [])):
if param["name"] not in QUERY_PARAMS:
continue
method["parameters"][idx] = QUERY_PARAMS[param["name"]].ref
return result

View File

@@ -1,22 +1,12 @@
from typing import Any
from django.utils.translation import gettext_lazy as _
from drf_spectacular.generators import SchemaGenerator
from drf_spectacular.plumbing import (
ResolvedComponent,
build_array_type,
build_basic_type,
build_object_type,
)
from drf_spectacular.settings import spectacular_settings
from drf_spectacular.types import OpenApiTypes
from rest_framework.settings import api_settings
from structlog.stdlib import get_logger
from authentik.api.v3.schema.pagination import PAGINATION
from authentik.api.v3.schema.query import QUERY_PARAMS
LOGGER = get_logger()
GENERIC_ERROR = ResolvedComponent(
name="GenericError",
@@ -67,40 +57,28 @@ VALIDATION_ERROR_RESPONSE = ResolvedComponent(
"description": "",
},
)
def postprocess_schema_register(
result: dict[str, Any], generator: SchemaGenerator, **kwargs
) -> dict[str, Any]:
"""Register custom schema components"""
LOGGER.debug("Registering custom schemas")
generator.registry.register_on_missing(PAGINATION)
generator.registry.register_on_missing(GENERIC_ERROR)
generator.registry.register_on_missing(GENERIC_ERROR_RESPONSE)
generator.registry.register_on_missing(VALIDATION_ERROR)
generator.registry.register_on_missing(VALIDATION_ERROR_RESPONSE)
for query in QUERY_PARAMS.values():
generator.registry.register_on_missing(query)
return result
def postprocess_schema_responses(
result: dict[str, Any], generator: SchemaGenerator, **kwargs
) -> dict[str, Any]:
"""Default error responses"""
LOGGER.debug("Adding default error responses")
for path in result["paths"].values():
for method in path.values():
method["responses"].setdefault("400", VALIDATION_ERROR_RESPONSE.ref)
method["responses"].setdefault("403", GENERIC_ERROR_RESPONSE.ref)
result["components"] = generator.registry.build(spectacular_settings.APPEND_COMPONENTS)
# This is a workaround for authentik/stages/prompt/stage.py
# since the serializer PromptChallengeResponse
# accepts dynamic keys
for component in result["components"]["schemas"]:
if component == "PromptChallengeResponseRequest":
comp = result["components"]["schemas"][component]
comp["additionalProperties"] = {}
return result
PAGINATION = ResolvedComponent(
name="Pagination",
type=ResolvedComponent.SCHEMA,
object="Pagination",
schema=build_object_type(
properties={
"next": build_basic_type(OpenApiTypes.NUMBER),
"previous": build_basic_type(OpenApiTypes.NUMBER),
"count": build_basic_type(OpenApiTypes.NUMBER),
"current": build_basic_type(OpenApiTypes.NUMBER),
"total_pages": build_basic_type(OpenApiTypes.NUMBER),
"start_index": build_basic_type(OpenApiTypes.NUMBER),
"end_index": build_basic_type(OpenApiTypes.NUMBER),
},
required=[
"next",
"previous",
"count",
"current",
"total_pages",
"start_index",
"end_index",
],
),
)

View File

@@ -1,20 +0,0 @@
from typing import TYPE_CHECKING
from drf_spectacular.plumbing import ResolvedComponent, build_object_type
if TYPE_CHECKING:
from drf_spectacular.generators import SchemaGenerator
AUTOCOMPLETE_SCHEMA = ResolvedComponent(
name="Autocomplete",
object="Autocomplete",
type=ResolvedComponent.SCHEMA,
schema=build_object_type(additionalProperties={}),
)
def postprocess_schema_search_autocomplete(result, generator: SchemaGenerator, **kwargs):
generator.registry.register_on_missing(AUTOCOMPLETE_SCHEMA)
return result

View File

@@ -1,60 +1,24 @@
"""Serializer mixin for managed models"""
from typing import cast
from django.conf import settings
from django.core.files.uploadedfile import InMemoryUploadedFile
from django.utils.translation import gettext_lazy as _
from drf_spectacular.utils import extend_schema, inline_serializer
from rest_framework.decorators import action
from rest_framework.exceptions import PermissionDenied, ValidationError
from rest_framework.fields import (
BooleanField,
CharField,
DateTimeField,
FileField,
)
from rest_framework.parsers import MultiPartParser
from rest_framework.exceptions import ValidationError
from rest_framework.fields import CharField, DateTimeField
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer
from rest_framework.viewsets import ModelViewSet
from authentik.api.validation import validate
from authentik.blueprints.models import BlueprintInstance
from authentik.blueprints.v1.common import Blueprint
from authentik.blueprints.v1.importer import Importer
from authentik.blueprints.v1.oci import OCI_PREFIX
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import JSONDictField, ModelSerializer, PassiveSerializer
from authentik.core.models import User
from authentik.events.logs import LogEventSerializer
from authentik.rbac.decorators import permission_required
def get_blueprints():
if settings.DEBUG:
return blueprints_find_dict()
return blueprints_find_dict.send().get_result(block=True)
class BlueprintUploadSerializer(PassiveSerializer):
"""Serializer to upload file"""
file = FileField(required=False)
path = CharField(required=False)
def validate_path(self, path: str) -> str:
"""Ensure the path (if set) specified is retrievable"""
if path == "":
return path
files: list[dict] = get_blueprints()
if path not in [file["path"] for file in files]:
raise ValidationError(_("Blueprint file does not exist"))
return path
class ManagedSerializer:
"""Managed Serializer"""
@@ -75,7 +39,7 @@ class BlueprintInstanceSerializer(ModelSerializer):
"""Ensure the path (if set) specified is retrievable"""
if path == "" or path.startswith(OCI_PREFIX):
return path
files: list[dict] = get_blueprints()
files: list[dict] = blueprints_find_dict.send().get_result(block=True)
if path not in [file["path"] for file in files]:
raise ValidationError(_("Blueprint file does not exist"))
return path
@@ -124,33 +88,6 @@ class BlueprintInstanceSerializer(ModelSerializer):
}
def check_blueprint_perms(blueprint: Blueprint, user: User, explicit_action: str | None = None):
"""Check for individual permissions for each model in a blueprint"""
for entry in blueprint.entries:
full_model = entry.get_model(blueprint)
app, __, model = full_model.partition(".")
perms = [
f"{app}.add_{model}",
f"{app}.change_{model}",
f"{app}.delete_{model}",
]
if explicit_action:
perms = [f"{app}.{explicit_action}_{model}"]
for perm in perms:
if not user.has_perm(perm):
raise PermissionDenied(
{
entry.id: _(
"User lacks permission to create {model}".format_map(
{
"model": full_model,
}
)
)
}
)
class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
"""Blueprint instances"""
@@ -160,12 +97,6 @@ class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
filterset_fields = ["name", "path"]
ordering = ["name"]
class BlueprintImportResultSerializer(PassiveSerializer):
"""Logs of an attempted blueprint import"""
logs = LogEventSerializer(many=True, read_only=True)
success = BooleanField(read_only=True)
@extend_schema(
responses={
200: ListSerializer(
@@ -184,7 +115,7 @@ class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
@action(detail=False, pagination_class=None, filter_backends=[])
def available(self, request: Request) -> Response:
"""Get blueprints"""
files: list[dict] = get_blueprints()
files: list[dict] = blueprints_find_dict.send().get_result(block=True)
return Response(files)
@permission_required("authentik_blueprints.view_blueprintinstance")
@@ -200,53 +131,3 @@ class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
blueprint = self.get_object()
apply_blueprint.send_with_options(args=(blueprint.pk,), rel_obj=blueprint)
return self.retrieve(request, *args, **kwargs)
@extend_schema(
request={"multipart/form-data": BlueprintUploadSerializer},
responses={
204: BlueprintImportResultSerializer,
400: BlueprintImportResultSerializer,
},
)
@action(url_path="import", detail=False, methods=["POST"], parser_classes=(MultiPartParser,))
@validate(
BlueprintUploadSerializer,
)
def import_(self, request: Request, body: BlueprintUploadSerializer) -> Response:
"""Import blueprint from .yaml file and apply it once, without creating an instance"""
string_contents = ""
if body.validated_data.get("file"):
file = cast(InMemoryUploadedFile, body.validated_data["file"])
string_contents = file.read().decode()
elif body.validated_data.get("path"):
string_contents = BlueprintInstance(
path=body.validated_data.get("path")
).retrieve_file()
else:
raise ValidationError("Either path or file must be set")
importer = Importer.from_string(string_contents)
check_blueprint_perms(importer.blueprint, request.user)
valid, logs = importer.validate()
import_response = self.BlueprintImportResultSerializer(
data={
"logs": [],
"success": False,
}
)
import_response.is_valid(raise_exception=True)
import_response.initial_data["logs"] = [LogEventSerializer(log).data for log in logs]
import_response.initial_data["success"] = valid
import_response.is_valid()
if not valid:
return Response(data=import_response.initial_data, status=200)
successful = importer.apply()
import_response.initial_data["success"] = successful
import_response.is_valid()
if not successful:
return Response(data=import_response.initial_data, status=200)
return Response(data=import_response.initial_data, status=200)

View File

@@ -21,9 +21,6 @@ PROMPT_CONSENT = "consent"
PROMPT_LOGIN = "login"
PLAN_CONTEXT_OIDC_LOGOUT_IFRAME_SESSIONS = "goauthentik.io/providers/oauth2/iframe_sessions"
PLAN_CONTEXT_POST_LOGOUT_REDIRECT_URI = "goauthentik.io/providers/oauth2/post_logout_redirect_uri"
OAUTH2_BINDING = "redirect"
SCOPE_OPENID = "openid"
SCOPE_OPENID_PROFILE = "profile"
@@ -40,9 +37,6 @@ TOKEN_TYPE = "Bearer" # nosec
SCOPE_AUTHENTIK_API = "goauthentik.io/api"
# URI schemes that are forbidden for redirect URIs
FORBIDDEN_URI_SCHEMES = {"javascript", "data", "vbscript"}
# Read/write full user (including email)
SCOPE_GITHUB_USER = "user"
# Read user (without email)

View File

@@ -25,7 +25,6 @@ from authentik.core.api.providers import ProviderSerializer
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.users import UserSerializer
from authentik.core.api.utils import ModelSerializer, ThemedUrlsSerializer
from authentik.core.apps import AppAccessWithoutBindings
from authentik.core.models import Application, User
from authentik.events.logs import LogEventSerializer, capture_logs
from authentik.policies.api.exec import PolicyTestResultSerializer
@@ -48,12 +47,7 @@ class ApplicationSerializer(ModelSerializer):
"""Application Serializer"""
launch_url = SerializerMethodField()
provider_obj = ProviderSerializer(
source="get_provider",
required=False,
read_only=True,
allow_null=True,
)
provider_obj = ProviderSerializer(source="get_provider", required=False, read_only=True)
backchannel_providers_obj = ProviderSerializer(
source="backchannel_providers", required=False, read_only=True, many=True
)
@@ -169,7 +163,6 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
request.user = user
for application in paginated_apps:
engine = PolicyEngine(application, request.user, request)
engine.empty_result = AppAccessWithoutBindings.get()
engine.build()
if engine.passing:
applications.append(application)
@@ -227,7 +220,6 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
if not for_user:
raise ValidationError({"for_user": "User not found"})
engine = PolicyEngine(application, for_user, request)
engine.empty_result = AppAccessWithoutBindings.get()
engine.use_cache = False
with capture_logs() as logs:
engine.build()

View File

@@ -7,7 +7,6 @@ from django.http import Http404
from django.utils.translation import gettext as _
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
from django_filters.filterset import FilterSet
from djangoql.schema import BoolField, StrField
from drf_spectacular.utils import (
OpenApiParameter,
OpenApiResponse,
@@ -26,9 +25,6 @@ from rest_framework.serializers import ListSerializer, ValidationError
from rest_framework.viewsets import ModelViewSet
from authentik.api.authentication import TokenAuthentication
from authentik.api.search.fields import (
JSONSearchField,
)
from authentik.api.validation import validate
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import JSONDictField, ModelSerializer, PassiveSerializer
@@ -269,6 +265,12 @@ class GroupViewSet(UsedByMixin, ModelViewSet):
]
def get_ql_fields(self):
from djangoql.schema import BoolField, StrField
from authentik.enterprise.search.fields import (
JSONSearchField,
)
return [
StrField(Group, "name"),
BoolField(Group, "is_superuser", nullable=True),

View File

@@ -1,6 +1,5 @@
"""Tokens API Viewset"""
from datetime import timedelta
from typing import Any
from django.utils.timezone import now
@@ -19,15 +18,12 @@ from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.users import UserSerializer
from authentik.core.api.utils import ModelSerializer, PassiveSerializer
from authentik.core.models import (
USER_ATTRIBUTE_AGENT_OWNER_PK,
USER_ATTRIBUTE_TOKEN_EXPIRING,
USER_ATTRIBUTE_TOKEN_MAXIMUM_LIFETIME,
Token,
TokenIntents,
User,
UserTypes,
default_token_duration,
default_token_key,
)
from authentik.events.models import Event, EventAction
from authentik.events.utils import model_to_dict
@@ -128,7 +124,7 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
"""Token Viewset"""
lookup_field = "identifier"
queryset = Token.objects.including_expired().all()
queryset = Token.objects.all()
serializer_class = TokenSerializer
search_fields = [
"identifier",
@@ -175,40 +171,6 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
Event.new(EventAction.SECRET_VIEW, secret=token).from_http(request) # noqa # nosec
return Response(TokenViewSerializer({"key": token.key}).data)
@extend_schema(
request=None,
responses={
200: TokenViewSerializer(many=False),
403: OpenApiResponse(description="Not the token owner, agent owner, or superuser"),
},
)
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
def rotate(self, request: Request, identifier: str) -> Response:
"""Rotate the token key and reset the expiry to 24 hours. Only callable by the token
owner, the owning agent's human owner, or a superuser."""
token = (
Token.objects.including_expired()
.select_related("user")
.filter(identifier=identifier)
.first()
)
if not token:
return Response(status=404)
if not request.user.is_superuser:
is_token_owner = token.user_id == request.user.pk
is_agent_owner = token.user.type == UserTypes.AGENT and str(
request.user.pk
) == token.user.attributes.get(USER_ATTRIBUTE_AGENT_OWNER_PK)
if not is_token_owner and not is_agent_owner:
return Response(status=403)
token.key = default_token_key()
token.expires = now() + timedelta(hours=24)
token.save()
Event.new(EventAction.SECRET_ROTATE, secret=token).from_http(request) # noqa # nosec
return Response(TokenViewSerializer({"key": token.key}).data)
@permission_required("authentik_core.set_token_key")
@extend_schema(
request=TokenSetKeySerializer(),

View File

@@ -2,8 +2,9 @@
from django.apps import apps
from django.db.models import Model
from django.utils.translation import gettext as _
from drf_spectacular.utils import PolymorphicProxySerializer, extend_schema, extend_schema_field
from rest_framework.exceptions import ValidationError
from rest_framework.exceptions import PermissionDenied, ValidationError
from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, ListField
from rest_framework.permissions import IsAuthenticated
from rest_framework.request import Request
@@ -12,7 +13,6 @@ from rest_framework.views import APIView
from yaml import ScalarNode
from authentik.api.validation import validate
from authentik.blueprints.api import check_blueprint_perms
from authentik.blueprints.v1.common import (
Blueprint,
BlueprintEntry,
@@ -165,7 +165,21 @@ class TransactionalApplicationView(APIView):
def put(self, request: Request, body: TransactionApplicationSerializer) -> Response:
"""Convert data into a blueprint, validate it and apply it"""
blueprint: Blueprint = body.validated_data
check_blueprint_perms(blueprint, request.user, explicit_action="add")
for entry in blueprint.entries:
full_model = entry.get_model(blueprint)
app, __, model = full_model.partition(".")
if not request.user.has_perm(f"{app}.add_{model}"):
raise PermissionDenied(
{
entry.id: _(
"User lacks permission to create {model}".format_map(
{
"model": full_model,
}
)
)
}
)
importer = Importer(blueprint, {})
applied = importer.apply()
response = {"applied": False, "logs": []}

View File

@@ -22,7 +22,6 @@ from django_filters.filters import (
UUIDFilter,
)
from django_filters.filterset import FilterSet
from djangoql.schema import BoolField, StrField
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import (
OpenApiParameter,
@@ -56,10 +55,6 @@ from rest_framework.viewsets import ModelViewSet
from structlog.stdlib import get_logger
from authentik.api.authentication import TokenAuthentication
from authentik.api.search.fields import (
ChoiceSearchField,
JSONSearchField,
)
from authentik.api.validation import validate
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
from authentik.brands.models import Brand
@@ -75,13 +70,9 @@ from authentik.core.middleware import (
SESSION_KEY_IMPERSONATE_USER,
)
from authentik.core.models import (
USER_ATTRIBUTE_AGENT_ALLOWED_APPS,
USER_ATTRIBUTE_AGENT_OWNER_PK,
USER_ATTRIBUTE_TOKEN_EXPIRING,
USER_PATH_AGENT,
USER_PATH_SERVICE_ACCOUNT,
USERNAME_MAX_LENGTH,
Application,
Group,
Session,
Token,
@@ -92,7 +83,6 @@ from authentik.core.models import (
)
from authentik.endpoints.connectors.agent.auth import AgentAuth
from authentik.events.models import Event, EventAction
from authentik.events.utils import model_to_dict, sanitize_dict
from authentik.flows.exceptions import FlowNonApplicableException
from authentik.flows.models import FlowToken
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner
@@ -254,25 +244,8 @@ class UserSerializer(ModelSerializer):
raise ValidationError(_("Can't change internal service account to other user type."))
if not self.instance and user_type == UserTypes.INTERNAL_SERVICE_ACCOUNT.value:
raise ValidationError(_("Setting a user to internal service account is not allowed."))
if (
self.instance
and self.instance.type == UserTypes.AGENT
and user_type != UserTypes.AGENT.value
):
raise ValidationError(_("Can't change agent user type."))
return user_type
def validate_attributes(self, attrs: dict) -> dict:
"""Prevent changes to agent owner"""
if not self.instance:
return attrs
if self.instance.type == UserTypes.AGENT:
existing_owner = self.instance.attributes.get(USER_ATTRIBUTE_AGENT_OWNER_PK)
new_owner = attrs.get(USER_ATTRIBUTE_AGENT_OWNER_PK)
if existing_owner is not None and new_owner != existing_owner:
raise ValidationError(_("Can't change owner of agent user."))
return attrs
def validate(self, attrs: dict) -> dict:
if self.instance and self.instance.type == UserTypes.INTERNAL_SERVICE_ACCOUNT:
raise ValidationError(_("Can't modify internal service account users"))
@@ -427,26 +400,6 @@ class UserServiceAccountSerializer(PassiveSerializer):
)
class UserAgentSerializer(PassiveSerializer):
"""Payload to create an agent user"""
name = CharField(max_length=150)
owner = PrimaryKeyRelatedField(queryset=User.objects.all(), required=False, default=None)
class UserAgentAllowedAppsSerializer(PassiveSerializer):
"""Payload to replace an agent's allowed applications"""
allowed_apps = ListField(child=UUIDField())
class UserAgentAllowedAppSerializer(PassiveSerializer):
"""Payload to add or remove a single allowed application"""
app = UUIDField()
action = ChoiceField(choices=[("add", "Add"), ("remove", "Remove")])
class UserRecoveryLinkSerializer(PassiveSerializer):
"""Payload to create a recovery link"""
@@ -571,6 +524,13 @@ class UserViewSet(
]
def get_ql_fields(self):
from djangoql.schema import BoolField, StrField
from authentik.enterprise.search.fields import (
ChoiceSearchField,
JSONSearchField,
)
return [
StrField(User, "username"),
StrField(User, "name"),
@@ -733,260 +693,6 @@ class UserViewSet(
status=500,
)
@permission_required(
None,
["authentik_core.add_user", "authentik_core.add_token", "authentik_core.add_agent_user"],
)
@extend_schema(
request=UserAgentSerializer,
responses={
200: inline_serializer(
"UserAgentResponse",
{
"username": CharField(required=True),
"token": CharField(required=True),
"user_uid": CharField(required=True),
"user_pk": IntegerField(required=True),
},
)
},
)
@action(
detail=False,
methods=["POST"],
pagination_class=None,
filter_backends=[],
)
@validate(UserAgentSerializer)
def agent(self, request: Request, body: UserAgentSerializer) -> Response:
"""Create a new agent user. Enterprise only. Caller must be an internal user."""
from authentik.enterprise.license import LicenseKey
if not LicenseKey.cached_summary().status.is_valid:
raise ValidationError(_("Enterprise is required to use this endpoint."))
if request.user.type != UserTypes.INTERNAL:
raise ValidationError(_("Only internal users can create agent users."))
requested_owner = body.validated_data.get("owner")
if requested_owner and not request.user.is_superuser:
if requested_owner.pk != request.user.pk:
raise ValidationError(
_("Non-superusers can only create agents owned by themselves.")
)
owner = requested_owner or request.user
username = body.validated_data["name"]
with atomic():
try:
user: User = User.objects.create(
username=username,
name=username,
type=UserTypes.AGENT,
attributes={
USER_ATTRIBUTE_AGENT_OWNER_PK: str(owner.pk),
USER_ATTRIBUTE_AGENT_ALLOWED_APPS: [],
},
path=USER_PATH_AGENT,
)
user.set_unusable_password()
user.save()
token = Token.objects.create(
identifier=slugify(f"agent-{username}-token"),
intent=TokenIntents.INTENT_API,
user=user,
expires=now() + timedelta(hours=24),
expiring=True,
)
user.assign_perms_to_managed_role("authentik_core.view_token_key", token)
owner.assign_perms_to_managed_role("authentik_core.view_user", user)
owner.assign_perms_to_managed_role("authentik_core.change_user", user)
owner.assign_perms_to_managed_role("authentik_core.delete_user", user)
owner.assign_perms_to_managed_role("authentik_core.view_user_applications", user)
Event.new(
EventAction.MODEL_CREATED,
model=sanitize_dict(model_to_dict(user)),
agent_owner=sanitize_dict(model_to_dict(owner)),
).from_http(request)
return Response(
{
"username": user.username,
"user_uid": user.uid,
"user_pk": user.pk,
"token": token.key,
}
)
except IntegrityError as exc:
error_msg = str(exc).lower()
if "unique" in error_msg:
return Response(
data={"non_field_errors": [_("A user with this username already exists")]},
status=400,
)
else:
LOGGER.warning("Agent user creation failed", exc=exc)
return Response(
data={"non_field_errors": [_("Unable to create user")]},
status=400,
)
except (ValueError, TypeError) as exc:
LOGGER.error("Unexpected error during agent user creation", exc=exc)
return Response(
data={"non_field_errors": [_("Unknown error occurred")]},
status=500,
)
@extend_schema(
request=UserAgentAllowedAppsSerializer,
responses={
200: UserAgentAllowedAppsSerializer,
400: OpenApiResponse(description="Invalid app UUIDs or owner lacks access"),
403: OpenApiResponse(description="Not the agent's owner or superuser"),
},
)
@action(
detail=True,
methods=["PUT"],
url_path="agent_allowed_apps",
url_name="agent-allowed-apps",
pagination_class=None,
filter_backends=[],
)
@validate(UserAgentAllowedAppsSerializer)
def agent_allowed_apps(
self, request: Request, pk: int, body: UserAgentAllowedAppsSerializer
) -> Response:
"""Replace the allowed application list for an agent user.
Caller must be the agent's owner or a superuser."""
from authentik.core.apps import AppAccessWithoutBindings
from authentik.policies.engine import PolicyEngine
agent, owner = self._get_agent_and_owner(request)
app_uuids = body.validated_data["allowed_apps"]
errors = []
for app_uuid in app_uuids:
try:
app = Application.objects.get(pk=app_uuid)
except Application.DoesNotExist:
errors.append(str(app_uuid))
continue
engine = PolicyEngine(app, owner, request)
engine.empty_result = AppAccessWithoutBindings.get()
engine.use_cache = False
engine.build()
if not engine.passing:
errors.append(str(app_uuid))
if errors:
return Response(
data={
"allowed_apps": [
_(
"Owner does not have access to application %(uuid)s "
"or application does not exist."
)
% {"uuid": uuid}
for uuid in errors
]
},
status=400,
)
agent.attributes[USER_ATTRIBUTE_AGENT_ALLOWED_APPS] = [str(u) for u in app_uuids]
agent.save(update_fields=["attributes"])
return Response({"allowed_apps": [str(u) for u in app_uuids]})
@extend_schema(
request=UserAgentAllowedAppSerializer,
responses={
200: UserAgentAllowedAppsSerializer,
204: OpenApiResponse(description="Application removed"),
400: OpenApiResponse(description="Invalid app UUID or owner lacks access"),
403: OpenApiResponse(description="Not the agent's owner or superuser"),
},
)
@action(
detail=True,
methods=["PATCH"],
url_path="agent_allowed_app",
url_name="agent-allowed-app",
pagination_class=None,
filter_backends=[],
)
@validate(UserAgentAllowedAppSerializer)
def agent_allowed_app(
self, request: Request, pk: int, body: UserAgentAllowedAppSerializer
) -> Response:
"""Add or remove a single application from an agent's allowed list.
Caller must be the agent's owner or a superuser."""
from authentik.core.apps import AppAccessWithoutBindings
from authentik.policies.engine import PolicyEngine
agent, owner = self._get_agent_and_owner(request)
app_uuid = str(body.validated_data["app"])
action = body.validated_data["action"]
current = agent.attributes.get(USER_ATTRIBUTE_AGENT_ALLOWED_APPS, [])
if action == "add":
try:
app = Application.objects.get(pk=app_uuid)
except Application.DoesNotExist:
return Response(
data={"app": [_("Application does not exist.")]},
status=400,
)
engine = PolicyEngine(app, owner, request)
engine.empty_result = AppAccessWithoutBindings.get()
engine.use_cache = False
engine.build()
if not engine.passing:
return Response(
data={"app": [_("Owner does not have access to this application.")]},
status=400,
)
if app_uuid not in current:
current.append(app_uuid)
agent.attributes[USER_ATTRIBUTE_AGENT_ALLOWED_APPS] = current
agent.save(update_fields=["attributes"])
return Response({"allowed_apps": current})
if action == "remove":
if app_uuid in current:
current.remove(app_uuid)
agent.attributes[USER_ATTRIBUTE_AGENT_ALLOWED_APPS] = current
agent.save(update_fields=["attributes"])
return Response(status=204)
return Response(
data={"action": [_("Invalid action.")]},
status=400,
)
def _get_agent_and_owner(self, request: Request) -> tuple[User, User]:
"""Validate that the target is an agent and the caller is authorized."""
agent: User = self.get_object()
if agent.type != UserTypes.AGENT:
raise ValidationError(_("User is not an agent user."))
owner_pk = agent.attributes.get(USER_ATTRIBUTE_AGENT_OWNER_PK)
is_owner = str(request.user.pk) == owner_pk
if not request.user.is_superuser and not is_owner:
raise ValidationError(_("Not the agent's owner or superuser."))
try:
owner = User.objects.get(pk=owner_pk)
except User.DoesNotExist as exc:
raise ValidationError(_("Agent owner not found.")) from exc
return agent, owner
@extend_schema(responses={200: SessionUserSerializer(many=False)})
@action(
url_path="me",

View File

@@ -1,20 +1,7 @@
"""authentik core app config"""
from django.utils.translation import gettext_lazy as _
from authentik.blueprints.apps import ManagedAppConfig
from authentik.tasks.schedules.common import ScheduleSpec
from authentik.tenants.flags import Flag
class AppAccessWithoutBindings(Flag[bool], key="core_default_app_access"):
default = True
visibility = "none"
description = _(
"Configure if applications without any policy/group/user bindings "
"should be accessible to any user."
)
class AuthentikCoreConfig(ManagedAppConfig):

View File

@@ -81,7 +81,7 @@ class TokenBackend(InbuiltBackend):
User().set_password(password, request=request)
return None
tokens = Token.objects.filter(
tokens = Token.filter_not_expired(
user=user, key=password, intent=TokenIntents.INTENT_APP_PASSWORD
)
if not tokens.exists():

View File

@@ -18,7 +18,6 @@ class Command(TenantCommand):
User.objects.exclude_anonymous()
.exclude(type=UserTypes.SERVICE_ACCOUNT)
.exclude(type=UserTypes.INTERNAL_SERVICE_ACCOUNT)
.exclude(type=UserTypes.AGENT)
)
if options["usernames"] and options["all"]:
self.stderr.write("--all and usernames specified, only one can be specified")

View File

@@ -1,41 +0,0 @@
# Generated by Django 5.2.13 on 2026-04-13 21:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0057_remove_user_groups_remove_user_user_permissions_and_more"),
]
operations = [
migrations.AlterModelOptions(
name="user",
options={
"permissions": [
("reset_user_password", "Reset Password"),
("impersonate", "Can impersonate other users"),
("preview_user", "Can preview user data sent to providers"),
("view_user_applications", "View applications the user has access to"),
("add_agent_user", "Can create agent users"),
],
"verbose_name": "User",
"verbose_name_plural": "Users",
},
),
migrations.AlterField(
model_name="user",
name="type",
field=models.TextField(
choices=[
("internal", "Internal"),
("external", "External"),
("service_account", "Service Account"),
("internal_service_account", "Internal Service Account"),
("agent", "Agent"),
],
default="internal",
),
),
]

View File

@@ -2,7 +2,7 @@
import re
import traceback
from datetime import datetime
from datetime import datetime, timedelta
from enum import StrEnum
from hashlib import sha256
from typing import Any, Self
@@ -16,7 +16,7 @@ from django.contrib.auth.models import UserManager as DjangoUserManager
from django.contrib.sessions.base_session import AbstractBaseSession
from django.core.validators import validate_slug
from django.db import models
from django.db.models import Manager, Q, QuerySet, options
from django.db.models import Q, QuerySet, options
from django.http import HttpRequest
from django.utils.functional import cached_property
from django.utils.timezone import now
@@ -45,7 +45,6 @@ from authentik.lib.models import (
SerializerModel,
)
from authentik.lib.utils.inheritance import get_deepest_child
from authentik.lib.utils.reflection import class_to_path
from authentik.lib.utils.time import timedelta_from_string
from authentik.policies.models import PolicyBindingModel
from authentik.rbac.models import Role
@@ -67,10 +66,6 @@ USER_ATTRIBUTE_CHANGE_USERNAME = f"{_USER_ATTR_PREFIX}/can-change-username"
USER_ATTRIBUTE_CHANGE_NAME = f"{_USER_ATTR_PREFIX}/can-change-name"
USER_ATTRIBUTE_CHANGE_EMAIL = f"{_USER_ATTR_PREFIX}/can-change-email"
USER_PATH_SERVICE_ACCOUNT = f"{USER_PATH_SYSTEM_PREFIX}/service-accounts"
_USER_ATTR_AGENT_PREFIX = f"{USER_PATH_SYSTEM_PREFIX}/agent"
USER_ATTRIBUTE_AGENT_OWNER_PK = f"{_USER_ATTR_AGENT_PREFIX}/owner-pk"
USER_ATTRIBUTE_AGENT_ALLOWED_APPS = f"{_USER_ATTR_AGENT_PREFIX}/allowed-apps"
USER_PATH_AGENT = f"{USER_PATH_SYSTEM_PREFIX}/agents"
options.DEFAULT_NAMES = options.DEFAULT_NAMES + (
# used_by API that allows models to specify if they shadow an object
@@ -130,9 +125,6 @@ class UserTypes(models.TextChoices):
# accounts, such as outpost users
INTERNAL_SERVICE_ACCOUNT = "internal_service_account"
# Enterprise-gated agent users owned by an internal user
AGENT = "agent"
class AttributesMixin(models.Model):
"""Adds an attributes property to a model"""
@@ -392,7 +384,6 @@ class User(SerializerModel, AttributesMixin, AbstractUser):
("impersonate", _("Can impersonate other users")),
("preview_user", _("Can preview user data sent to providers")),
("view_user_applications", _("View applications the user has access to")),
("add_agent_user", _("Can create agent users")),
]
indexes = [
models.Index(fields=["last_login"]),
@@ -526,7 +517,7 @@ class User(SerializerModel, AttributesMixin, AbstractUser):
@property
def ak_groups(self):
"""This is a proxy for a renamed, deprecated field."""
from authentik.events.models import Event
from authentik.events.models import Event, EventAction
deprecation = "authentik.core.models.User.ak_groups"
replacement = "authentik.core.models.User.groups"
@@ -553,9 +544,21 @@ class User(SerializerModel, AttributesMixin, AbstractUser):
cause=cause,
stacktrace=stacktrace,
)
Event.log_deprecation(
deprecation, message=message_event, cause=cause, replacement=replacement
)
if not Event.filter_not_expired(
action=EventAction.CONFIGURATION_WARNING,
context__deprecation=deprecation,
context__cause=cause,
).exists():
event = Event.new(
EventAction.CONFIGURATION_WARNING,
deprecation=deprecation,
replacement=replacement,
message=message_event,
cause=cause,
)
event.expires = datetime.now() + timedelta(days=30)
event.save()
return self.groups
def set_password(self, raw_password, signal=True, sender=None, request=None):
@@ -804,11 +807,11 @@ class Application(SerializerModel, PolicyBindingModel):
def backchannel_provider_for[T: Provider](self, provider_type: type[T], **kwargs) -> T | None:
"""Get Backchannel provider for a specific type"""
provider: BackchannelProvider | None = self.backchannel_providers.filter(
providers = self.backchannel_providers.filter(
**{f"{provider_type._meta.model_name}__isnull": False},
**kwargs,
).first()
return getattr(provider, provider_type._meta.model_name) if provider else None
)
return getattr(providers.first(), provider_type._meta.model_name)
def __str__(self):
return str(self.name)
@@ -1093,24 +1096,12 @@ class GroupSourceConnection(SerializerModel, CreatedUpdatedModel):
unique_together = (("group", "source"),)
class ExpiringManager(Manager):
"""Manager for expiring objects which filters out expired objects by default"""
def get_queryset(self):
return QuerySet(self.model, using=self._db).exclude(expires__lt=now(), expiring=True)
def including_expired(self):
return QuerySet(self.model, using=self._db)
class ExpiringModel(models.Model):
"""Base Model which can expire, and is automatically cleaned up."""
expires = models.DateTimeField(default=None, null=True)
expiring = models.BooleanField(default=True)
objects = ExpiringManager()
class Meta:
abstract = True
indexes = [
@@ -1124,33 +1115,13 @@ class ExpiringModel(models.Model):
default the object is deleted. This is less efficient compared
to bulk deleting objects, but classes like Token() need to change
values instead of being deleted."""
try:
return self.delete(*args, **kwargs)
except self.DoesNotExist:
# Object has already been deleted, so this should be fine
return None
return self.delete(*args, **kwargs)
@classmethod
def filter_not_expired(cls, **kwargs) -> QuerySet[Self]:
"""Filer for tokens which are not expired yet or are not expiring,
and match filters in `kwargs`"""
from authentik.events.models import Event
deprecation_id = f"{class_to_path(cls)}.filter_not_expired"
Event.log_deprecation(
deprecation_id,
message=(
".filter_not_expired() is deprecated as the default lookup now excludes "
"expired objects."
),
)
for obj in (
cls.objects.including_expired()
.filter(**kwargs)
.filter(Q(expires__lt=now(), expiring=True))
):
for obj in cls.objects.filter(**kwargs).filter(Q(expires__lt=now(), expiring=True)):
obj.delete()
return cls.objects.filter(**kwargs)

View File

@@ -72,7 +72,6 @@ class SessionStore(SessionBase):
# and their descriptors fail to initialize (e.g., missing storage)
# TypeError - can happen with incompatible pickled objects
# If any of these happen, just return an empty dictionary (an empty session)
LOGGER.warning("Failed to decode session data", exc_info=True)
pass
return {}

View File

@@ -11,14 +11,12 @@ from django.http.request import HttpRequest
from structlog.stdlib import get_logger
from authentik.core.models import (
USER_ATTRIBUTE_AGENT_OWNER_PK,
Application,
AuthenticatedSession,
BackchannelProvider,
ExpiringModel,
Session,
User,
UserTypes,
default_token_duration,
)
from authentik.flows.apps import RefreshOtherFlowsAfterAuthentication
@@ -26,8 +24,7 @@ from authentik.root.ws.consumer import build_device_group
# Arguments: user: User, password: str
password_changed = Signal()
# Arguments: credentials: dict[str, any], request: HttpRequest,
# stage: Stage, context: dict[str, any]
# Arguments: credentials: dict[str, any], request: HttpRequest, stage: Stage
login_failed = Signal()
LOGGER = get_logger()
@@ -71,35 +68,6 @@ def authenticated_session_delete(sender: type[Model], instance: AuthenticatedSes
Session.objects.filter(session_key=instance.pk).delete()
def _agent_qs_for_owner(owner_pk: int):
"""Return a queryset of agent users belonging to the given owner pk"""
return User.objects.filter(
type=UserTypes.AGENT,
attributes__contains={USER_ATTRIBUTE_AGENT_OWNER_PK: str(owner_pk)},
)
@receiver(post_delete, sender=User)
def user_delete_cascade_agents(sender: type[Model], instance: User, **_):
"""Delete agent users when their owner is deleted"""
_agent_qs_for_owner(instance.pk).delete()
@receiver(post_save, sender=User)
def user_save_propagate_agent_active(
sender: type[Model], instance: User, update_fields: frozenset[str] | None = None, **_
):
"""Propagate is_active changes to owned agent users"""
if update_fields is not None and "is_active" not in update_fields:
return
agents = _agent_qs_for_owner(instance.pk)
if not instance.is_active:
Session.objects.filter(
authenticatedsession__user__in=agents.filter(is_active=True)
).delete()
agents.update(is_active=instance.is_active)
@receiver(pre_save)
def backchannel_provider_pre_save(sender: type[Model], instance: Model, **_):
"""Ensure backchannel providers have is_backchannel set to true"""

View File

@@ -27,10 +27,7 @@ def clean_expired_models():
for cls in ExpiringModel.__subclasses__():
cls: ExpiringModel
objects = (
cls.objects.including_expired()
.all()
.exclude(expiring=False)
.exclude(expiring=True, expires__gt=now())
cls.objects.all().exclude(expiring=False).exclude(expiring=True, expires__gt=now())
)
amount = objects.count()
for obj in chunked_queryset(objects):

View File

@@ -1,84 +0,0 @@
"""Test agent token-to-session exchange"""
from django.urls.base import reverse
from rest_framework.test import APITestCase
from authentik.core.models import (
USER_ATTRIBUTE_AGENT_OWNER_PK,
USER_PATH_AGENT,
Token,
TokenIntents,
User,
UserTypes,
)
from authentik.core.tests.utils import create_test_user
from authentik.lib.generators import generate_id
class TestAgentSession(APITestCase):
"""Test agent token-to-session exchange"""
def _create_agent_with_token(self):
owner = create_test_user()
agent = User.objects.create(
username=generate_id(),
type=UserTypes.AGENT,
attributes={USER_ATTRIBUTE_AGENT_OWNER_PK: str(owner.pk)},
path=USER_PATH_AGENT,
)
agent.set_unusable_password()
agent.save()
token = Token.objects.create(
identifier=generate_id(),
intent=TokenIntents.INTENT_API,
user=agent,
expiring=True,
)
return owner, agent, token
def test_session_exchange_success(self):
"""Valid agent token creates a session"""
_owner, _agent, token = self._create_agent_with_token()
response = self.client.post(
reverse("authentik_api:agent-session"),
data={"key": token.key},
content_type="application/json",
)
self.assertEqual(response.status_code, 204)
def test_session_exchange_invalid_token(self):
"""Invalid token key is rejected"""
response = self.client.post(
reverse("authentik_api:agent-session"),
data={"key": "nonexistent-key"},
content_type="application/json",
)
self.assertEqual(response.status_code, 400)
def test_session_exchange_non_agent(self):
"""Token belonging to a non-agent user is rejected"""
user = create_test_user()
token = Token.objects.create(
identifier=generate_id(),
intent=TokenIntents.INTENT_API,
user=user,
expiring=True,
)
response = self.client.post(
reverse("authentik_api:agent-session"),
data={"key": token.key},
content_type="application/json",
)
self.assertEqual(response.status_code, 400)
def test_session_exchange_inactive_agent(self):
"""Inactive agent is rejected"""
_owner, agent, token = self._create_agent_with_token()
agent.is_active = False
agent.save(update_fields=["is_active"])
response = self.client.post(
reverse("authentik_api:agent-session"),
data={"key": token.key},
content_type="application/json",
)
self.assertEqual(response.status_code, 403)

View File

@@ -1,101 +0,0 @@
"""Test interface view redirect behavior by user type"""
from django.test import TestCase
from django.urls import reverse
from authentik.brands.models import Brand
from authentik.core.models import Application, UserTypes
from authentik.core.tests.utils import create_test_brand, create_test_user
class TestInterfaceRedirects(TestCase):
"""Test RootRedirectView and BrandDefaultRedirectView redirect logic by user type"""
def setUp(self):
self.app = Application.objects.create(name="test-app", slug="test-app")
self.brand: Brand = create_test_brand(default_application=self.app)
def _assert_redirects_to_app(self, url_name: str, user_type: UserTypes):
user = create_test_user(type=user_type)
self.client.force_login(user)
response = self.client.get(reverse(f"authentik_core:{url_name}"))
self.assertRedirects(
response,
reverse(
"authentik_core:application-launch", kwargs={"application_slug": self.app.slug}
),
fetch_redirect_response=False,
)
def _assert_no_redirect(self, url_name: str, user_type: UserTypes):
"""Internal users should not be redirected away."""
user = create_test_user(type=user_type)
self.client.force_login(user)
response = self.client.get(reverse(f"authentik_core:{url_name}"))
# Internal users get a 200 (rendered template) or redirect to if-user, not to the app
app_url = reverse(
"authentik_core:application-launch", kwargs={"application_slug": self.app.slug}
)
self.assertNotEqual(response.get("Location"), app_url)
# --- RootRedirectView ---
def test_root_redirect_external_user(self):
"""External users are redirected to the default app from root"""
self._assert_redirects_to_app("root-redirect", UserTypes.EXTERNAL)
def test_root_redirect_service_account(self):
"""Service accounts are redirected to the default app from root"""
self._assert_redirects_to_app("root-redirect", UserTypes.SERVICE_ACCOUNT)
def test_root_redirect_internal_service_account(self):
"""Internal service accounts are redirected to the default app from root"""
self._assert_redirects_to_app("root-redirect", UserTypes.INTERNAL_SERVICE_ACCOUNT)
def test_root_redirect_internal_user(self):
"""Internal users are NOT redirected to the app from root"""
self._assert_no_redirect("root-redirect", UserTypes.INTERNAL)
# --- BrandDefaultRedirectView (if/user/) ---
def test_if_user_external_user(self):
"""External users are redirected to the default app from if/user/"""
self._assert_redirects_to_app("if-user", UserTypes.EXTERNAL)
def test_if_user_service_account(self):
"""Service accounts are redirected to the default app from if/user/"""
self._assert_redirects_to_app("if-user", UserTypes.SERVICE_ACCOUNT)
def test_if_user_internal_service_account(self):
"""Internal service accounts are redirected to the default app from if/user/"""
self._assert_redirects_to_app("if-user", UserTypes.INTERNAL_SERVICE_ACCOUNT)
def test_if_user_internal_user(self):
"""Internal users are NOT redirected to the app from if/user/"""
self._assert_no_redirect("if-user", UserTypes.INTERNAL)
# --- BrandDefaultRedirectView (if/admin/) ---
def test_if_admin_service_account(self):
"""Service accounts are redirected to the default app from if/admin/"""
self._assert_redirects_to_app("if-admin", UserTypes.SERVICE_ACCOUNT)
def test_if_admin_internal_service_account(self):
"""Internal service accounts are redirected to the default app from if/admin/"""
self._assert_redirects_to_app("if-admin", UserTypes.INTERNAL_SERVICE_ACCOUNT)
def test_if_admin_internal_user(self):
"""Internal users are NOT redirected to the app from if/admin/"""
self._assert_no_redirect("if-admin", UserTypes.INTERNAL)
# --- No default app set ---
def test_service_account_no_default_app_access_denied(self):
"""Service accounts get access denied when no default app is configured"""
self.brand.default_application = None
self.brand.save()
user = create_test_user(type=UserTypes.SERVICE_ACCOUNT)
self.client.force_login(user)
response = self.client.get(reverse("authentik_core:if-user"))
self.assertEqual(response.status_code, 200)
self.assertIn(b"Interface can only be accessed by internal users", response.content)

View File

@@ -9,8 +9,6 @@ from freezegun import freeze_time
from guardian.shortcuts import get_anonymous_user
from authentik.core.models import Provider, Source, Token
from authentik.events.models import Event, EventAction
from authentik.lib.generators import generate_id
from authentik.lib.utils.reflection import all_subclasses
@@ -31,22 +29,6 @@ class TestModels(TestCase):
freeze.tick(timedelta(seconds=1))
self.assertFalse(token.is_expired)
def test_filter_not_expired_warning(self):
"""Test filter_not_expired's deprecation message"""
id = generate_id()
Token.objects.create(
expires=now() - timedelta(hours=1),
expiring=True,
user=get_anonymous_user(),
identifier=id,
)
self.assertFalse(Token.filter_not_expired(identifier=id).exists())
event = Event.objects.filter(action=EventAction.CONFIGURATION_WARNING).first()
self.assertIsNotNone(event)
self.assertEqual(
event.context["deprecation"], "authentik.core.models.Token.filter_not_expired"
)
def source_tester_factory(test_model: type[Source]) -> Callable:
"""Test source"""

View File

@@ -173,7 +173,7 @@ class TestTokenAPI(APITestCase):
def test_list(self):
"""Test Token List (Test normal authentication)"""
Token.objects.including_expired().all().delete()
Token.objects.all().delete()
token_should: Token = Token.objects.create(
identifier="test", expiring=False, user=self.user
)
@@ -185,7 +185,7 @@ class TestTokenAPI(APITestCase):
def test_list_with_permission(self):
"""Test Token List (Test with `view_token` permission)"""
Token.objects.including_expired().all().delete()
Token.objects.all().delete()
token_should: Token = Token.objects.create(
identifier="test", expiring=False, user=self.user
)
@@ -199,50 +199,6 @@ class TestTokenAPI(APITestCase):
self.assertEqual(body["results"][0]["identifier"], token_should.identifier)
self.assertEqual(body["results"][1]["identifier"], token_should_not.identifier)
def test_token_rotate_by_owner(self):
"""Token owner can rotate their own token"""
token = Token.objects.create(
identifier=generate_id(),
intent=TokenIntents.INTENT_API,
user=self.user,
expiring=True,
)
original_key = token.key
response = self.client.post(
reverse("authentik_api:token-rotate", kwargs={"identifier": token.identifier}),
)
self.assertEqual(response.status_code, 200)
token.refresh_from_db()
self.assertNotEqual(token.key, original_key)
self.assertEqual(token.key, loads(response.content)["key"])
def test_token_rotate_by_superuser(self):
"""Superuser can rotate any token"""
token = Token.objects.create(
identifier=generate_id(),
intent=TokenIntents.INTENT_API,
user=self.user,
expiring=True,
)
self.client.force_login(self.admin)
response = self.client.post(
reverse("authentik_api:token-rotate", kwargs={"identifier": token.identifier}),
)
self.assertEqual(response.status_code, 200)
def test_token_rotate_unauthorized(self):
"""Non-owner cannot rotate another user's token"""
token = Token.objects.create(
identifier=generate_id(),
intent=TokenIntents.INTENT_API,
user=self.admin,
expiring=True,
)
response = self.client.post(
reverse("authentik_api:token-rotate", kwargs={"identifier": token.identifier}),
)
self.assertEqual(response.status_code, 403)
def test_serializer_no_request(self):
"""Test serializer without request"""
self.assertTrue(

View File

@@ -1,9 +1,6 @@
"""Test token auth"""
from datetime import timedelta
from django.test import TestCase
from django.utils.timezone import now
from authentik.core.auth import TokenBackend
from authentik.core.models import Token, TokenIntents, User
@@ -31,15 +28,6 @@ class TestTokenAuth(TestCase):
TokenBackend().authenticate(self.request, "test-user", self.token.key), self.user
)
def test_token_auth_expired(self):
"""Test auth with token"""
self.token.expiring = True
self.token.expires = now() - timedelta(hours=1)
self.token.save()
self.assertEqual(
TokenBackend().authenticate(self.request, "test-user", self.token.key), None
)
def test_token_auth_none(self):
"""Test auth with token (non-existent user)"""
self.assertIsNone(

View File

@@ -2,14 +2,7 @@
from django.test.testcases import TestCase
from authentik.core.models import (
USER_ATTRIBUTE_AGENT_OWNER_PK,
USER_PATH_AGENT,
AuthenticatedSession,
Session,
User,
UserTypes,
)
from authentik.core.models import User
from authentik.events.models import Event
from authentik.lib.generators import generate_id
@@ -40,93 +33,3 @@ class TestUsers(TestCase):
self.assertEqual(Event.objects.count(), 1)
user.ak_groups.all()
self.assertEqual(Event.objects.count(), 1)
class TestAgentUserSignals(TestCase):
"""Test signals related to agent user lifecycle"""
def _create_owner(self):
owner = User.objects.create(username=generate_id())
owner.set_unusable_password()
owner.save()
return owner
def _create_agent(self, owner):
agent = User.objects.create(
username=generate_id(),
type=UserTypes.AGENT,
attributes={USER_ATTRIBUTE_AGENT_OWNER_PK: str(owner.pk)},
path=USER_PATH_AGENT,
)
agent.set_unusable_password()
agent.save()
return agent
def test_delete_owner_cascades_to_agents(self):
"""Deleting an owner also deletes all their agent users"""
owner = self._create_owner()
agent1 = self._create_agent(owner)
agent2 = self._create_agent(owner)
other_owner = self._create_owner()
other_agent = self._create_agent(other_owner)
owner.delete()
self.assertFalse(User.objects.filter(pk=agent1.pk).exists())
self.assertFalse(User.objects.filter(pk=agent2.pk).exists())
self.assertTrue(User.objects.filter(pk=other_agent.pk).exists())
def test_deactivate_owner_deactivates_agents(self):
"""Setting an owner inactive also marks all their agents inactive"""
owner = self._create_owner()
agent = self._create_agent(owner)
owner.is_active = False
owner.save(update_fields=["is_active"])
agent.refresh_from_db()
self.assertFalse(agent.is_active)
def test_reactivate_owner_reactivates_agents(self):
"""Setting an owner active again also re-activates their agents"""
owner = self._create_owner()
owner.is_active = False
owner.save(update_fields=["is_active"])
agent = self._create_agent(owner)
agent.is_active = False
agent.save(update_fields=["is_active"])
owner.is_active = True
owner.save(update_fields=["is_active"])
agent.refresh_from_db()
self.assertTrue(agent.is_active)
def test_unrelated_owner_save_does_not_affect_agents(self):
"""Saving an owner without changing is_active does not touch agents"""
owner = self._create_owner()
agent = self._create_agent(owner)
agent.is_active = False
agent.save(update_fields=["is_active"])
owner.name = generate_id()
owner.save(update_fields=["name"])
agent.refresh_from_db()
self.assertFalse(agent.is_active)
def test_deactivate_owner_clears_agent_sessions(self):
"""Deactivating an owner removes authenticated sessions for their agents"""
owner = self._create_owner()
agent = self._create_agent(owner)
session = Session.objects.create(
session_key=generate_id(),
last_ip="255.255.255.255",
last_user_agent="",
)
AuthenticatedSession.objects.create(user=agent, session=session)
owner.is_active = False
owner.save(update_fields=["is_active"])
self.assertFalse(Session.objects.filter(pk=session.pk).exists())

View File

@@ -2,7 +2,6 @@
from datetime import datetime, timedelta
from json import loads
from unittest.mock import MagicMock, patch
from django.urls.base import reverse
from django.utils.timezone import now
@@ -10,15 +9,10 @@ from rest_framework.test import APITestCase
from authentik.brands.models import Brand
from authentik.core.models import (
USER_ATTRIBUTE_AGENT_ALLOWED_APPS,
USER_ATTRIBUTE_AGENT_OWNER_PK,
USER_ATTRIBUTE_TOKEN_EXPIRING,
USER_PATH_AGENT,
Application,
AuthenticatedSession,
Session,
Token,
TokenIntents,
User,
UserTypes,
)
@@ -884,249 +878,3 @@ class TestUsersAPI(APITestCase):
self.assertIn(user2.pk, pks)
# Verify user2 comes before user1 in descending order
self.assertLess(pks.index(user2.pk), pks.index(user1.pk))
class TestAgentUserAPI(APITestCase):
"""Test agent user API"""
def setUp(self) -> None:
self.admin = create_test_admin_user()
self.user = create_test_user()
self.owner = create_test_user()
self.owner.assign_perms_to_managed_role("authentik_core.add_agent_user")
self.owner.assign_perms_to_managed_role("authentik_core.add_user")
self.owner.assign_perms_to_managed_role("authentik_core.add_token")
def _create_agent(self, name="test-agent", owner=None):
owner = owner or self.admin
agent = User.objects.create(
username=name,
name=name,
type=UserTypes.AGENT,
attributes={
USER_ATTRIBUTE_AGENT_OWNER_PK: str(owner.pk),
USER_ATTRIBUTE_AGENT_ALLOWED_APPS: [],
},
path=USER_PATH_AGENT,
)
agent.set_unusable_password()
agent.save()
return agent
def test_agent_create(self):
"""Non-admin owner with correct permissions can create an agent"""
self.client.force_login(self.owner)
with patch(
"authentik.enterprise.license.LicenseKey.cached_summary",
MagicMock(return_value=MagicMock(status=MagicMock(is_valid=True))),
):
response = self.client.post(
reverse("authentik_api:user-agent"),
data={"name": "test-agent"},
)
self.assertEqual(response.status_code, 200)
agent = User.objects.get(username="test-agent")
self.assertEqual(agent.type, UserTypes.AGENT)
self.assertEqual(agent.path, USER_PATH_AGENT)
self.assertEqual(agent.attributes.get(USER_ATTRIBUTE_AGENT_OWNER_PK), str(self.owner.pk))
self.assertEqual(agent.attributes.get(USER_ATTRIBUTE_AGENT_ALLOWED_APPS), [])
self.assertFalse(agent.has_usable_password())
token = Token.objects.filter(user=agent, intent=TokenIntents.INTENT_API).first()
self.assertIsNotNone(token)
self.assertTrue(token.expiring)
def test_agent_create_no_license(self):
"""Agent creation is rejected without a valid enterprise license"""
self.client.force_login(self.owner)
with patch(
"authentik.enterprise.license.LicenseKey.cached_summary",
MagicMock(return_value=MagicMock(status=MagicMock(is_valid=False))),
):
response = self.client.post(
reverse("authentik_api:user-agent"),
data={"name": "test-agent"},
)
self.assertEqual(response.status_code, 400)
def test_agent_create_non_internal_user(self):
"""Only internal users can create agent users"""
self.owner.type = UserTypes.EXTERNAL
self.owner.save(update_fields=["type"])
self.client.force_login(self.owner)
with patch(
"authentik.enterprise.license.LicenseKey.cached_summary",
MagicMock(return_value=MagicMock(status=MagicMock(is_valid=True))),
):
response = self.client.post(
reverse("authentik_api:user-agent"),
data={"name": "test-agent"},
)
self.assertEqual(response.status_code, 400)
def test_agent_create_no_permission(self):
"""User without add_agent_user permission is rejected"""
self.client.force_login(self.user)
with patch(
"authentik.enterprise.license.LicenseKey.cached_summary",
MagicMock(return_value=MagicMock(status=MagicMock(is_valid=True))),
):
response = self.client.post(
reverse("authentik_api:user-agent"),
data={"name": "test-agent"},
)
self.assertEqual(response.status_code, 403)
def test_agent_create_duplicate(self):
"""Duplicate agent username returns a user-friendly error"""
self._create_agent("test-agent-dup")
self.client.force_login(self.owner)
with patch(
"authentik.enterprise.license.LicenseKey.cached_summary",
MagicMock(return_value=MagicMock(status=MagicMock(is_valid=True))),
):
response = self.client.post(
reverse("authentik_api:user-agent"),
data={"name": "test-agent-dup"},
)
self.assertEqual(response.status_code, 400)
def test_agent_type_cannot_be_changed(self):
"""Agent user type cannot be changed via the users API"""
agent = self._create_agent()
self.client.force_login(self.admin)
response = self.client.patch(
reverse("authentik_api:user-detail", kwargs={"pk": agent.pk}),
data={"type": UserTypes.INTERNAL},
content_type="application/json",
)
self.assertEqual(response.status_code, 400)
def test_agent_owner_cannot_be_changed(self):
"""Agent owner cannot be changed via the users API"""
agent = self._create_agent()
other = create_test_user()
self.client.force_login(self.admin)
new_attrs = dict(agent.attributes)
new_attrs[USER_ATTRIBUTE_AGENT_OWNER_PK] = str(other.pk)
response = self.client.patch(
reverse("authentik_api:user-detail", kwargs={"pk": agent.pk}),
data={"attributes": new_attrs},
content_type="application/json",
)
self.assertEqual(response.status_code, 400)
def test_agent_allowed_apps_update(self):
"""Owner can update the agent's allowed apps list"""
agent = self._create_agent(owner=self.admin)
app = Application.objects.create(name=generate_id(), slug=generate_id())
self.client.force_login(self.admin)
response = self.client.put(
reverse("authentik_api:user-agent-allowed-apps", kwargs={"pk": agent.pk}),
data={"allowed_apps": [str(app.pk)]},
content_type="application/json",
)
self.assertEqual(response.status_code, 200)
agent.refresh_from_db()
self.assertIn(str(app.pk), agent.attributes[USER_ATTRIBUTE_AGENT_ALLOWED_APPS])
def test_agent_allowed_apps_update_unauthorized(self):
"""Non-owner, non-superuser is rejected when updating allowed apps"""
other = create_test_user()
agent = self._create_agent(owner=other)
self.client.force_login(self.user)
response = self.client.put(
reverse("authentik_api:user-agent-allowed-apps", kwargs={"pk": agent.pk}),
data={"allowed_apps": []},
content_type="application/json",
)
self.assertEqual(response.status_code, 403)
def test_agent_allowed_apps_update_non_agent(self):
"""Endpoint rejects non-agent users"""
self.client.force_login(self.admin)
response = self.client.put(
reverse("authentik_api:user-agent-allowed-apps", kwargs={"pk": self.user.pk}),
data={"allowed_apps": []},
content_type="application/json",
)
self.assertEqual(response.status_code, 400)
def test_agent_allowed_app_add(self):
"""PATCH add: owner can add a single app to agent's allowed list"""
agent = self._create_agent(owner=self.admin)
app = Application.objects.create(name=generate_id(), slug=generate_id())
self.client.force_login(self.admin)
response = self.client.patch(
reverse("authentik_api:user-agent-allowed-app", kwargs={"pk": agent.pk}),
data={"app": str(app.pk), "action": "add"},
content_type="application/json",
)
self.assertEqual(response.status_code, 200)
agent.refresh_from_db()
self.assertIn(str(app.pk), agent.attributes[USER_ATTRIBUTE_AGENT_ALLOWED_APPS])
def test_agent_allowed_app_add_duplicate(self):
"""PATCH add: adding an already-allowed app is idempotent"""
agent = self._create_agent(owner=self.admin)
app = Application.objects.create(name=generate_id(), slug=generate_id())
agent.attributes[USER_ATTRIBUTE_AGENT_ALLOWED_APPS] = [str(app.pk)]
agent.save(update_fields=["attributes"])
self.client.force_login(self.admin)
response = self.client.patch(
reverse("authentik_api:user-agent-allowed-app", kwargs={"pk": agent.pk}),
data={"app": str(app.pk), "action": "add"},
content_type="application/json",
)
self.assertEqual(response.status_code, 200)
agent.refresh_from_db()
self.assertEqual(agent.attributes[USER_ATTRIBUTE_AGENT_ALLOWED_APPS].count(str(app.pk)), 1)
def test_agent_allowed_app_remove(self):
"""PATCH remove: owner can remove a single app from agent's allowed list"""
agent = self._create_agent(owner=self.admin)
app = Application.objects.create(name=generate_id(), slug=generate_id())
agent.attributes[USER_ATTRIBUTE_AGENT_ALLOWED_APPS] = [str(app.pk)]
agent.save(update_fields=["attributes"])
self.client.force_login(self.admin)
response = self.client.patch(
reverse("authentik_api:user-agent-allowed-app", kwargs={"pk": agent.pk}),
data={"app": str(app.pk), "action": "remove"},
content_type="application/json",
)
self.assertEqual(response.status_code, 204)
agent.refresh_from_db()
self.assertNotIn(str(app.pk), agent.attributes[USER_ATTRIBUTE_AGENT_ALLOWED_APPS])
def test_agent_allowed_app_add_nonexistent(self):
"""PATCH add: nonexistent app UUID is rejected"""
agent = self._create_agent(owner=self.admin)
self.client.force_login(self.admin)
response = self.client.patch(
reverse("authentik_api:user-agent-allowed-app", kwargs={"pk": agent.pk}),
data={"app": "00000000-0000-0000-0000-000000000000", "action": "add"},
content_type="application/json",
)
self.assertEqual(response.status_code, 400)
def test_token_rotate_by_agent_owner(self):
"""Non-admin owner can rotate the agent's token"""
self.client.force_login(self.owner)
with patch(
"authentik.enterprise.license.LicenseKey.cached_summary",
MagicMock(return_value=MagicMock(status=MagicMock(is_valid=True))),
):
response = self.client.post(
reverse("authentik_api:user-agent"),
data={"name": "rotate-test-agent"},
)
self.assertEqual(response.status_code, 200)
token = Token.objects.get(
user__username="rotate-test-agent", intent=TokenIntents.INTENT_API
)
original_key = token.key
response = self.client.post(
reverse("authentik_api:token-rotate", kwargs={"identifier": token.identifier}),
)
self.assertEqual(response.status_code, 200)
token.refresh_from_db()
self.assertNotEqual(token.key, original_key)

View File

@@ -19,7 +19,6 @@ from authentik.core.api.sources import (
from authentik.core.api.tokens import TokenViewSet
from authentik.core.api.transactional_applications import TransactionalApplicationView
from authentik.core.api.users import UserViewSet
from authentik.core.views.agent_session import AgentSessionView
from authentik.core.views.apps import RedirectToAppLaunch
from authentik.core.views.debug import AccessDeniedView
from authentik.core.views.interface import (
@@ -80,11 +79,6 @@ api_urlpatterns = [
TransactionalApplicationView.as_view(),
name="core-transactional-application",
),
path(
"core/agent/session/",
AgentSessionView.as_view(),
name="agent-session",
),
("core/groups", GroupViewSet),
("core/users", UserViewSet),
("core/tokens", TokenViewSet),

View File

@@ -1,50 +0,0 @@
"""Agent token-to-session exchange view"""
from django.contrib.auth import login
from rest_framework.authentication import BaseAuthentication
from rest_framework.permissions import AllowAny
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from authentik.core.models import AuthenticatedSession, Token, TokenIntents, UserTypes
from authentik.stages.password import BACKEND_INBUILT
class NoAuthentication(BaseAuthentication):
"""Explicitly skip DRF authentication; the view authenticates via the request body."""
def authenticate(self, request):
return None
class AgentSessionView(APIView):
"""Exchange an agent's API token for an authenticated session."""
authentication_classes = [NoAuthentication]
permission_classes = [AllowAny]
def post(self, request: Request) -> Response:
key = request.data.get("key")
if not key:
return Response({"detail": "Key is required."}, status=400)
token = (
Token.objects.filter(key=key, intent=TokenIntents.INTENT_API)
.select_related("user")
.first()
)
if not token:
return Response({"detail": "Invalid token."}, status=400)
if token.is_expired:
return Response({"detail": "Token has expired."}, status=403)
if token.user.type != UserTypes.AGENT:
return Response({"detail": "Token does not belong to an agent user."}, status=400)
if not token.user.is_active:
return Response({"detail": "Agent user is inactive."}, status=403)
login(request._request, token.user, backend=BACKEND_INBUILT)
session = AuthenticatedSession.from_request(request._request, token.user)
if session:
session.save()
return Response(status=204)

View File

@@ -26,12 +26,7 @@ class RootRedirectView(RedirectView):
query_string = True
def redirect_to_app(self, request: HttpRequest):
if request.user.is_authenticated and request.user.type in (
UserTypes.EXTERNAL,
UserTypes.SERVICE_ACCOUNT,
UserTypes.INTERNAL_SERVICE_ACCOUNT,
UserTypes.AGENT,
):
if request.user.is_authenticated and request.user.type == UserTypes.EXTERNAL:
brand: Brand = request.brand
if brand.default_application:
return redirect(
@@ -67,12 +62,7 @@ class BrandDefaultRedirectView(InterfaceView):
"""By default redirect to default app"""
def dispatch(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
if request.user.is_authenticated and request.user.type in (
UserTypes.EXTERNAL,
UserTypes.SERVICE_ACCOUNT,
UserTypes.INTERNAL_SERVICE_ACCOUNT,
UserTypes.AGENT,
):
if request.user.is_authenticated and request.user.type == UserTypes.EXTERNAL:
brand: Brand = request.brand
if brand.default_application:
return redirect(

View File

@@ -114,16 +114,15 @@ def certificate_discovery():
discovered = 0
for file in glob(CONFIG.get("cert_discovery_dir") + "/**", recursive=True):
path = Path(file)
if not path.exists() or path.is_dir():
if not path.exists():
continue
if path.is_dir():
continue
# For certbot setups, we want to ignore archive.
if "archive" in file:
continue
# Handle additionalOutputFormats from cert-manager gracefully
if path.name in ["ca.crt", "tls-combined.pem", "key.der"]:
continue
# Support certbot & kubernetes.io/tls directory structure
if path.name in ["fullchain.pem", "privkey.pem", "tls.crt", "tls.key"]:
# Support certbot's directory structure
if path.name in ["fullchain.pem", "privkey.pem"]:
cert_name = path.parent.name
else:
cert_name = path.name.replace(path.suffix, "")

View File

@@ -355,16 +355,6 @@ class TestCrypto(APITestCase):
subject_alt_names=[],
validity_days=3,
)
name3 = generate_id()
builder3 = CertificateBuilder(name3)
with self.assertRaises(ValueError):
builder3.save()
builder3.build(
subject_alt_names=[],
validity_days=3,
)
with TemporaryDirectory() as temp_dir:
with open(f"{temp_dir}/foo.pem", "w+", encoding="utf-8") as _cert:
_cert.write(builder.certificate)
@@ -375,8 +365,6 @@ class TestCrypto(APITestCase):
_cert.write(builder2.certificate)
with open(f"{temp_dir}/foo.bar/privkey.pem", "w+", encoding="utf-8") as _key:
_key.write(builder2.private_key)
with open(f"{temp_dir}/tls-combined.pem", "w+", encoding="utf-8") as _cert:
_cert.write(builder3.certificate)
with CONFIG.patch("cert_discovery_dir", temp_dir):
certificate_discovery.send()
keypair: CertificateKeyPair = CertificateKeyPair.objects.filter(
@@ -388,9 +376,6 @@ class TestCrypto(APITestCase):
self.assertTrue(
CertificateKeyPair.objects.filter(managed=MANAGED_DISCOVERED % "foo.bar").exists()
)
self.assertFalse(
CertificateKeyPair.objects.filter(managed=MANAGED_DISCOVERED % "tls-combined").exists()
)
def test_discovery_updating_same_private_key(self):
"""Test certificate discovery updating certs with matching private keys"""

View File

@@ -97,7 +97,7 @@ class DeviceViewSet(
def summary(self, request: Request) -> Response:
delta = now() - timedelta(hours=24)
unreachable = (
Device.objects.all()
Device.filter_not_expired()
.annotate(
latest_snapshot=Subquery(
DeviceFactSnapshot.objects.filter(connection__device=OuterRef("pk"))
@@ -110,7 +110,7 @@ class DeviceViewSet(
.count()
)
data = {
"total_count": Device.objects.all().count(),
"total_count": Device.filter_not_expired().count(),
"unreachable_count": unreachable,
# Currently not supported
"outdated_agent_count": 0,

View File

@@ -65,9 +65,7 @@ class AgentConnectorSerializer(ConnectorSerializer):
class MDMConfigSerializer(PassiveSerializer):
platform = ChoiceField(choices=OSFamily.choices)
enrollment_token = PrimaryKeyRelatedField(
queryset=EnrollmentToken.objects.including_expired().all()
)
enrollment_token = PrimaryKeyRelatedField(queryset=EnrollmentToken.objects.all())
def validate_platform(self, platform: OSFamily) -> OSFamily:
if platform not in [OSFamily.iOS, OSFamily.macOS, OSFamily.windows]:
@@ -138,7 +136,7 @@ class AgentConnectorViewSet(
device=device,
connector=token.connector,
)
DeviceToken.objects.including_expired().filter(device=connection).delete()
DeviceToken.objects.filter(device=connection).delete()
token = DeviceToken.objects.create(device=connection, expiring=False)
return Response(
{

View File

@@ -18,10 +18,7 @@ from authentik.rbac.decorators import permission_required
class EnrollmentTokenSerializer(ModelSerializer):
device_group_obj = DeviceAccessGroupSerializer(
source="device_group",
read_only=True,
required=False,
allow_null=True,
source="device_group", read_only=True, required=False
)
def __init__(self, *args, **kwargs) -> None:

View File

@@ -34,11 +34,9 @@ class AgentEnrollmentAuth(BaseAuthentication):
def authenticate(self, request: Request) -> tuple[User, Any] | None:
auth = get_authorization_header(request)
key = validate_auth(auth)
token = EnrollmentToken.objects.filter(key=key).first()
token = EnrollmentToken.filter_not_expired(key=key).first()
if not token:
raise PermissionDenied()
if not token.connector.enabled:
raise PermissionDenied()
CTX_AUTH_VIA.set("endpoint_token_enrollment")
return (DeviceUser(), token)
@@ -50,11 +48,9 @@ class AgentAuth(BaseAuthentication):
key = validate_auth(auth, format="bearer+agent")
if not key:
return None
device_token = DeviceToken.objects.filter(key=key).first()
device_token = DeviceToken.filter_not_expired(key=key).first()
if not device_token:
raise PermissionDenied()
if not device_token.device.connector.enabled:
raise PermissionDenied()
if device_token.device.device.is_expired:
raise PermissionDenied()
CTX_AUTH_VIA.set("endpoint_token")
@@ -91,7 +87,7 @@ class DeviceAuthFedAuthentication(BaseAuthentication):
if not raw_token:
LOGGER.warning("Missing token")
return None
device = Device.objects.filter(name=request.query_params.get("device")).first()
device = Device.filter_not_expired(name=request.query_params.get("device")).first()
if not device:
LOGGER.warning("Couldn't find device")
return None

View File

@@ -53,11 +53,11 @@ class EndpointAgentChallengeResponse(ChallengeResponse):
except PyJWTError as exc:
self.stage.logger.warning("Could not parse response", exc=exc)
raise ValidationError("Invalid challenge response") from None
device = Device.objects.filter(identifier=raw["iss"]).first()
device = Device.filter_not_expired(identifier=raw["iss"]).first()
if not device:
self.stage.logger.warning("Could not find device for challenge")
raise ValidationError("Invalid challenge response")
for token in DeviceToken.objects.filter(
for token in DeviceToken.filter_not_expired(
device__device=device,
device__connector=self.stage.executor.current_stage.connector,
).values_list("key", flat=True):

View File

@@ -58,16 +58,6 @@ class TestAgentAPI(APITestCase):
)
self.assertEqual(response.status_code, 200)
def test_enroll_disabled(self):
self.connector.enabled = False
self.connector.save()
response = self.client.post(
reverse("authentik_api:agentconnector-enroll"),
data={"device_serial": generate_id(), "device_name": "bar"},
HTTP_AUTHORIZATION=f"Bearer {self.token.key}",
)
self.assertEqual(response.status_code, 403)
def test_enroll_token_delete(self):
response = self.client.post(
reverse("authentik_api:agentconnector-enroll"),
@@ -89,7 +79,7 @@ class TestAgentAPI(APITestCase):
HTTP_AUTHORIZATION=f"Bearer {self.token.key}",
)
self.assertEqual(response.status_code, 200)
device = Device.objects.filter(identifier=ident).first()
device = Device.filter_not_expired(identifier=ident).first()
self.assertIsNotNone(device)
self.assertEqual(device.access_group, device_group)
@@ -104,7 +94,7 @@ class TestAgentAPI(APITestCase):
HTTP_AUTHORIZATION=f"Bearer {self.token.key}",
)
self.assertEqual(response.status_code, 403)
self.assertFalse(Device.objects.filter(identifier=dev_id).exists())
self.assertFalse(Device.filter_not_expired(identifier=dev_id).exists())
@reconcile_app("authentik_crypto")
def test_config(self):
@@ -114,16 +104,6 @@ class TestAgentAPI(APITestCase):
)
self.assertEqual(response.status_code, 200)
@reconcile_app("authentik_crypto")
def test_config_disabled(self):
self.connector.enabled = False
self.connector.save()
response = self.client.get(
reverse("authentik_api:agentconnector-agent-config"),
HTTP_AUTHORIZATION=f"Bearer+agent {self.device_token.key}",
)
self.assertEqual(response.status_code, 403)
def test_check_in(self):
response = self.client.post(
reverse("authentik_api:agentconnector-check-in"),
@@ -132,16 +112,6 @@ class TestAgentAPI(APITestCase):
)
self.assertEqual(response.status_code, 204)
def test_check_in_disabled(self):
self.connector.enabled = False
self.connector.save()
response = self.client.post(
reverse("authentik_api:agentconnector-check-in"),
data=CHECK_IN_DATA_VALID,
HTTP_AUTHORIZATION=f"Bearer+agent {self.device_token.key}",
)
self.assertEqual(response.status_code, 403)
def test_check_in_token_expired(self):
self.device_token.expiring = True
self.device_token.expires = now() - timedelta(hours=1)

View File

@@ -44,6 +44,3 @@ class BaseController[T: "Connector"]:
def stage_view_authentication(self) -> StageView | None:
return None
def sync_endpoints(self):
raise NotImplementedError

View File

@@ -54,7 +54,7 @@ class Device(InternallyManagedMixin, ExpiringModel, AttributesMixin, PolicyBindi
def facts(self) -> DeviceFactSnapshot:
data = {}
last_updated = datetime.fromtimestamp(0, UTC)
for snapshot_data, snapshort_created in DeviceFactSnapshot.objects.filter(
for snapshot_data, snapshort_created in DeviceFactSnapshot.filter_not_expired(
snapshot_id__in=Subquery(
DeviceFactSnapshot.objects.filter(
connection__connector=OuterRef("connection__connector"), connection__device=self
@@ -162,11 +162,8 @@ class Connector(ScheduledModel, SerializerModel):
@property
def schedule_specs(self) -> list[ScheduleSpec]:
from authentik.endpoints.controller import Capabilities
from authentik.endpoints.tasks import endpoints_sync
if Capabilities.ENROLL_AUTOMATIC_API not in self.controller(self).capabilities():
return []
return [
ScheduleSpec(
actor=endpoints_sync,

View File

@@ -1,4 +1,4 @@
from authentik.endpoints.models import Connector, EndpointStage, StageMode
from authentik.endpoints.models import EndpointStage, StageMode
from authentik.flows.stage import StageView
PLAN_CONTEXT_ENDPOINT_CONNECTOR = "endpoint_connector"
@@ -8,10 +8,7 @@ class EndpointStageView(StageView):
def _get_inner(self) -> StageView | None:
stage: EndpointStage = self.executor.current_stage
connector: Connector = stage.connector
if not connector.enabled:
return None
inner_stage: type[StageView] | None = connector.stage
inner_stage: type[StageView] | None = stage.connector.stage
if not inner_stage:
return None
return inner_stage(self.executor, request=self.request)

View File

@@ -17,11 +17,11 @@ def endpoints_sync(connector_pk: Any):
connector: Connector | None = (
Connector.objects.filter(pk=connector_pk).select_subclasses().first()
)
if not connector or not connector.enabled:
if not connector:
return
controller = connector.controller
ctrl = controller(connector)
if Capabilities.ENROLL_AUTOMATIC_API not in ctrl.capabilities():
if Capabilities.AUTOMATIC_API not in ctrl.capabilities():
return
LOGGER.info("Syncing connector", connector=connector.name)
ctrl.sync_endpoints()

View File

@@ -1,35 +0,0 @@
from unittest.mock import PropertyMock, patch
from rest_framework.test import APITestCase
from authentik.endpoints.controller import BaseController, Capabilities
from authentik.endpoints.models import Connector
from authentik.endpoints.tasks import endpoints_sync
from authentik.lib.generators import generate_id
class TestEndpointTasks(APITestCase):
def test_agent_sync(self):
class controller(BaseController):
def capabilities(self):
return [Capabilities.ENROLL_AUTOMATIC_API]
def sync_endpoints(self):
pass
with patch.object(Connector, "controller", PropertyMock(return_value=controller)):
connector = Connector.objects.create(name=generate_id())
self.assertEqual(len(connector.schedule_specs), 1)
endpoints_sync.send(connector.pk).get_result(block=True)
def test_agent_no_sync(self):
class controller(BaseController):
def capabilities(self):
return []
with patch.object(Connector, "controller", PropertyMock(return_value=controller)):
connector = Connector.objects.create(name=generate_id())
self.assertEqual(len(connector.schedule_specs), 0)
endpoints_sync.send(connector.pk).get_result(block=True)

View File

@@ -1,7 +1,6 @@
"""Enterprise app config"""
from django.conf import settings
from django.utils.translation import gettext_lazy as _
from authentik.enterprise.apps import EnterpriseConfig
from authentik.tenants.flags import Flag
@@ -10,9 +9,6 @@ from authentik.tenants.flags import Flag
class AuditIncludeExpandedDiff(Flag[bool], key="enterprise_audit_include_expanded_diff"):
default = False
visibility = "none"
description = _(
"Include additional information in audit logs, may incur a performance penalty."
)
class AuthentikEnterpriseAuditConfig(EnterpriseConfig):

View File

@@ -17,7 +17,7 @@ class NonceView(View):
def post(self, request: HttpRequest, *args, **kwargs):
raw_token = unquote(self.request.POST.get("x-ak-device-token"))
device_token = DeviceToken.objects.filter(key=raw_token).first()
device_token = DeviceToken.filter_not_expired(key=raw_token).first()
if not device_token:
return HttpResponseBadRequest()
nonce = AppleNonce.objects.create(

View File

@@ -106,7 +106,7 @@ class RegisterUserView(APIView):
def post(self, request: Request, body: AgentPSSOUserRegistration) -> Response:
device_token: DeviceToken = request.auth
conn: AgentDeviceConnection = device_token.device
user_token = DeviceAuthenticationToken.objects.filter(
user_token = DeviceAuthenticationToken.filter_not_expired(
device=conn.device,
token=body.validated_data["user_auth"],
device_token=device_token,

View File

@@ -96,7 +96,7 @@ class TokenView(View):
self.remote_nonce = decoded.get("nonce")
# Check that the nonce hasn't been used before
nonce = AppleNonce.objects.filter(nonce=decoded["request_nonce"]).first()
nonce = AppleNonce.filter_not_expired(nonce=decoded["request_nonce"]).first()
if not nonce:
raise ValidationError("Invalid nonce")
self.nonce = nonce

View File

@@ -3,7 +3,6 @@ from hmac import compare_digest
from django.http import Http404, HttpRequest, HttpResponse, HttpResponseBadRequest, QueryDict
from authentik.common.oauth.constants import QS_LOGIN_HINT
from authentik.endpoints.connectors.agent.auth import (
agent_auth_issue_token,
check_device_policies,
@@ -15,7 +14,7 @@ from authentik.enterprise.policy import EnterprisePolicyAccessView
from authentik.flows.exceptions import FlowNonApplicableException
from authentik.flows.models import in_memory_stage
from authentik.flows.planner import PLAN_CONTEXT_DEVICE, FlowPlanner
from authentik.flows.stage import PLAN_CONTEXT_PENDING_USER_IDENTIFIER, StageView
from authentik.flows.stage import StageView
from authentik.providers.oauth2.utils import HttpResponseRedirectScheme
QS_AGENT_IA_TOKEN = "ak-auth-ia-token" # nosec
@@ -30,7 +29,7 @@ class AgentInteractiveAuth(EnterprisePolicyAccessView):
def resolve_provider_application(self):
auth_token = (
DeviceAuthenticationToken.objects.filter(identifier=self.kwargs["token_uuid"])
DeviceAuthenticationToken.filter_not_expired(identifier=self.kwargs["token_uuid"])
.prefetch_related()
.first()
)
@@ -65,14 +64,14 @@ class AgentInteractiveAuth(EnterprisePolicyAccessView):
planner = FlowPlanner(self.connector.authorization_flow)
planner.allow_empty_flows = True
context = {
PLAN_CONTEXT_DEVICE: self.device,
PLAN_CONTEXT_DEVICE_AUTH_TOKEN: self.auth_token,
}
if QS_LOGIN_HINT in request.GET:
context[PLAN_CONTEXT_PENDING_USER_IDENTIFIER] = request.GET[QS_LOGIN_HINT]
try:
plan = planner.plan(self.request, context)
plan = planner.plan(
self.request,
{
PLAN_CONTEXT_DEVICE: self.device,
PLAN_CONTEXT_DEVICE_AUTH_TOKEN: self.auth_token,
},
)
except FlowNonApplicableException:
return self.handle_no_permission_authenticated()
plan.append_stage(in_memory_stage(AgentAuthFulfillmentStage))
@@ -85,6 +84,7 @@ class AgentInteractiveAuth(EnterprisePolicyAccessView):
class AgentAuthFulfillmentStage(StageView):
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
device: Device = self.executor.plan.context.pop(PLAN_CONTEXT_DEVICE)
auth_token: DeviceAuthenticationToken = self.executor.plan.context.pop(

View File

@@ -141,10 +141,8 @@ class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider):
# according to the provider's settings
base = User.objects.all().exclude_anonymous().filter(**kwargs)
if self.exclude_users_service_account:
base = (
base.exclude(type=UserTypes.SERVICE_ACCOUNT)
.exclude(type=UserTypes.INTERNAL_SERVICE_ACCOUNT)
.exclude(type=UserTypes.AGENT)
base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude(
type=UserTypes.INTERNAL_SERVICE_ACCOUNT
)
if self.filter_group:
base = base.filter(groups__in=[self.filter_group])

View File

@@ -130,10 +130,8 @@ class MicrosoftEntraProvider(OutgoingSyncProvider, BackchannelProvider):
# according to the provider's settings
base = User.objects.all().exclude_anonymous().filter(**kwargs)
if self.exclude_users_service_account:
base = (
base.exclude(type=UserTypes.SERVICE_ACCOUNT)
.exclude(type=UserTypes.INTERNAL_SERVICE_ACCOUNT)
.exclude(type=UserTypes.AGENT)
base = base.exclude(type=UserTypes.SERVICE_ACCOUNT).exclude(
type=UserTypes.INTERNAL_SERVICE_ACCOUNT
)
if self.filter_group:
base = base.filter(groups__in=[self.filter_group])

View File

@@ -18,10 +18,6 @@ class SSFProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer):
ssf_url = SerializerMethodField()
token_obj = TokenSerializer(source="token", required=False, read_only=True)
oidc_auth_providers_obj = ProviderSerializer(
read_only=True, source="oidc_auth_providers", many=True
)
def get_ssf_url(self, instance: SSFProvider) -> str | None:
request: Request = self._context.get("request")
if not request:
@@ -49,10 +45,8 @@ class SSFProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer):
"signing_key",
"token_obj",
"oidc_auth_providers",
"oidc_auth_providers_obj",
"ssf_url",
"event_retention",
"push_verify_certificates",
]
extra_kwargs = {}
@@ -60,7 +54,7 @@ class SSFProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer):
class SSFProviderViewSet(UsedByMixin, ModelViewSet):
"""SSFProvider Viewset"""
queryset = SSFProvider.objects.all().prefetch_related("oidc_auth_providers")
queryset = SSFProvider.objects.all()
serializer_class = SSFProviderSerializer
filterset_fields = {
"application": ["isnull"],

View File

@@ -1,7 +1,6 @@
"""SSF Stream API Views"""
from rest_framework import mixins
from rest_framework.viewsets import GenericViewSet
from rest_framework.viewsets import ReadOnlyModelViewSet
from authentik.core.api.utils import ModelSerializer
from authentik.enterprise.providers.ssf.api.providers import SSFProviderSerializer
@@ -17,7 +16,6 @@ class SSFStreamSerializer(ModelSerializer):
model = Stream
fields = [
"pk",
"status",
"provider",
"provider_obj",
"delivery_method",
@@ -29,12 +27,7 @@ class SSFStreamSerializer(ModelSerializer):
]
class SSFStreamViewSet(
mixins.RetrieveModelMixin,
mixins.DestroyModelMixin,
mixins.ListModelMixin,
GenericViewSet,
):
class SSFStreamViewSet(ReadOnlyModelViewSet):
"""SSFStream Viewset"""
queryset = Stream.objects.all()

View File

@@ -1,43 +0,0 @@
# Generated by Django 5.2.12 on 2026-04-04 16:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_providers_ssf", "0001_initial"),
]
operations = [
migrations.AddField(
model_name="ssfprovider",
name="push_verify_certificates",
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name="stream",
name="authorization_header",
field=models.TextField(default=None, null=True),
),
migrations.AddField(
model_name="stream",
name="status",
field=models.TextField(
choices=[("enabled", "Enabled"), ("paused", "Paused"), ("disabled", "Disabled")],
default="enabled",
),
),
migrations.AlterField(
model_name="stream",
name="delivery_method",
field=models.TextField(
choices=[
("https://schemas.openid.net/secevent/risc/delivery-method/push", "Risc Push"),
("https://schemas.openid.net/secevent/risc/delivery-method/poll", "Risc Poll"),
("urn:ietf:rfc:8935", "SSF RFC Push"),
("urn:ietf:rfc:8936", "SSF RFC Pull"),
]
),
),
]

Some files were not shown because too many files have changed in this diff Show More