mirror of
https://github.com/goauthentik/authentik
synced 2026-04-26 01:25:02 +02:00
Compare commits
293 Commits
web/flow/t
...
version-20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9b64d05e35 | ||
|
|
99a93fa8a2 | ||
|
|
bd2a0e1d7d | ||
|
|
c4d455dd3a | ||
|
|
508dba6a04 | ||
|
|
aa921dcdca | ||
|
|
e5d873c129 | ||
|
|
f0a14d380f | ||
|
|
1da15a549e | ||
|
|
eaf1c45ea6 | ||
|
|
f0f42668c4 | ||
|
|
123fbd26bb | ||
|
|
b94d93b6c4 | ||
|
|
d0b25bf648 | ||
|
|
d4db4e50b4 | ||
|
|
c5e726d7eb | ||
|
|
203a7e0c61 | ||
|
|
2feaeff5db | ||
|
|
8fcc47e047 | ||
|
|
7a6408cc67 | ||
|
|
2da88028da | ||
|
|
fa91404895 | ||
|
|
460fce7279 | ||
|
|
995128955c | ||
|
|
85536abbcf | ||
|
|
5249546862 | ||
|
|
bf91348c05 | ||
|
|
63136f0180 | ||
|
|
faffabf938 | ||
|
|
0b180b15a2 | ||
|
|
07af6de74f | ||
|
|
ddfef91ea5 | ||
|
|
cefbf5e6ae | ||
|
|
e53d3d2486 | ||
|
|
32a3eed521 | ||
|
|
f05cc6e75a | ||
|
|
c68c36fdeb | ||
|
|
888f969fc7 | ||
|
|
82535e4671 | ||
|
|
ed2957e4e6 | ||
|
|
a5abe85148 | ||
|
|
8d2c31fa25 | ||
|
|
2637ce2474 | ||
|
|
319008dec8 | ||
|
|
8beb2fac18 | ||
|
|
ac7b28d0b0 | ||
|
|
073acf92c2 | ||
|
|
ad107c19af | ||
|
|
d285fcd8a7 | ||
|
|
84066cab48 | ||
|
|
e623d93ff5 | ||
|
|
1d0628dfbe | ||
|
|
996645105c | ||
|
|
63d7ca6ef0 | ||
|
|
5b24f4ad80 | ||
|
|
ed2e6cfb9c | ||
|
|
a1431ea48e | ||
|
|
b30e77b363 | ||
|
|
2f50cdd9fe | ||
|
|
494bdcaa09 | ||
|
|
e36ce1789e | ||
|
|
5a72ed83e0 | ||
|
|
f72d257e43 | ||
|
|
cbedb16cc4 | ||
|
|
6fc1b5ce90 | ||
|
|
57b0fa48c1 | ||
|
|
84a344ed87 | ||
|
|
f864cb56ab | ||
|
|
692735f9e1 | ||
|
|
e24fb300b1 | ||
|
|
f0e90d6873 | ||
|
|
0cf45835a0 | ||
|
|
69d35c1d26 | ||
|
|
ac803b210d | ||
|
|
c9728b4607 | ||
|
|
6e45584563 | ||
|
|
59a2e84b35 | ||
|
|
6025dbb9c9 | ||
|
|
d07bcd5025 | ||
|
|
e80655d285 | ||
|
|
e0d3d4d38c | ||
|
|
62112404ee | ||
|
|
1c9e12fcd9 | ||
|
|
42c6c257ec | ||
|
|
41bd9d7913 | ||
|
|
2c84935732 | ||
|
|
819c13a9bc | ||
|
|
0d8f366af8 | ||
|
|
093e60c753 | ||
|
|
af646f32d2 | ||
|
|
de4afc7322 | ||
|
|
bc1983106f | ||
|
|
8c2c1474f1 | ||
|
|
0dccbd4193 | ||
|
|
6a70894e01 | ||
|
|
2f5eb9b2e4 | ||
|
|
12aedb3a9e | ||
|
|
303dc93514 | ||
|
|
fbb217db57 | ||
|
|
4de253653f | ||
|
|
4154c06831 | ||
|
|
4750ed5e2a | ||
|
|
361017127d | ||
|
|
0ca5a54307 | ||
|
|
ef1aad5dbb | ||
|
|
29d880920e | ||
|
|
fc6f8374e6 | ||
|
|
a8668bbac4 | ||
|
|
d686932166 | ||
|
|
feceb220b1 | ||
|
|
937df6e07f | ||
|
|
48e6b968a6 | ||
|
|
cd89c45e75 | ||
|
|
e53995e2c1 | ||
|
|
33d5f11f0e | ||
|
|
565e16eca7 | ||
|
|
9a0164b722 | ||
|
|
8af491630b | ||
|
|
8e25e7a213 | ||
|
|
4d183657da | ||
|
|
be89b6052d | ||
|
|
ad5d2bb611 | ||
|
|
8d30fb3d25 | ||
|
|
cea3fbfa9b | ||
|
|
151d889ff4 | ||
|
|
58ca3ecbd5 | ||
|
|
1a6c7082a3 | ||
|
|
1dc60276f9 | ||
|
|
de045c6d7b | ||
|
|
850728e9bb | ||
|
|
84a605a4ba | ||
|
|
1780bb0cf0 | ||
|
|
cd75fe235d | ||
|
|
e6e62e9de1 | ||
|
|
ac7a4f8a22 | ||
|
|
0290ed3342 | ||
|
|
e367525794 | ||
|
|
93c319baee | ||
|
|
1d02ee7d74 | ||
|
|
93439b5742 | ||
|
|
6682a6664e | ||
|
|
0b5bac74e9 | ||
|
|
062823f1b2 | ||
|
|
a17fe58971 | ||
|
|
422ea893b1 | ||
|
|
15c9f93851 | ||
|
|
e2202d498b | ||
|
|
9ea9a86ad3 | ||
|
|
4bac1edd61 | ||
|
|
24726be3c9 | ||
|
|
411f06756f | ||
|
|
4bdcab48c3 | ||
|
|
00dbd377a7 | ||
|
|
a01c0575db | ||
|
|
6e51d044bb | ||
|
|
6d1b168dc4 | ||
|
|
43675c2b22 | ||
|
|
8645273eaf | ||
|
|
eb6f4712fe | ||
|
|
7b9505242e | ||
|
|
3dda20ebc7 | ||
|
|
dfd2bc5c3c | ||
|
|
06a270913c | ||
|
|
430507fc72 | ||
|
|
847af7f9ea | ||
|
|
8f1cb636e8 | ||
|
|
e61c876002 | ||
|
|
33c0d3df0a | ||
|
|
3a03e1ebfd | ||
|
|
1e41b77761 | ||
|
|
6c1662f99f | ||
|
|
bb5bc5c8da | ||
|
|
30670c9070 | ||
|
|
fdbf9ffedc | ||
|
|
2ec433d724 | ||
|
|
55297b9e6a | ||
|
|
f9dda6582c | ||
|
|
3394c17bfd | ||
|
|
a37d101b10 | ||
|
|
4774b4db87 | ||
|
|
fdb52c9394 | ||
|
|
9bcf9cd7d4 | ||
|
|
ae5c0cf209 | ||
|
|
55d2143fb7 | ||
|
|
6a6ede737b | ||
|
|
6e48258f11 | ||
|
|
6e0a1dba7b | ||
|
|
8426655ee3 | ||
|
|
4309d4df56 | ||
|
|
a334dbd342 | ||
|
|
d7577ad7b3 | ||
|
|
d1f3473c62 | ||
|
|
2f2ed996c6 | ||
|
|
ab8dc0f73e | ||
|
|
7f50035b54 | ||
|
|
5987b37455 | ||
|
|
a060d4d373 | ||
|
|
656a720d68 | ||
|
|
fd2c964479 | ||
|
|
92812a565e | ||
|
|
a016313d7b | ||
|
|
2f2488b326 | ||
|
|
233377e86c | ||
|
|
868dd307bc | ||
|
|
db96590b53 | ||
|
|
02fa9d1b70 | ||
|
|
256800fd55 | ||
|
|
c1a82241ec | ||
|
|
b16dd8ad0e | ||
|
|
54900857a5 | ||
|
|
9693eed19e | ||
|
|
8fb9daff71 | ||
|
|
4ce4a0e410 | ||
|
|
f1dcdd8d11 | ||
|
|
668fd8278f | ||
|
|
1fabd6c0d6 | ||
|
|
7a4cb0a387 | ||
|
|
2c351ea9f3 | ||
|
|
79b343ff5a | ||
|
|
f3cbd94f0b | ||
|
|
4b82ded894 | ||
|
|
89be2a6682 | ||
|
|
dc6ed688d3 | ||
|
|
84efee29a2 | ||
|
|
abdc171919 | ||
|
|
2664ea7d2d | ||
|
|
af831304c6 | ||
|
|
ab16661a61 | ||
|
|
9457982376 | ||
|
|
41462b580a | ||
|
|
300f88aa0a | ||
|
|
0dab65179d | ||
|
|
1793ddf772 | ||
|
|
062eeab4b6 | ||
|
|
a40db06ddc | ||
|
|
f9154487d2 | ||
|
|
1e97a22171 | ||
|
|
6d2f014b9f | ||
|
|
7e92840852 | ||
|
|
c99b92ff2c | ||
|
|
2ea3be7227 | ||
|
|
087935636c | ||
|
|
77bab7611f | ||
|
|
518eecc2a5 | ||
|
|
17293735db | ||
|
|
032da33369 | ||
|
|
a677137c9a | ||
|
|
da48a2dd12 | ||
|
|
ef74ca01a2 | ||
|
|
fd778b18ad | ||
|
|
d8f6a97875 | ||
|
|
8610ec2d52 | ||
|
|
524ab27df6 | ||
|
|
49a9cbf4cf | ||
|
|
56361c2fbf | ||
|
|
9721c4fc29 | ||
|
|
3f1a0f83ca | ||
|
|
4960b8eec4 | ||
|
|
611b3b72e6 | ||
|
|
b01833c143 | ||
|
|
95233dd9f8 | ||
|
|
a4559e568d | ||
|
|
259b353a87 | ||
|
|
dbdf2cb4d0 | ||
|
|
de97eac6e4 | ||
|
|
1e221ed52c | ||
|
|
b606bc37fb | ||
|
|
c04c7ab64d | ||
|
|
5ec14ee0ae | ||
|
|
995a710c10 | ||
|
|
efb709992c | ||
|
|
c6f3c715ba | ||
|
|
fe97c45d63 | ||
|
|
a7de5ed482 | ||
|
|
f18c3c23fe | ||
|
|
7e359a9a58 | ||
|
|
68c7037eea | ||
|
|
30d41ded81 | ||
|
|
3fd278e16d | ||
|
|
a90870437e | ||
|
|
8fe584b473 | ||
|
|
d6dc91fbe7 | ||
|
|
c8c0c79106 | ||
|
|
aea05b88c0 | ||
|
|
1d6e040974 | ||
|
|
097a1c94b1 | ||
|
|
fc5c8ecbc1 | ||
|
|
ab660c6f44 | ||
|
|
8caceee8fa | ||
|
|
154bc0c38c | ||
|
|
777c5b74e4 | ||
|
|
c52512892f | ||
|
|
1e354820fd |
23
.github/actions/cherry-pick/action.yml
vendored
23
.github/actions/cherry-pick/action.yml
vendored
@@ -115,20 +115,13 @@ runs:
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ inputs.token }}
|
||||
PR_NUMBER: ${{ steps.should_run.outputs.pr_number }}
|
||||
REASON: ${{ steps.should_run.outputs.reason }}
|
||||
run: |
|
||||
set -e -o pipefail
|
||||
PR_NUMBER="${{ steps.should_run.outputs.pr_number }}"
|
||||
|
||||
# Get PR details
|
||||
PR_DATA=$(gh api repos/${{ github.repository }}/pulls/$PR_NUMBER)
|
||||
PR_TITLE=$(echo "$PR_DATA" | jq -r '.title')
|
||||
PR_AUTHOR=$(echo "$PR_DATA" | jq -r '.user.login')
|
||||
|
||||
echo "pr_title=$PR_TITLE" >> $GITHUB_OUTPUT
|
||||
echo "pr_author=$PR_AUTHOR" >> $GITHUB_OUTPUT
|
||||
|
||||
# Determine which labels to process
|
||||
if [ "${{ steps.should_run.outputs.reason }}" = "label_added_to_merged_pr" ]; then
|
||||
if [ "${REASON}" = "label_added_to_merged_pr" ]; then
|
||||
# Only process the specific label that was just added
|
||||
if [ "${{ github.event_name }}" = "issues" ]; then
|
||||
LABEL_NAME="${{ github.event.label.name }}"
|
||||
@@ -152,13 +145,13 @@ runs:
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ inputs.token }}
|
||||
PR_NUMBER: '${{ steps.should_run.outputs.pr_number }}'
|
||||
COMMIT_SHA: '${{ steps.should_run.outputs.merge_commit_sha }}'
|
||||
PR_TITLE: ${{ github.event.pull_request.title }}
|
||||
PR_AUTHOR: ${{ github.event.pull_request.user.login }}
|
||||
LABELS: '${{ steps.pr_details.outputs.labels }}'
|
||||
run: |
|
||||
set -e -o pipefail
|
||||
PR_NUMBER='${{ steps.should_run.outputs.pr_number }}'
|
||||
COMMIT_SHA='${{ steps.should_run.outputs.merge_commit_sha }}'
|
||||
PR_TITLE='${{ steps.pr_details.outputs.pr_title }}'
|
||||
PR_AUTHOR='${{ steps.pr_details.outputs.pr_author }}'
|
||||
LABELS='${{ steps.pr_details.outputs.labels }}'
|
||||
|
||||
echo "Processing PR #$PR_NUMBER (reason: ${{ steps.should_run.outputs.reason }})"
|
||||
echo "Found backport labels: $LABELS"
|
||||
|
||||
@@ -89,6 +89,8 @@ if should_push:
|
||||
_cache_tag = "buildcache"
|
||||
if image_arch:
|
||||
_cache_tag += f"-{image_arch}"
|
||||
if is_release:
|
||||
_cache_tag += f"-{version_family}"
|
||||
cache_to = f"type=registry,ref={get_attest_image_names(image_tags)}:{_cache_tag},mode=max"
|
||||
|
||||
|
||||
|
||||
40
.github/actions/setup/action.yml
vendored
40
.github/actions/setup/action.yml
vendored
@@ -8,45 +8,61 @@ inputs:
|
||||
postgresql_version:
|
||||
description: "Optional postgresql image tag"
|
||||
default: "16"
|
||||
working-directory:
|
||||
description: |
|
||||
Optional working directory if this repo isn't in the root of the actions workspace.
|
||||
When set, needs to contain a trailing slash
|
||||
default: ""
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Install apt deps & cleanup
|
||||
- name: Cleanup apt
|
||||
if: ${{ contains(inputs.dependencies, 'system') || contains(inputs.dependencies, 'python') }}
|
||||
shell: bash
|
||||
run: sudo apt-get remove --purge man-db
|
||||
- name: Install apt deps
|
||||
if: ${{ contains(inputs.dependencies, 'system') || contains(inputs.dependencies, 'python') }}
|
||||
uses: gerlero/apt-install@f4fa5265092af9e750549565d28c99aec7189639
|
||||
with:
|
||||
packages: libpq-dev openssl libxmlsec1-dev pkg-config gettext krb5-multidev libkrb5-dev heimdal-multidev libclang-dev krb5-kdc krb5-user krb5-admin-server
|
||||
update: true
|
||||
upgrade: false
|
||||
install-recommends: false
|
||||
- name: Make space on disk
|
||||
if: ${{ contains(inputs.dependencies, 'system') || contains(inputs.dependencies, 'python') }}
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt-get remove --purge man-db
|
||||
sudo apt-get update
|
||||
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext krb5-multidev libkrb5-dev heimdal-multidev libclang-dev krb5-kdc krb5-user krb5-admin-server
|
||||
sudo rm -rf /usr/local/lib/android
|
||||
sudo mkdir -p /tmp/empty/
|
||||
sudo rsync -a --delete /tmp/empty/ /usr/local/lib/android/
|
||||
- name: Install uv
|
||||
if: ${{ contains(inputs.dependencies, 'python') }}
|
||||
uses: astral-sh/setup-uv@803947b9bd8e9f986429fa0c5a41c367cd732b41 # v5
|
||||
uses: astral-sh/setup-uv@eac588ad8def6316056a12d4907a9d4d84ff7a3b # v5
|
||||
with:
|
||||
enable-cache: true
|
||||
- name: Setup python
|
||||
if: ${{ contains(inputs.dependencies, 'python') }}
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v5
|
||||
with:
|
||||
python-version-file: "pyproject.toml"
|
||||
python-version-file: "${{ inputs.working-directory }}pyproject.toml"
|
||||
- name: Install Python deps
|
||||
if: ${{ contains(inputs.dependencies, 'python') }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: uv sync --all-extras --dev --frozen
|
||||
- name: Setup node
|
||||
if: ${{ contains(inputs.dependencies, 'node') }}
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v4
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
node-version-file: ${{ inputs.working-directory }}web/package.json
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
cache-dependency-path: ${{ inputs.working-directory }}web/package-lock.json
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- name: Setup go
|
||||
if: ${{ contains(inputs.dependencies, 'go') }}
|
||||
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v5
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
go-version-file: "${{ inputs.working-directory }}go.mod"
|
||||
- name: Setup docker cache
|
||||
if: ${{ contains(inputs.dependencies, 'runtime') }}
|
||||
uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7
|
||||
@@ -55,13 +71,15 @@ runs:
|
||||
- name: Setup dependencies
|
||||
if: ${{ contains(inputs.dependencies, 'runtime') }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
export PSQL_TAG=${{ inputs.postgresql_version }}
|
||||
docker compose -f .github/actions/setup/compose.yml up -d
|
||||
cd web && npm i
|
||||
cd web && npm ci
|
||||
- name: Generate config
|
||||
if: ${{ contains(inputs.dependencies, 'python') }}
|
||||
shell: uv run python {0}
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
from authentik.lib.generators import generate_id
|
||||
from yaml import safe_dump
|
||||
|
||||
2
.github/actions/setup/compose.yml
vendored
2
.github/actions/setup/compose.yml
vendored
@@ -2,7 +2,7 @@ services:
|
||||
postgresql:
|
||||
image: docker.io/library/postgres:${PSQL_TAG:-16}
|
||||
volumes:
|
||||
- db-data:/var/lib/postgresql/data
|
||||
- db-data:/var/lib/postgresql
|
||||
command: "-c log_statement=all"
|
||||
environment:
|
||||
POSTGRES_USER: authentik
|
||||
|
||||
2
.github/workflows/_reusable-docker-build.yml
vendored
2
.github/workflows/_reusable-docker-build.yml
vendored
@@ -90,7 +90,7 @@ jobs:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- uses: int128/docker-manifest-create-action@a39573caa37b6a8a03302d43b57c3f48635096e2 # v2
|
||||
- uses: int128/docker-manifest-create-action@1a059c021f1d5e9f2bd39de745d5dd3a0ef6df90 # v2
|
||||
id: build
|
||||
with:
|
||||
tags: ${{ matrix.tag }}
|
||||
|
||||
2
.github/workflows/api-ts-publish.yml
vendored
2
.github/workflows/api-ts-publish.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
|
||||
5
.github/workflows/ci-main.yml
vendored
5
.github/workflows/ci-main.yml
vendored
@@ -95,7 +95,10 @@ jobs:
|
||||
with:
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
- name: run migrations to stable
|
||||
run: uv run python -m lifecycle.migrate
|
||||
run: |
|
||||
docker ps
|
||||
docker logs setup-postgresql-1
|
||||
uv run python -m lifecycle.migrate
|
||||
- name: checkout current code
|
||||
run: |
|
||||
set -x
|
||||
|
||||
4
.github/workflows/gen-image-compress.yml
vendored
4
.github/workflows/gen-image-compress.yml
vendored
@@ -32,13 +32,13 @@ jobs:
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- name: Compress images
|
||||
id: compress
|
||||
uses: calibreapp/image-actions@420075c115b26f8785e293c5bd5bef0911c506e5 # main
|
||||
uses: calibreapp/image-actions@d9c8ee5c3dc52ae4622c82ead88d658f4b16b65f # main
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||
compressOnly: ${{ github.event_name != 'pull_request' }}
|
||||
|
||||
@@ -19,7 +19,7 @@ jobs:
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
|
||||
2
.github/workflows/gh-cherry-pick.yml
vendored
2
.github/workflows/gh-cherry-pick.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
if: ${{ env.GH_APP_ID != '' }}
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
env:
|
||||
GH_APP_ID: ${{ secrets.GH_APP_ID }}
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
|
||||
2
.github/workflows/gh-ghcr-retention.yml
vendored
2
.github/workflows/gh-ghcr-retention.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
- name: Delete 'dev' containers older than a week
|
||||
uses: snok/container-retention-policy@3b0972b2276b171b212f8c4efbca59ebba26eceb # v3.0.1
|
||||
with:
|
||||
|
||||
2
.github/workflows/packages-npm-publish.yml
vendored
2
.github/workflows/packages-npm-publish.yml
vendored
@@ -40,7 +40,7 @@ jobs:
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@e0021407031f5be11a464abee9a0776171c79891 # 24d32ffd492484c1d75e0c0b894501ddb9d30d62
|
||||
uses: tj-actions/changed-files@8cba46e29c11878d930bca7870bb54394d3e8b21 # 24d32ffd492484c1d75e0c0b894501ddb9d30d62
|
||||
with:
|
||||
files: |
|
||||
${{ matrix.package }}/package.json
|
||||
|
||||
4
.github/workflows/release-branch-off.yml
vendored
4
.github/workflows/release-branch-off.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
- name: Checkout main
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
with:
|
||||
@@ -60,7 +60,7 @@ jobs:
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
- name: Checkout main
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
with:
|
||||
|
||||
23
.github/workflows/release-publish.yml
vendored
23
.github/workflows/release-publish.yml
vendored
@@ -160,10 +160,17 @@ jobs:
|
||||
node-version-file: web/package.json
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Build web
|
||||
- name: Install web dependencies
|
||||
working-directory: web/
|
||||
run: |
|
||||
npm ci
|
||||
- name: Generate API Clients
|
||||
run: |
|
||||
make gen-client-ts
|
||||
make gen-client-go
|
||||
- name: Build web
|
||||
working-directory: web/
|
||||
run: |
|
||||
npm run build-proxy
|
||||
- name: Build outpost
|
||||
run: |
|
||||
@@ -192,7 +199,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708 # v5
|
||||
- uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6.0.0
|
||||
with:
|
||||
role-to-assume: "arn:aws:iam::016170277896:role/github_goauthentik_authentik"
|
||||
aws-region: ${{ env.AWS_REGION }}
|
||||
@@ -210,12 +217,12 @@ jobs:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- name: Run test suite in final docker images
|
||||
run: |
|
||||
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env
|
||||
docker compose pull -q
|
||||
docker compose up --no-start
|
||||
docker compose start postgresql
|
||||
docker compose run -u root server test-all
|
||||
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> lifecycle/container/.env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> lifecycle/container/.env
|
||||
docker compose -f lifecycle/container/compose.yml pull -q
|
||||
docker compose -f lifecycle/container/compose.yml up --no-start
|
||||
docker compose -f lifecycle/container/compose.yml start postgresql
|
||||
docker compose -f lifecycle/container/compose.yml run -u root server test-all
|
||||
sentry-release:
|
||||
needs:
|
||||
- build-server
|
||||
|
||||
15
.github/workflows/release-tag.yml
vendored
15
.github/workflows/release-tag.yml
vendored
@@ -70,7 +70,7 @@ jobs:
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
- id: get-user-id
|
||||
name: Get GitHub app user ID
|
||||
run: echo "user-id=$(gh api "/users/${{ steps.app-token.outputs.app-slug }}[bot]" --jq .id)" >> "$GITHUB_OUTPUT"
|
||||
@@ -91,6 +91,7 @@ jobs:
|
||||
# ID from https://api.github.com/users/authentik-automation[bot]
|
||||
git config --global user.name '${{ steps.app-token.outputs.app-slug }}[bot]'
|
||||
git config --global user.email '${{ steps.get-user-id.outputs.user-id }}+${{ steps.app-token.outputs.app-slug }}[bot]@users.noreply.github.com'
|
||||
git pull
|
||||
git commit -a -m "release: ${{ inputs.version }}" --allow-empty
|
||||
git tag "version/${{ inputs.version }}" HEAD -m "version/${{ inputs.version }}"
|
||||
git push --follow-tags
|
||||
@@ -117,7 +118,7 @@ jobs:
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
repositories: helm
|
||||
- id: get-user-id
|
||||
name: Get GitHub app user ID
|
||||
@@ -159,7 +160,7 @@ jobs:
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
repositories: version
|
||||
- id: get-user-id
|
||||
name: Get GitHub app user ID
|
||||
@@ -174,21 +175,25 @@ jobs:
|
||||
if: "${{ inputs.release_reason == 'feature' }}"
|
||||
run: |
|
||||
changelog_url="https://docs.goauthentik.io/docs/releases/${{ needs.check-inputs.outputs.major_version }}"
|
||||
reason="${{ inputs.release_reason }}"
|
||||
jq \
|
||||
--arg version "${{ inputs.version }}" \
|
||||
--arg changelog "See ${changelog_url}" \
|
||||
--arg changelog_url "${changelog_url}" \
|
||||
'.stable.version = $version | .stable.changelog = $changelog | .stable.changelog_url = $changelog_url' version.json > version.new.json
|
||||
--arg reason "${reason}" \
|
||||
'.stable.version = $version | .stable.changelog = $changelog | .stable.changelog_url = $changelog_url | .stable.reason = $reason' version.json > version.new.json
|
||||
mv version.new.json version.json
|
||||
- name: Bump version
|
||||
if: "${{ inputs.release_reason != 'feature' }}"
|
||||
run: |
|
||||
changelog_url="https://docs.goauthentik.io/docs/releases/${{ needs.check-inputs.outputs.major_version }}#fixed-in-$(echo -n ${{ inputs.version}} | sed 's/\.//g')"
|
||||
reason="${{ inputs.release_reason }}"
|
||||
jq \
|
||||
--arg version "${{ inputs.version }}" \
|
||||
--arg changelog "See ${changelog_url}" \
|
||||
--arg changelog_url "${changelog_url}" \
|
||||
'.stable.version = $version | .stable.changelog = $changelog | .stable.changelog_url = $changelog_url' version.json > version.new.json
|
||||
--arg reason "${reason}" \
|
||||
'.stable.version = $version | .stable.changelog = $changelog | .stable.changelog_url = $changelog_url | .stable.reason = $reason' version.json > version.new.json
|
||||
mv version.new.json version.json
|
||||
- name: Create pull request
|
||||
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
|
||||
|
||||
2
.github/workflows/repo-stale.yml
vendored
2
.github/workflows/repo-stale.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
- uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10
|
||||
with:
|
||||
repo-token: ${{ steps.generate_token.outputs.token }}
|
||||
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
with:
|
||||
|
||||
8
Makefile
8
Makefile
@@ -148,11 +148,11 @@ bump: ## Bump authentik version. Usage: make bump version=20xx.xx.xx
|
||||
ifndef version
|
||||
$(error Usage: make bump version=20xx.xx.xx )
|
||||
endif
|
||||
$(SED_INPLACE) 's/^version = ".*"/version = "$(version)"/' pyproject.toml
|
||||
$(SED_INPLACE) 's/^VERSION = ".*"/VERSION = "$(version)"/' authentik/__init__.py
|
||||
$(eval current_version := $(shell cat ${PWD}/internal/constants/VERSION))
|
||||
$(SED_INPLACE) 's/^version = ".*"/version = "$(version)"/' ${PWD}/pyproject.toml
|
||||
$(SED_INPLACE) 's/^VERSION = ".*"/VERSION = "$(version)"/' ${PWD}/authentik/__init__.py
|
||||
$(MAKE) gen-build gen-compose aws-cfn
|
||||
npm version --no-git-tag-version --allow-same-version $(version)
|
||||
cd ${PWD}/web && npm version --no-git-tag-version --allow-same-version $(version)
|
||||
$(SED_INPLACE) "s/\"${current_version}\"/\"$(version)\"/" ${PWD}/package.json ${PWD}/package-lock.json ${PWD}/web/package.json ${PWD}/web/package-lock.json
|
||||
echo -n $(version) > ${PWD}/internal/constants/VERSION
|
||||
|
||||
#########################
|
||||
|
||||
42
SECURITY.md
42
SECURITY.md
@@ -18,10 +18,10 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
|
||||
|
||||
(.x being the latest patch release for each version)
|
||||
|
||||
| Version | Supported |
|
||||
| ---------- | ---------- |
|
||||
| 2025.10.x | ✅ |
|
||||
| 2025.12.x | ✅ |
|
||||
| Version | Supported |
|
||||
| --------- | --------- |
|
||||
| 2025.12.x | ✅ |
|
||||
| 2026.2.x | ✅ |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
@@ -60,6 +60,40 @@ authentik reserves the right to reclassify CVSS as necessary. To determine sever
|
||||
| 7.0 – 8.9 | High |
|
||||
| 9.0 – 10.0 | Critical |
|
||||
|
||||
## Intended functionality
|
||||
|
||||
The following capabilities are part of intentional system design and should not be reported as security vulnerabilities:
|
||||
|
||||
- Expressions (property mappings/policies/prompts) can execute arbitrary Python code without safeguards.
|
||||
|
||||
This is expected behavior. Any user with permission to create or modify objects containing expression fields can write code that is executed within authentik. If a vulnerability allows a user without the required permissions to write or modify code and have it executed, that would be a valid security report.
|
||||
|
||||
However, the fact that expressions are executed as part of normal operations is not considered a privilege escalation or security vulnerability.
|
||||
|
||||
- Blueprints can access all files on the filesystem.
|
||||
|
||||
This access is intentional to allow legitimate configuration and deployment tasks. It does not represent a security problem by itself.
|
||||
|
||||
- Importing blueprints allows arbitrary modification of application objects.
|
||||
|
||||
This is intended functionality. This behavior reflects the privileged design of blueprint imports. It is "exploitable" when importing blueprints from untrusted sources without reviewing the blueprint beforehand. However, any method to create, modify or execute blueprints without the required permissions would be a valid security report.
|
||||
|
||||
- Flow imports may contain objects other than flows (such as policies, users, groups, etc.)
|
||||
|
||||
This is expected behavior as flow imports are blueprint files.
|
||||
|
||||
- Prompt HTML is not escaped.
|
||||
|
||||
Prompts intentionally allow raw HTML, including script tags, so they can be used to create interactive or customized user interface elements. Because of this, scripts within prompts may affect or interact with the surrounding page as designed.
|
||||
|
||||
- Open redirects that do not include tokens or other sensitive information are not considered a security vulnerability.
|
||||
|
||||
Redirects that only change navigation flow and do not expose session tokens, API keys, or other confidential data are considered acceptable and do not require reporting.
|
||||
|
||||
- Outgoing network requests are not filtered.
|
||||
|
||||
The destinations of outgoing network requests (HTTP, TCP, etc.) made by authentik to configurable endpoints through objects such as OAuth Sources, SSO Providers, and others are not validated. Depending on your threat model, these requests should be restricted at the network level using appropriate firewall or network policies.
|
||||
|
||||
## Disclosure process
|
||||
|
||||
1. Report from Github or Issue is reported via Email as listed above.
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from functools import lru_cache
|
||||
from os import environ
|
||||
|
||||
VERSION = "2026.2.0-rc1"
|
||||
VERSION = "2026.2.3-rc1"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
"""Schema generation tests"""
|
||||
|
||||
from pathlib import Path
|
||||
from tempfile import gettempdir
|
||||
from uuid import uuid4
|
||||
|
||||
from django.core.management import call_command
|
||||
from django.urls import reverse
|
||||
@@ -29,15 +31,14 @@ class TestSchemaGeneration(APITestCase):
|
||||
|
||||
def test_build_schema(self):
|
||||
"""Test schema build command"""
|
||||
blueprint_file = Path("blueprints/schema.json")
|
||||
api_file = Path("schema.yml")
|
||||
blueprint_file.unlink()
|
||||
api_file.unlink()
|
||||
tmp = Path(gettempdir())
|
||||
blueprint_file = tmp / f"{str(uuid4())}.json"
|
||||
api_file = tmp / f"{str(uuid4())}.yml"
|
||||
with (
|
||||
CONFIG.patch("debug", True),
|
||||
CONFIG.patch("tenants.enabled", True),
|
||||
CONFIG.patch("outposts.disable_embedded_outpost", True),
|
||||
):
|
||||
call_command("build_schema")
|
||||
call_command("build_schema", blueprint_file=blueprint_file, api_file=api_file)
|
||||
self.assertTrue(blueprint_file.exists())
|
||||
self.assertTrue(api_file.exists())
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
import traceback
|
||||
from collections.abc import Callable
|
||||
from importlib import import_module
|
||||
from inspect import ismethod
|
||||
|
||||
from django.apps import AppConfig
|
||||
from django.conf import settings
|
||||
@@ -72,12 +71,19 @@ class ManagedAppConfig(AppConfig):
|
||||
|
||||
def _reconcile(self, prefix: str) -> None:
|
||||
for meth_name in dir(self):
|
||||
meth = getattr(self, meth_name)
|
||||
if not ismethod(meth):
|
||||
# Check the attribute on the class to avoid evaluating @property descriptors.
|
||||
# Using getattr(self, ...) on a @property would evaluate it, which can trigger
|
||||
# expensive side effects (e.g. tenant_schedule_specs iterating all providers
|
||||
# and running PolicyEngine queries for every user).
|
||||
class_attr = getattr(type(self), meth_name, None)
|
||||
if class_attr is None or isinstance(class_attr, property):
|
||||
continue
|
||||
category = getattr(meth, "_authentik_managed_reconcile", None)
|
||||
if not callable(class_attr):
|
||||
continue
|
||||
category = getattr(class_attr, "_authentik_managed_reconcile", None)
|
||||
if category != prefix:
|
||||
continue
|
||||
meth = getattr(self, meth_name)
|
||||
name = meth_name.replace(prefix, "")
|
||||
try:
|
||||
self.logger.debug("Starting reconciler", name=name)
|
||||
|
||||
@@ -43,8 +43,6 @@ def get_attrs(obj: SerializerModel) -> dict[str, Any]:
|
||||
continue
|
||||
if _field.read_only:
|
||||
data.pop(field_name, None)
|
||||
if _field.get_initial() == data.get(field_name, None):
|
||||
data.pop(field_name, None)
|
||||
if field_name.endswith("_set"):
|
||||
data.pop(field_name, None)
|
||||
return data
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from typing import Any
|
||||
|
||||
from django.db.models import Case, F, IntegerField, Q, Value, When
|
||||
from django.db.models.functions import Length
|
||||
from django.db.models.functions import Concat, Length
|
||||
from django.http.request import HttpRequest
|
||||
from django.utils.html import _json_script_escapes
|
||||
from django.utils.safestring import mark_safe
|
||||
@@ -26,7 +26,8 @@ def get_brand_for_request(request: HttpRequest) -> Brand:
|
||||
domain_length=Length("domain"),
|
||||
match_priority=Case(
|
||||
When(
|
||||
condition=Q(host_domain__iendswith=F("domain")),
|
||||
condition=Q(host_domain__iexact=F("domain"))
|
||||
| Q(host_domain__iendswith=Concat(Value("."), F("domain"))),
|
||||
then=F("domain_length"),
|
||||
),
|
||||
default=Value(-1),
|
||||
|
||||
@@ -28,6 +28,8 @@ SAML_ATTRIBUTES_GROUP = "http://schemas.xmlsoap.org/claims/Group"
|
||||
SAML_BINDING_POST = "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST"
|
||||
SAML_BINDING_REDIRECT = "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
|
||||
|
||||
SAML_STATUS_SUCCESS = "urn:oasis:names:tc:SAML:2.0:status:Success"
|
||||
|
||||
DSA_SHA1 = "http://www.w3.org/2000/09/xmldsig#dsa-sha1"
|
||||
RSA_SHA1 = "http://www.w3.org/2000/09/xmldsig#rsa-sha1"
|
||||
# https://datatracker.ietf.org/doc/html/rfc4051#section-2.3.2
|
||||
|
||||
@@ -47,7 +47,12 @@ class ApplicationSerializer(ModelSerializer):
|
||||
"""Application Serializer"""
|
||||
|
||||
launch_url = SerializerMethodField()
|
||||
provider_obj = ProviderSerializer(source="get_provider", required=False, read_only=True)
|
||||
provider_obj = ProviderSerializer(
|
||||
source="get_provider",
|
||||
required=False,
|
||||
read_only=True,
|
||||
allow_null=True,
|
||||
)
|
||||
backchannel_providers_obj = ProviderSerializer(
|
||||
source="backchannel_providers", required=False, read_only=True, many=True
|
||||
)
|
||||
|
||||
@@ -72,6 +72,7 @@ from authentik.core.middleware import (
|
||||
from authentik.core.models import (
|
||||
USER_ATTRIBUTE_TOKEN_EXPIRING,
|
||||
USER_PATH_SERVICE_ACCOUNT,
|
||||
USERNAME_MAX_LENGTH,
|
||||
Group,
|
||||
Session,
|
||||
Token,
|
||||
@@ -144,7 +145,7 @@ class UserSerializer(ModelSerializer):
|
||||
roles_obj = SerializerMethodField(allow_null=True)
|
||||
uid = CharField(read_only=True)
|
||||
username = CharField(
|
||||
max_length=150,
|
||||
max_length=USERNAME_MAX_LENGTH,
|
||||
validators=[UniqueValidator(queryset=User.objects.all().order_by("username"))],
|
||||
)
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
"""authentik core models"""
|
||||
|
||||
import re
|
||||
import traceback
|
||||
from datetime import datetime, timedelta
|
||||
from enum import StrEnum
|
||||
from hashlib import sha256
|
||||
@@ -15,7 +17,6 @@ from django.contrib.sessions.base_session import AbstractBaseSession
|
||||
from django.core.validators import validate_slug
|
||||
from django.db import models
|
||||
from django.db.models import Q, QuerySet, options
|
||||
from django.db.models.constants import LOOKUP_SEP
|
||||
from django.http import HttpRequest
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.timezone import now
|
||||
@@ -43,6 +44,7 @@ from authentik.lib.models import (
|
||||
DomainlessFormattedURLValidator,
|
||||
SerializerModel,
|
||||
)
|
||||
from authentik.lib.utils.inheritance import get_deepest_child
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
from authentik.policies.models import PolicyBindingModel
|
||||
from authentik.rbac.models import Role
|
||||
@@ -50,6 +52,7 @@ from authentik.tenants.models import DEFAULT_TOKEN_DURATION, DEFAULT_TOKEN_LENGT
|
||||
from authentik.tenants.utils import get_current_tenant, get_unique_identifier
|
||||
|
||||
LOGGER = get_logger()
|
||||
USERNAME_MAX_LENGTH = 150
|
||||
USER_PATH_SYSTEM_PREFIX = "goauthentik.io"
|
||||
_USER_ATTR_PREFIX = f"{USER_PATH_SYSTEM_PREFIX}/user"
|
||||
USER_ATTRIBUTE_DEBUG = f"{_USER_ATTR_PREFIX}/debug"
|
||||
@@ -527,23 +530,35 @@ class User(SerializerModel, AttributesMixin, AbstractUser):
|
||||
"default: in 30 days). See authentik logs for every will invocation of this "
|
||||
"deprecation."
|
||||
)
|
||||
stacktrace = traceback.format_stack()
|
||||
# The last line is this function, the next-to-last line is its caller
|
||||
cause = stacktrace[-2] if len(stacktrace) > 1 else "Unknown, see stacktrace in logs"
|
||||
if search := re.search(r'"(.*?)"', cause):
|
||||
cause = f"Property mapping or Expression policy named {search.group(1)}"
|
||||
|
||||
LOGGER.warning(
|
||||
"deprecation used",
|
||||
message=message_logger,
|
||||
deprecation=deprecation,
|
||||
replacement=replacement,
|
||||
cause=cause,
|
||||
stacktrace=stacktrace,
|
||||
)
|
||||
if not Event.filter_not_expired(
|
||||
action=EventAction.CONFIGURATION_WARNING, context__deprecation=deprecation
|
||||
action=EventAction.CONFIGURATION_WARNING,
|
||||
context__deprecation=deprecation,
|
||||
context__cause=cause,
|
||||
).exists():
|
||||
event = Event.new(
|
||||
EventAction.CONFIGURATION_WARNING,
|
||||
deprecation=deprecation,
|
||||
replacement=replacement,
|
||||
message=message_event,
|
||||
cause=cause,
|
||||
)
|
||||
event.expires = datetime.now() + timedelta(days=30)
|
||||
event.save()
|
||||
|
||||
return self.groups
|
||||
|
||||
def set_password(self, raw_password, signal=True, sender=None, request=None):
|
||||
@@ -788,25 +803,7 @@ class Application(SerializerModel, PolicyBindingModel):
|
||||
"""Get casted provider instance. Needs Application queryset with_provider"""
|
||||
if not self.provider:
|
||||
return None
|
||||
|
||||
candidates = []
|
||||
base_class = Provider
|
||||
for subclass in base_class.objects.get_queryset()._get_subclasses_recurse(base_class):
|
||||
parent = self.provider
|
||||
for level in subclass.split(LOOKUP_SEP):
|
||||
try:
|
||||
parent = getattr(parent, level)
|
||||
except AttributeError:
|
||||
break
|
||||
if parent in candidates:
|
||||
continue
|
||||
idx = subclass.count(LOOKUP_SEP)
|
||||
if type(parent) is not base_class:
|
||||
idx += 1
|
||||
candidates.insert(idx, parent)
|
||||
if not candidates:
|
||||
return None
|
||||
return candidates[-1]
|
||||
return get_deepest_child(self.provider)
|
||||
|
||||
def backchannel_provider_for[T: Provider](self, provider_type: type[T], **kwargs) -> T | None:
|
||||
"""Get Backchannel provider for a specific type"""
|
||||
@@ -1118,7 +1115,11 @@ class ExpiringModel(models.Model):
|
||||
default the object is deleted. This is less efficient compared
|
||||
to bulk deleting objects, but classes like Token() need to change
|
||||
values instead of being deleted."""
|
||||
return self.delete(*args, **kwargs)
|
||||
try:
|
||||
return self.delete(*args, **kwargs)
|
||||
except self.DoesNotExist:
|
||||
# Object has already been deleted, so this should be fine
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def filter_not_expired(cls, **kwargs) -> QuerySet[Self]:
|
||||
|
||||
@@ -24,7 +24,8 @@ from authentik.root.ws.consumer import build_device_group
|
||||
|
||||
# Arguments: user: User, password: str
|
||||
password_changed = Signal()
|
||||
# Arguments: credentials: dict[str, any], request: HttpRequest, stage: Stage
|
||||
# Arguments: credentials: dict[str, any], request: HttpRequest,
|
||||
# stage: Stage, context: dict[str, any]
|
||||
login_failed = Signal()
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
@@ -44,19 +44,24 @@
|
||||
{% endblock %}
|
||||
</div>
|
||||
</main>
|
||||
<footer aria-label="Site footer" class="pf-c-login__footer pf-m-dark">
|
||||
<ul class="pf-c-list pf-m-inline">
|
||||
{% for link in footer_links %}
|
||||
<li>
|
||||
<a href="{{ link.href }}">{{ link.name }}</a>
|
||||
</li>
|
||||
{% endfor %}
|
||||
<li>
|
||||
<span>
|
||||
{% trans 'Powered by authentik' %}
|
||||
</span>
|
||||
</li>
|
||||
</ul>
|
||||
<footer
|
||||
name="site-footer"
|
||||
aria-label="{% trans 'Site footer' %}"
|
||||
class="pf-c-login__footer pf-m-dark">
|
||||
<div name="flow-links" aria-label="{% trans 'Flow links' %}">
|
||||
<ul class="pf-c-list pf-m-inline" part="list">
|
||||
{% for link in footer_links %}
|
||||
<li part="list-item">
|
||||
<a part="list-item-link" href="{{ link.href }}">{{ link.name }}</a>
|
||||
</li>
|
||||
{% endfor %}
|
||||
<li part="list-item">
|
||||
<span>
|
||||
{% trans 'Powered by authentik' %}
|
||||
</span>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</footer>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -78,7 +78,7 @@ def generate_key_id_legacy(key_data: str) -> str:
|
||||
"""Generate Key ID using MD5 (legacy format for backwards compatibility)."""
|
||||
if not key_data:
|
||||
return ""
|
||||
return md5(key_data.encode("utf-8")).hexdigest() # nosec
|
||||
return md5(key_data.encode("utf-8"), usedforsecurity=False).hexdigest() # nosec
|
||||
|
||||
|
||||
class CertificateKeyPair(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.endpoints.api.connectors import ConnectorSerializer
|
||||
from authentik.endpoints.models import EndpointStage
|
||||
from authentik.endpoints.controller import Capabilities
|
||||
from authentik.endpoints.models import Connector, EndpointStage
|
||||
from authentik.flows.api.stages import StageSerializer
|
||||
|
||||
|
||||
@@ -11,6 +14,13 @@ class EndpointStageSerializer(StageSerializer):
|
||||
|
||||
connector_obj = ConnectorSerializer(source="connector", read_only=True)
|
||||
|
||||
def validate_connector(self, connector: Connector) -> Connector:
|
||||
conn: Connector = Connector.objects.get_subclass(pk=connector.pk)
|
||||
controller = conn.controller(conn)
|
||||
if Capabilities.STAGE_ENDPOINTS not in controller.capabilities():
|
||||
raise ValidationError(_("Selected connector is not compatible with this stage."))
|
||||
return connector
|
||||
|
||||
class Meta:
|
||||
model = EndpointStage
|
||||
fields = StageSerializer.Meta.fields + [
|
||||
|
||||
@@ -18,7 +18,10 @@ from authentik.rbac.decorators import permission_required
|
||||
class EnrollmentTokenSerializer(ModelSerializer):
|
||||
|
||||
device_group_obj = DeviceAccessGroupSerializer(
|
||||
source="device_group", read_only=True, required=False
|
||||
source="device_group",
|
||||
read_only=True,
|
||||
required=False,
|
||||
allow_null=True,
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
|
||||
@@ -37,6 +37,8 @@ class AgentEnrollmentAuth(BaseAuthentication):
|
||||
token = EnrollmentToken.filter_not_expired(key=key).first()
|
||||
if not token:
|
||||
raise PermissionDenied()
|
||||
if not token.connector.enabled:
|
||||
raise PermissionDenied()
|
||||
CTX_AUTH_VIA.set("endpoint_token_enrollment")
|
||||
return (DeviceUser(), token)
|
||||
|
||||
@@ -51,6 +53,8 @@ class AgentAuth(BaseAuthentication):
|
||||
device_token = DeviceToken.filter_not_expired(key=key).first()
|
||||
if not device_token:
|
||||
raise PermissionDenied()
|
||||
if not device_token.device.connector.enabled:
|
||||
raise PermissionDenied()
|
||||
if device_token.device.device.is_expired:
|
||||
raise PermissionDenied()
|
||||
CTX_AUTH_VIA.set("endpoint_token")
|
||||
|
||||
@@ -8,7 +8,7 @@ from rest_framework.fields import CharField
|
||||
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.endpoints.connectors.agent.models import AgentConnector, EnrollmentToken
|
||||
from authentik.endpoints.controller import BaseController
|
||||
from authentik.endpoints.controller import BaseController, Capabilities
|
||||
from authentik.endpoints.facts import OSFamily
|
||||
|
||||
|
||||
@@ -48,8 +48,8 @@ class AgentConnectorController(BaseController[AgentConnector]):
|
||||
def vendor_identifier() -> str:
|
||||
return "goauthentik.io/platform"
|
||||
|
||||
def supported_enrollment_methods(self):
|
||||
return []
|
||||
def capabilities(self) -> list[Capabilities]:
|
||||
return [Capabilities.STAGE_ENDPOINTS]
|
||||
|
||||
def generate_mdm_config(
|
||||
self, target_platform: OSFamily, request: HttpRequest, token: EnrollmentToken
|
||||
|
||||
@@ -58,6 +58,16 @@ class TestAgentAPI(APITestCase):
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_enroll_disabled(self):
|
||||
self.connector.enabled = False
|
||||
self.connector.save()
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:agentconnector-enroll"),
|
||||
data={"device_serial": generate_id(), "device_name": "bar"},
|
||||
HTTP_AUTHORIZATION=f"Bearer {self.token.key}",
|
||||
)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
def test_enroll_token_delete(self):
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:agentconnector-enroll"),
|
||||
@@ -104,6 +114,16 @@ class TestAgentAPI(APITestCase):
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
@reconcile_app("authentik_crypto")
|
||||
def test_config_disabled(self):
|
||||
self.connector.enabled = False
|
||||
self.connector.save()
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:agentconnector-agent-config"),
|
||||
HTTP_AUTHORIZATION=f"Bearer+agent {self.device_token.key}",
|
||||
)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
def test_check_in(self):
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:agentconnector-check-in"),
|
||||
@@ -112,6 +132,16 @@ class TestAgentAPI(APITestCase):
|
||||
)
|
||||
self.assertEqual(response.status_code, 204)
|
||||
|
||||
def test_check_in_disabled(self):
|
||||
self.connector.enabled = False
|
||||
self.connector.save()
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:agentconnector-check-in"),
|
||||
data=CHECK_IN_DATA_VALID,
|
||||
HTTP_AUTHORIZATION=f"Bearer+agent {self.device_token.key}",
|
||||
)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
def test_check_in_token_expired(self):
|
||||
self.device_token.expiring = True
|
||||
self.device_token.expires = now() - timedelta(hours=1)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from hashlib import sha256
|
||||
from json import loads
|
||||
from unittest.mock import PropertyMock, patch
|
||||
|
||||
from django.urls import reverse
|
||||
from jwt import encode
|
||||
@@ -232,3 +233,43 @@ class TestEndpointStage(FlowTestCase):
|
||||
plan = plan()
|
||||
self.assertNotIn(PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE, plan.context)
|
||||
self.assertEqual(plan.context[PLAN_CONTEXT_DEVICE], self.device)
|
||||
|
||||
def test_endpoint_stage_connector_no_stage_optional(self):
|
||||
flow = create_test_flow()
|
||||
stage = EndpointStage.objects.create(connector=self.connector, mode=StageMode.OPTIONAL)
|
||||
FlowStageBinding.objects.create(stage=stage, target=flow, order=0)
|
||||
|
||||
with patch(
|
||||
"authentik.endpoints.connectors.agent.models.AgentConnector.stage",
|
||||
PropertyMock(return_value=None),
|
||||
):
|
||||
with self.assertFlowFinishes() as plan:
|
||||
res = self.client.get(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||
)
|
||||
self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
|
||||
plan = plan()
|
||||
self.assertNotIn(PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE, plan.context)
|
||||
self.assertNotIn(PLAN_CONTEXT_DEVICE, plan.context)
|
||||
|
||||
def test_endpoint_stage_connector_no_stage_required(self):
|
||||
flow = create_test_flow()
|
||||
stage = EndpointStage.objects.create(connector=self.connector, mode=StageMode.REQUIRED)
|
||||
FlowStageBinding.objects.create(stage=stage, target=flow, order=0)
|
||||
|
||||
with patch(
|
||||
"authentik.endpoints.connectors.agent.models.AgentConnector.stage",
|
||||
PropertyMock(return_value=None),
|
||||
):
|
||||
with self.assertFlowFinishes() as plan:
|
||||
res = self.client.get(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||
)
|
||||
self.assertStageResponse(
|
||||
res,
|
||||
component="ak-stage-access-denied",
|
||||
error_message="Invalid stage configuration",
|
||||
)
|
||||
plan = plan()
|
||||
self.assertNotIn(PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE, plan.context)
|
||||
self.assertNotIn(PLAN_CONTEXT_DEVICE, plan.context)
|
||||
|
||||
@@ -8,13 +8,15 @@ from authentik.lib.sentry import SentryIgnoredException
|
||||
MERGED_VENDOR = "goauthentik.io/@merged"
|
||||
|
||||
|
||||
class EnrollmentMethods(models.TextChoices):
|
||||
class Capabilities(models.TextChoices):
|
||||
# Automatically enrolled through user action
|
||||
AUTOMATIC_USER = "automatic_user"
|
||||
ENROLL_AUTOMATIC_USER = "enroll_automatic_user"
|
||||
# Automatically enrolled through connector integration
|
||||
AUTOMATIC_API = "automatic_api"
|
||||
ENROLL_AUTOMATIC_API = "enroll_automatic_api"
|
||||
# Manually enrolled with user interaction (user scanning a QR code for example)
|
||||
MANUAL_USER = "manual_user"
|
||||
ENROLL_MANUAL_USER = "enroll_manual_user"
|
||||
# Supported for use with Endpoints stage
|
||||
STAGE_ENDPOINTS = "stage_endpoints"
|
||||
|
||||
|
||||
class ConnectorSyncException(SentryIgnoredException):
|
||||
@@ -34,7 +36,7 @@ class BaseController[T: "Connector"]:
|
||||
def vendor_identifier() -> str:
|
||||
raise NotImplementedError
|
||||
|
||||
def supported_enrollment_methods(self) -> list[EnrollmentMethods]:
|
||||
def capabilities(self) -> list[Capabilities]:
|
||||
return []
|
||||
|
||||
def stage_view_enrollment(self) -> StageView | None:
|
||||
@@ -42,3 +44,6 @@ class BaseController[T: "Connector"]:
|
||||
|
||||
def stage_view_authentication(self) -> StageView | None:
|
||||
return None
|
||||
|
||||
def sync_endpoints(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -162,8 +162,11 @@ class Connector(ScheduledModel, SerializerModel):
|
||||
|
||||
@property
|
||||
def schedule_specs(self) -> list[ScheduleSpec]:
|
||||
from authentik.endpoints.controller import Capabilities
|
||||
from authentik.endpoints.tasks import endpoints_sync
|
||||
|
||||
if Capabilities.ENROLL_AUTOMATIC_API not in self.controller(self).capabilities():
|
||||
return []
|
||||
return [
|
||||
ScheduleSpec(
|
||||
actor=endpoints_sync,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from authentik.endpoints.models import EndpointStage
|
||||
from authentik.endpoints.models import Connector, EndpointStage, StageMode
|
||||
from authentik.flows.stage import StageView
|
||||
|
||||
PLAN_CONTEXT_ENDPOINT_CONNECTOR = "endpoint_connector"
|
||||
@@ -6,15 +6,27 @@ PLAN_CONTEXT_ENDPOINT_CONNECTOR = "endpoint_connector"
|
||||
|
||||
class EndpointStageView(StageView):
|
||||
|
||||
def _get_inner(self):
|
||||
def _get_inner(self) -> StageView | None:
|
||||
stage: EndpointStage = self.executor.current_stage
|
||||
inner_stage: type[StageView] | None = stage.connector.stage
|
||||
connector: Connector = stage.connector
|
||||
if not connector.enabled:
|
||||
return None
|
||||
inner_stage: type[StageView] | None = connector.stage
|
||||
if not inner_stage:
|
||||
return self.executor.stage_ok()
|
||||
return None
|
||||
return inner_stage(self.executor, request=self.request)
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
return self._get_inner().dispatch(request, *args, **kwargs)
|
||||
inner = self._get_inner()
|
||||
if inner is None:
|
||||
stage: EndpointStage = self.executor.current_stage
|
||||
if stage.mode == StageMode.OPTIONAL:
|
||||
return self.executor.stage_ok()
|
||||
else:
|
||||
return self.executor.stage_invalid("Invalid stage configuration")
|
||||
return inner.dispatch(request, *args, **kwargs)
|
||||
|
||||
def cleanup(self):
|
||||
return self._get_inner().cleanup()
|
||||
inner = self._get_inner()
|
||||
if inner is not None:
|
||||
return inner.cleanup()
|
||||
|
||||
@@ -6,7 +6,7 @@ from django.utils.translation import gettext_lazy as _
|
||||
from dramatiq.actor import actor
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.endpoints.controller import EnrollmentMethods
|
||||
from authentik.endpoints.controller import Capabilities
|
||||
from authentik.endpoints.models import Connector
|
||||
|
||||
LOGGER = get_logger()
|
||||
@@ -17,11 +17,11 @@ def endpoints_sync(connector_pk: Any):
|
||||
connector: Connector | None = (
|
||||
Connector.objects.filter(pk=connector_pk).select_subclasses().first()
|
||||
)
|
||||
if not connector:
|
||||
if not connector or not connector.enabled:
|
||||
return
|
||||
controller = connector.controller
|
||||
ctrl = controller(connector)
|
||||
if EnrollmentMethods.AUTOMATIC_API not in ctrl.supported_enrollment_methods():
|
||||
if Capabilities.ENROLL_AUTOMATIC_API not in ctrl.capabilities():
|
||||
return
|
||||
LOGGER.info("Syncing connector", connector=connector.name)
|
||||
ctrl.sync_endpoints()
|
||||
|
||||
41
authentik/endpoints/tests/test_api.py
Normal file
41
authentik/endpoints/tests/test_api.py
Normal file
@@ -0,0 +1,41 @@
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.endpoints.connectors.agent.models import AgentConnector
|
||||
from authentik.endpoints.models import StageMode
|
||||
from authentik.enterprise.endpoints.connectors.fleet.models import FleetConnector
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
class TestAPI(APITestCase):
|
||||
def setUp(self):
|
||||
self.user = create_test_admin_user()
|
||||
self.client.force_login(self.user)
|
||||
|
||||
def test_endpoint_stage_agent(self):
|
||||
connector = AgentConnector.objects.create(name=generate_id())
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:stages-endpoint-list"),
|
||||
data={
|
||||
"name": generate_id(),
|
||||
"connector": str(connector.pk),
|
||||
"mode": StageMode.REQUIRED,
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 201)
|
||||
|
||||
def test_endpoint_stage_fleet(self):
|
||||
connector = FleetConnector.objects.create(name=generate_id())
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:stages-endpoint-list"),
|
||||
data={
|
||||
"name": generate_id(),
|
||||
"connector": str(connector.pk),
|
||||
"mode": StageMode.REQUIRED,
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
res.content, {"connector": ["Selected connector is not compatible with this stage."]}
|
||||
)
|
||||
35
authentik/endpoints/tests/test_tasks.py
Normal file
35
authentik/endpoints/tests/test_tasks.py
Normal file
@@ -0,0 +1,35 @@
|
||||
from unittest.mock import PropertyMock, patch
|
||||
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.endpoints.controller import BaseController, Capabilities
|
||||
from authentik.endpoints.models import Connector
|
||||
from authentik.endpoints.tasks import endpoints_sync
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
class TestEndpointTasks(APITestCase):
|
||||
def test_agent_sync(self):
|
||||
class controller(BaseController):
|
||||
def capabilities(self):
|
||||
return [Capabilities.ENROLL_AUTOMATIC_API]
|
||||
|
||||
def sync_endpoints(self):
|
||||
pass
|
||||
|
||||
with patch.object(Connector, "controller", PropertyMock(return_value=controller)):
|
||||
connector = Connector.objects.create(name=generate_id())
|
||||
self.assertEqual(len(connector.schedule_specs), 1)
|
||||
|
||||
endpoints_sync.send(connector.pk).get_result(block=True)
|
||||
|
||||
def test_agent_no_sync(self):
|
||||
class controller(BaseController):
|
||||
def capabilities(self):
|
||||
return []
|
||||
|
||||
with patch.object(Connector, "controller", PropertyMock(return_value=controller)):
|
||||
connector = Connector.objects.create(name=generate_id())
|
||||
self.assertEqual(len(connector.schedule_specs), 0)
|
||||
|
||||
endpoints_sync.send(connector.pk).get_result(block=True)
|
||||
@@ -3,6 +3,7 @@ from hmac import compare_digest
|
||||
|
||||
from django.http import Http404, HttpRequest, HttpResponse, HttpResponseBadRequest, QueryDict
|
||||
|
||||
from authentik.common.oauth.constants import QS_LOGIN_HINT
|
||||
from authentik.endpoints.connectors.agent.auth import (
|
||||
agent_auth_issue_token,
|
||||
check_device_policies,
|
||||
@@ -14,7 +15,7 @@ from authentik.enterprise.policy import EnterprisePolicyAccessView
|
||||
from authentik.flows.exceptions import FlowNonApplicableException
|
||||
from authentik.flows.models import in_memory_stage
|
||||
from authentik.flows.planner import PLAN_CONTEXT_DEVICE, FlowPlanner
|
||||
from authentik.flows.stage import StageView
|
||||
from authentik.flows.stage import PLAN_CONTEXT_PENDING_USER_IDENTIFIER, StageView
|
||||
from authentik.providers.oauth2.utils import HttpResponseRedirectScheme
|
||||
|
||||
QS_AGENT_IA_TOKEN = "ak-auth-ia-token" # nosec
|
||||
@@ -64,14 +65,14 @@ class AgentInteractiveAuth(EnterprisePolicyAccessView):
|
||||
|
||||
planner = FlowPlanner(self.connector.authorization_flow)
|
||||
planner.allow_empty_flows = True
|
||||
context = {
|
||||
PLAN_CONTEXT_DEVICE: self.device,
|
||||
PLAN_CONTEXT_DEVICE_AUTH_TOKEN: self.auth_token,
|
||||
}
|
||||
if QS_LOGIN_HINT in request.GET:
|
||||
context[PLAN_CONTEXT_PENDING_USER_IDENTIFIER] = request.GET[QS_LOGIN_HINT]
|
||||
try:
|
||||
plan = planner.plan(
|
||||
self.request,
|
||||
{
|
||||
PLAN_CONTEXT_DEVICE: self.device,
|
||||
PLAN_CONTEXT_DEVICE_AUTH_TOKEN: self.auth_token,
|
||||
},
|
||||
)
|
||||
plan = planner.plan(self.request, context)
|
||||
except FlowNonApplicableException:
|
||||
return self.handle_no_permission_authenticated()
|
||||
plan.append_stage(in_memory_stage(AgentAuthFulfillmentStage))
|
||||
@@ -84,7 +85,6 @@ class AgentInteractiveAuth(EnterprisePolicyAccessView):
|
||||
|
||||
|
||||
class AgentAuthFulfillmentStage(StageView):
|
||||
|
||||
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||
device: Device = self.executor.plan.context.pop(PLAN_CONTEXT_DEVICE)
|
||||
auth_token: DeviceAuthenticationToken = self.executor.plan.context.pop(
|
||||
|
||||
@@ -6,7 +6,7 @@ from requests import RequestException
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.endpoints.controller import BaseController, ConnectorSyncException, EnrollmentMethods
|
||||
from authentik.endpoints.controller import BaseController, Capabilities, ConnectorSyncException
|
||||
from authentik.endpoints.facts import (
|
||||
DeviceFacts,
|
||||
OSFamily,
|
||||
@@ -43,8 +43,8 @@ class FleetController(BaseController[DBC]):
|
||||
def vendor_identifier() -> str:
|
||||
return "fleetdm.com"
|
||||
|
||||
def supported_enrollment_methods(self) -> list[EnrollmentMethods]:
|
||||
return [EnrollmentMethods.AUTOMATIC_API]
|
||||
def capabilities(self) -> list[Capabilities]:
|
||||
return [Capabilities.ENROLL_AUTOMATIC_API]
|
||||
|
||||
def _url(self, path: str) -> str:
|
||||
return f"{self.connector.url}{path}"
|
||||
|
||||
@@ -15,6 +15,7 @@ from django.core.cache import cache
|
||||
from django.db.models.query import QuerySet
|
||||
from django.utils.timezone import now
|
||||
from jwt import PyJWTError, decode, get_unverified_header
|
||||
from jwt.algorithms import ECAlgorithm
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import (
|
||||
ChoiceField,
|
||||
@@ -109,13 +110,20 @@ class LicenseKey:
|
||||
intermediate.verify_directly_issued_by(get_licensing_key())
|
||||
except InvalidSignature, TypeError, ValueError, Error:
|
||||
raise ValidationError("Unable to verify license") from None
|
||||
_validate_curve_original = ECAlgorithm._validate_curve
|
||||
try:
|
||||
# authentik's license are generated with `algorithm="ES512"` and signed with
|
||||
# a key of curve `secp384r1`. Starting with version 2.11.0, pyjwt enforces the spec, see
|
||||
# https://github.com/jpadilla/pyjwt/commit/5b8622773358e56d3d3c0a9acf404809ff34433a
|
||||
# authentik will change its license generation to `algorithm="ES384"` in 2026.
|
||||
# TODO: remove this when the last incompatible license runs out.
|
||||
ECAlgorithm._validate_curve = lambda *_: True
|
||||
body = from_dict(
|
||||
LicenseKey,
|
||||
decode(
|
||||
jwt,
|
||||
our_cert.public_key(),
|
||||
algorithms=["ES512"],
|
||||
algorithms=["ES384", "ES512"],
|
||||
audience=get_license_aud(),
|
||||
options={"verify_exp": check_expiry, "verify_signature": check_expiry},
|
||||
),
|
||||
@@ -125,6 +133,8 @@ class LicenseKey:
|
||||
if unverified["aud"] != get_license_aud():
|
||||
raise ValidationError("Invalid Install ID in license") from None
|
||||
raise ValidationError("Unable to verify license") from None
|
||||
finally:
|
||||
ECAlgorithm._validate_curve = _validate_curve_original
|
||||
return body
|
||||
|
||||
@staticmethod
|
||||
|
||||
0
authentik/enterprise/lifecycle/__init__.py
Normal file
0
authentik/enterprise/lifecycle/__init__.py
Normal file
0
authentik/enterprise/lifecycle/api/__init__.py
Normal file
0
authentik/enterprise/lifecycle/api/__init__.py
Normal file
150
authentik/enterprise/lifecycle/api/iterations.py
Normal file
150
authentik/enterprise/lifecycle/api/iterations.py
Normal file
@@ -0,0 +1,150 @@
|
||||
from datetime import datetime
|
||||
|
||||
from django.db.models import BooleanField as ModelBooleanField
|
||||
from django.db.models import Case, Q, Value, When
|
||||
from django_filters.rest_framework import BooleanFilter, FilterSet
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import IntegerField, SerializerMethodField
|
||||
from rest_framework.mixins import CreateModelMixin
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
|
||||
from authentik.core.api.utils import ModelSerializer
|
||||
from authentik.enterprise.api import EnterpriseRequiredMixin
|
||||
from authentik.enterprise.lifecycle.api.reviews import ReviewSerializer
|
||||
from authentik.enterprise.lifecycle.models import LifecycleIteration, ReviewState
|
||||
from authentik.enterprise.lifecycle.utils import (
|
||||
ContentTypeField,
|
||||
ReviewerGroupSerializer,
|
||||
ReviewerUserSerializer,
|
||||
admin_link_for_model,
|
||||
parse_content_type,
|
||||
start_of_day,
|
||||
)
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
|
||||
|
||||
class LifecycleIterationSerializer(EnterpriseRequiredMixin, ModelSerializer):
|
||||
content_type = ContentTypeField()
|
||||
object_verbose = SerializerMethodField()
|
||||
object_admin_url = SerializerMethodField(read_only=True)
|
||||
grace_period_end = SerializerMethodField(read_only=True)
|
||||
reviews = ReviewSerializer(many=True, read_only=True, source="review_set.all")
|
||||
user_can_review = SerializerMethodField(read_only=True)
|
||||
|
||||
reviewer_groups = ReviewerGroupSerializer(
|
||||
many=True, read_only=True, source="rule.reviewer_groups"
|
||||
)
|
||||
min_reviewers = IntegerField(read_only=True, source="rule.min_reviewers")
|
||||
reviewers = ReviewerUserSerializer(many=True, read_only=True, source="rule.reviewers")
|
||||
|
||||
next_review_date = SerializerMethodField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = LifecycleIteration
|
||||
fields = [
|
||||
"id",
|
||||
"content_type",
|
||||
"object_id",
|
||||
"object_verbose",
|
||||
"object_admin_url",
|
||||
"state",
|
||||
"opened_on",
|
||||
"grace_period_end",
|
||||
"next_review_date",
|
||||
"reviews",
|
||||
"user_can_review",
|
||||
"reviewer_groups",
|
||||
"min_reviewers",
|
||||
"reviewers",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
def get_object_verbose(self, iteration: LifecycleIteration) -> str:
|
||||
return str(iteration.object)
|
||||
|
||||
def get_object_admin_url(self, iteration: LifecycleIteration) -> str:
|
||||
return admin_link_for_model(iteration.object)
|
||||
|
||||
def get_grace_period_end(self, iteration: LifecycleIteration) -> datetime:
|
||||
return start_of_day(
|
||||
iteration.opened_on + timedelta_from_string(iteration.rule.grace_period)
|
||||
)
|
||||
|
||||
def get_next_review_date(self, iteration: LifecycleIteration) -> datetime:
|
||||
return start_of_day(iteration.opened_on + timedelta_from_string(iteration.rule.interval))
|
||||
|
||||
def get_user_can_review(self, iteration: LifecycleIteration) -> bool:
|
||||
return iteration.user_can_review(self.context["request"].user)
|
||||
|
||||
|
||||
class LifecycleIterationFilterSet(FilterSet):
|
||||
user_is_reviewer = BooleanFilter(field_name="user_is_reviewer", lookup_expr="exact")
|
||||
|
||||
|
||||
class IterationViewSet(EnterpriseRequiredMixin, CreateModelMixin, GenericViewSet):
|
||||
queryset = LifecycleIteration.objects.all()
|
||||
serializer_class = LifecycleIterationSerializer
|
||||
ordering = ["-opened_on"]
|
||||
ordering_fields = ["state", "content_type__model", "opened_on", "grace_period_end"]
|
||||
filterset_class = LifecycleIterationFilterSet
|
||||
|
||||
def get_queryset(self):
|
||||
user = self.request.user
|
||||
return self.queryset.annotate(
|
||||
user_is_reviewer=Case(
|
||||
When(
|
||||
Q(rule__reviewers=user)
|
||||
| Q(rule__reviewer_groups__in=user.groups.all().with_ancestors()),
|
||||
then=Value(True),
|
||||
),
|
||||
default=Value(False),
|
||||
output_field=ModelBooleanField(),
|
||||
)
|
||||
).distinct()
|
||||
|
||||
@action(
|
||||
detail=False,
|
||||
methods=["get"],
|
||||
url_path=r"latest/(?P<content_type>[^/]+)/(?P<object_id>[^/]+)",
|
||||
)
|
||||
def latest_iteration(self, request: Request, content_type: str, object_id: str) -> Response:
|
||||
ct = parse_content_type(content_type)
|
||||
try:
|
||||
obj = (
|
||||
self.get_queryset()
|
||||
.filter(
|
||||
content_type__app_label=ct["app_label"],
|
||||
content_type__model=ct["model"],
|
||||
object_id=object_id,
|
||||
)
|
||||
.latest("opened_on")
|
||||
)
|
||||
except LifecycleIteration.DoesNotExist:
|
||||
return Response(status=404)
|
||||
serializer = self.get_serializer(obj)
|
||||
return Response(serializer.data)
|
||||
|
||||
@extend_schema(
|
||||
operation_id="lifecycle_iterations_list_open",
|
||||
responses={200: LifecycleIterationSerializer(many=True)},
|
||||
)
|
||||
@action(
|
||||
detail=False,
|
||||
methods=["get"],
|
||||
url_path=r"open",
|
||||
)
|
||||
def open_iterations(self, request: Request):
|
||||
iterations = self.get_queryset().filter(
|
||||
Q(state=ReviewState.PENDING) | Q(state=ReviewState.OVERDUE)
|
||||
)
|
||||
iterations = self.filter_queryset(iterations)
|
||||
page = self.paginate_queryset(iterations)
|
||||
if page is not None:
|
||||
serializer = self.get_serializer(page, many=True)
|
||||
return self.get_paginated_response(serializer.data)
|
||||
|
||||
serializer = self.get_serializer(iterations, many=True)
|
||||
return Response(serializer.data)
|
||||
33
authentik/enterprise/lifecycle/api/reviews.py
Normal file
33
authentik/enterprise/lifecycle/api/reviews.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.mixins import CreateModelMixin
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
|
||||
from authentik.core.api.utils import ModelSerializer
|
||||
from authentik.enterprise.api import EnterpriseRequiredMixin
|
||||
from authentik.enterprise.lifecycle.models import LifecycleIteration, Review
|
||||
from authentik.enterprise.lifecycle.utils import ReviewerUserSerializer
|
||||
|
||||
|
||||
class ReviewSerializer(EnterpriseRequiredMixin, ModelSerializer):
|
||||
reviewer = ReviewerUserSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Review
|
||||
fields = ["id", "iteration", "reviewer", "timestamp", "note"]
|
||||
read_only_fields = ["id", "timestamp", "reviewer"]
|
||||
|
||||
def validate_iteration(self, iteration: LifecycleIteration) -> LifecycleIteration:
|
||||
user = self.context["request"].user
|
||||
if not iteration.user_can_review(user):
|
||||
raise ValidationError(_("You are not allowed to submit a review for this object."))
|
||||
return iteration
|
||||
|
||||
|
||||
class ReviewViewSet(EnterpriseRequiredMixin, CreateModelMixin, GenericViewSet):
|
||||
queryset = Review.objects.all()
|
||||
serializer_class = ReviewSerializer
|
||||
|
||||
def perform_create(self, serializer: ReviewSerializer) -> None:
|
||||
review = serializer.save(reviewer=self.request.user)
|
||||
review.iteration.on_review(self.request)
|
||||
113
authentik/enterprise/lifecycle/api/rules.py
Normal file
113
authentik/enterprise/lifecycle/api/rules.py
Normal file
@@ -0,0 +1,113 @@
|
||||
from django.utils.translation import gettext as _
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import SerializerMethodField
|
||||
from rest_framework.relations import SlugRelatedField
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.core.api.utils import ModelSerializer
|
||||
from authentik.core.models import User
|
||||
from authentik.enterprise.api import EnterpriseRequiredMixin
|
||||
from authentik.enterprise.lifecycle.models import LifecycleRule
|
||||
from authentik.enterprise.lifecycle.utils import (
|
||||
ContentTypeField,
|
||||
ReviewerGroupSerializer,
|
||||
ReviewerUserSerializer,
|
||||
)
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
|
||||
|
||||
class LifecycleRuleSerializer(EnterpriseRequiredMixin, ModelSerializer):
|
||||
content_type = ContentTypeField()
|
||||
target_verbose = SerializerMethodField()
|
||||
reviewer_groups_obj = ReviewerGroupSerializer(
|
||||
many=True, read_only=True, source="reviewer_groups"
|
||||
)
|
||||
reviewers = SlugRelatedField(slug_field="uuid", many=True, queryset=User.objects.all())
|
||||
reviewers_obj = ReviewerUserSerializer(many=True, read_only=True, source="reviewers")
|
||||
|
||||
class Meta:
|
||||
model = LifecycleRule
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"content_type",
|
||||
"object_id",
|
||||
"interval",
|
||||
"grace_period",
|
||||
"reviewer_groups",
|
||||
"reviewer_groups_obj",
|
||||
"min_reviewers",
|
||||
"min_reviewers_is_per_group",
|
||||
"reviewers",
|
||||
"reviewers_obj",
|
||||
"notification_transports",
|
||||
"target_verbose",
|
||||
]
|
||||
read_only_fields = ["id", "reviewers_obj", "reviewer_groups_obj", "target_verbose"]
|
||||
|
||||
def get_target_verbose(self, rule: LifecycleRule) -> str:
|
||||
if rule.object_id is None:
|
||||
return rule.content_type.model_class()._meta.verbose_name_plural
|
||||
else:
|
||||
return f"{rule.content_type.model_class()._meta.verbose_name}: {rule.object}"
|
||||
|
||||
def validate_object_id(self, value: str) -> str | None:
|
||||
if value == "":
|
||||
return None
|
||||
return value
|
||||
|
||||
def validate(self, attrs: dict) -> dict:
|
||||
if (
|
||||
attrs.get("object_id") is not None
|
||||
and not attrs["content_type"]
|
||||
.get_all_objects_for_this_type(pk=attrs["object_id"])
|
||||
.exists()
|
||||
):
|
||||
raise ValidationError({"object_id": _("Object does not exist")})
|
||||
if "reviewer_groups" in attrs or "reviewers" in attrs:
|
||||
reviewer_groups = attrs.get(
|
||||
"reviewer_groups", self.instance.reviewer_groups.all() if self.instance else []
|
||||
)
|
||||
reviewers = attrs.get(
|
||||
"reviewers", self.instance.reviewers.all() if self.instance else []
|
||||
)
|
||||
if len(reviewer_groups) == 0 and len(reviewers) == 0:
|
||||
raise ValidationError(_("Either a reviewer group or a reviewer must be set."))
|
||||
if "grace_period" in attrs or "interval" in attrs:
|
||||
grace_period = attrs.get("grace_period", getattr(self.instance, "grace_period", None))
|
||||
interval = attrs.get("interval", getattr(self.instance, "interval", None))
|
||||
if (
|
||||
grace_period is not None
|
||||
and interval is not None
|
||||
and (timedelta_from_string(grace_period) > timedelta_from_string(interval))
|
||||
):
|
||||
raise ValidationError(
|
||||
{"grace_period": _("Grace period must be shorter than the interval.")}
|
||||
)
|
||||
if "content_type" in attrs or "object_id" in attrs:
|
||||
content_type = attrs.get("content_type", getattr(self.instance, "content_type", None))
|
||||
object_id = attrs.get("object_id", getattr(self.instance, "object_id", None))
|
||||
if content_type is not None and object_id is None:
|
||||
existing = LifecycleRule.objects.filter(
|
||||
content_type=content_type, object_id__isnull=True
|
||||
)
|
||||
if self.instance:
|
||||
existing = existing.exclude(pk=self.instance.pk)
|
||||
if existing.exists():
|
||||
raise ValidationError(
|
||||
{
|
||||
"content_type": _(
|
||||
"Only one type-wide rule for each object type is allowed."
|
||||
)
|
||||
}
|
||||
)
|
||||
return attrs
|
||||
|
||||
|
||||
class LifecycleRuleViewSet(ModelViewSet):
|
||||
queryset = LifecycleRule.objects.all()
|
||||
serializer_class = LifecycleRuleSerializer
|
||||
search_fields = ["content_type__model", "reviewer_groups__name", "reviewers__username"]
|
||||
ordering = ["name"]
|
||||
ordering_fields = ["name", "content_type__model"]
|
||||
filterset_fields = ["content_type__model"]
|
||||
22
authentik/enterprise/lifecycle/apps.py
Normal file
22
authentik/enterprise/lifecycle/apps.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from authentik.enterprise.apps import EnterpriseConfig
|
||||
from authentik.lib.utils.time import fqdn_rand
|
||||
from authentik.tasks.schedules.common import ScheduleSpec
|
||||
|
||||
|
||||
class ReportsConfig(EnterpriseConfig):
|
||||
name = "authentik.enterprise.lifecycle"
|
||||
label = "authentik_lifecycle"
|
||||
verbose_name = "authentik Enterprise.Lifecycle"
|
||||
default = True
|
||||
|
||||
@property
|
||||
def tenant_schedule_specs(self) -> list[ScheduleSpec]:
|
||||
from authentik.enterprise.lifecycle.tasks import apply_lifecycle_rules
|
||||
|
||||
return [
|
||||
ScheduleSpec(
|
||||
actor=apply_lifecycle_rules,
|
||||
crontab=f"{fqdn_rand('lifecycle_apply_lifecycle_rules')} "
|
||||
f"{fqdn_rand('lifecycle_apply_lifecycle_rules', 24)} * * *",
|
||||
)
|
||||
]
|
||||
154
authentik/enterprise/lifecycle/migrations/0001_initial.py
Normal file
154
authentik/enterprise/lifecycle/migrations/0001_initial.py
Normal file
@@ -0,0 +1,154 @@
|
||||
# Generated by Django 5.2.11 on 2026-02-09 15:57
|
||||
|
||||
import authentik.lib.utils.time
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0057_remove_user_groups_remove_user_user_permissions_and_more"),
|
||||
("authentik_events", "0016_alter_event_action"),
|
||||
("contenttypes", "0002_remove_content_type_name"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="LifecycleRule",
|
||||
fields=[
|
||||
("id", models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
("name", models.TextField(unique=True)),
|
||||
("object_id", models.TextField(default=None, null=True)),
|
||||
(
|
||||
"interval",
|
||||
models.TextField(
|
||||
default="days=60",
|
||||
validators=[authentik.lib.utils.time.timedelta_string_validator],
|
||||
),
|
||||
),
|
||||
(
|
||||
"grace_period",
|
||||
models.TextField(
|
||||
default="days=30",
|
||||
validators=[authentik.lib.utils.time.timedelta_string_validator],
|
||||
),
|
||||
),
|
||||
("min_reviewers", models.PositiveSmallIntegerField(default=1)),
|
||||
("min_reviewers_is_per_group", models.BooleanField(default=False)),
|
||||
(
|
||||
"content_type",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="contenttypes.contenttype"
|
||||
),
|
||||
),
|
||||
(
|
||||
"notification_transports",
|
||||
models.ManyToManyField(
|
||||
blank=True,
|
||||
help_text="Select which transports should be used to notify the reviewers. If none are selected, the notification will only be shown in the authentik UI.",
|
||||
to="authentik_events.notificationtransport",
|
||||
),
|
||||
),
|
||||
("reviewer_groups", models.ManyToManyField(blank=True, to="authentik_core.group")),
|
||||
("reviewers", models.ManyToManyField(blank=True, to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="LifecycleIteration",
|
||||
fields=[
|
||||
(
|
||||
"managed",
|
||||
models.TextField(
|
||||
default=None,
|
||||
help_text="Objects that are managed by authentik. These objects are created and updated automatically. This flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
|
||||
null=True,
|
||||
unique=True,
|
||||
verbose_name="Managed by authentik",
|
||||
),
|
||||
),
|
||||
("id", models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
("object_id", models.TextField()),
|
||||
(
|
||||
"state",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("REVIEWED", "Reviewed"),
|
||||
("PENDING", "Pending"),
|
||||
("OVERDUE", "Overdue"),
|
||||
("CANCELED", "Canceled"),
|
||||
],
|
||||
default="PENDING",
|
||||
max_length=10,
|
||||
),
|
||||
),
|
||||
("opened_on", models.DateField(auto_now_add=True)),
|
||||
(
|
||||
"content_type",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="contenttypes.contenttype"
|
||||
),
|
||||
),
|
||||
(
|
||||
"rule",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
to="authentik_lifecycle.lifecyclerule",
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Review",
|
||||
fields=[
|
||||
("id", models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
("timestamp", models.DateTimeField(auto_now_add=True)),
|
||||
("note", models.TextField(null=True)),
|
||||
(
|
||||
"iteration",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="authentik_lifecycle.lifecycleiteration",
|
||||
),
|
||||
),
|
||||
(
|
||||
"reviewer",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="lifecyclerule",
|
||||
index=models.Index(fields=["content_type"], name="authentik_l_content_4e3a6a_idx"),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="lifecyclerule",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(("object_id__isnull", True)),
|
||||
fields=("content_type",),
|
||||
name="uniq_lifecycle_rule_ct_null_object",
|
||||
),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="lifecyclerule",
|
||||
unique_together={("content_type", "object_id")},
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="lifecycleiteration",
|
||||
index=models.Index(
|
||||
fields=["content_type", "opened_on"], name="authentik_l_content_09c32a_idx"
|
||||
),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="review",
|
||||
unique_together={("iteration", "reviewer")},
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 5.2.11 on 2026-02-13 09:33
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_lifecycle", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="lifecycleiteration",
|
||||
name="opened_on",
|
||||
field=models.DateTimeField(auto_now_add=True),
|
||||
),
|
||||
]
|
||||
292
authentik/enterprise/lifecycle/models.py
Normal file
292
authentik/enterprise/lifecycle/models.py
Normal file
@@ -0,0 +1,292 @@
|
||||
from datetime import timedelta
|
||||
from uuid import uuid4
|
||||
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db import models
|
||||
from django.db.models import Q, QuerySet
|
||||
from django.db.models.fields import Field
|
||||
from django.db.models.functions import Cast
|
||||
from django.http import HttpRequest
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext as _
|
||||
from rest_framework.serializers import BaseSerializer
|
||||
|
||||
from authentik.blueprints.models import ManagedModel
|
||||
from authentik.core.models import Group, User
|
||||
from authentik.enterprise.lifecycle.utils import link_for_model, start_of_day
|
||||
from authentik.events.models import Event, EventAction, NotificationSeverity, NotificationTransport
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.lib.utils.time import timedelta_from_string, timedelta_string_validator
|
||||
|
||||
|
||||
class LifecycleRule(SerializerModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4)
|
||||
name = models.TextField(unique=True)
|
||||
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
|
||||
object_id = models.TextField(null=True, default=None)
|
||||
object = GenericForeignKey("content_type", "object_id")
|
||||
|
||||
interval = models.TextField(
|
||||
default="days=60",
|
||||
validators=[timedelta_string_validator],
|
||||
)
|
||||
# Grace period starts after a review is due
|
||||
grace_period = models.TextField(
|
||||
default="days=30",
|
||||
validators=[timedelta_string_validator],
|
||||
)
|
||||
|
||||
# The review has to be conducted by `min_reviewers` members of `reviewer_groups`
|
||||
# (total or per group depending on `min_reviewers_is_per_group` flag) as well
|
||||
# as all of `reviewers`
|
||||
reviewer_groups = models.ManyToManyField("authentik_core.Group", blank=True)
|
||||
min_reviewers = models.PositiveSmallIntegerField(default=1)
|
||||
min_reviewers_is_per_group = models.BooleanField(default=False)
|
||||
reviewers = models.ManyToManyField("authentik_core.User", blank=True)
|
||||
|
||||
notification_transports = models.ManyToManyField(
|
||||
NotificationTransport,
|
||||
help_text=_(
|
||||
"Select which transports should be used to notify the reviewers. If none are "
|
||||
"selected, the notification will only be shown in the authentik UI."
|
||||
),
|
||||
blank=True,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
indexes = [models.Index(fields=["content_type"])]
|
||||
unique_together = [["content_type", "object_id"]]
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["content_type"],
|
||||
condition=Q(object_id__isnull=True),
|
||||
name="uniq_lifecycle_rule_ct_null_object",
|
||||
)
|
||||
]
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[BaseSerializer]:
|
||||
from authentik.enterprise.lifecycle.api.rules import LifecycleRuleSerializer
|
||||
|
||||
return LifecycleRuleSerializer
|
||||
|
||||
def _get_pk_field(self) -> Field:
|
||||
model = self.content_type.model_class()
|
||||
pk = model._meta.pk
|
||||
while hasattr(pk, "target_field"):
|
||||
pk = pk.target_field
|
||||
return pk.__class__()
|
||||
|
||||
def get_objects(self) -> QuerySet:
|
||||
qs = self.content_type.get_all_objects_for_this_type()
|
||||
if self.object_id:
|
||||
qs = qs.filter(pk=self.object_id)
|
||||
else:
|
||||
qs = qs.exclude(
|
||||
pk__in=LifecycleRule.objects.filter(
|
||||
content_type=self.content_type, object_id__isnull=False
|
||||
).values_list(Cast("object_id", output_field=self._get_pk_field()), flat=True)
|
||||
)
|
||||
return qs
|
||||
|
||||
def _get_stale_iterations(self) -> QuerySet[LifecycleIteration]:
|
||||
filter = ~Q(content_type=self.content_type)
|
||||
if self.object_id:
|
||||
filter = filter | ~Q(object_id=self.object_id)
|
||||
filter = Q(state__in=(ReviewState.PENDING, ReviewState.OVERDUE)) & filter
|
||||
return self.lifecycleiteration_set.filter(filter)
|
||||
|
||||
def _get_newly_overdue_iterations(self) -> QuerySet[LifecycleIteration]:
|
||||
return self.lifecycleiteration_set.filter(
|
||||
opened_on__lt=start_of_day(
|
||||
timezone.now() + timedelta(days=1) - timedelta_from_string(self.grace_period)
|
||||
),
|
||||
state=ReviewState.PENDING,
|
||||
)
|
||||
|
||||
def _get_newly_due_objects(self) -> QuerySet:
|
||||
recent_iteration_ids = LifecycleIteration.objects.filter(
|
||||
content_type=self.content_type,
|
||||
object_id__isnull=False,
|
||||
opened_on__gte=start_of_day(
|
||||
timezone.now() + timedelta(days=1) - timedelta_from_string(self.interval)
|
||||
),
|
||||
).values_list(Cast("object_id", output_field=self._get_pk_field()), flat=True)
|
||||
|
||||
return self.get_objects().exclude(pk__in=recent_iteration_ids)
|
||||
|
||||
def apply(self):
|
||||
self._get_stale_iterations().update(state=ReviewState.CANCELED)
|
||||
|
||||
for iteration in self._get_newly_overdue_iterations():
|
||||
iteration.make_overdue()
|
||||
|
||||
for obj in self._get_newly_due_objects():
|
||||
LifecycleIteration.start(content_type=self.content_type, object_id=obj.pk, rule=self)
|
||||
|
||||
def is_satisfied_for_iteration(self, iteration: LifecycleIteration) -> bool:
|
||||
reviewers = self.reviewers.all()
|
||||
if (
|
||||
iteration.review_set.filter(reviewer__in=reviewers).distinct("reviewer").count()
|
||||
< reviewers.count()
|
||||
):
|
||||
return False
|
||||
if self.reviewer_groups.count() == 0:
|
||||
return True
|
||||
if self.min_reviewers_is_per_group:
|
||||
for g in self.reviewer_groups.all():
|
||||
if (
|
||||
iteration.review_set.filter(
|
||||
reviewer__groups__in=Group.objects.filter(pk=g.pk).with_descendants()
|
||||
)
|
||||
.distinct()
|
||||
.count()
|
||||
< self.min_reviewers
|
||||
):
|
||||
return False
|
||||
return True
|
||||
else:
|
||||
return (
|
||||
iteration.review_set.filter(
|
||||
reviewer__groups__in=self.reviewer_groups.all().with_descendants()
|
||||
)
|
||||
.distinct()
|
||||
.count()
|
||||
>= self.min_reviewers
|
||||
)
|
||||
|
||||
def get_reviewers(self) -> QuerySet[User]:
|
||||
return User.objects.filter(
|
||||
Q(id__in=self.reviewers.all().values_list("pk", flat=True))
|
||||
| Q(groups__in=self.reviewer_groups.all().with_descendants())
|
||||
).distinct()
|
||||
|
||||
def notify_reviewers(self, event: Event, severity: str):
|
||||
from authentik.enterprise.lifecycle.tasks import send_notification
|
||||
|
||||
for transport in self.notification_transports.all():
|
||||
for user in self.get_reviewers():
|
||||
send_notification.send_with_options(
|
||||
args=(transport.pk, event.pk, user.pk, severity),
|
||||
rel_obj=transport,
|
||||
)
|
||||
if transport.send_once:
|
||||
break
|
||||
|
||||
|
||||
class ReviewState(models.TextChoices):
|
||||
REVIEWED = "REVIEWED", _("Reviewed")
|
||||
PENDING = "PENDING", _("Pending")
|
||||
OVERDUE = "OVERDUE", _("Overdue")
|
||||
CANCELED = "CANCELED", _("Canceled")
|
||||
|
||||
|
||||
class LifecycleIteration(SerializerModel, ManagedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4)
|
||||
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
|
||||
object_id = models.TextField(null=False)
|
||||
object = GenericForeignKey("content_type", "object_id")
|
||||
|
||||
rule = models.ForeignKey(LifecycleRule, null=True, on_delete=models.SET_NULL)
|
||||
|
||||
state = models.CharField(max_length=10, choices=ReviewState, default=ReviewState.PENDING)
|
||||
opened_on = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
indexes = [models.Index(fields=["content_type", "opened_on"])]
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[BaseSerializer]:
|
||||
from authentik.enterprise.lifecycle.api.iterations import LifecycleIterationSerializer
|
||||
|
||||
return LifecycleIterationSerializer
|
||||
|
||||
def _get_model_name(self) -> str:
|
||||
return self.content_type.name.lower()
|
||||
|
||||
def _get_event_args(self) -> dict:
|
||||
return {
|
||||
"target": self.object,
|
||||
"hyperlink": link_for_model(self.object),
|
||||
"hyperlink_label": _(f"Go to {self._get_model_name()}"),
|
||||
"lifecycle_iteration": self.id,
|
||||
}
|
||||
|
||||
def initialize(self):
|
||||
event = Event.new(
|
||||
EventAction.REVIEW_INITIATED,
|
||||
message=_(f"Access review is due for {self.content_type.name} {str(self.object)}"),
|
||||
**self._get_event_args(),
|
||||
)
|
||||
event.save()
|
||||
self.rule.notify_reviewers(event, NotificationSeverity.NOTICE)
|
||||
|
||||
def make_overdue(self):
|
||||
self.state = ReviewState.OVERDUE
|
||||
|
||||
event = Event.new(
|
||||
EventAction.REVIEW_OVERDUE,
|
||||
message=_(f"Access review is overdue for {self.content_type.name} {str(self.object)}"),
|
||||
**self._get_event_args(),
|
||||
)
|
||||
event.save()
|
||||
self.rule.notify_reviewers(event, NotificationSeverity.ALERT)
|
||||
self.save()
|
||||
|
||||
@staticmethod
|
||||
def start(content_type: ContentType, object_id: str, rule: LifecycleRule) -> LifecycleIteration:
|
||||
iteration = LifecycleIteration.objects.create(
|
||||
content_type=content_type, object_id=object_id, rule=rule
|
||||
)
|
||||
iteration.initialize()
|
||||
return iteration
|
||||
|
||||
def make_reviewed(self, request: HttpRequest):
|
||||
self.state = ReviewState.REVIEWED
|
||||
event = Event.new(
|
||||
EventAction.REVIEW_COMPLETED,
|
||||
message=_(f"Access review completed for {self.content_type.name} {str(self.object)}"),
|
||||
**self._get_event_args(),
|
||||
).from_http(request)
|
||||
event.save()
|
||||
self.rule.notify_reviewers(event, NotificationSeverity.NOTICE)
|
||||
self.save()
|
||||
|
||||
def on_review(self, request: HttpRequest):
|
||||
if self.state not in (ReviewState.PENDING, ReviewState.OVERDUE):
|
||||
raise AssertionError("Review is not pending or overdue")
|
||||
if self.rule.is_satisfied_for_iteration(self):
|
||||
self.make_reviewed(request)
|
||||
|
||||
def user_can_review(self, user: User) -> bool:
|
||||
if self.state not in (ReviewState.PENDING, ReviewState.OVERDUE):
|
||||
return False
|
||||
if self.review_set.filter(reviewer=user).exists():
|
||||
return False
|
||||
groups = self.rule.reviewer_groups.all()
|
||||
if groups:
|
||||
for group in groups:
|
||||
if group.is_member(user):
|
||||
return True
|
||||
return False
|
||||
else:
|
||||
return user in self.rule.get_reviewers()
|
||||
|
||||
|
||||
class Review(SerializerModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4)
|
||||
iteration = models.ForeignKey(LifecycleIteration, on_delete=models.CASCADE)
|
||||
|
||||
reviewer = models.ForeignKey("authentik_core.User", on_delete=models.CASCADE)
|
||||
timestamp = models.DateTimeField(auto_now_add=True)
|
||||
note = models.TextField(null=True)
|
||||
|
||||
class Meta:
|
||||
unique_together = [["iteration", "reviewer"]]
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[BaseSerializer]:
|
||||
from authentik.enterprise.lifecycle.api.reviews import ReviewSerializer
|
||||
|
||||
return ReviewSerializer
|
||||
22
authentik/enterprise/lifecycle/signals.py
Normal file
22
authentik/enterprise/lifecycle/signals.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from django.db.models import Q
|
||||
from django.db.models.signals import post_save, pre_delete
|
||||
from django.dispatch import receiver
|
||||
|
||||
from authentik.enterprise.lifecycle.models import LifecycleRule, ReviewState
|
||||
|
||||
|
||||
@receiver(post_save, sender=LifecycleRule)
|
||||
def post_rule_save(sender, instance: LifecycleRule, created: bool, **_):
|
||||
from authentik.enterprise.lifecycle.tasks import apply_lifecycle_rule
|
||||
|
||||
apply_lifecycle_rule.send_with_options(
|
||||
args=(instance.id,),
|
||||
rel_obj=instance,
|
||||
)
|
||||
|
||||
|
||||
@receiver(pre_delete, sender=LifecycleRule)
|
||||
def pre_rule_delete(sender, instance: LifecycleRule, **_):
|
||||
instance.lifecycleiteration_set.filter(
|
||||
Q(state=ReviewState.PENDING) | Q(state=ReviewState.OVERDUE)
|
||||
).update(state=ReviewState.CANCELED)
|
||||
45
authentik/enterprise/lifecycle/tasks.py
Normal file
45
authentik/enterprise/lifecycle/tasks.py
Normal file
@@ -0,0 +1,45 @@
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from dramatiq import actor
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.enterprise.lifecycle.models import LifecycleRule
|
||||
from authentik.events.models import Event, Notification, NotificationTransport
|
||||
|
||||
|
||||
@actor(description=_("Dispatch tasks to validate lifecycle rules."))
|
||||
def apply_lifecycle_rules():
|
||||
for rule in LifecycleRule.objects.all():
|
||||
apply_lifecycle_rule.send_with_options(
|
||||
args=(rule.id,),
|
||||
rel_obj=rule,
|
||||
)
|
||||
|
||||
|
||||
@actor(description=_("Apply lifecycle rule."))
|
||||
def apply_lifecycle_rule(rule_id: str):
|
||||
rule = LifecycleRule.objects.filter(pk=rule_id).first()
|
||||
if rule:
|
||||
rule.apply()
|
||||
|
||||
|
||||
@actor(description=_("Send lifecycle rule notification."))
|
||||
def send_notification(transport_pk: int, event_pk: str, user_pk: int, severity: str):
|
||||
event = Event.objects.filter(pk=event_pk).first()
|
||||
if not event:
|
||||
return
|
||||
user = User.objects.filter(pk=user_pk).first()
|
||||
if not user:
|
||||
return
|
||||
|
||||
notification = Notification(
|
||||
severity=severity,
|
||||
body=event.summary,
|
||||
event=event,
|
||||
user=user,
|
||||
hyperlink=event.hyperlink,
|
||||
hyperlink_label=event.hyperlink_label,
|
||||
)
|
||||
transport = NotificationTransport.objects.filter(pk=transport_pk).first()
|
||||
if not transport:
|
||||
return
|
||||
transport.send(notification)
|
||||
0
authentik/enterprise/lifecycle/tests/__init__.py
Normal file
0
authentik/enterprise/lifecycle/tests/__init__.py
Normal file
425
authentik/enterprise/lifecycle/tests/test_api.py
Normal file
425
authentik/enterprise/lifecycle/tests/test_api.py
Normal file
@@ -0,0 +1,425 @@
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Application, Group
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_user
|
||||
from authentik.enterprise.lifecycle.models import LifecycleIteration, LifecycleRule, ReviewState
|
||||
from authentik.enterprise.reports.tests.utils import patch_license
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
@patch_license
|
||||
class TestLifecycleRuleAPI(APITestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.user = create_test_admin_user()
|
||||
self.client.force_login(self.user)
|
||||
self.app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
self.content_type = ContentType.objects.get_for_model(Application)
|
||||
self.reviewer_group = Group.objects.create(name=generate_id())
|
||||
|
||||
def test_list_rules(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
response = self.client.get(reverse("authentik_api:lifecyclerule-list"))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertGreaterEqual(len(response.data["results"]), 1)
|
||||
|
||||
def test_create_rule_with_reviewer_group(self):
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:lifecyclerule-list"),
|
||||
{
|
||||
"name": generate_id(),
|
||||
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
|
||||
"object_id": str(self.app.pk),
|
||||
"interval": "days=30",
|
||||
"grace_period": "days=10",
|
||||
"reviewer_groups": [str(self.reviewer_group.pk)],
|
||||
"reviewers": [],
|
||||
"min_reviewers": 1,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
self.assertEqual(response.data["object_id"], str(self.app.pk))
|
||||
self.assertEqual(response.data["interval"], "days=30")
|
||||
|
||||
def test_create_rule_with_explicit_reviewer(self):
|
||||
reviewer = create_test_user()
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:lifecyclerule-list"),
|
||||
{
|
||||
"name": generate_id(),
|
||||
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
|
||||
"object_id": str(self.app.pk),
|
||||
"interval": "days=60",
|
||||
"grace_period": "days=15",
|
||||
"reviewer_groups": [],
|
||||
"reviewers": [str(reviewer.uuid)],
|
||||
"min_reviewers": 1,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
self.assertIn(reviewer.uuid, response.data["reviewers"])
|
||||
|
||||
def test_create_rule_type_level(self):
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:lifecyclerule-list"),
|
||||
{
|
||||
"name": generate_id(),
|
||||
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
|
||||
"object_id": None,
|
||||
"interval": "days=90",
|
||||
"grace_period": "days=30",
|
||||
"reviewer_groups": [str(self.reviewer_group.pk)],
|
||||
"reviewers": [],
|
||||
"min_reviewers": 1,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
self.assertIsNone(response.data["object_id"])
|
||||
|
||||
def test_create_rule_fails_without_reviewers(self):
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:lifecyclerule-list"),
|
||||
{
|
||||
"name": generate_id(),
|
||||
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
|
||||
"object_id": str(self.app.pk),
|
||||
"interval": "days=30",
|
||||
"grace_period": "days=10",
|
||||
"reviewer_groups": [],
|
||||
"reviewers": [],
|
||||
"min_reviewers": 1,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
def test_create_rule_fails_grace_period_longer_than_interval(self):
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:lifecyclerule-list"),
|
||||
{
|
||||
"name": generate_id(),
|
||||
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
|
||||
"object_id": str(self.app.pk),
|
||||
"interval": "days=10",
|
||||
"grace_period": "days=30",
|
||||
"reviewer_groups": [str(self.reviewer_group.pk)],
|
||||
"reviewers": [],
|
||||
"min_reviewers": 1,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertIn("grace_period", response.data)
|
||||
|
||||
def test_create_rule_fails_invalid_object_id(self):
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:lifecyclerule-list"),
|
||||
{
|
||||
"name": generate_id(),
|
||||
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
|
||||
"object_id": "00000000-0000-0000-0000-000000000000",
|
||||
"interval": "days=30",
|
||||
"grace_period": "days=10",
|
||||
"reviewer_groups": [str(self.reviewer_group.pk)],
|
||||
"reviewers": [],
|
||||
"min_reviewers": 1,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertIn("object_id", response.data)
|
||||
|
||||
def test_retrieve_rule(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:lifecyclerule-detail", kwargs={"pk": rule.pk})
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.data["id"], str(rule.pk))
|
||||
|
||||
def test_update_rule(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
interval="days=30",
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
response = self.client.patch(
|
||||
reverse("authentik_api:lifecyclerule-detail", kwargs={"pk": rule.pk}),
|
||||
{"interval": "days=60"},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.data["interval"], "days=60")
|
||||
|
||||
def test_delete_rule(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
response = self.client.delete(
|
||||
reverse("authentik_api:lifecyclerule-detail", kwargs={"pk": rule.pk})
|
||||
)
|
||||
self.assertEqual(response.status_code, 204)
|
||||
self.assertFalse(LifecycleRule.objects.filter(pk=rule.pk).exists())
|
||||
|
||||
|
||||
@patch_license
|
||||
class TestIterationAPI(APITestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.user = create_test_admin_user()
|
||||
self.client.force_login(self.user)
|
||||
self.app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
self.content_type = ContentType.objects.get_for_model(Application)
|
||||
self.reviewer_group = Group.objects.create(name=generate_id())
|
||||
self.reviewer_group.users.add(self.user)
|
||||
|
||||
def test_open_iterations(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
response = self.client.get(reverse("authentik_api:lifecycleiteration-open-iterations"))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertGreaterEqual(len(response.data["results"]), 1)
|
||||
|
||||
for iteration in response.data["results"]:
|
||||
self.assertEqual(iteration["state"], ReviewState.PENDING)
|
||||
|
||||
def test_open_iterations_filter_user_is_reviewer(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:lifecycleiteration-open-iterations"),
|
||||
{"user_is_reviewer": "true"},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
# User is in reviewer_group, so should see the iteration
|
||||
self.assertGreaterEqual(len(response.data["results"]), 1)
|
||||
|
||||
def test_latest_iteration(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
"authentik_api:lifecycleiteration-latest-iteration",
|
||||
kwargs={
|
||||
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
|
||||
"object_id": str(self.app.pk),
|
||||
},
|
||||
)
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.data["object_id"], str(self.app.pk))
|
||||
|
||||
def test_latest_iteration_not_found(self):
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
"authentik_api:lifecycleiteration-latest-iteration",
|
||||
kwargs={
|
||||
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
|
||||
"object_id": "00000000-0000-0000-0000-000000000000",
|
||||
},
|
||||
)
|
||||
)
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
def test_iteration_includes_user_can_review(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
response = self.client.get(reverse("authentik_api:lifecycleiteration-open-iterations"))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertGreaterEqual(len(response.data["results"]), 1)
|
||||
# user_can_review should be present
|
||||
self.assertIn("user_can_review", response.data["results"][0])
|
||||
|
||||
|
||||
@patch_license
|
||||
class TestReviewAPI(APITestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.user = create_test_admin_user()
|
||||
self.client.force_login(self.user)
|
||||
self.app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
self.content_type = ContentType.objects.get_for_model(Application)
|
||||
self.reviewer_group = Group.objects.create(name=generate_id())
|
||||
self.reviewer_group.users.add(self.user)
|
||||
|
||||
def test_create_review(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
min_reviewers=1,
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
# Get the auto-created iteration
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=self.content_type, object_id=str(self.app.pk), rule=rule
|
||||
)
|
||||
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:review-list"),
|
||||
{
|
||||
"iteration": str(iteration.pk),
|
||||
"note": "Reviewed and approved",
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
self.assertEqual(response.data["iteration"], iteration.pk)
|
||||
self.assertEqual(response.data["note"], "Reviewed and approved")
|
||||
self.assertEqual(response.data["reviewer"]["pk"], self.user.pk)
|
||||
|
||||
def test_create_review_completes_iteration(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
min_reviewers=1,
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=self.content_type, object_id=str(self.app.pk), rule=rule
|
||||
)
|
||||
self.assertEqual(iteration.state, ReviewState.PENDING)
|
||||
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:review-list"),
|
||||
{
|
||||
"iteration": str(iteration.pk),
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.REVIEWED)
|
||||
|
||||
def test_create_review_sets_reviewer_from_request(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
min_reviewers=1,
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=self.content_type, object_id=str(self.app.pk), rule=rule
|
||||
)
|
||||
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:review-list"),
|
||||
{
|
||||
"iteration": str(iteration.pk),
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
# Reviewer should be the logged-in user
|
||||
self.assertEqual(response.data["reviewer"]["pk"], self.user.pk)
|
||||
|
||||
def test_non_reviewer_cannot_review(self):
|
||||
other_group = Group.objects.create(name=generate_id())
|
||||
other_user = create_test_user()
|
||||
other_group.users.add(other_user)
|
||||
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
min_reviewers=1,
|
||||
)
|
||||
rule.reviewer_groups.add(other_group)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=self.content_type, object_id=str(self.app.pk), rule=rule
|
||||
)
|
||||
|
||||
# Current user is not in the reviewer group
|
||||
self.assertFalse(iteration.user_can_review(self.user))
|
||||
|
||||
def test_non_reviewer_review_via_api_rejected(self):
|
||||
other_group = Group.objects.create(name=generate_id())
|
||||
other_user = create_test_user()
|
||||
other_group.users.add(other_user)
|
||||
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
min_reviewers=1,
|
||||
)
|
||||
rule.reviewer_groups.add(other_group)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=self.content_type, object_id=str(self.app.pk), rule=rule
|
||||
)
|
||||
|
||||
# Current user (self.user) is NOT in the reviewer group
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:review-list"),
|
||||
{"iteration": str(iteration.pk)},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
def test_duplicate_review_via_api_rejected(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
min_reviewers=2,
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=self.content_type, object_id=str(self.app.pk), rule=rule
|
||||
)
|
||||
|
||||
# First review should succeed
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:review-list"),
|
||||
{"iteration": str(iteration.pk)},
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
|
||||
# Second review by same user should be rejected
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:review-list"),
|
||||
{"iteration": str(iteration.pk)},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
845
authentik/enterprise/lifecycle/tests/test_models.py
Normal file
845
authentik/enterprise/lifecycle/tests/test_models.py
Normal file
@@ -0,0 +1,845 @@
|
||||
import datetime as dt
|
||||
from datetime import timedelta
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.test import RequestFactory, TestCase
|
||||
from django.utils import timezone
|
||||
|
||||
from authentik.core.models import Application, Group
|
||||
from authentik.core.tests.utils import create_test_user
|
||||
from authentik.enterprise.lifecycle.models import (
|
||||
LifecycleIteration,
|
||||
LifecycleRule,
|
||||
Review,
|
||||
ReviewState,
|
||||
)
|
||||
from authentik.events.models import (
|
||||
Event,
|
||||
EventAction,
|
||||
NotificationSeverity,
|
||||
NotificationTransport,
|
||||
)
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.rbac.models import Role
|
||||
|
||||
|
||||
class TestLifecycleModels(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.factory = RequestFactory()
|
||||
|
||||
def _get_request(self):
|
||||
return self.factory.get("/")
|
||||
|
||||
def _create_object(self, model):
|
||||
if model is Application:
|
||||
return Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
if model is Role:
|
||||
return Role.objects.create(name=generate_id())
|
||||
if model is Group:
|
||||
return Group.objects.create(name=generate_id())
|
||||
raise AssertionError(f"Unsupported model {model}")
|
||||
|
||||
def _create_rule_for_object(self, obj, **kwargs) -> LifecycleRule:
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
return LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=str(obj.pk),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def _create_rule_for_type(self, model, **kwargs) -> LifecycleRule:
|
||||
content_type = ContentType.objects.get_for_model(model)
|
||||
return LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=None,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def test_iteration_start_supported_objects(self):
|
||||
"""Ensure iterations are automatically started for applications, roles, and groups."""
|
||||
for model in (Application, Role, Group):
|
||||
with self.subTest(model=model.__name__):
|
||||
obj = self._create_object(model)
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
before_events = Event.objects.filter(action=EventAction.REVIEW_INITIATED).count()
|
||||
|
||||
rule = self._create_rule_for_object(obj)
|
||||
|
||||
# Verify iteration was created automatically
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(obj.pk), rule=rule
|
||||
)
|
||||
self.assertEqual(iteration.state, ReviewState.PENDING)
|
||||
self.assertEqual(iteration.object, obj)
|
||||
self.assertEqual(iteration.rule, rule)
|
||||
self.assertEqual(
|
||||
Event.objects.filter(action=EventAction.REVIEW_INITIATED).count(),
|
||||
before_events + 1,
|
||||
)
|
||||
|
||||
def test_review_requires_all_explicit_reviewers(self):
|
||||
obj = Group.objects.create(name=generate_id())
|
||||
rule = self._create_rule_for_object(obj)
|
||||
reviewer_one = create_test_user()
|
||||
reviewer_two = create_test_user()
|
||||
rule.reviewers.add(reviewer_one, reviewer_two)
|
||||
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(obj.pk), rule=rule
|
||||
)
|
||||
request = self._get_request()
|
||||
|
||||
Review.objects.create(iteration=iteration, reviewer=reviewer_one)
|
||||
iteration.on_review(request)
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.PENDING)
|
||||
|
||||
Review.objects.create(iteration=iteration, reviewer=reviewer_two)
|
||||
iteration.on_review(request)
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.REVIEWED)
|
||||
self.assertTrue(Event.objects.filter(action=EventAction.REVIEW_COMPLETED).exists())
|
||||
|
||||
def test_review_min_reviewers_from_groups(self):
|
||||
"""Group-based reviews complete once the minimum number of reviewers review."""
|
||||
obj = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
rule = self._create_rule_for_object(obj, min_reviewers=2)
|
||||
|
||||
reviewer_group = Group.objects.create(name=generate_id())
|
||||
reviewer_one = create_test_user()
|
||||
reviewer_two = create_test_user()
|
||||
reviewer_group.users.add(reviewer_one, reviewer_two)
|
||||
rule.reviewer_groups.add(reviewer_group)
|
||||
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(obj.pk), rule=rule
|
||||
)
|
||||
request = self._get_request()
|
||||
|
||||
Review.objects.create(iteration=iteration, reviewer=reviewer_one)
|
||||
iteration.on_review(request)
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.PENDING)
|
||||
|
||||
Review.objects.create(iteration=iteration, reviewer=reviewer_two)
|
||||
iteration.on_review(request)
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.REVIEWED)
|
||||
|
||||
def test_review_explicit_and_group_reviewers(self):
|
||||
"""Reviews require both explicit reviewers AND min_reviewers from groups."""
|
||||
obj = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
rule = self._create_rule_for_object(obj, min_reviewers=1)
|
||||
|
||||
reviewer_group = Group.objects.create(name=generate_id())
|
||||
group_member = create_test_user()
|
||||
reviewer_group.users.add(group_member)
|
||||
rule.reviewer_groups.add(reviewer_group)
|
||||
|
||||
explicit_reviewer = create_test_user()
|
||||
rule.reviewers.add(explicit_reviewer)
|
||||
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(obj.pk), rule=rule
|
||||
)
|
||||
request = self._get_request()
|
||||
|
||||
# Only group member reviews - not satisfied (explicit reviewer missing)
|
||||
Review.objects.create(iteration=iteration, reviewer=group_member)
|
||||
iteration.on_review(request)
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.PENDING)
|
||||
|
||||
# Explicit reviewer reviews - now satisfied
|
||||
Review.objects.create(iteration=iteration, reviewer=explicit_reviewer)
|
||||
iteration.on_review(request)
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.REVIEWED)
|
||||
|
||||
def test_review_min_reviewers_per_group(self):
|
||||
obj = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
rule = self._create_rule_for_object(obj, min_reviewers=1, min_reviewers_is_per_group=True)
|
||||
|
||||
group_one = Group.objects.create(name=generate_id())
|
||||
group_two = Group.objects.create(name=generate_id())
|
||||
member_group_one = create_test_user()
|
||||
member_group_two = create_test_user()
|
||||
group_one.users.add(member_group_one)
|
||||
group_two.users.add(member_group_two)
|
||||
rule.reviewer_groups.add(group_one, group_two)
|
||||
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(obj.pk), rule=rule
|
||||
)
|
||||
request = self._get_request()
|
||||
|
||||
# Only member from group_one reviews - not satisfied (need member from each group)
|
||||
Review.objects.create(iteration=iteration, reviewer=member_group_one)
|
||||
iteration.on_review(request)
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.PENDING)
|
||||
|
||||
# Member from group_two reviews - now satisfied
|
||||
Review.objects.create(iteration=iteration, reviewer=member_group_two)
|
||||
iteration.on_review(request)
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.REVIEWED)
|
||||
|
||||
def test_review_reviewers_from_child_groups(self):
|
||||
obj = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
rule = self._create_rule_for_object(obj, min_reviewers=1)
|
||||
|
||||
parent_group = Group.objects.create(name=generate_id())
|
||||
child_group = Group.objects.create(name=generate_id())
|
||||
child_group.parents.add(parent_group)
|
||||
|
||||
child_member = create_test_user()
|
||||
child_group.users.add(child_member)
|
||||
|
||||
rule.reviewer_groups.add(parent_group)
|
||||
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(obj.pk), rule=rule
|
||||
)
|
||||
request = self._get_request()
|
||||
|
||||
# Child group member should be able to review
|
||||
self.assertTrue(iteration.user_can_review(child_member))
|
||||
|
||||
Review.objects.create(iteration=iteration, reviewer=child_member)
|
||||
iteration.on_review(request)
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.REVIEWED)
|
||||
|
||||
def test_review_reviewers_from_nested_child_groups(self):
|
||||
obj = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
rule = self._create_rule_for_object(obj, min_reviewers=2)
|
||||
|
||||
grandparent = Group.objects.create(name=generate_id())
|
||||
parent = Group.objects.create(name=generate_id())
|
||||
child = Group.objects.create(name=generate_id())
|
||||
parent.parents.add(grandparent)
|
||||
child.parents.add(parent)
|
||||
|
||||
parent_member = create_test_user()
|
||||
child_member = create_test_user()
|
||||
parent.users.add(parent_member)
|
||||
child.users.add(child_member)
|
||||
|
||||
rule.reviewer_groups.add(grandparent)
|
||||
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(obj.pk), rule=rule
|
||||
)
|
||||
request = self._get_request()
|
||||
|
||||
# Both nested members should be able to review
|
||||
self.assertTrue(iteration.user_can_review(parent_member))
|
||||
self.assertTrue(iteration.user_can_review(child_member))
|
||||
|
||||
Review.objects.create(iteration=iteration, reviewer=parent_member)
|
||||
iteration.on_review(request)
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.PENDING)
|
||||
|
||||
Review.objects.create(iteration=iteration, reviewer=child_member)
|
||||
iteration.on_review(request)
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.REVIEWED)
|
||||
|
||||
def test_notify_reviewers_send_once(self):
|
||||
obj = Group.objects.create(name=generate_id())
|
||||
rule = self._create_rule_for_object(obj)
|
||||
|
||||
reviewer_one = create_test_user()
|
||||
reviewer_two = create_test_user()
|
||||
rule.reviewers.add(reviewer_one, reviewer_two)
|
||||
|
||||
transport_once = NotificationTransport.objects.create(
|
||||
name=generate_id(),
|
||||
send_once=True,
|
||||
)
|
||||
transport_all = NotificationTransport.objects.create(
|
||||
name=generate_id(),
|
||||
send_once=False,
|
||||
)
|
||||
rule.notification_transports.add(transport_once, transport_all)
|
||||
|
||||
event = Event.new(EventAction.REVIEW_INITIATED, target=obj)
|
||||
event.save()
|
||||
|
||||
with patch(
|
||||
"authentik.enterprise.lifecycle.tasks.send_notification.send_with_options"
|
||||
) as send_with_options:
|
||||
rule.notify_reviewers(event, NotificationSeverity.NOTICE)
|
||||
|
||||
reviewer_pks = {reviewer_one.pk, reviewer_two.pk}
|
||||
self.assertEqual(send_with_options.call_count, len(reviewer_pks) + 1)
|
||||
|
||||
calls = [call.kwargs["args"] for call in send_with_options.call_args_list]
|
||||
once_calls = [args for args in calls if args[0] == transport_once.pk]
|
||||
all_calls = [args for args in calls if args[0] == transport_all.pk]
|
||||
|
||||
self.assertEqual(len(once_calls), 1)
|
||||
self.assertEqual(len(all_calls), len(reviewer_pks))
|
||||
self.assertIn(once_calls[0][2], reviewer_pks)
|
||||
self.assertEqual({args[2] for args in all_calls}, reviewer_pks)
|
||||
|
||||
def test_apply_marks_overdue_and_opens_due_reviews(self):
|
||||
app_one = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
app_two = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
content_type = ContentType.objects.get_for_model(Application)
|
||||
|
||||
rule_overdue = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=str(app_one.pk),
|
||||
interval="days=365",
|
||||
grace_period="days=10",
|
||||
)
|
||||
|
||||
# Get the automatically created iteration and backdate it past the grace period
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(app_one.pk), rule=rule_overdue
|
||||
)
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=(timezone.now() - timedelta(days=20))
|
||||
)
|
||||
|
||||
# Apply again to trigger overdue logic
|
||||
rule_overdue.apply()
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.OVERDUE)
|
||||
self.assertEqual(
|
||||
LifecycleIteration.objects.filter(
|
||||
content_type=content_type, object_id=str(app_one.pk)
|
||||
).count(),
|
||||
1,
|
||||
)
|
||||
|
||||
LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=str(app_two.pk),
|
||||
interval="days=30",
|
||||
grace_period="days=10",
|
||||
)
|
||||
self.assertEqual(
|
||||
LifecycleIteration.objects.filter(
|
||||
content_type=content_type, object_id=str(app_two.pk)
|
||||
).count(),
|
||||
1,
|
||||
)
|
||||
new_iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(app_two.pk)
|
||||
)
|
||||
self.assertEqual(new_iteration.state, ReviewState.PENDING)
|
||||
|
||||
def test_apply_idempotent(self):
|
||||
app_due = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
app_overdue = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
content_type = ContentType.objects.get_for_model(Application)
|
||||
|
||||
initiated_before = Event.objects.filter(action=EventAction.REVIEW_INITIATED).count()
|
||||
overdue_before = Event.objects.filter(action=EventAction.REVIEW_OVERDUE).count()
|
||||
|
||||
rule_due = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=str(app_due.pk),
|
||||
interval="days=30",
|
||||
grace_period="days=30",
|
||||
)
|
||||
reviewer = create_test_user()
|
||||
rule_due.reviewers.add(reviewer)
|
||||
transport = NotificationTransport.objects.create(name=generate_id())
|
||||
rule_due.notification_transports.add(transport)
|
||||
|
||||
rule_overdue = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=str(app_overdue.pk),
|
||||
interval="days=365",
|
||||
grace_period="days=10",
|
||||
)
|
||||
|
||||
overdue_iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(app_overdue.pk), rule=rule_overdue
|
||||
)
|
||||
LifecycleIteration.objects.filter(pk=overdue_iteration.pk).update(
|
||||
opened_on=(timezone.now() - timedelta(days=20))
|
||||
)
|
||||
|
||||
# Apply overdue rule to mark iteration as overdue
|
||||
rule_overdue.apply()
|
||||
|
||||
due_iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(app_due.pk)
|
||||
)
|
||||
overdue_iteration.refresh_from_db()
|
||||
self.assertEqual(due_iteration.state, ReviewState.PENDING)
|
||||
self.assertEqual(overdue_iteration.state, ReviewState.OVERDUE)
|
||||
|
||||
initiated_after_first = Event.objects.filter(action=EventAction.REVIEW_INITIATED).count()
|
||||
overdue_after_first = Event.objects.filter(action=EventAction.REVIEW_OVERDUE).count()
|
||||
# Both rules created iterations on save
|
||||
self.assertEqual(initiated_after_first, initiated_before + 2)
|
||||
self.assertEqual(overdue_after_first, overdue_before + 1)
|
||||
|
||||
# Apply again - should be idempotent
|
||||
rule_due.apply()
|
||||
rule_overdue.apply()
|
||||
|
||||
due_iteration.refresh_from_db()
|
||||
overdue_iteration.refresh_from_db()
|
||||
self.assertEqual(due_iteration.state, ReviewState.PENDING)
|
||||
self.assertEqual(overdue_iteration.state, ReviewState.OVERDUE)
|
||||
self.assertEqual(
|
||||
Event.objects.filter(action=EventAction.REVIEW_INITIATED).count(),
|
||||
initiated_after_first,
|
||||
)
|
||||
self.assertEqual(
|
||||
Event.objects.filter(action=EventAction.REVIEW_OVERDUE).count(),
|
||||
overdue_after_first,
|
||||
)
|
||||
|
||||
def test_rule_matches_entire_type(self):
|
||||
"""A rule with object_id=None matches all objects of that type."""
|
||||
app_one = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
app_two = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
content_type = ContentType.objects.get_for_model(Application)
|
||||
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=None,
|
||||
interval="days=30",
|
||||
grace_period="days=10",
|
||||
)
|
||||
|
||||
objects = list(rule.get_objects())
|
||||
self.assertIn(app_one, objects)
|
||||
self.assertIn(app_two, objects)
|
||||
|
||||
def test_rule_type_excludes_objects_with_specific_rules(self):
|
||||
app_with_rule = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
app_without_rule = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
content_type = ContentType.objects.get_for_model(Application)
|
||||
|
||||
# Create a specific rule for app_with_rule
|
||||
LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=str(app_with_rule.pk),
|
||||
interval="days=30",
|
||||
)
|
||||
|
||||
# Create a type-level rule
|
||||
type_rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=None,
|
||||
interval="days=60",
|
||||
)
|
||||
|
||||
objects = list(type_rule.get_objects())
|
||||
self.assertNotIn(app_with_rule, objects)
|
||||
self.assertIn(app_without_rule, objects)
|
||||
|
||||
def test_rule_type_apply_creates_iterations_for_all_objects(self):
|
||||
app_one = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
app_two = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
content_type = ContentType.objects.get_for_model(Application)
|
||||
|
||||
LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=None,
|
||||
interval="days=30",
|
||||
grace_period="days=10",
|
||||
)
|
||||
|
||||
self.assertTrue(
|
||||
LifecycleIteration.objects.filter(
|
||||
content_type=content_type, object_id=str(app_one.pk)
|
||||
).exists()
|
||||
)
|
||||
self.assertTrue(
|
||||
LifecycleIteration.objects.filter(
|
||||
content_type=content_type, object_id=str(app_two.pk)
|
||||
).exists()
|
||||
)
|
||||
|
||||
def test_delete_rule_cancels_open_iterations(self):
|
||||
obj = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
|
||||
rule = self._create_rule_for_object(obj)
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
pending_iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(obj.pk), rule=rule
|
||||
)
|
||||
self.assertEqual(pending_iteration.state, ReviewState.PENDING)
|
||||
|
||||
overdue_iteration = LifecycleIteration.objects.create(
|
||||
content_type=content_type,
|
||||
object_id=str(obj.pk),
|
||||
rule=rule,
|
||||
state=ReviewState.OVERDUE,
|
||||
)
|
||||
reviewed_iteration = LifecycleIteration.objects.create(
|
||||
content_type=content_type,
|
||||
object_id=str(obj.pk),
|
||||
rule=rule,
|
||||
state=ReviewState.REVIEWED,
|
||||
)
|
||||
|
||||
rule.delete()
|
||||
|
||||
pending_iteration.refresh_from_db()
|
||||
overdue_iteration.refresh_from_db()
|
||||
reviewed_iteration.refresh_from_db()
|
||||
|
||||
self.assertEqual(pending_iteration.state, ReviewState.CANCELED)
|
||||
self.assertEqual(overdue_iteration.state, ReviewState.CANCELED)
|
||||
self.assertEqual(reviewed_iteration.state, ReviewState.REVIEWED) # Not affected
|
||||
|
||||
def test_update_rule_target_cancels_stale_iterations(self):
|
||||
app_one = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
app_two = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
content_type = ContentType.objects.get_for_model(Application)
|
||||
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=str(app_one.pk),
|
||||
interval="days=30",
|
||||
)
|
||||
|
||||
iteration_for_app_one = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(app_one.pk), rule=rule
|
||||
)
|
||||
self.assertEqual(iteration_for_app_one.state, ReviewState.PENDING)
|
||||
|
||||
# Change rule target to app_two - save() triggers apply() which cancels stale iterations
|
||||
rule.object_id = str(app_two.pk)
|
||||
rule.save()
|
||||
|
||||
iteration_for_app_one.refresh_from_db()
|
||||
self.assertEqual(iteration_for_app_one.state, ReviewState.CANCELED)
|
||||
|
||||
def test_update_rule_content_type_cancels_stale_iterations(self):
|
||||
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
group = Group.objects.create(name=generate_id())
|
||||
app_content_type = ContentType.objects.get_for_model(Application)
|
||||
group_content_type = ContentType.objects.get_for_model(Group)
|
||||
|
||||
# Creating rule triggers automatic apply() which creates a iteration for app
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=app_content_type,
|
||||
object_id=str(app.pk),
|
||||
interval="days=30",
|
||||
)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=app_content_type, object_id=str(app.pk), rule=rule
|
||||
)
|
||||
self.assertEqual(iteration.state, ReviewState.PENDING)
|
||||
|
||||
# Change content type to Group - save() triggers apply() which cancels stale iterations
|
||||
rule.content_type = group_content_type
|
||||
rule.object_id = str(group.pk)
|
||||
rule.save()
|
||||
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.CANCELED)
|
||||
|
||||
def test_user_can_review_checks_group_hierarchy(self):
|
||||
obj = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
rule = self._create_rule_for_object(obj)
|
||||
|
||||
parent_group = Group.objects.create(name=generate_id())
|
||||
child_group = Group.objects.create(name=generate_id())
|
||||
child_group.parents.add(parent_group)
|
||||
|
||||
parent_member = create_test_user()
|
||||
child_member = create_test_user()
|
||||
non_member = create_test_user()
|
||||
parent_group.users.add(parent_member)
|
||||
child_group.users.add(child_member)
|
||||
|
||||
rule.reviewer_groups.add(parent_group)
|
||||
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
# iteration is created automatically when rule is saved
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(obj.pk), rule=rule
|
||||
)
|
||||
|
||||
self.assertTrue(iteration.user_can_review(parent_member))
|
||||
self.assertTrue(iteration.user_can_review(child_member))
|
||||
self.assertFalse(iteration.user_can_review(non_member))
|
||||
|
||||
def test_user_cannot_review_twice(self):
|
||||
obj = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
rule = self._create_rule_for_object(obj)
|
||||
reviewer = create_test_user()
|
||||
rule.reviewers.add(reviewer)
|
||||
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
# iteration is created automatically when rule is saved
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(obj.pk), rule=rule
|
||||
)
|
||||
|
||||
self.assertTrue(iteration.user_can_review(reviewer))
|
||||
|
||||
Review.objects.create(iteration=iteration, reviewer=reviewer)
|
||||
|
||||
self.assertFalse(iteration.user_can_review(reviewer))
|
||||
|
||||
def test_user_cannot_review_completed_iteration(self):
|
||||
obj = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
rule = self._create_rule_for_object(obj)
|
||||
reviewer = create_test_user()
|
||||
rule.reviewers.add(reviewer)
|
||||
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
# Get the automatically created pending iteration and test with different states
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(obj.pk), rule=rule
|
||||
)
|
||||
|
||||
for state in (ReviewState.REVIEWED, ReviewState.CANCELED):
|
||||
iteration.state = state
|
||||
iteration.save()
|
||||
self.assertFalse(iteration.user_can_review(reviewer))
|
||||
|
||||
def test_get_reviewers_includes_child_group_members(self):
|
||||
obj = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
rule = self._create_rule_for_object(obj)
|
||||
|
||||
parent_group = Group.objects.create(name=generate_id())
|
||||
child_group = Group.objects.create(name=generate_id())
|
||||
child_group.parents.add(parent_group)
|
||||
|
||||
parent_member = create_test_user()
|
||||
child_member = create_test_user()
|
||||
parent_group.users.add(parent_member)
|
||||
child_group.users.add(child_member)
|
||||
|
||||
rule.reviewer_groups.add(parent_group)
|
||||
|
||||
reviewers = list(rule.get_reviewers())
|
||||
self.assertIn(parent_member, reviewers)
|
||||
self.assertIn(child_member, reviewers)
|
||||
|
||||
def test_get_reviewers_includes_explicit_reviewers(self):
|
||||
obj = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
rule = self._create_rule_for_object(obj)
|
||||
|
||||
explicit_reviewer = create_test_user()
|
||||
rule.reviewers.add(explicit_reviewer)
|
||||
|
||||
group = Group.objects.create(name=generate_id())
|
||||
group_member = create_test_user()
|
||||
group.users.add(group_member)
|
||||
rule.reviewer_groups.add(group)
|
||||
|
||||
reviewers = list(rule.get_reviewers())
|
||||
self.assertIn(explicit_reviewer, reviewers)
|
||||
self.assertIn(group_member, reviewers)
|
||||
|
||||
|
||||
class TestLifecycleDateBoundaries(TestCase):
|
||||
"""Verify that start_of_day normalization ensures correct overdue/due
|
||||
detection regardless of exact task execution time within a day.
|
||||
|
||||
The daily task may run at any point during the day. The start_of_day
|
||||
normalization in _get_newly_overdue_iterations and _get_newly_due_objects
|
||||
ensures that the boundary is always at midnight, so millisecond variations
|
||||
in task execution time do not affect results."""
|
||||
|
||||
def _create_rule_and_iteration(self, grace_period="days=1", interval="days=365"):
|
||||
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
content_type = ContentType.objects.get_for_model(Application)
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=str(app.pk),
|
||||
interval=interval,
|
||||
grace_period=grace_period,
|
||||
)
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(app.pk), rule=rule
|
||||
)
|
||||
return app, rule, iteration
|
||||
|
||||
def test_overdue_iteration_opened_yesterday(self):
|
||||
"""grace_period=1 day: iteration opened yesterday at any time is overdue today."""
|
||||
_, rule, iteration = self._create_rule_and_iteration(grace_period="days=1")
|
||||
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
|
||||
for opened_on in [
|
||||
dt.datetime(2025, 6, 14, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 14, 12, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 14, 23, 59, 59, 999999, tzinfo=dt.UTC),
|
||||
]:
|
||||
with self.subTest(opened_on=opened_on):
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=opened_on, state=ReviewState.PENDING
|
||||
)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertIn(iteration, list(rule._get_newly_overdue_iterations()))
|
||||
|
||||
def test_not_overdue_iteration_opened_today(self):
|
||||
"""grace_period=1 day: iteration opened today at any time is NOT overdue."""
|
||||
_, rule, iteration = self._create_rule_and_iteration(grace_period="days=1")
|
||||
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
|
||||
for opened_on in [
|
||||
dt.datetime(2025, 6, 15, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 23, 59, 59, 999999, tzinfo=dt.UTC),
|
||||
]:
|
||||
with self.subTest(opened_on=opened_on):
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=opened_on, state=ReviewState.PENDING
|
||||
)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertNotIn(iteration, list(rule._get_newly_overdue_iterations()))
|
||||
|
||||
def test_overdue_independent_of_task_execution_time(self):
|
||||
"""Overdue detection gives the same result whether the task runs at 00:00:01 or 23:59:59."""
|
||||
_, rule, iteration = self._create_rule_and_iteration(grace_period="days=1")
|
||||
opened_on = dt.datetime(2025, 6, 14, 18, 0, 0, tzinfo=dt.UTC)
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=opened_on, state=ReviewState.PENDING
|
||||
)
|
||||
for task_time in [
|
||||
dt.datetime(2025, 6, 15, 0, 0, 1, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 12, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 23, 59, 59, tzinfo=dt.UTC),
|
||||
]:
|
||||
with self.subTest(task_time=task_time):
|
||||
with patch("django.utils.timezone.now", return_value=task_time):
|
||||
self.assertIn(iteration, list(rule._get_newly_overdue_iterations()))
|
||||
|
||||
def test_overdue_boundary_multi_day_grace_period(self):
|
||||
"""grace_period=30 days: overdue after 30 full days, not after 29."""
|
||||
_, rule, iteration = self._create_rule_and_iteration(grace_period="days=30")
|
||||
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
|
||||
|
||||
# Opened 30 days ago (May 16), should go overdue
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=dt.datetime(2025, 5, 16, 12, 0, 0, tzinfo=dt.UTC),
|
||||
state=ReviewState.PENDING,
|
||||
)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertIn(iteration, list(rule._get_newly_overdue_iterations()))
|
||||
|
||||
# Opened 29 days ago (May 17), should NOT go overdue
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=dt.datetime(2025, 5, 17, 12, 0, 0, tzinfo=dt.UTC),
|
||||
state=ReviewState.PENDING,
|
||||
)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertNotIn(iteration, list(rule._get_newly_overdue_iterations()))
|
||||
|
||||
def test_due_object_iteration_opened_yesterday(self):
|
||||
"""interval=1 day: object with iteration opened yesterday is due for a new review."""
|
||||
app, rule, iteration = self._create_rule_and_iteration(interval="days=1")
|
||||
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
|
||||
for opened_on in [
|
||||
dt.datetime(2025, 6, 14, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 14, 12, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 14, 23, 59, 59, 999999, tzinfo=dt.UTC),
|
||||
]:
|
||||
with self.subTest(opened_on=opened_on):
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(opened_on=opened_on)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertIn(app, list(rule._get_newly_due_objects()))
|
||||
|
||||
def test_not_due_object_iteration_opened_today(self):
|
||||
"""interval=1 day: object with iteration opened today is NOT due."""
|
||||
app, rule, iteration = self._create_rule_and_iteration(interval="days=1")
|
||||
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
|
||||
for opened_on in [
|
||||
dt.datetime(2025, 6, 15, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 23, 59, 59, 999999, tzinfo=dt.UTC),
|
||||
]:
|
||||
with self.subTest(opened_on=opened_on):
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(opened_on=opened_on)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertNotIn(app, list(rule._get_newly_due_objects()))
|
||||
|
||||
def test_due_independent_of_task_execution_time(self):
|
||||
"""Due detection gives the same result whether the task runs at 00:00:01 or 23:59:59."""
|
||||
app, rule, iteration = self._create_rule_and_iteration(interval="days=1")
|
||||
opened_on = dt.datetime(2025, 6, 14, 18, 0, 0, tzinfo=dt.UTC)
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(opened_on=opened_on)
|
||||
for task_time in [
|
||||
dt.datetime(2025, 6, 15, 0, 0, 1, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 12, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 23, 59, 59, tzinfo=dt.UTC),
|
||||
]:
|
||||
with self.subTest(task_time=task_time):
|
||||
with patch("django.utils.timezone.now", return_value=task_time):
|
||||
self.assertIn(app, list(rule._get_newly_due_objects()))
|
||||
|
||||
def test_due_boundary_multi_day_interval(self):
|
||||
"""interval=30 days: due after 30 full days, not after 29."""
|
||||
app, rule, iteration = self._create_rule_and_iteration(interval="days=30")
|
||||
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
|
||||
|
||||
# Previous review opened 30 days ago (May 16), review is due for the object
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=dt.datetime(2025, 5, 16, 12, 0, 0, tzinfo=dt.UTC)
|
||||
)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertIn(app, list(rule._get_newly_due_objects()))
|
||||
|
||||
# Previous review opened 29 days ago (May 17), new review is NOT due
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=dt.datetime(2025, 5, 17, 12, 0, 0, tzinfo=dt.UTC)
|
||||
)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertNotIn(app, list(rule._get_newly_due_objects()))
|
||||
|
||||
def test_apply_overdue_at_boundary(self):
|
||||
"""apply() marks iteration overdue when grace period just expired,
|
||||
regardless of what time the daily task runs."""
|
||||
_, rule, iteration = self._create_rule_and_iteration(
|
||||
grace_period="days=1", interval="days=365"
|
||||
)
|
||||
opened_on = dt.datetime(2025, 6, 14, 20, 0, 0, tzinfo=dt.UTC)
|
||||
for task_time in [
|
||||
dt.datetime(2025, 6, 15, 0, 0, 1, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 23, 59, 59, tzinfo=dt.UTC),
|
||||
]:
|
||||
with self.subTest(task_time=task_time):
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=opened_on, state=ReviewState.PENDING
|
||||
)
|
||||
with patch("django.utils.timezone.now", return_value=task_time):
|
||||
rule.apply()
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.OVERDUE)
|
||||
11
authentik/enterprise/lifecycle/urls.py
Normal file
11
authentik/enterprise/lifecycle/urls.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""API URLs"""
|
||||
|
||||
from authentik.enterprise.lifecycle.api.iterations import IterationViewSet
|
||||
from authentik.enterprise.lifecycle.api.reviews import ReviewViewSet
|
||||
from authentik.enterprise.lifecycle.api.rules import LifecycleRuleViewSet
|
||||
|
||||
api_urlpatterns = [
|
||||
("lifecycle/iterations", IterationViewSet),
|
||||
("lifecycle/reviews", ReviewViewSet),
|
||||
("lifecycle/rules", LifecycleRuleViewSet),
|
||||
]
|
||||
75
authentik/enterprise/lifecycle/utils.py
Normal file
75
authentik/enterprise/lifecycle/utils.py
Normal file
@@ -0,0 +1,75 @@
|
||||
from datetime import datetime
|
||||
from urllib import parse
|
||||
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db.models import Model
|
||||
from django.urls import reverse
|
||||
from rest_framework.serializers import ChoiceField, Serializer, UUIDField
|
||||
|
||||
from authentik.core.api.utils import ModelSerializer
|
||||
from authentik.core.models import Application, Group, User
|
||||
from authentik.rbac.models import Role
|
||||
|
||||
|
||||
def parse_content_type(value: str) -> dict:
|
||||
app_label, model = value.split(".")
|
||||
return {"app_label": app_label, "model": model}
|
||||
|
||||
|
||||
def model_choices() -> list[tuple[str, str]]:
|
||||
return [
|
||||
("authentik_core.application", "Application"),
|
||||
("authentik_core.group", "Group"),
|
||||
("authentik_rbac.role", "Role"),
|
||||
]
|
||||
|
||||
|
||||
def admin_link_for_model(model: Model) -> str:
|
||||
if isinstance(model, Application):
|
||||
url = f"/core/applications/{model.slug}"
|
||||
elif isinstance(model, Group):
|
||||
url = f"/identity/groups/{model.pk}"
|
||||
elif isinstance(model, Role):
|
||||
url = f"/identity/roles/{model.pk}"
|
||||
else:
|
||||
raise TypeError("Unsupported model")
|
||||
return url + ";" + parse.quote('{"page":"page-lifecycle"}')
|
||||
|
||||
|
||||
def link_for_model(model: Model) -> str:
|
||||
return f"{reverse("authentik_core:if-admin")}#{admin_link_for_model(model)}"
|
||||
|
||||
|
||||
def start_of_day(dt: datetime) -> datetime:
|
||||
return dt.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
|
||||
class ContentTypeField(ChoiceField):
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(choices=model_choices(), **kwargs)
|
||||
|
||||
def to_representation(self, content_type: ContentType) -> str:
|
||||
return f"{content_type.app_label}.{content_type.model}"
|
||||
|
||||
def to_internal_value(self, data: str) -> ContentType:
|
||||
return ContentType.objects.get(**parse_content_type(data))
|
||||
|
||||
|
||||
class GenericForeignKeySerializer(Serializer):
|
||||
content_type = ContentTypeField()
|
||||
object_id = UUIDField()
|
||||
|
||||
|
||||
class ReviewerGroupSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Group
|
||||
fields = [
|
||||
"pk",
|
||||
"name",
|
||||
]
|
||||
|
||||
|
||||
class ReviewerUserSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = User
|
||||
fields = ["pk", "uuid", "username", "name"]
|
||||
@@ -331,7 +331,7 @@ class GoogleWorkspaceGroupTests(TestCase):
|
||||
).exists()
|
||||
)
|
||||
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||
self.assertEqual(len(http.requests()), 5)
|
||||
self.assertEqual(len(http.requests()), 7)
|
||||
|
||||
def test_sync_discover_multiple(self):
|
||||
"""Test group discovery"""
|
||||
@@ -372,7 +372,7 @@ class GoogleWorkspaceGroupTests(TestCase):
|
||||
).exists()
|
||||
)
|
||||
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||
self.assertEqual(len(http.requests()), 5)
|
||||
self.assertEqual(len(http.requests()), 7)
|
||||
# Change response to trigger update
|
||||
http.add_response(
|
||||
f"https://admin.googleapis.com/admin/directory/v1/groups?customer=my_customer&maxResults=500&orderBy=email&key={self.api_key}&alt=json",
|
||||
|
||||
@@ -309,7 +309,7 @@ class GoogleWorkspaceUserTests(TestCase):
|
||||
).exists()
|
||||
)
|
||||
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||
self.assertEqual(len(http.requests()), 5)
|
||||
self.assertEqual(len(http.requests()), 7)
|
||||
|
||||
def test_sync_discover_multiple(self):
|
||||
"""Test user discovery, running multiple times"""
|
||||
@@ -352,7 +352,7 @@ class GoogleWorkspaceUserTests(TestCase):
|
||||
).exists()
|
||||
)
|
||||
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||
self.assertEqual(len(http.requests()), 5)
|
||||
self.assertEqual(len(http.requests()), 7)
|
||||
# Change response, which will trigger a discovery update
|
||||
http.add_response(
|
||||
f"https://admin.googleapis.com/admin/directory/v1/users?customer=my_customer&maxResults=500&orderBy=email&key={self.api_key}&alt=json",
|
||||
|
||||
@@ -78,7 +78,8 @@ class MicrosoftEntraUserClient(MicrosoftEntraSyncClient[User, MicrosoftEntraProv
|
||||
def create(self, user: User):
|
||||
"""Create user from scratch and create a connection object"""
|
||||
microsoft_user = self.to_schema(user, None)
|
||||
self.check_email_valid(microsoft_user.user_principal_name)
|
||||
if microsoft_user.user_principal_name:
|
||||
self.check_email_valid(microsoft_user.user_principal_name)
|
||||
with transaction.atomic():
|
||||
try:
|
||||
response = self._request(self.client.users.post(microsoft_user))
|
||||
@@ -118,7 +119,8 @@ class MicrosoftEntraUserClient(MicrosoftEntraSyncClient[User, MicrosoftEntraProv
|
||||
def update(self, user: User, connection: MicrosoftEntraProviderUser):
|
||||
"""Update existing user"""
|
||||
microsoft_user = self.to_schema(user, connection)
|
||||
self.check_email_valid(microsoft_user.user_principal_name)
|
||||
if microsoft_user.user_principal_name:
|
||||
self.check_email_valid(microsoft_user.user_principal_name)
|
||||
response = self._request(
|
||||
self.client.users.by_user_id(connection.microsoft_id).patch(microsoft_user)
|
||||
)
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
|
||||
from django.http import HttpRequest
|
||||
from django.urls import reverse
|
||||
from rest_framework.fields import SerializerMethodField, URLField
|
||||
from rest_framework.fields import CharField, SerializerMethodField, URLField
|
||||
|
||||
from authentik.core.api.providers import ProviderSerializer
|
||||
from authentik.core.models import Application
|
||||
from authentik.core.models import Provider
|
||||
from authentik.enterprise.api import EnterpriseRequiredMixin
|
||||
from authentik.enterprise.providers.ws_federation.models import WSFederationProvider
|
||||
from authentik.enterprise.providers.ws_federation.processors.metadata import MetadataProcessor
|
||||
@@ -16,8 +16,31 @@ class WSFederationProviderSerializer(EnterpriseRequiredMixin, SAMLProviderSerial
|
||||
"""WSFederationProvider Serializer"""
|
||||
|
||||
reply_url = URLField(source="acs_url")
|
||||
wtrealm = CharField(source="audience")
|
||||
url_wsfed = SerializerMethodField()
|
||||
wtrealm = SerializerMethodField()
|
||||
|
||||
def get_url_download_metadata(self, instance: WSFederationProvider) -> str:
|
||||
"""Get metadata download URL"""
|
||||
if "request" not in self._context:
|
||||
return ""
|
||||
request: HttpRequest = self._context["request"]._request
|
||||
try:
|
||||
return request.build_absolute_uri(
|
||||
reverse(
|
||||
"authentik_providers_ws_federation:metadata-download",
|
||||
kwargs={"application_slug": instance.application.slug},
|
||||
)
|
||||
)
|
||||
except Provider.application.RelatedObjectDoesNotExist:
|
||||
return request.build_absolute_uri(
|
||||
reverse(
|
||||
"authentik_api:wsfederationprovider-metadata",
|
||||
kwargs={
|
||||
"pk": instance.pk,
|
||||
},
|
||||
)
|
||||
+ "?download"
|
||||
)
|
||||
|
||||
def get_url_wsfed(self, instance: WSFederationProvider) -> str:
|
||||
"""Get WS-Fed url"""
|
||||
@@ -26,16 +49,11 @@ class WSFederationProviderSerializer(EnterpriseRequiredMixin, SAMLProviderSerial
|
||||
request: HttpRequest = self._context["request"]._request
|
||||
return request.build_absolute_uri(reverse("authentik_providers_ws_federation:wsfed"))
|
||||
|
||||
def get_wtrealm(self, instance: WSFederationProvider) -> str:
|
||||
try:
|
||||
return f"goauthentik.io://app/{instance.application.slug}"
|
||||
except Application.DoesNotExist:
|
||||
return None
|
||||
|
||||
class Meta(SAMLProviderSerializer.Meta):
|
||||
model = WSFederationProvider
|
||||
fields = ProviderSerializer.Meta.fields + [
|
||||
"reply_url",
|
||||
"wtrealm",
|
||||
"assertion_valid_not_before",
|
||||
"assertion_valid_not_on_or_after",
|
||||
"session_valid_not_on_or_after",
|
||||
@@ -51,7 +69,6 @@ class WSFederationProviderSerializer(EnterpriseRequiredMixin, SAMLProviderSerial
|
||||
"default_name_id_policy",
|
||||
"url_download_metadata",
|
||||
"url_wsfed",
|
||||
"wtrealm",
|
||||
]
|
||||
extra_kwargs = ProviderSerializer.Meta.extra_kwargs
|
||||
|
||||
|
||||
@@ -8,6 +8,10 @@ from authentik.providers.saml.models import SAMLProvider
|
||||
class WSFederationProvider(SAMLProvider):
|
||||
"""WS-Federation for applications which support WS-Fed."""
|
||||
|
||||
# Alias'd fields:
|
||||
# - acs_url -> reply_url
|
||||
# - audience -> realm / wtrealm
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[Serializer]:
|
||||
from authentik.enterprise.providers.ws_federation.api.providers import (
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
from dataclasses import dataclass
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from django.http import HttpRequest
|
||||
from django.shortcuts import get_object_or_404
|
||||
@@ -37,8 +36,6 @@ class SignInRequest:
|
||||
wreply: str
|
||||
wctx: str | None
|
||||
|
||||
app_slug: str
|
||||
|
||||
@staticmethod
|
||||
def parse(request: HttpRequest) -> SignInRequest:
|
||||
action = request.GET.get("wa")
|
||||
@@ -47,26 +44,26 @@ class SignInRequest:
|
||||
realm = request.GET.get("wtrealm")
|
||||
if not realm:
|
||||
raise ValueError("Missing Realm")
|
||||
parsed = urlparse(realm)
|
||||
|
||||
req = SignInRequest(
|
||||
wa=action,
|
||||
wtrealm=realm,
|
||||
wreply=request.GET.get("wreply"),
|
||||
wctx=request.GET.get("wctx", ""),
|
||||
app_slug=parsed.path[1:],
|
||||
)
|
||||
|
||||
_, provider = req.get_app_provider()
|
||||
if not req.wreply:
|
||||
req.wreply = provider.acs_url
|
||||
if not req.wreply.startswith(provider.acs_url):
|
||||
raise ValueError("Invalid wreply")
|
||||
return req
|
||||
|
||||
def get_app_provider(self):
|
||||
application = get_object_or_404(Application, slug=self.app_slug)
|
||||
provider: WSFederationProvider = get_object_or_404(
|
||||
WSFederationProvider, pk=application.provider_id
|
||||
WSFederationProvider, audience=self.wtrealm
|
||||
)
|
||||
application = get_object_or_404(Application, provider=provider)
|
||||
return application, provider
|
||||
|
||||
|
||||
@@ -84,6 +81,8 @@ class SignInProcessor:
|
||||
self.sign_in_request = sign_in_request
|
||||
self.saml_processor = AssertionProcessor(self.provider, self.request, AuthNRequest())
|
||||
self.saml_processor.provider.audience = self.sign_in_request.wtrealm
|
||||
if self.provider.signing_kp:
|
||||
self.saml_processor.provider.sign_assertion = True
|
||||
|
||||
def create_response_token(self):
|
||||
root = Element(f"{{{NS_WS_FED_TRUST}}}RequestSecurityTokenResponse", nsmap=NS_MAP)
|
||||
@@ -151,7 +150,8 @@ class SignInProcessor:
|
||||
def response(self) -> dict[str, str]:
|
||||
root = self.create_response_token()
|
||||
assertion = root.xpath("//saml:Assertion", namespaces=NS_MAP)[0]
|
||||
self.saml_processor._sign(assertion)
|
||||
if self.provider.signing_kp:
|
||||
self.saml_processor._sign(assertion)
|
||||
str_token = etree.tostring(root).decode("utf-8") # nosec
|
||||
return delete_none_values(
|
||||
{
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
from dataclasses import dataclass
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from django.http import HttpRequest
|
||||
from django.shortcuts import get_object_or_404
|
||||
@@ -15,8 +14,6 @@ class SignOutRequest:
|
||||
wtrealm: str
|
||||
wreply: str
|
||||
|
||||
app_slug: str
|
||||
|
||||
@staticmethod
|
||||
def parse(request: HttpRequest) -> SignOutRequest:
|
||||
action = request.GET.get("wa")
|
||||
@@ -25,23 +22,23 @@ class SignOutRequest:
|
||||
realm = request.GET.get("wtrealm")
|
||||
if not realm:
|
||||
raise ValueError("Missing Realm")
|
||||
parsed = urlparse(realm)
|
||||
|
||||
req = SignOutRequest(
|
||||
wa=action,
|
||||
wtrealm=realm,
|
||||
wreply=request.GET.get("wreply"),
|
||||
app_slug=parsed.path[1:],
|
||||
)
|
||||
|
||||
_, provider = req.get_app_provider()
|
||||
if not req.wreply:
|
||||
req.wreply = provider.acs_url
|
||||
if not req.wreply.startswith(provider.acs_url):
|
||||
raise ValueError("Invalid wreply")
|
||||
return req
|
||||
|
||||
def get_app_provider(self):
|
||||
application = get_object_or_404(Application, slug=self.app_slug)
|
||||
provider: WSFederationProvider = get_object_or_404(
|
||||
WSFederationProvider, pk=application.provider_id
|
||||
WSFederationProvider, audience=self.wtrealm
|
||||
)
|
||||
application = get_object_or_404(Application, provider=provider)
|
||||
return application, provider
|
||||
|
||||
@@ -43,7 +43,6 @@ class TestWSFedSignIn(TestCase):
|
||||
wtrealm="",
|
||||
wreply="",
|
||||
wctx=None,
|
||||
app_slug="",
|
||||
),
|
||||
)
|
||||
token = proc.response()[WS_FED_POST_KEY_RESULT]
|
||||
@@ -65,7 +64,6 @@ class TestWSFedSignIn(TestCase):
|
||||
wtrealm="",
|
||||
wreply="",
|
||||
wctx=None,
|
||||
app_slug="",
|
||||
),
|
||||
)
|
||||
token = proc.response()[WS_FED_POST_KEY_RESULT]
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from django.urls import path
|
||||
|
||||
from authentik.enterprise.providers.ws_federation.api.providers import WSFederationProviderViewSet
|
||||
from authentik.enterprise.providers.ws_federation.views import WSFedEntryView
|
||||
from authentik.enterprise.providers.ws_federation.views import MetadataDownload, WSFedEntryView
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
@@ -11,6 +11,12 @@ urlpatterns = [
|
||||
WSFedEntryView.as_view(),
|
||||
name="wsfed",
|
||||
),
|
||||
# Metadata
|
||||
path(
|
||||
"<slug:application_slug>/metadata/",
|
||||
MetadataDownload.as_view(),
|
||||
name="metadata-download",
|
||||
),
|
||||
]
|
||||
|
||||
api_urlpatterns = [
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from django.http import Http404, HttpRequest, HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.shortcuts import get_object_or_404, redirect
|
||||
from django.urls import reverse
|
||||
from django.utils.translation import gettext as _
|
||||
from django.views import View
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import Application, AuthenticatedSession
|
||||
@@ -160,3 +162,24 @@ class WSFedFlowFinalView(ChallengeStageView):
|
||||
"attrs": response,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class MetadataDownload(View):
|
||||
"""Redirect to metadata download"""
|
||||
|
||||
def dispatch(self, request: HttpRequest, application_slug: str) -> HttpResponse:
|
||||
app = Application.objects.filter(slug=application_slug).with_provider().first()
|
||||
if not app:
|
||||
raise Http404
|
||||
provider = app.get_provider()
|
||||
if not provider:
|
||||
raise Http404
|
||||
return redirect(
|
||||
reverse(
|
||||
"authentik_api:wsfederationprovider-metadata",
|
||||
kwargs={
|
||||
"pk": provider.pk,
|
||||
},
|
||||
)
|
||||
+ "?download"
|
||||
)
|
||||
|
||||
@@ -4,6 +4,7 @@ TENANT_APPS = [
|
||||
"authentik.enterprise.audit",
|
||||
"authentik.enterprise.endpoints.connectors.agent",
|
||||
"authentik.enterprise.endpoints.connectors.fleet",
|
||||
"authentik.enterprise.lifecycle",
|
||||
"authentik.enterprise.policies.unique_password",
|
||||
"authentik.enterprise.providers.google_workspace",
|
||||
"authentik.enterprise.providers.microsoft_entra",
|
||||
|
||||
54
authentik/events/migrations/0016_alter_event_action.py
Normal file
54
authentik/events/migrations/0016_alter_event_action.py
Normal file
@@ -0,0 +1,54 @@
|
||||
# Generated by Django 5.2.10 on 2026-02-03 09:52
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_events", "0015_alter_event_action_choices"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="event",
|
||||
name="action",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("login", "Login"),
|
||||
("login_failed", "Login Failed"),
|
||||
("logout", "Logout"),
|
||||
("user_write", "User Write"),
|
||||
("suspicious_request", "Suspicious Request"),
|
||||
("password_set", "Password Set"),
|
||||
("secret_view", "Secret View"),
|
||||
("secret_rotate", "Secret Rotate"),
|
||||
("invitation_used", "Invite Used"),
|
||||
("authorize_application", "Authorize Application"),
|
||||
("source_linked", "Source Linked"),
|
||||
("impersonation_started", "Impersonation Started"),
|
||||
("impersonation_ended", "Impersonation Ended"),
|
||||
("flow_execution", "Flow Execution"),
|
||||
("policy_execution", "Policy Execution"),
|
||||
("policy_exception", "Policy Exception"),
|
||||
("property_mapping_exception", "Property Mapping Exception"),
|
||||
("system_task_execution", "System Task Execution"),
|
||||
("system_task_exception", "System Task Exception"),
|
||||
("system_exception", "System Exception"),
|
||||
("configuration_error", "Configuration Error"),
|
||||
("configuration_warning", "Configuration Warning"),
|
||||
("model_created", "Model Created"),
|
||||
("model_updated", "Model Updated"),
|
||||
("model_deleted", "Model Deleted"),
|
||||
("email_sent", "Email Sent"),
|
||||
("update_available", "Update Available"),
|
||||
("export_ready", "Export Ready"),
|
||||
("review_initiated", "Review Initiated"),
|
||||
("review_overdue", "Review Overdue"),
|
||||
("review_attested", "Review Attested"),
|
||||
("review_completed", "Review Completed"),
|
||||
("custom_", "Custom Prefix"),
|
||||
]
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -123,6 +123,11 @@ class EventAction(models.TextChoices):
|
||||
|
||||
EXPORT_READY = "export_ready"
|
||||
|
||||
REVIEW_INITIATED = "review_initiated"
|
||||
REVIEW_OVERDUE = "review_overdue"
|
||||
REVIEW_ATTESTED = "review_attested"
|
||||
REVIEW_COMPLETED = "review_completed"
|
||||
|
||||
CUSTOM_PREFIX = "custom_"
|
||||
|
||||
|
||||
|
||||
@@ -93,11 +93,13 @@ def on_login_failed(
|
||||
credentials: dict[str, str],
|
||||
request: HttpRequest,
|
||||
stage: Stage | None = None,
|
||||
context: dict[str, Any] | None = None,
|
||||
**kwargs,
|
||||
):
|
||||
"""Failed Login, authentik custom event"""
|
||||
user = User.objects.filter(username=credentials.get("username")).first()
|
||||
Event.new(EventAction.LOGIN_FAILED, **credentials, stage=stage, **kwargs).from_http(
|
||||
context = context or {}
|
||||
Event.new(EventAction.LOGIN_FAILED, **credentials, stage=stage, **context).from_http(
|
||||
request, user
|
||||
)
|
||||
|
||||
|
||||
@@ -207,3 +207,9 @@ class TestEvents(TestCase):
|
||||
"username": user.username,
|
||||
},
|
||||
)
|
||||
|
||||
def test_invalid_string(self):
|
||||
"""Test creating an event with invalid unicode string data"""
|
||||
event = Event.new("unittest", foo="foo bar \u0000 baz")
|
||||
event.save()
|
||||
self.assertEqual(event.context["foo"], "foo bar baz")
|
||||
|
||||
@@ -36,6 +36,10 @@ ALLOWED_SPECIAL_KEYS = re.compile(
|
||||
)
|
||||
|
||||
|
||||
def cleanse_str(raw: Any) -> str:
|
||||
return str(raw).replace("\u0000", "")
|
||||
|
||||
|
||||
def cleanse_item(key: str, value: Any) -> Any:
|
||||
"""Cleanse a single item"""
|
||||
if isinstance(value, dict):
|
||||
@@ -66,7 +70,7 @@ def cleanse_dict(source: dict[Any, Any]) -> dict[Any, Any]:
|
||||
|
||||
def model_to_dict(model: Model) -> dict[str, Any]:
|
||||
"""Convert model to dict"""
|
||||
name = str(model)
|
||||
name = cleanse_str(model)
|
||||
if hasattr(model, "name"):
|
||||
name = model.name
|
||||
return {
|
||||
@@ -133,11 +137,11 @@ def sanitize_item(value: Any) -> Any: # noqa: PLR0911, PLR0912
|
||||
if isinstance(value, ASN):
|
||||
return ASN_CONTEXT_PROCESSOR.asn_to_dict(value)
|
||||
if isinstance(value, Path):
|
||||
return str(value)
|
||||
return cleanse_str(value)
|
||||
if isinstance(value, Exception):
|
||||
return str(value)
|
||||
return cleanse_str(value)
|
||||
if isinstance(value, YAMLTag):
|
||||
return str(value)
|
||||
return cleanse_str(value)
|
||||
if isinstance(value, Enum):
|
||||
return value.value
|
||||
if isinstance(value, type):
|
||||
@@ -161,7 +165,7 @@ def sanitize_item(value: Any) -> Any: # noqa: PLR0911, PLR0912
|
||||
raise ValueError("JSON can't represent timezone-aware times.")
|
||||
return value.isoformat()
|
||||
if isinstance(value, timedelta):
|
||||
return str(value.total_seconds())
|
||||
return cleanse_str(value.total_seconds())
|
||||
if callable(value):
|
||||
return {
|
||||
"type": "callable",
|
||||
@@ -174,8 +178,8 @@ def sanitize_item(value: Any) -> Any: # noqa: PLR0911, PLR0912
|
||||
try:
|
||||
return DjangoJSONEncoder().default(value)
|
||||
except TypeError:
|
||||
return str(value)
|
||||
return str(value)
|
||||
return cleanse_str(value)
|
||||
return cleanse_str(value)
|
||||
|
||||
|
||||
def sanitize_dict(source: dict[Any, Any]) -> dict[Any, Any]:
|
||||
|
||||
@@ -29,6 +29,12 @@ class RefreshOtherFlowsAfterAuthentication(Flag[bool], key="flows_refresh_others
|
||||
visibility = "public"
|
||||
|
||||
|
||||
class ContinuousLogin(Flag[bool], key="flows_continuous_login"):
|
||||
|
||||
default = False
|
||||
visibility = "public"
|
||||
|
||||
|
||||
class AuthentikFlowsConfig(ManagedAppConfig):
|
||||
"""authentik flows app config"""
|
||||
|
||||
|
||||
@@ -31,6 +31,9 @@ class FlowLayout(models.TextChoices):
|
||||
SIDEBAR_LEFT = "sidebar_left"
|
||||
SIDEBAR_RIGHT = "sidebar_right"
|
||||
|
||||
SIDEBAR_LEFT_FRAME_BACKGROUND = "sidebar_left_frame_background"
|
||||
SIDEBAR_RIGHT_FRAME_BACKGROUND = "sidebar_right_frame_background"
|
||||
|
||||
|
||||
class ErrorDetailSerializer(PassiveSerializer):
|
||||
"""Serializer for rest_framework's error messages"""
|
||||
|
||||
29
authentik/flows/migrations/0031_alter_flow_layout.py
Normal file
29
authentik/flows/migrations/0031_alter_flow_layout.py
Normal file
@@ -0,0 +1,29 @@
|
||||
# Generated by Django 5.2.10 on 2026-01-16 17:50
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_flows", "0030_alter_flow_background"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="flow",
|
||||
name="layout",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("stacked", "Stacked"),
|
||||
("content_left", "Content Left"),
|
||||
("content_right", "Content Right"),
|
||||
("sidebar_left", "Sidebar Left"),
|
||||
("sidebar_right", "Sidebar Right"),
|
||||
("sidebar_left_frame_background", "Sidebar Left Frame Background"),
|
||||
("sidebar_right_frame_background", "Sidebar Right Frame Background"),
|
||||
],
|
||||
default="stacked",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -9,7 +9,15 @@
|
||||
{{ block.super }}
|
||||
<link rel="prefetch" href="{{ flow_background_url }}" />
|
||||
{% if flow.compatibility_mode and not inspector %}
|
||||
<script data-id="shady-dom">ShadyDOM = { force: true };</script>
|
||||
{% comment %}
|
||||
@see {@link web/types/webcomponents.d.ts} for type definitions.
|
||||
{% endcomment %}
|
||||
<script data-id="shady-dom">
|
||||
"use strict";
|
||||
|
||||
window.ShadyDOM = window.ShadyDOM || {}
|
||||
window.ShadyDOM.force = true
|
||||
</script>
|
||||
{% endif %}
|
||||
{% include "base/header_js.html" %}
|
||||
<script data-id="flow-config">
|
||||
@@ -45,16 +53,11 @@
|
||||
slug="{{ flow.slug }}"
|
||||
class="pf-c-login"
|
||||
data-layout="{{ flow.layout|default:'stacked' }}"
|
||||
loading
|
||||
>
|
||||
{% include "base/placeholder.html" %}
|
||||
|
||||
<ak-brand-links
|
||||
slot="footer"
|
||||
exportparts="list:brand-links-list, list-item:brand-links-list-item"
|
||||
role="contentinfo"
|
||||
aria-label="{% trans 'Site footer' %}"
|
||||
class="pf-c-login__footer {% if flow.layout == 'stacked' %}pf-m-dark{% endif %}"
|
||||
></ak-brand-links>
|
||||
<ak-brand-links name="flow-links" slot="footer"></ak-brand-links>
|
||||
</ak-flow-executor>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -141,6 +141,10 @@ web:
|
||||
# workers: 2
|
||||
threads: 4
|
||||
path: /
|
||||
timeout_http_read_header: 5s
|
||||
timeout_http_read: 30s
|
||||
timeout_http_write: 60s
|
||||
timeout_http_idle: 120s
|
||||
|
||||
worker:
|
||||
processes: 1
|
||||
@@ -178,3 +182,5 @@ storage:
|
||||
# backend: file # or s3
|
||||
# file: {}
|
||||
# s3: {}
|
||||
|
||||
skip_migrations: false
|
||||
|
||||
@@ -42,7 +42,7 @@ ARG_SANITIZE = re.compile(r"[:.-]")
|
||||
|
||||
|
||||
def sanitize_arg(arg_name: str) -> str:
|
||||
return re.sub(ARG_SANITIZE, "_", arg_name)
|
||||
return re.sub(ARG_SANITIZE, "_", slugify(arg_name))
|
||||
|
||||
|
||||
class BaseEvaluator:
|
||||
@@ -311,7 +311,9 @@ class BaseEvaluator:
|
||||
|
||||
def wrap_expression(self, expression: str) -> str:
|
||||
"""Wrap expression in a function, call it, and save the result as `result`"""
|
||||
handler_signature = ",".join(sanitize_arg(x) for x in self._context.keys())
|
||||
handler_signature = ",".join(
|
||||
[x for x in [sanitize_arg(x) for x in self._context.keys()] if x]
|
||||
)
|
||||
full_expression = ""
|
||||
full_expression += f"def handler({handler_signature}):\n"
|
||||
full_expression += indent(expression, " ")
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
"""Migration helpers"""
|
||||
|
||||
from collections.abc import Iterable
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from django.apps.registry import Apps
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
|
||||
from authentik.events.utils import cleanse_dict, sanitize_dict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.events.models import EventAction
|
||||
|
||||
|
||||
def fallback_names(app: str, model: str, field: str):
|
||||
"""Factory function that checks all instances of `app`.`model` instance's `field`
|
||||
@@ -65,3 +71,12 @@ def progress_bar(iterable: Iterable):
|
||||
print_progress_bar(i + 1)
|
||||
# Print New Line on Complete
|
||||
print()
|
||||
|
||||
|
||||
def migration_event(
|
||||
apps: Apps, schema_editor: BaseDatabaseSchemaEditor, action: EventAction, **kwargs
|
||||
):
|
||||
db_alias = schema_editor.connection.alias
|
||||
Event = apps.get_model("authentik_events", "Event")
|
||||
event = Event(action=action, app="authentik", context=cleanse_dict(sanitize_dict(kwargs)))
|
||||
event.save(using=db_alias)
|
||||
|
||||
@@ -88,7 +88,7 @@ class DomainlessURLValidator(URLValidator):
|
||||
|
||||
def __call__(self, value: str):
|
||||
# Check if the scheme is valid.
|
||||
scheme = value.split("://")[0].lower()
|
||||
scheme = value.split("://", maxsplit=1)[0].lower()
|
||||
if scheme not in self.schemes:
|
||||
value = "default" + value
|
||||
super().__call__(value)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import math
|
||||
from typing import Any, Self
|
||||
|
||||
import pglock
|
||||
@@ -68,7 +69,12 @@ class OutgoingSyncProvider(ScheduledModel, Model):
|
||||
return Paginator(self.get_object_qs(type), self.sync_page_size)
|
||||
|
||||
def get_object_sync_time_limit_ms[T: User | Group](self, type: type[T]) -> int:
|
||||
num_pages: int = self.get_paginator(type).num_pages
|
||||
# Use a simple COUNT(*) on the model instead of materializing get_object_qs(),
|
||||
# which for some providers (e.g. SCIM) runs PolicyEngine per-user and is
|
||||
# extremely expensive. The time limit is an upper-bound estimate, so using
|
||||
# the total count (without policy filtering) is a safe overestimate.
|
||||
total_count = type.objects.count()
|
||||
num_pages = math.ceil(total_count / self.sync_page_size) if total_count > 0 else 1
|
||||
page_timeout_ms = timedelta_from_string(self.sync_page_timeout).total_seconds() * 1000
|
||||
return int(num_pages * page_timeout_ms * 1.5)
|
||||
|
||||
|
||||
@@ -103,6 +103,7 @@ class SyncTasks:
|
||||
)
|
||||
users_tasks.run().wait(timeout=provider.get_object_sync_time_limit_ms(User))
|
||||
group_tasks.run().wait(timeout=provider.get_object_sync_time_limit_ms(Group))
|
||||
self._sync_cleanup(provider, task)
|
||||
except TransientSyncException as exc:
|
||||
self.logger.warning("transient sync exception", exc=exc)
|
||||
task.warning("Sync encountered a transient exception. Retrying", exc=exc)
|
||||
@@ -111,6 +112,35 @@ class SyncTasks:
|
||||
task.error(exc)
|
||||
return
|
||||
|
||||
def _sync_cleanup(self, provider: OutgoingSyncProvider, task: Task):
|
||||
"""Delete remote objects that are no longer in scope"""
|
||||
for object_type in (User, Group):
|
||||
try:
|
||||
client = provider.client_for_model(object_type)
|
||||
except TransientSyncException:
|
||||
continue
|
||||
in_scope_pks = set(provider.get_object_qs(object_type).values_list("pk", flat=True))
|
||||
stale = client.connection_type.objects.filter(provider=provider).exclude(
|
||||
**{f"{client.connection_type_query}__pk__in": in_scope_pks}
|
||||
)
|
||||
for connection in stale:
|
||||
try:
|
||||
client.delete(connection.scim_id)
|
||||
task.info(
|
||||
f"Deleted out-of-scope {object_type._meta.verbose_name}",
|
||||
scim_id=connection.scim_id,
|
||||
)
|
||||
except NotFoundSyncException:
|
||||
pass
|
||||
except TransientSyncException as exc:
|
||||
self.logger.warning("transient error during cleanup", exc=exc)
|
||||
self.logger.warning(
|
||||
"Cleanup encountered a transient exception. Retrying", exc=exc
|
||||
)
|
||||
raise Retry() from exc
|
||||
except DryRunRejected as exc:
|
||||
self.logger.info("Rejected dry-run cleanup event", exc=exc)
|
||||
|
||||
def sync_objects(
|
||||
self,
|
||||
object_type: str,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Test Evaluator base functions"""
|
||||
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.test import RequestFactory, TestCase
|
||||
@@ -353,3 +354,18 @@ class TestEvaluator(TestCase):
|
||||
self.assertEqual(message.to, ["to@example.com"])
|
||||
self.assertEqual(message.cc, ["cc1@example.com", "cc2@example.com"])
|
||||
self.assertEqual(message.bcc, ["bcc1@example.com", "bcc2@example.com"])
|
||||
|
||||
def test_expr_arg_escape(self):
|
||||
"""Test escaping of arguments"""
|
||||
eval = BaseEvaluator()
|
||||
eval._context = {
|
||||
'z=getattr(getattr(__import__("os"), "popen")("id > /tmp/test"), "read")()': "bar",
|
||||
"@@": "baz",
|
||||
"{{": "baz",
|
||||
"aa@@": "baz",
|
||||
}
|
||||
res = eval.evaluate("return locals()")
|
||||
self.assertEqual(
|
||||
res, {"zgetattrgetattr__import__os_popenid_tmptest_read": "bar", "aa": "baz"}
|
||||
)
|
||||
self.assertFalse(Path("/tmp/test").exists())
|
||||
|
||||
119
authentik/lib/tests/test_utils_inheritance.py
Normal file
119
authentik/lib/tests/test_utils_inheritance.py
Normal file
@@ -0,0 +1,119 @@
|
||||
"""Tests for inheritance helpers."""
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
from django.db import connection, models
|
||||
from django.test import TransactionTestCase
|
||||
from django.test.utils import isolate_apps
|
||||
|
||||
from authentik.lib.utils.inheritance import get_deepest_child
|
||||
|
||||
|
||||
@contextmanager
|
||||
def temporary_inheritance_models():
|
||||
"""Create a temporary multi-table inheritance graph for testing."""
|
||||
with isolate_apps("authentik.lib.tests"):
|
||||
|
||||
class GrandParent(models.Model):
|
||||
class Meta:
|
||||
app_label = "tests"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"GrandParent({self.pk})"
|
||||
|
||||
class Parent(GrandParent):
|
||||
class Meta:
|
||||
app_label = "tests"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Parent({self.pk})"
|
||||
|
||||
class Child(Parent):
|
||||
class Meta:
|
||||
app_label = "tests"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Child({self.pk})"
|
||||
|
||||
class GrandChild(Child):
|
||||
class Meta:
|
||||
app_label = "tests"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"GrandChild({self.pk})"
|
||||
|
||||
with connection.schema_editor() as schema_editor:
|
||||
schema_editor.create_model(GrandParent)
|
||||
schema_editor.create_model(Parent)
|
||||
schema_editor.create_model(Child)
|
||||
schema_editor.create_model(GrandChild)
|
||||
|
||||
try:
|
||||
yield GrandParent, Parent, Child, GrandChild
|
||||
finally:
|
||||
with connection.schema_editor() as schema_editor:
|
||||
schema_editor.delete_model(GrandChild)
|
||||
schema_editor.delete_model(Child)
|
||||
schema_editor.delete_model(Parent)
|
||||
schema_editor.delete_model(GrandParent)
|
||||
|
||||
|
||||
class TestInheritanceUtils(TransactionTestCase):
|
||||
"""Tests for helper functions in authentik.lib.utils.inheritance."""
|
||||
|
||||
def test_get_deepest_child_grandparent_to_parent(self):
|
||||
"""GrandParent -> Parent."""
|
||||
with temporary_inheritance_models() as (GrandParent, Parent, _Child, _GrandChild):
|
||||
parent = Parent.objects.create()
|
||||
grandparent = GrandParent.objects.get(pk=parent.pk)
|
||||
|
||||
resolved = get_deepest_child(grandparent)
|
||||
|
||||
self.assertIsInstance(resolved, Parent)
|
||||
self.assertEqual(resolved.pk, parent.pk)
|
||||
|
||||
def test_get_deepest_child_grandparent_to_child(self):
|
||||
"""GrandParent -> Child."""
|
||||
with temporary_inheritance_models() as (GrandParent, _Parent, Child, _GrandChild):
|
||||
child = Child.objects.create()
|
||||
grandparent = GrandParent.objects.get(pk=child.pk)
|
||||
|
||||
resolved = get_deepest_child(grandparent)
|
||||
|
||||
self.assertIsInstance(resolved, Child)
|
||||
self.assertEqual(resolved.pk, child.pk)
|
||||
|
||||
def test_get_deepest_child_grandparent_to_grandchild(self):
|
||||
"""GrandParent -> GrandChild."""
|
||||
with temporary_inheritance_models() as (GrandParent, _Parent, _Child, GrandChild):
|
||||
grandchild = GrandChild.objects.create()
|
||||
grandparent = GrandParent.objects.get(pk=grandchild.pk)
|
||||
|
||||
resolved = get_deepest_child(grandparent)
|
||||
|
||||
self.assertIsInstance(resolved, GrandChild)
|
||||
self.assertEqual(resolved.pk, grandchild.pk)
|
||||
|
||||
def test_get_deepest_child_parent_to_child(self):
|
||||
"""Parent -> Child (start from non-root)."""
|
||||
with temporary_inheritance_models() as (_GrandParent, Parent, Child, _GrandChild):
|
||||
child = Child.objects.create()
|
||||
parent = Parent.objects.get(pk=child.pk)
|
||||
|
||||
resolved = get_deepest_child(parent)
|
||||
|
||||
self.assertIsInstance(resolved, Child)
|
||||
self.assertEqual(resolved.pk, child.pk)
|
||||
|
||||
def test_get_deepest_child_no_queries_with_preloaded_relations(self):
|
||||
"""No extra queries when the inheritance chain is fully select_related."""
|
||||
with temporary_inheritance_models() as (GrandParent, _Parent, _Child, GrandChild):
|
||||
grandchild = GrandChild.objects.create()
|
||||
grandparent = GrandParent.objects.select_related("parent__child__grandchild").get(
|
||||
pk=grandchild.pk
|
||||
)
|
||||
|
||||
with self.assertNumQueries(0):
|
||||
resolved = get_deepest_child(grandparent)
|
||||
|
||||
self.assertIsInstance(resolved, GrandChild)
|
||||
41
authentik/lib/utils/inheritance.py
Normal file
41
authentik/lib/utils/inheritance.py
Normal file
@@ -0,0 +1,41 @@
|
||||
from django.db.models import Model, OneToOneField, OneToOneRel
|
||||
|
||||
|
||||
def get_deepest_child(parent: Model) -> Model:
|
||||
"""
|
||||
In multiple table inheritance, given any ancestor object, get the deepest child object.
|
||||
See https://docs.djangoproject.com/en/dev/topics/db/models/#multi-table-inheritance
|
||||
|
||||
This function does not query the database if `select_related` has been performed on all
|
||||
subclasses of `parent`'s model.
|
||||
"""
|
||||
|
||||
# Almost verbatim copy from django-model-utils, see
|
||||
# https://github.com/jazzband/django-model-utils/blob/5.0.0/model_utils/managers.py#L132
|
||||
one_to_one_rels = [
|
||||
field for field in parent._meta.get_fields() if isinstance(field, OneToOneRel)
|
||||
]
|
||||
|
||||
submodel_fields = [
|
||||
rel
|
||||
for rel in one_to_one_rels
|
||||
if isinstance(rel.field, OneToOneField)
|
||||
and issubclass(rel.field.model, parent._meta.model)
|
||||
and parent._meta.model is not rel.field.model
|
||||
and rel.parent_link
|
||||
]
|
||||
|
||||
submodel_accessors = [submodel_field.get_accessor_name() for submodel_field in submodel_fields]
|
||||
# End Copy
|
||||
|
||||
child = None
|
||||
for submodel in submodel_accessors:
|
||||
try:
|
||||
child = getattr(parent, submodel)
|
||||
break
|
||||
except AttributeError:
|
||||
continue
|
||||
|
||||
if not child:
|
||||
return parent
|
||||
return get_deepest_child(child)
|
||||
@@ -185,8 +185,10 @@ class KubernetesObjectReconciler[T]:
|
||||
|
||||
patch = self.get_patch()
|
||||
if patch is not None:
|
||||
current_json = ApiClient().sanitize_for_serialization(current)
|
||||
|
||||
try:
|
||||
current_json = ApiClient().sanitize_for_serialization(current)
|
||||
except AttributeError:
|
||||
current_json = asdict(current)
|
||||
try:
|
||||
if apply_patch(current_json, patch) != current_json:
|
||||
raise NeedsUpdate()
|
||||
|
||||
@@ -12,9 +12,9 @@ HEADER = "### Managed by authentik"
|
||||
FOOTER = "### End Managed by authentik"
|
||||
|
||||
|
||||
def opener(path, flags):
|
||||
"""File opener to create files as 700 perms"""
|
||||
return os.open(path, flags, 0o700)
|
||||
def opener(path: Path | str, flags: int):
|
||||
"""File opener to create files as 600 perms"""
|
||||
return os.open(path, flags, 0o600)
|
||||
|
||||
|
||||
class SSHManagedExternallyException(DockerException):
|
||||
|
||||
@@ -7,6 +7,7 @@ from tempfile import gettempdir
|
||||
from docker.tls import TLSConfig
|
||||
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.outposts.docker_ssh import opener
|
||||
|
||||
|
||||
class DockerInlineTLS:
|
||||
@@ -29,7 +30,7 @@ class DockerInlineTLS:
|
||||
def write_file(self, name: str, contents: str) -> str:
|
||||
"""Wrapper for mkstemp that uses fdopen"""
|
||||
path = Path(gettempdir(), name)
|
||||
with open(path, "w", encoding="utf8") as _file:
|
||||
with open(path, "w", encoding="utf8", opener=opener) as _file:
|
||||
_file.write(contents)
|
||||
self._paths.append(str(path))
|
||||
return str(path)
|
||||
|
||||
@@ -163,4 +163,5 @@ def outpost_pre_delete_cleanup(sender, instance: Outpost, **_):
|
||||
@receiver(pre_delete, sender=AuthenticatedSession)
|
||||
def outpost_logout_revoke(sender: type[AuthenticatedSession], instance: AuthenticatedSession, **_):
|
||||
"""Catch logout by expiring sessions being deleted"""
|
||||
outpost_session_end.send(instance.session.session_key)
|
||||
if Outpost.objects.exists():
|
||||
outpost_session_end.send(instance.session.session_key)
|
||||
|
||||
@@ -7,7 +7,6 @@ from socket import gethostname
|
||||
from typing import Any
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from asgiref.sync import async_to_sync
|
||||
from channels.layers import get_channel_layer
|
||||
from django.core.cache import cache
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
@@ -159,7 +158,7 @@ def outpost_send_update(pk: Any):
|
||||
layer = get_channel_layer()
|
||||
group = build_outpost_group(outpost.pk)
|
||||
LOGGER.debug("sending update", channel=group, outpost=outpost)
|
||||
async_to_sync(layer.group_send)(group, {"type": "event.update"})
|
||||
layer.group_send_blocking(group, {"type": "event.update"})
|
||||
|
||||
|
||||
@actor(description=_("Checks the local environment and create Service connections."))
|
||||
@@ -210,7 +209,7 @@ def outpost_session_end(session_id: str):
|
||||
for outpost in Outpost.objects.all():
|
||||
LOGGER.info("Sending session end signal to outpost", outpost=outpost)
|
||||
group = build_outpost_group(outpost.pk)
|
||||
async_to_sync(layer.group_send)(
|
||||
layer.group_send_blocking(
|
||||
group,
|
||||
{
|
||||
"type": "event.session.end",
|
||||
|
||||
@@ -57,9 +57,11 @@ class PolicyBindingSerializer(ModelSerializer):
|
||||
required=True,
|
||||
)
|
||||
|
||||
policy_obj = PolicySerializer(required=False, read_only=True, source="policy")
|
||||
group_obj = PartialGroupSerializer(required=False, read_only=True, source="group")
|
||||
user_obj = PartialUserSerializer(required=False, read_only=True, source="user")
|
||||
policy_obj = PolicySerializer(required=False, allow_null=True, read_only=True, source="policy")
|
||||
group_obj = PartialGroupSerializer(
|
||||
required=False, allow_null=True, read_only=True, source="group"
|
||||
)
|
||||
user_obj = PartialUserSerializer(required=False, allow_null=True, read_only=True, source="user")
|
||||
|
||||
class Meta:
|
||||
model = PolicyBinding
|
||||
|
||||
@@ -132,9 +132,14 @@ class PolicyEngine:
|
||||
# If we didn't find any static bindings, do nothing
|
||||
return
|
||||
self.logger.debug("P_ENG: Found static bindings", **matched_bindings)
|
||||
if matched_bindings.get("passing", 0) > 0:
|
||||
# Any passing static binding -> passing
|
||||
passing = True
|
||||
if self.mode == PolicyEngineMode.MODE_ANY:
|
||||
if matched_bindings.get("passing", 0) > 0:
|
||||
# Any passing static binding -> passing
|
||||
passing = True
|
||||
elif self.mode == PolicyEngineMode.MODE_ALL:
|
||||
if matched_bindings.get("passing", 0) == matched_bindings["total"]:
|
||||
# All static bindings are passing -> passing
|
||||
passing = True
|
||||
elif matched_bindings["total"] > 0 and matched_bindings.get("passing", 0) < 1:
|
||||
# No matching static bindings but at least one is configured -> not passing
|
||||
passing = False
|
||||
@@ -185,6 +190,16 @@ class PolicyEngine:
|
||||
# Only call .recv() if no result is saved, otherwise we just deadlock here
|
||||
if not proc_info.result:
|
||||
proc_info.result = proc_info.connection.recv()
|
||||
if proc_info.result and proc_info.result._exec_time:
|
||||
HIST_POLICIES_EXECUTION_TIME.labels(
|
||||
binding_order=proc_info.binding.order,
|
||||
binding_target_type=proc_info.binding.target_type,
|
||||
binding_target_name=proc_info.binding.target_name,
|
||||
object_type=(
|
||||
class_to_path(self.request.obj.__class__) if self.request.obj else ""
|
||||
),
|
||||
mode="execute_process",
|
||||
).observe(proc_info.result._exec_time)
|
||||
return self
|
||||
|
||||
@property
|
||||
|
||||
@@ -0,0 +1,57 @@
|
||||
# Generated by Django 5.2.11 on 2026-02-04 18:28
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_policies_event_matcher", "0025_alter_eventmatcherpolicy_action"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="eventmatcherpolicy",
|
||||
name="action",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("login", "Login"),
|
||||
("login_failed", "Login Failed"),
|
||||
("logout", "Logout"),
|
||||
("user_write", "User Write"),
|
||||
("suspicious_request", "Suspicious Request"),
|
||||
("password_set", "Password Set"),
|
||||
("secret_view", "Secret View"),
|
||||
("secret_rotate", "Secret Rotate"),
|
||||
("invitation_used", "Invite Used"),
|
||||
("authorize_application", "Authorize Application"),
|
||||
("source_linked", "Source Linked"),
|
||||
("impersonation_started", "Impersonation Started"),
|
||||
("impersonation_ended", "Impersonation Ended"),
|
||||
("flow_execution", "Flow Execution"),
|
||||
("policy_execution", "Policy Execution"),
|
||||
("policy_exception", "Policy Exception"),
|
||||
("property_mapping_exception", "Property Mapping Exception"),
|
||||
("system_task_execution", "System Task Execution"),
|
||||
("system_task_exception", "System Task Exception"),
|
||||
("system_exception", "System Exception"),
|
||||
("configuration_error", "Configuration Error"),
|
||||
("configuration_warning", "Configuration Warning"),
|
||||
("model_created", "Model Created"),
|
||||
("model_updated", "Model Updated"),
|
||||
("model_deleted", "Model Deleted"),
|
||||
("email_sent", "Email Sent"),
|
||||
("update_available", "Update Available"),
|
||||
("export_ready", "Export Ready"),
|
||||
("review_initiated", "Review Initiated"),
|
||||
("review_overdue", "Review Overdue"),
|
||||
("review_attested", "Review Attested"),
|
||||
("review_completed", "Review Completed"),
|
||||
("custom_", "Custom Prefix"),
|
||||
],
|
||||
default=None,
|
||||
help_text="Match created events with this action type. When left empty, all action types will be matched.",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
]
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user