mirror of
https://github.com/goauthentik/authentik
synced 2026-05-11 09:26:22 +02:00
Compare commits
156 Commits
website/in
...
version/20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5249546862 | ||
|
|
bf91348c05 | ||
|
|
63136f0180 | ||
|
|
faffabf938 | ||
|
|
0b180b15a2 | ||
|
|
07af6de74f | ||
|
|
ddfef91ea5 | ||
|
|
cefbf5e6ae | ||
|
|
e53d3d2486 | ||
|
|
32a3eed521 | ||
|
|
f05cc6e75a | ||
|
|
c68c36fdeb | ||
|
|
888f969fc7 | ||
|
|
82535e4671 | ||
|
|
ed2957e4e6 | ||
|
|
a5abe85148 | ||
|
|
8d2c31fa25 | ||
|
|
2637ce2474 | ||
|
|
319008dec8 | ||
|
|
8beb2fac18 | ||
|
|
ac7b28d0b0 | ||
|
|
073acf92c2 | ||
|
|
ad107c19af | ||
|
|
d285fcd8a7 | ||
|
|
84066cab48 | ||
|
|
e623d93ff5 | ||
|
|
1d0628dfbe | ||
|
|
996645105c | ||
|
|
63d7ca6ef0 | ||
|
|
5b24f4ad80 | ||
|
|
ed2e6cfb9c | ||
|
|
a1431ea48e | ||
|
|
b30e77b363 | ||
|
|
2f50cdd9fe | ||
|
|
494bdcaa09 | ||
|
|
e36ce1789e | ||
|
|
5a72ed83e0 | ||
|
|
f72d257e43 | ||
|
|
cbedb16cc4 | ||
|
|
6fc1b5ce90 | ||
|
|
57b0fa48c1 | ||
|
|
84a344ed87 | ||
|
|
f864cb56ab | ||
|
|
692735f9e1 | ||
|
|
e24fb300b1 | ||
|
|
f0e90d6873 | ||
|
|
0cf45835a0 | ||
|
|
69d35c1d26 | ||
|
|
ac803b210d | ||
|
|
c9728b4607 | ||
|
|
6e45584563 | ||
|
|
59a2e84b35 | ||
|
|
6025dbb9c9 | ||
|
|
d07bcd5025 | ||
|
|
e80655d285 | ||
|
|
e0d3d4d38c | ||
|
|
62112404ee | ||
|
|
1c9e12fcd9 | ||
|
|
42c6c257ec | ||
|
|
41bd9d7913 | ||
|
|
2c84935732 | ||
|
|
819c13a9bc | ||
|
|
0d8f366af8 | ||
|
|
093e60c753 | ||
|
|
af646f32d2 | ||
|
|
de4afc7322 | ||
|
|
bc1983106f | ||
|
|
8c2c1474f1 | ||
|
|
0dccbd4193 | ||
|
|
6a70894e01 | ||
|
|
2f5eb9b2e4 | ||
|
|
12aedb3a9e | ||
|
|
303dc93514 | ||
|
|
fbb217db57 | ||
|
|
4de253653f | ||
|
|
4154c06831 | ||
|
|
4750ed5e2a | ||
|
|
361017127d | ||
|
|
0ca5a54307 | ||
|
|
ef1aad5dbb | ||
|
|
29d880920e | ||
|
|
fc6f8374e6 | ||
|
|
a8668bbac4 | ||
|
|
d686932166 | ||
|
|
feceb220b1 | ||
|
|
937df6e07f | ||
|
|
48e6b968a6 | ||
|
|
cd89c45e75 | ||
|
|
e53995e2c1 | ||
|
|
33d5f11f0e | ||
|
|
565e16eca7 | ||
|
|
9a0164b722 | ||
|
|
8af491630b | ||
|
|
8e25e7a213 | ||
|
|
4d183657da | ||
|
|
be89b6052d | ||
|
|
ad5d2bb611 | ||
|
|
8d30fb3d25 | ||
|
|
cea3fbfa9b | ||
|
|
151d889ff4 | ||
|
|
58ca3ecbd5 | ||
|
|
1a6c7082a3 | ||
|
|
1dc60276f9 | ||
|
|
de045c6d7b | ||
|
|
850728e9bb | ||
|
|
84a605a4ba | ||
|
|
1780bb0cf0 | ||
|
|
cd75fe235d | ||
|
|
e6e62e9de1 | ||
|
|
ac7a4f8a22 | ||
|
|
0290ed3342 | ||
|
|
e367525794 | ||
|
|
93c319baee | ||
|
|
1d02ee7d74 | ||
|
|
93439b5742 | ||
|
|
6682a6664e | ||
|
|
0b5bac74e9 | ||
|
|
062823f1b2 | ||
|
|
a17fe58971 | ||
|
|
422ea893b1 | ||
|
|
15c9f93851 | ||
|
|
e2202d498b | ||
|
|
9ea9a86ad3 | ||
|
|
4bac1edd61 | ||
|
|
24726be3c9 | ||
|
|
411f06756f | ||
|
|
4bdcab48c3 | ||
|
|
00dbd377a7 | ||
|
|
a01c0575db | ||
|
|
6e51d044bb | ||
|
|
6d1b168dc4 | ||
|
|
43675c2b22 | ||
|
|
8645273eaf | ||
|
|
eb6f4712fe | ||
|
|
7b9505242e | ||
|
|
3dda20ebc7 | ||
|
|
dfd2bc5c3c | ||
|
|
06a270913c | ||
|
|
430507fc72 | ||
|
|
847af7f9ea | ||
|
|
8f1cb636e8 | ||
|
|
e61c876002 | ||
|
|
33c0d3df0a | ||
|
|
3a03e1ebfd | ||
|
|
1e41b77761 | ||
|
|
6c1662f99f | ||
|
|
bb5bc5c8da | ||
|
|
30670c9070 | ||
|
|
fdbf9ffedc | ||
|
|
2ec433d724 | ||
|
|
55297b9e6a | ||
|
|
f9dda6582c | ||
|
|
3394c17bfd | ||
|
|
a37d101b10 | ||
|
|
4774b4db87 | ||
|
|
fdb52c9394 |
23
.github/actions/cherry-pick/action.yml
vendored
23
.github/actions/cherry-pick/action.yml
vendored
@@ -115,20 +115,13 @@ runs:
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ inputs.token }}
|
||||
PR_NUMBER: ${{ steps.should_run.outputs.pr_number }}
|
||||
REASON: ${{ steps.should_run.outputs.reason }}
|
||||
run: |
|
||||
set -e -o pipefail
|
||||
PR_NUMBER="${{ steps.should_run.outputs.pr_number }}"
|
||||
|
||||
# Get PR details
|
||||
PR_DATA=$(gh api repos/${{ github.repository }}/pulls/$PR_NUMBER)
|
||||
PR_TITLE=$(echo "$PR_DATA" | jq -r '.title')
|
||||
PR_AUTHOR=$(echo "$PR_DATA" | jq -r '.user.login')
|
||||
|
||||
echo "pr_title=$PR_TITLE" >> $GITHUB_OUTPUT
|
||||
echo "pr_author=$PR_AUTHOR" >> $GITHUB_OUTPUT
|
||||
|
||||
# Determine which labels to process
|
||||
if [ "${{ steps.should_run.outputs.reason }}" = "label_added_to_merged_pr" ]; then
|
||||
if [ "${REASON}" = "label_added_to_merged_pr" ]; then
|
||||
# Only process the specific label that was just added
|
||||
if [ "${{ github.event_name }}" = "issues" ]; then
|
||||
LABEL_NAME="${{ github.event.label.name }}"
|
||||
@@ -152,13 +145,13 @@ runs:
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ inputs.token }}
|
||||
PR_NUMBER: '${{ steps.should_run.outputs.pr_number }}'
|
||||
COMMIT_SHA: '${{ steps.should_run.outputs.merge_commit_sha }}'
|
||||
PR_TITLE: ${{ github.event.pull_request.title }}
|
||||
PR_AUTHOR: ${{ github.event.pull_request.user.login }}
|
||||
LABELS: '${{ steps.pr_details.outputs.labels }}'
|
||||
run: |
|
||||
set -e -o pipefail
|
||||
PR_NUMBER='${{ steps.should_run.outputs.pr_number }}'
|
||||
COMMIT_SHA='${{ steps.should_run.outputs.merge_commit_sha }}'
|
||||
PR_TITLE='${{ steps.pr_details.outputs.pr_title }}'
|
||||
PR_AUTHOR='${{ steps.pr_details.outputs.pr_author }}'
|
||||
LABELS='${{ steps.pr_details.outputs.labels }}'
|
||||
|
||||
echo "Processing PR #$PR_NUMBER (reason: ${{ steps.should_run.outputs.reason }})"
|
||||
echo "Found backport labels: $LABELS"
|
||||
|
||||
@@ -89,6 +89,8 @@ if should_push:
|
||||
_cache_tag = "buildcache"
|
||||
if image_arch:
|
||||
_cache_tag += f"-{image_arch}"
|
||||
if is_release:
|
||||
_cache_tag += f"-{version_family}"
|
||||
cache_to = f"type=registry,ref={get_attest_image_names(image_tags)}:{_cache_tag},mode=max"
|
||||
|
||||
|
||||
|
||||
18
.github/actions/setup/action.yml
vendored
18
.github/actions/setup/action.yml
vendored
@@ -8,6 +8,11 @@ inputs:
|
||||
postgresql_version:
|
||||
description: "Optional postgresql image tag"
|
||||
default: "16"
|
||||
working-directory:
|
||||
description: |
|
||||
Optional working directory if this repo isn't in the root of the actions workspace.
|
||||
When set, needs to contain a trailing slash
|
||||
default: ""
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
@@ -29,24 +34,25 @@ runs:
|
||||
if: ${{ contains(inputs.dependencies, 'python') }}
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v5
|
||||
with:
|
||||
python-version-file: "pyproject.toml"
|
||||
python-version-file: "${{ inputs.working-directory }}pyproject.toml"
|
||||
- name: Install Python deps
|
||||
if: ${{ contains(inputs.dependencies, 'python') }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: uv sync --all-extras --dev --frozen
|
||||
- name: Setup node
|
||||
if: ${{ contains(inputs.dependencies, 'node') }}
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v4
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
node-version-file: ${{ inputs.working-directory }}web/package.json
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
cache-dependency-path: ${{ inputs.working-directory }}web/package-lock.json
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- name: Setup go
|
||||
if: ${{ contains(inputs.dependencies, 'go') }}
|
||||
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v5
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
go-version-file: "${{ inputs.working-directory }}go.mod"
|
||||
- name: Setup docker cache
|
||||
if: ${{ contains(inputs.dependencies, 'runtime') }}
|
||||
uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7
|
||||
@@ -55,13 +61,15 @@ runs:
|
||||
- name: Setup dependencies
|
||||
if: ${{ contains(inputs.dependencies, 'runtime') }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
export PSQL_TAG=${{ inputs.postgresql_version }}
|
||||
docker compose -f .github/actions/setup/compose.yml up -d
|
||||
cd web && npm i
|
||||
cd web && npm ci
|
||||
- name: Generate config
|
||||
if: ${{ contains(inputs.dependencies, 'python') }}
|
||||
shell: uv run python {0}
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
from authentik.lib.generators import generate_id
|
||||
from yaml import safe_dump
|
||||
|
||||
@@ -80,7 +80,7 @@ jobs:
|
||||
make gen-client-ts
|
||||
make gen-client-go
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@601a80b39c9405e50806ae38af30926f9d957c47 # v6
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6
|
||||
id: push
|
||||
with:
|
||||
context: .
|
||||
|
||||
2
.github/workflows/api-ts-publish.yml
vendored
2
.github/workflows/api-ts-publish.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
|
||||
2
.github/workflows/ci-docs.yml
vendored
2
.github/workflows/ci-docs.yml
vendored
@@ -96,7 +96,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build Docker Image
|
||||
id: push
|
||||
uses: docker/build-push-action@601a80b39c9405e50806ae38af30926f9d957c47 # v6
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6
|
||||
with:
|
||||
tags: ${{ steps.ev.outputs.imageTags }}
|
||||
file: website/Dockerfile
|
||||
|
||||
11
.github/workflows/ci-main.yml
vendored
11
.github/workflows/ci-main.yml
vendored
@@ -42,16 +42,6 @@ jobs:
|
||||
uses: ./.github/actions/setup
|
||||
- name: run job
|
||||
run: uv run make ci-${{ matrix.job }}
|
||||
test-gen-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: generate schema
|
||||
run: make migrate gen-build
|
||||
- name: ensure schema is up-to-date
|
||||
run: git diff --exit-code -- schema.yml blueprints/schema.json
|
||||
test-migrations:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@@ -287,7 +277,6 @@ jobs:
|
||||
if: always()
|
||||
needs:
|
||||
- lint
|
||||
- test-gen-build
|
||||
- test-migrations
|
||||
- test-migrations-from-stable
|
||||
- test-unittest
|
||||
|
||||
2
.github/workflows/ci-outpost.yml
vendored
2
.github/workflows/ci-outpost.yml
vendored
@@ -111,7 +111,7 @@ jobs:
|
||||
run: make gen-client-go
|
||||
- name: Build Docker Image
|
||||
id: push
|
||||
uses: docker/build-push-action@601a80b39c9405e50806ae38af30926f9d957c47 # v6
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6
|
||||
with:
|
||||
tags: ${{ steps.ev.outputs.imageTags }}
|
||||
file: lifecycle/container/${{ matrix.type }}.Dockerfile
|
||||
|
||||
2
.github/workflows/gen-image-compress.yml
vendored
2
.github/workflows/gen-image-compress.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
|
||||
@@ -19,7 +19,7 @@ jobs:
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
|
||||
2
.github/workflows/gh-cherry-pick.yml
vendored
2
.github/workflows/gh-cherry-pick.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
if: ${{ env.GH_APP_ID != '' }}
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
env:
|
||||
GH_APP_ID: ${{ secrets.GH_APP_ID }}
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
|
||||
2
.github/workflows/gh-ghcr-retention.yml
vendored
2
.github/workflows/gh-ghcr-retention.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
- name: Delete 'dev' containers older than a week
|
||||
uses: snok/container-retention-policy@3b0972b2276b171b212f8c4efbca59ebba26eceb # v3.0.1
|
||||
with:
|
||||
|
||||
4
.github/workflows/release-branch-off.yml
vendored
4
.github/workflows/release-branch-off.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
- name: Checkout main
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
with:
|
||||
@@ -60,7 +60,7 @@ jobs:
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
- name: Checkout main
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
with:
|
||||
|
||||
4
.github/workflows/release-publish.yml
vendored
4
.github/workflows/release-publish.yml
vendored
@@ -51,7 +51,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build Docker Image
|
||||
id: push
|
||||
uses: docker/build-push-action@601a80b39c9405e50806ae38af30926f9d957c47 # v6
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6
|
||||
with:
|
||||
tags: ${{ steps.ev.outputs.imageTags }}
|
||||
file: website/Dockerfile
|
||||
@@ -119,7 +119,7 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build Docker Image
|
||||
uses: docker/build-push-action@601a80b39c9405e50806ae38af30926f9d957c47 # v6
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6
|
||||
id: push
|
||||
with:
|
||||
push: true
|
||||
|
||||
15
.github/workflows/release-tag.yml
vendored
15
.github/workflows/release-tag.yml
vendored
@@ -70,7 +70,7 @@ jobs:
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
- id: get-user-id
|
||||
name: Get GitHub app user ID
|
||||
run: echo "user-id=$(gh api "/users/${{ steps.app-token.outputs.app-slug }}[bot]" --jq .id)" >> "$GITHUB_OUTPUT"
|
||||
@@ -91,6 +91,7 @@ jobs:
|
||||
# ID from https://api.github.com/users/authentik-automation[bot]
|
||||
git config --global user.name '${{ steps.app-token.outputs.app-slug }}[bot]'
|
||||
git config --global user.email '${{ steps.get-user-id.outputs.user-id }}+${{ steps.app-token.outputs.app-slug }}[bot]@users.noreply.github.com'
|
||||
git pull
|
||||
git commit -a -m "release: ${{ inputs.version }}" --allow-empty
|
||||
git tag "version/${{ inputs.version }}" HEAD -m "version/${{ inputs.version }}"
|
||||
git push --follow-tags
|
||||
@@ -117,7 +118,7 @@ jobs:
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
repositories: helm
|
||||
- id: get-user-id
|
||||
name: Get GitHub app user ID
|
||||
@@ -159,7 +160,7 @@ jobs:
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
repositories: version
|
||||
- id: get-user-id
|
||||
name: Get GitHub app user ID
|
||||
@@ -174,21 +175,25 @@ jobs:
|
||||
if: "${{ inputs.release_reason == 'feature' }}"
|
||||
run: |
|
||||
changelog_url="https://docs.goauthentik.io/docs/releases/${{ needs.check-inputs.outputs.major_version }}"
|
||||
reason="${{ inputs.release_reason }}"
|
||||
jq \
|
||||
--arg version "${{ inputs.version }}" \
|
||||
--arg changelog "See ${changelog_url}" \
|
||||
--arg changelog_url "${changelog_url}" \
|
||||
'.stable.version = $version | .stable.changelog = $changelog | .stable.changelog_url = $changelog_url' version.json > version.new.json
|
||||
--arg reason "${reason}" \
|
||||
'.stable.version = $version | .stable.changelog = $changelog | .stable.changelog_url = $changelog_url | .stable.reason = $reason' version.json > version.new.json
|
||||
mv version.new.json version.json
|
||||
- name: Bump version
|
||||
if: "${{ inputs.release_reason != 'feature' }}"
|
||||
run: |
|
||||
changelog_url="https://docs.goauthentik.io/docs/releases/${{ needs.check-inputs.outputs.major_version }}#fixed-in-$(echo -n ${{ inputs.version}} | sed 's/\.//g')"
|
||||
reason="${{ inputs.release_reason }}"
|
||||
jq \
|
||||
--arg version "${{ inputs.version }}" \
|
||||
--arg changelog "See ${changelog_url}" \
|
||||
--arg changelog_url "${changelog_url}" \
|
||||
'.stable.version = $version | .stable.changelog = $changelog | .stable.changelog_url = $changelog_url' version.json > version.new.json
|
||||
--arg reason "${reason}" \
|
||||
'.stable.version = $version | .stable.changelog = $changelog | .stable.changelog_url = $changelog_url | .stable.reason = $reason' version.json > version.new.json
|
||||
mv version.new.json version.json
|
||||
- name: Create pull request
|
||||
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
|
||||
|
||||
2
.github/workflows/repo-stale.yml
vendored
2
.github/workflows/repo-stale.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
- uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10
|
||||
with:
|
||||
repo-token: ${{ steps.generate_token.outputs.token }}
|
||||
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
with:
|
||||
|
||||
8
Makefile
8
Makefile
@@ -148,11 +148,11 @@ bump: ## Bump authentik version. Usage: make bump version=20xx.xx.xx
|
||||
ifndef version
|
||||
$(error Usage: make bump version=20xx.xx.xx )
|
||||
endif
|
||||
$(SED_INPLACE) 's/^version = ".*"/version = "$(version)"/' pyproject.toml
|
||||
$(SED_INPLACE) 's/^VERSION = ".*"/VERSION = "$(version)"/' authentik/__init__.py
|
||||
$(eval current_version := $(shell cat ${PWD}/internal/constants/VERSION))
|
||||
$(SED_INPLACE) 's/^version = ".*"/version = "$(version)"/' ${PWD}/pyproject.toml
|
||||
$(SED_INPLACE) 's/^VERSION = ".*"/VERSION = "$(version)"/' ${PWD}/authentik/__init__.py
|
||||
$(MAKE) gen-build gen-compose aws-cfn
|
||||
npm version --no-git-tag-version --allow-same-version $(version)
|
||||
cd ${PWD}/web && npm version --no-git-tag-version --allow-same-version $(version)
|
||||
$(SED_INPLACE) "s/\"${current_version}\"/\"$(version)\"/" ${PWD}/package.json ${PWD}/package-lock.json ${PWD}/web/package.json ${PWD}/web/package-lock.json
|
||||
echo -n $(version) > ${PWD}/internal/constants/VERSION
|
||||
|
||||
#########################
|
||||
|
||||
42
SECURITY.md
42
SECURITY.md
@@ -18,10 +18,10 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
|
||||
|
||||
(.x being the latest patch release for each version)
|
||||
|
||||
| Version | Supported |
|
||||
| ---------- | ---------- |
|
||||
| 2025.10.x | ✅ |
|
||||
| 2025.12.x | ✅ |
|
||||
| Version | Supported |
|
||||
| --------- | --------- |
|
||||
| 2025.12.x | ✅ |
|
||||
| 2026.2.x | ✅ |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
@@ -60,6 +60,40 @@ authentik reserves the right to reclassify CVSS as necessary. To determine sever
|
||||
| 7.0 – 8.9 | High |
|
||||
| 9.0 – 10.0 | Critical |
|
||||
|
||||
## Intended functionality
|
||||
|
||||
The following capabilities are part of intentional system design and should not be reported as security vulnerabilities:
|
||||
|
||||
- Expressions (property mappings/policies/prompts) can execute arbitrary Python code without safeguards.
|
||||
|
||||
This is expected behavior. Any user with permission to create or modify objects containing expression fields can write code that is executed within authentik. If a vulnerability allows a user without the required permissions to write or modify code and have it executed, that would be a valid security report.
|
||||
|
||||
However, the fact that expressions are executed as part of normal operations is not considered a privilege escalation or security vulnerability.
|
||||
|
||||
- Blueprints can access all files on the filesystem.
|
||||
|
||||
This access is intentional to allow legitimate configuration and deployment tasks. It does not represent a security problem by itself.
|
||||
|
||||
- Importing blueprints allows arbitrary modification of application objects.
|
||||
|
||||
This is intended functionality. This behavior reflects the privileged design of blueprint imports. It is "exploitable" when importing blueprints from untrusted sources without reviewing the blueprint beforehand. However, any method to create, modify or execute blueprints without the required permissions would be a valid security report.
|
||||
|
||||
- Flow imports may contain objects other than flows (such as policies, users, groups, etc.)
|
||||
|
||||
This is expected behavior as flow imports are blueprint files.
|
||||
|
||||
- Prompt HTML is not escaped.
|
||||
|
||||
Prompts intentionally allow raw HTML, including script tags, so they can be used to create interactive or customized user interface elements. Because of this, scripts within prompts may affect or interact with the surrounding page as designed.
|
||||
|
||||
- Open redirects that do not include tokens or other sensitive information are not considered a security vulnerability.
|
||||
|
||||
Redirects that only change navigation flow and do not expose session tokens, API keys, or other confidential data are considered acceptable and do not require reporting.
|
||||
|
||||
- Outgoing network requests are not filtered.
|
||||
|
||||
The destinations of outgoing network requests (HTTP, TCP, etc.) made by authentik to configurable endpoints through objects such as OAuth Sources, SSO Providers, and others are not validated. Depending on your threat model, these requests should be restricted at the network level using appropriate firewall or network policies.
|
||||
|
||||
## Disclosure process
|
||||
|
||||
1. Report from Github or Issue is reported via Email as listed above.
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from functools import lru_cache
|
||||
from os import environ
|
||||
|
||||
VERSION = "2026.5.0-rc1"
|
||||
VERSION = "2026.2.2"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
|
||||
|
||||
@@ -94,7 +94,7 @@ class Backend:
|
||||
|
||||
Args:
|
||||
file_path: Relative file path
|
||||
request: Optional Django HttpRequest for fully qualified URL building
|
||||
request: Optional Django HttpRequest for fully qualifed URL building
|
||||
use_cache: whether to retrieve the URL from cache
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -71,7 +71,7 @@ def postprocess_schema_responses(
|
||||
def postprocess_schema_query_params(
|
||||
result: dict[str, Any], generator: SchemaGenerator, **kwargs
|
||||
) -> dict[str, Any]:
|
||||
"""Optimize pagination parameters, instead of redeclaring parameters for each endpoint
|
||||
"""Optimise pagination parameters, instead of redeclaring parameters for each endpoint
|
||||
declare them globally and refer to them"""
|
||||
LOGGER.debug("Deduplicating query parameters")
|
||||
for path in result["paths"].values():
|
||||
|
||||
@@ -272,7 +272,7 @@ class Importer:
|
||||
and entry.state != BlueprintEntryDesiredState.MUST_CREATED
|
||||
):
|
||||
self.logger.debug(
|
||||
"Initialize serializer with instance",
|
||||
"Initialise serializer with instance",
|
||||
model=model,
|
||||
instance=model_instance,
|
||||
pk=model_instance.pk,
|
||||
@@ -290,7 +290,7 @@ class Importer:
|
||||
)
|
||||
else:
|
||||
self.logger.debug(
|
||||
"Initialized new serializer instance",
|
||||
"Initialised new serializer instance",
|
||||
model=model,
|
||||
**cleanse_dict(updated_identifiers),
|
||||
)
|
||||
|
||||
@@ -47,7 +47,12 @@ class ApplicationSerializer(ModelSerializer):
|
||||
"""Application Serializer"""
|
||||
|
||||
launch_url = SerializerMethodField()
|
||||
provider_obj = ProviderSerializer(source="get_provider", required=False, read_only=True)
|
||||
provider_obj = ProviderSerializer(
|
||||
source="get_provider",
|
||||
required=False,
|
||||
read_only=True,
|
||||
allow_null=True,
|
||||
)
|
||||
backchannel_providers_obj = ProviderSerializer(
|
||||
source="backchannel_providers", required=False, read_only=True, many=True
|
||||
)
|
||||
@@ -154,14 +159,14 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
|
||||
return queryset
|
||||
|
||||
def _get_allowed_applications(
|
||||
self, paginated_apps: Iterator[Application], user: User | None = None
|
||||
self, pagined_apps: Iterator[Application], user: User | None = None
|
||||
) -> list[Application]:
|
||||
applications = []
|
||||
request = self.request._request
|
||||
if user:
|
||||
request = copy(request)
|
||||
request.user = user
|
||||
for application in paginated_apps:
|
||||
for application in pagined_apps:
|
||||
engine = PolicyEngine(application, request.user, request)
|
||||
engine.build()
|
||||
if engine.passing:
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
"""authentik core models"""
|
||||
|
||||
import re
|
||||
import traceback
|
||||
from datetime import datetime, timedelta
|
||||
from enum import StrEnum
|
||||
from hashlib import sha256
|
||||
@@ -15,7 +17,6 @@ from django.contrib.sessions.base_session import AbstractBaseSession
|
||||
from django.core.validators import validate_slug
|
||||
from django.db import models
|
||||
from django.db.models import Q, QuerySet, options
|
||||
from django.db.models.constants import LOOKUP_SEP
|
||||
from django.http import HttpRequest
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.timezone import now
|
||||
@@ -43,6 +44,7 @@ from authentik.lib.models import (
|
||||
DomainlessFormattedURLValidator,
|
||||
SerializerModel,
|
||||
)
|
||||
from authentik.lib.utils.inheritance import get_deepest_child
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
from authentik.policies.models import PolicyBindingModel
|
||||
from authentik.rbac.models import Role
|
||||
@@ -528,23 +530,35 @@ class User(SerializerModel, AttributesMixin, AbstractUser):
|
||||
"default: in 30 days). See authentik logs for every will invocation of this "
|
||||
"deprecation."
|
||||
)
|
||||
stacktrace = traceback.format_stack()
|
||||
# The last line is this function, the next-to-last line is its caller
|
||||
cause = stacktrace[-2] if len(stacktrace) > 1 else "Unknown, see stacktrace in logs"
|
||||
if search := re.search(r'"(.*?)"', cause):
|
||||
cause = f"Property mapping or Expression policy named {search.group(1)}"
|
||||
|
||||
LOGGER.warning(
|
||||
"deprecation used",
|
||||
message=message_logger,
|
||||
deprecation=deprecation,
|
||||
replacement=replacement,
|
||||
cause=cause,
|
||||
stacktrace=stacktrace,
|
||||
)
|
||||
if not Event.filter_not_expired(
|
||||
action=EventAction.CONFIGURATION_WARNING, context__deprecation=deprecation
|
||||
action=EventAction.CONFIGURATION_WARNING,
|
||||
context__deprecation=deprecation,
|
||||
context__cause=cause,
|
||||
).exists():
|
||||
event = Event.new(
|
||||
EventAction.CONFIGURATION_WARNING,
|
||||
deprecation=deprecation,
|
||||
replacement=replacement,
|
||||
message=message_event,
|
||||
cause=cause,
|
||||
)
|
||||
event.expires = datetime.now() + timedelta(days=30)
|
||||
event.save()
|
||||
|
||||
return self.groups
|
||||
|
||||
def set_password(self, raw_password, signal=True, sender=None, request=None):
|
||||
@@ -789,25 +803,7 @@ class Application(SerializerModel, PolicyBindingModel):
|
||||
"""Get casted provider instance. Needs Application queryset with_provider"""
|
||||
if not self.provider:
|
||||
return None
|
||||
|
||||
candidates = []
|
||||
base_class = Provider
|
||||
for subclass in base_class.objects.get_queryset()._get_subclasses_recurse(base_class):
|
||||
parent = self.provider
|
||||
for level in subclass.split(LOOKUP_SEP):
|
||||
try:
|
||||
parent = getattr(parent, level)
|
||||
except AttributeError:
|
||||
break
|
||||
if parent in candidates:
|
||||
continue
|
||||
idx = subclass.count(LOOKUP_SEP)
|
||||
if type(parent) is not base_class:
|
||||
idx += 1
|
||||
candidates.insert(idx, parent)
|
||||
if not candidates:
|
||||
return None
|
||||
return candidates[-1]
|
||||
return get_deepest_child(self.provider)
|
||||
|
||||
def backchannel_provider_for[T: Provider](self, provider_type: type[T], **kwargs) -> T | None:
|
||||
"""Get Backchannel provider for a specific type"""
|
||||
@@ -1119,7 +1115,11 @@ class ExpiringModel(models.Model):
|
||||
default the object is deleted. This is less efficient compared
|
||||
to bulk deleting objects, but classes like Token() need to change
|
||||
values instead of being deleted."""
|
||||
return self.delete(*args, **kwargs)
|
||||
try:
|
||||
return self.delete(*args, **kwargs)
|
||||
except self.DoesNotExist:
|
||||
# Object has already been deleted, so this should be fine
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def filter_not_expired(cls, **kwargs) -> QuerySet[Self]:
|
||||
|
||||
@@ -24,7 +24,8 @@ from authentik.root.ws.consumer import build_device_group
|
||||
|
||||
# Arguments: user: User, password: str
|
||||
password_changed = Signal()
|
||||
# Arguments: credentials: dict[str, any], request: HttpRequest, stage: Stage
|
||||
# Arguments: credentials: dict[str, any], request: HttpRequest,
|
||||
# stage: Stage, context: dict[str, any]
|
||||
login_failed = Signal()
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
@@ -44,19 +44,24 @@
|
||||
{% endblock %}
|
||||
</div>
|
||||
</main>
|
||||
<footer aria-label="Site footer" class="pf-c-login__footer pf-m-dark">
|
||||
<ul class="pf-c-list pf-m-inline">
|
||||
{% for link in footer_links %}
|
||||
<li>
|
||||
<a href="{{ link.href }}">{{ link.name }}</a>
|
||||
</li>
|
||||
{% endfor %}
|
||||
<li>
|
||||
<span>
|
||||
{% trans 'Powered by authentik' %}
|
||||
</span>
|
||||
</li>
|
||||
</ul>
|
||||
<footer
|
||||
name="site-footer"
|
||||
aria-label="{% trans 'Site footer' %}"
|
||||
class="pf-c-login__footer pf-m-dark">
|
||||
<div name="flow-links" aria-label="{% trans 'Flow links' %}">
|
||||
<ul class="pf-c-list pf-m-inline" part="list">
|
||||
{% for link in footer_links %}
|
||||
<li part="list-item">
|
||||
<a part="list-item-link" href="{{ link.href }}">{{ link.name }}</a>
|
||||
</li>
|
||||
{% endfor %}
|
||||
<li part="list-item">
|
||||
<span>
|
||||
{% trans 'Powered by authentik' %}
|
||||
</span>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</footer>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -63,7 +63,7 @@ class TestPropertyMappingAPI(APITestCase):
|
||||
PropertyMappingSerializer().validate_expression("/")
|
||||
|
||||
def test_types(self):
|
||||
"""Test PropertyMapping's types endpoint"""
|
||||
"""Test PropertyMappigns's types endpoint"""
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:propertymapping-types"),
|
||||
)
|
||||
|
||||
@@ -78,7 +78,7 @@ def generate_key_id_legacy(key_data: str) -> str:
|
||||
"""Generate Key ID using MD5 (legacy format for backwards compatibility)."""
|
||||
if not key_data:
|
||||
return ""
|
||||
return md5(key_data.encode("utf-8")).hexdigest() # nosec
|
||||
return md5(key_data.encode("utf-8"), usedforsecurity=False).hexdigest() # nosec
|
||||
|
||||
|
||||
class CertificateKeyPair(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.endpoints.api.connectors import ConnectorSerializer
|
||||
from authentik.endpoints.models import EndpointStage
|
||||
from authentik.endpoints.controller import Capabilities
|
||||
from authentik.endpoints.models import Connector, EndpointStage
|
||||
from authentik.flows.api.stages import StageSerializer
|
||||
|
||||
|
||||
@@ -11,6 +14,13 @@ class EndpointStageSerializer(StageSerializer):
|
||||
|
||||
connector_obj = ConnectorSerializer(source="connector", read_only=True)
|
||||
|
||||
def validate_connector(self, connector: Connector) -> Connector:
|
||||
conn: Connector = Connector.objects.get_subclass(pk=connector.pk)
|
||||
controller = conn.controller(conn)
|
||||
if Capabilities.STAGE_ENDPOINTS not in controller.capabilities():
|
||||
raise ValidationError(_("Selected connector is not compatible with this stage."))
|
||||
return connector
|
||||
|
||||
class Meta:
|
||||
model = EndpointStage
|
||||
fields = StageSerializer.Meta.fields + [
|
||||
|
||||
@@ -18,7 +18,10 @@ from authentik.rbac.decorators import permission_required
|
||||
class EnrollmentTokenSerializer(ModelSerializer):
|
||||
|
||||
device_group_obj = DeviceAccessGroupSerializer(
|
||||
source="device_group", read_only=True, required=False
|
||||
source="device_group",
|
||||
read_only=True,
|
||||
required=False,
|
||||
allow_null=True,
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
|
||||
@@ -37,6 +37,8 @@ class AgentEnrollmentAuth(BaseAuthentication):
|
||||
token = EnrollmentToken.filter_not_expired(key=key).first()
|
||||
if not token:
|
||||
raise PermissionDenied()
|
||||
if not token.connector.enabled:
|
||||
raise PermissionDenied()
|
||||
CTX_AUTH_VIA.set("endpoint_token_enrollment")
|
||||
return (DeviceUser(), token)
|
||||
|
||||
@@ -51,6 +53,8 @@ class AgentAuth(BaseAuthentication):
|
||||
device_token = DeviceToken.filter_not_expired(key=key).first()
|
||||
if not device_token:
|
||||
raise PermissionDenied()
|
||||
if not device_token.device.connector.enabled:
|
||||
raise PermissionDenied()
|
||||
if device_token.device.device.is_expired:
|
||||
raise PermissionDenied()
|
||||
CTX_AUTH_VIA.set("endpoint_token")
|
||||
|
||||
@@ -8,7 +8,7 @@ from rest_framework.fields import CharField
|
||||
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.endpoints.connectors.agent.models import AgentConnector, EnrollmentToken
|
||||
from authentik.endpoints.controller import BaseController
|
||||
from authentik.endpoints.controller import BaseController, Capabilities
|
||||
from authentik.endpoints.facts import OSFamily
|
||||
|
||||
|
||||
@@ -48,8 +48,8 @@ class AgentConnectorController(BaseController[AgentConnector]):
|
||||
def vendor_identifier() -> str:
|
||||
return "goauthentik.io/platform"
|
||||
|
||||
def supported_enrollment_methods(self):
|
||||
return []
|
||||
def capabilities(self) -> list[Capabilities]:
|
||||
return [Capabilities.STAGE_ENDPOINTS]
|
||||
|
||||
def generate_mdm_config(
|
||||
self, target_platform: OSFamily, request: HttpRequest, token: EnrollmentToken
|
||||
|
||||
@@ -58,6 +58,16 @@ class TestAgentAPI(APITestCase):
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_enroll_disabled(self):
|
||||
self.connector.enabled = False
|
||||
self.connector.save()
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:agentconnector-enroll"),
|
||||
data={"device_serial": generate_id(), "device_name": "bar"},
|
||||
HTTP_AUTHORIZATION=f"Bearer {self.token.key}",
|
||||
)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
def test_enroll_token_delete(self):
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:agentconnector-enroll"),
|
||||
@@ -104,6 +114,16 @@ class TestAgentAPI(APITestCase):
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
@reconcile_app("authentik_crypto")
|
||||
def test_config_disabled(self):
|
||||
self.connector.enabled = False
|
||||
self.connector.save()
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:agentconnector-agent-config"),
|
||||
HTTP_AUTHORIZATION=f"Bearer+agent {self.device_token.key}",
|
||||
)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
def test_check_in(self):
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:agentconnector-check-in"),
|
||||
@@ -112,6 +132,16 @@ class TestAgentAPI(APITestCase):
|
||||
)
|
||||
self.assertEqual(response.status_code, 204)
|
||||
|
||||
def test_check_in_disabled(self):
|
||||
self.connector.enabled = False
|
||||
self.connector.save()
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:agentconnector-check-in"),
|
||||
data=CHECK_IN_DATA_VALID,
|
||||
HTTP_AUTHORIZATION=f"Bearer+agent {self.device_token.key}",
|
||||
)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
def test_check_in_token_expired(self):
|
||||
self.device_token.expiring = True
|
||||
self.device_token.expires = now() - timedelta(hours=1)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from hashlib import sha256
|
||||
from json import loads
|
||||
from unittest.mock import PropertyMock, patch
|
||||
|
||||
from django.urls import reverse
|
||||
from jwt import encode
|
||||
@@ -232,3 +233,43 @@ class TestEndpointStage(FlowTestCase):
|
||||
plan = plan()
|
||||
self.assertNotIn(PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE, plan.context)
|
||||
self.assertEqual(plan.context[PLAN_CONTEXT_DEVICE], self.device)
|
||||
|
||||
def test_endpoint_stage_connector_no_stage_optional(self):
|
||||
flow = create_test_flow()
|
||||
stage = EndpointStage.objects.create(connector=self.connector, mode=StageMode.OPTIONAL)
|
||||
FlowStageBinding.objects.create(stage=stage, target=flow, order=0)
|
||||
|
||||
with patch(
|
||||
"authentik.endpoints.connectors.agent.models.AgentConnector.stage",
|
||||
PropertyMock(return_value=None),
|
||||
):
|
||||
with self.assertFlowFinishes() as plan:
|
||||
res = self.client.get(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||
)
|
||||
self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
|
||||
plan = plan()
|
||||
self.assertNotIn(PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE, plan.context)
|
||||
self.assertNotIn(PLAN_CONTEXT_DEVICE, plan.context)
|
||||
|
||||
def test_endpoint_stage_connector_no_stage_required(self):
|
||||
flow = create_test_flow()
|
||||
stage = EndpointStage.objects.create(connector=self.connector, mode=StageMode.REQUIRED)
|
||||
FlowStageBinding.objects.create(stage=stage, target=flow, order=0)
|
||||
|
||||
with patch(
|
||||
"authentik.endpoints.connectors.agent.models.AgentConnector.stage",
|
||||
PropertyMock(return_value=None),
|
||||
):
|
||||
with self.assertFlowFinishes() as plan:
|
||||
res = self.client.get(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||
)
|
||||
self.assertStageResponse(
|
||||
res,
|
||||
component="ak-stage-access-denied",
|
||||
error_message="Invalid stage configuration",
|
||||
)
|
||||
plan = plan()
|
||||
self.assertNotIn(PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE, plan.context)
|
||||
self.assertNotIn(PLAN_CONTEXT_DEVICE, plan.context)
|
||||
|
||||
@@ -8,13 +8,15 @@ from authentik.lib.sentry import SentryIgnoredException
|
||||
MERGED_VENDOR = "goauthentik.io/@merged"
|
||||
|
||||
|
||||
class EnrollmentMethods(models.TextChoices):
|
||||
class Capabilities(models.TextChoices):
|
||||
# Automatically enrolled through user action
|
||||
AUTOMATIC_USER = "automatic_user"
|
||||
ENROLL_AUTOMATIC_USER = "enroll_automatic_user"
|
||||
# Automatically enrolled through connector integration
|
||||
AUTOMATIC_API = "automatic_api"
|
||||
ENROLL_AUTOMATIC_API = "enroll_automatic_api"
|
||||
# Manually enrolled with user interaction (user scanning a QR code for example)
|
||||
MANUAL_USER = "manual_user"
|
||||
ENROLL_MANUAL_USER = "enroll_manual_user"
|
||||
# Supported for use with Endpoints stage
|
||||
STAGE_ENDPOINTS = "stage_endpoints"
|
||||
|
||||
|
||||
class ConnectorSyncException(SentryIgnoredException):
|
||||
@@ -34,7 +36,7 @@ class BaseController[T: "Connector"]:
|
||||
def vendor_identifier() -> str:
|
||||
raise NotImplementedError
|
||||
|
||||
def supported_enrollment_methods(self) -> list[EnrollmentMethods]:
|
||||
def capabilities(self) -> list[Capabilities]:
|
||||
return []
|
||||
|
||||
def stage_view_enrollment(self) -> StageView | None:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from authentik.endpoints.models import EndpointStage
|
||||
from authentik.endpoints.models import Connector, EndpointStage, StageMode
|
||||
from authentik.flows.stage import StageView
|
||||
|
||||
PLAN_CONTEXT_ENDPOINT_CONNECTOR = "endpoint_connector"
|
||||
@@ -6,15 +6,27 @@ PLAN_CONTEXT_ENDPOINT_CONNECTOR = "endpoint_connector"
|
||||
|
||||
class EndpointStageView(StageView):
|
||||
|
||||
def _get_inner(self):
|
||||
def _get_inner(self) -> StageView | None:
|
||||
stage: EndpointStage = self.executor.current_stage
|
||||
inner_stage: type[StageView] | None = stage.connector.stage
|
||||
connector: Connector = stage.connector
|
||||
if not connector.enabled:
|
||||
return None
|
||||
inner_stage: type[StageView] | None = connector.stage
|
||||
if not inner_stage:
|
||||
return self.executor.stage_ok()
|
||||
return None
|
||||
return inner_stage(self.executor, request=self.request)
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
return self._get_inner().dispatch(request, *args, **kwargs)
|
||||
inner = self._get_inner()
|
||||
if inner is None:
|
||||
stage: EndpointStage = self.executor.current_stage
|
||||
if stage.mode == StageMode.OPTIONAL:
|
||||
return self.executor.stage_ok()
|
||||
else:
|
||||
return self.executor.stage_invalid("Invalid stage configuration")
|
||||
return inner.dispatch(request, *args, **kwargs)
|
||||
|
||||
def cleanup(self):
|
||||
return self._get_inner().cleanup()
|
||||
inner = self._get_inner()
|
||||
if inner is not None:
|
||||
return inner.cleanup()
|
||||
|
||||
@@ -6,7 +6,7 @@ from django.utils.translation import gettext_lazy as _
|
||||
from dramatiq.actor import actor
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.endpoints.controller import EnrollmentMethods
|
||||
from authentik.endpoints.controller import Capabilities
|
||||
from authentik.endpoints.models import Connector
|
||||
|
||||
LOGGER = get_logger()
|
||||
@@ -17,11 +17,11 @@ def endpoints_sync(connector_pk: Any):
|
||||
connector: Connector | None = (
|
||||
Connector.objects.filter(pk=connector_pk).select_subclasses().first()
|
||||
)
|
||||
if not connector:
|
||||
if not connector or not connector.enabled:
|
||||
return
|
||||
controller = connector.controller
|
||||
ctrl = controller(connector)
|
||||
if EnrollmentMethods.AUTOMATIC_API not in ctrl.supported_enrollment_methods():
|
||||
if Capabilities.AUTOMATIC_API not in ctrl.capabilities():
|
||||
return
|
||||
LOGGER.info("Syncing connector", connector=connector.name)
|
||||
ctrl.sync_endpoints()
|
||||
|
||||
41
authentik/endpoints/tests/test_api.py
Normal file
41
authentik/endpoints/tests/test_api.py
Normal file
@@ -0,0 +1,41 @@
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.endpoints.connectors.agent.models import AgentConnector
|
||||
from authentik.endpoints.models import StageMode
|
||||
from authentik.enterprise.endpoints.connectors.fleet.models import FleetConnector
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
class TestAPI(APITestCase):
|
||||
def setUp(self):
|
||||
self.user = create_test_admin_user()
|
||||
self.client.force_login(self.user)
|
||||
|
||||
def test_endpoint_stage_agent(self):
|
||||
connector = AgentConnector.objects.create(name=generate_id())
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:stages-endpoint-list"),
|
||||
data={
|
||||
"name": generate_id(),
|
||||
"connector": str(connector.pk),
|
||||
"mode": StageMode.REQUIRED,
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 201)
|
||||
|
||||
def test_endpoint_stage_fleet(self):
|
||||
connector = FleetConnector.objects.create(name=generate_id())
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:stages-endpoint-list"),
|
||||
data={
|
||||
"name": generate_id(),
|
||||
"connector": str(connector.pk),
|
||||
"mode": StageMode.REQUIRED,
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
res.content, {"connector": ["Selected connector is not compatible with this stage."]}
|
||||
)
|
||||
@@ -3,6 +3,7 @@ from hmac import compare_digest
|
||||
|
||||
from django.http import Http404, HttpRequest, HttpResponse, HttpResponseBadRequest, QueryDict
|
||||
|
||||
from authentik.common.oauth.constants import QS_LOGIN_HINT
|
||||
from authentik.endpoints.connectors.agent.auth import (
|
||||
agent_auth_issue_token,
|
||||
check_device_policies,
|
||||
@@ -14,7 +15,7 @@ from authentik.enterprise.policy import EnterprisePolicyAccessView
|
||||
from authentik.flows.exceptions import FlowNonApplicableException
|
||||
from authentik.flows.models import in_memory_stage
|
||||
from authentik.flows.planner import PLAN_CONTEXT_DEVICE, FlowPlanner
|
||||
from authentik.flows.stage import StageView
|
||||
from authentik.flows.stage import PLAN_CONTEXT_PENDING_USER_IDENTIFIER, StageView
|
||||
from authentik.providers.oauth2.utils import HttpResponseRedirectScheme
|
||||
|
||||
QS_AGENT_IA_TOKEN = "ak-auth-ia-token" # nosec
|
||||
@@ -64,14 +65,14 @@ class AgentInteractiveAuth(EnterprisePolicyAccessView):
|
||||
|
||||
planner = FlowPlanner(self.connector.authorization_flow)
|
||||
planner.allow_empty_flows = True
|
||||
context = {
|
||||
PLAN_CONTEXT_DEVICE: self.device,
|
||||
PLAN_CONTEXT_DEVICE_AUTH_TOKEN: self.auth_token,
|
||||
}
|
||||
if QS_LOGIN_HINT in request.GET:
|
||||
context[PLAN_CONTEXT_PENDING_USER_IDENTIFIER] = request.GET[QS_LOGIN_HINT]
|
||||
try:
|
||||
plan = planner.plan(
|
||||
self.request,
|
||||
{
|
||||
PLAN_CONTEXT_DEVICE: self.device,
|
||||
PLAN_CONTEXT_DEVICE_AUTH_TOKEN: self.auth_token,
|
||||
},
|
||||
)
|
||||
plan = planner.plan(self.request, context)
|
||||
except FlowNonApplicableException:
|
||||
return self.handle_no_permission_authenticated()
|
||||
plan.append_stage(in_memory_stage(AgentAuthFulfillmentStage))
|
||||
@@ -84,7 +85,6 @@ class AgentInteractiveAuth(EnterprisePolicyAccessView):
|
||||
|
||||
|
||||
class AgentAuthFulfillmentStage(StageView):
|
||||
|
||||
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||
device: Device = self.executor.plan.context.pop(PLAN_CONTEXT_DEVICE)
|
||||
auth_token: DeviceAuthenticationToken = self.executor.plan.context.pop(
|
||||
|
||||
@@ -6,7 +6,7 @@ from requests import RequestException
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.endpoints.controller import BaseController, ConnectorSyncException, EnrollmentMethods
|
||||
from authentik.endpoints.controller import BaseController, Capabilities, ConnectorSyncException
|
||||
from authentik.endpoints.facts import (
|
||||
DeviceFacts,
|
||||
OSFamily,
|
||||
@@ -43,8 +43,8 @@ class FleetController(BaseController[DBC]):
|
||||
def vendor_identifier() -> str:
|
||||
return "fleetdm.com"
|
||||
|
||||
def supported_enrollment_methods(self) -> list[EnrollmentMethods]:
|
||||
return [EnrollmentMethods.AUTOMATIC_API]
|
||||
def capabilities(self) -> list[Capabilities]:
|
||||
return [Capabilities.ENROLL_AUTOMATIC_API]
|
||||
|
||||
def _url(self, path: str) -> str:
|
||||
return f"{self.connector.url}{path}"
|
||||
|
||||
@@ -15,6 +15,7 @@ from django.core.cache import cache
|
||||
from django.db.models.query import QuerySet
|
||||
from django.utils.timezone import now
|
||||
from jwt import PyJWTError, decode, get_unverified_header
|
||||
from jwt.algorithms import ECAlgorithm
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import (
|
||||
ChoiceField,
|
||||
@@ -109,13 +110,20 @@ class LicenseKey:
|
||||
intermediate.verify_directly_issued_by(get_licensing_key())
|
||||
except InvalidSignature, TypeError, ValueError, Error:
|
||||
raise ValidationError("Unable to verify license") from None
|
||||
_validate_curve_original = ECAlgorithm._validate_curve
|
||||
try:
|
||||
# authentik's license are generated with `algorithm="ES512"` and signed with
|
||||
# a key of curve `secp384r1`. Starting with version 2.11.0, pyjwt enforces the spec, see
|
||||
# https://github.com/jpadilla/pyjwt/commit/5b8622773358e56d3d3c0a9acf404809ff34433a
|
||||
# authentik will change its license generation to `algorithm="ES384"` in 2026.
|
||||
# TODO: remove this when the last incompatible license runs out.
|
||||
ECAlgorithm._validate_curve = lambda *_: True
|
||||
body = from_dict(
|
||||
LicenseKey,
|
||||
decode(
|
||||
jwt,
|
||||
our_cert.public_key(),
|
||||
algorithms=["ES512"],
|
||||
algorithms=["ES384", "ES512"],
|
||||
audience=get_license_aud(),
|
||||
options={"verify_exp": check_expiry, "verify_signature": check_expiry},
|
||||
),
|
||||
@@ -125,6 +133,8 @@ class LicenseKey:
|
||||
if unverified["aud"] != get_license_aud():
|
||||
raise ValidationError("Invalid Install ID in license") from None
|
||||
raise ValidationError("Unable to verify license") from None
|
||||
finally:
|
||||
ECAlgorithm._validate_curve = _validate_curve_original
|
||||
return body
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
from datetime import date
|
||||
from datetime import datetime
|
||||
|
||||
from django.db.models import BooleanField as ModelBooleanField
|
||||
from django.db.models import Case, Q, Value, When
|
||||
from django_filters.rest_framework import BooleanFilter, FilterSet
|
||||
from drf_spectacular.utils import extend_schema, extend_schema_field
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import DateField, IntegerField, SerializerMethodField
|
||||
from rest_framework.fields import IntegerField, SerializerMethodField
|
||||
from rest_framework.mixins import CreateModelMixin
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
@@ -21,6 +21,7 @@ from authentik.enterprise.lifecycle.utils import (
|
||||
ReviewerUserSerializer,
|
||||
admin_link_for_model,
|
||||
parse_content_type,
|
||||
start_of_day,
|
||||
)
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
|
||||
@@ -67,13 +68,13 @@ class LifecycleIterationSerializer(EnterpriseRequiredMixin, ModelSerializer):
|
||||
def get_object_admin_url(self, iteration: LifecycleIteration) -> str:
|
||||
return admin_link_for_model(iteration.object)
|
||||
|
||||
@extend_schema_field(DateField())
|
||||
def get_grace_period_end(self, iteration: LifecycleIteration) -> date:
|
||||
return iteration.opened_on + timedelta_from_string(iteration.rule.grace_period)
|
||||
def get_grace_period_end(self, iteration: LifecycleIteration) -> datetime:
|
||||
return start_of_day(
|
||||
iteration.opened_on + timedelta_from_string(iteration.rule.grace_period)
|
||||
)
|
||||
|
||||
@extend_schema_field(DateField())
|
||||
def get_next_review_date(self, iteration: LifecycleIteration):
|
||||
return iteration.opened_on + timedelta_from_string(iteration.rule.interval)
|
||||
def get_next_review_date(self, iteration: LifecycleIteration) -> datetime:
|
||||
return start_of_day(iteration.opened_on + timedelta_from_string(iteration.rule.interval))
|
||||
|
||||
def get_user_can_review(self, iteration: LifecycleIteration) -> bool:
|
||||
return iteration.user_can_review(self.context["request"].user)
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 5.2.11 on 2026-02-13 09:33
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_lifecycle", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="lifecycleiteration",
|
||||
name="opened_on",
|
||||
field=models.DateTimeField(auto_now_add=True),
|
||||
),
|
||||
]
|
||||
@@ -1,3 +1,4 @@
|
||||
from datetime import timedelta
|
||||
from uuid import uuid4
|
||||
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
@@ -13,7 +14,7 @@ from rest_framework.serializers import BaseSerializer
|
||||
|
||||
from authentik.blueprints.models import ManagedModel
|
||||
from authentik.core.models import Group, User
|
||||
from authentik.enterprise.lifecycle.utils import link_for_model
|
||||
from authentik.enterprise.lifecycle.utils import link_for_model, start_of_day
|
||||
from authentik.events.models import Event, EventAction, NotificationSeverity, NotificationTransport
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.lib.utils.time import timedelta_from_string, timedelta_string_validator
|
||||
@@ -98,7 +99,9 @@ class LifecycleRule(SerializerModel):
|
||||
|
||||
def _get_newly_overdue_iterations(self) -> QuerySet[LifecycleIteration]:
|
||||
return self.lifecycleiteration_set.filter(
|
||||
opened_on__lte=timezone.now() - timedelta_from_string(self.grace_period),
|
||||
opened_on__lt=start_of_day(
|
||||
timezone.now() + timedelta(days=1) - timedelta_from_string(self.grace_period)
|
||||
),
|
||||
state=ReviewState.PENDING,
|
||||
)
|
||||
|
||||
@@ -106,7 +109,9 @@ class LifecycleRule(SerializerModel):
|
||||
recent_iteration_ids = LifecycleIteration.objects.filter(
|
||||
content_type=self.content_type,
|
||||
object_id__isnull=False,
|
||||
opened_on__gte=timezone.now() - timedelta_from_string(self.interval),
|
||||
opened_on__gte=start_of_day(
|
||||
timezone.now() + timedelta(days=1) - timedelta_from_string(self.interval)
|
||||
),
|
||||
).values_list(Cast("object_id", output_field=self._get_pk_field()), flat=True)
|
||||
|
||||
return self.get_objects().exclude(pk__in=recent_iteration_ids)
|
||||
@@ -186,7 +191,7 @@ class LifecycleIteration(SerializerModel, ManagedModel):
|
||||
rule = models.ForeignKey(LifecycleRule, null=True, on_delete=models.SET_NULL)
|
||||
|
||||
state = models.CharField(max_length=10, choices=ReviewState, default=ReviewState.PENDING)
|
||||
opened_on = models.DateField(auto_now_add=True)
|
||||
opened_on = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
indexes = [models.Index(fields=["content_type", "opened_on"])]
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import datetime as dt
|
||||
from datetime import timedelta
|
||||
from unittest.mock import patch
|
||||
|
||||
@@ -319,7 +320,7 @@ class TestLifecycleModels(TestCase):
|
||||
content_type=content_type, object_id=str(app_one.pk), rule=rule_overdue
|
||||
)
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=(timezone.now().date() - timedelta(days=20))
|
||||
opened_on=(timezone.now() - timedelta(days=20))
|
||||
)
|
||||
|
||||
# Apply again to trigger overdue logic
|
||||
@@ -383,7 +384,7 @@ class TestLifecycleModels(TestCase):
|
||||
content_type=content_type, object_id=str(app_overdue.pk), rule=rule_overdue
|
||||
)
|
||||
LifecycleIteration.objects.filter(pk=overdue_iteration.pk).update(
|
||||
opened_on=(timezone.now().date() - timedelta(days=20))
|
||||
opened_on=(timezone.now() - timedelta(days=20))
|
||||
)
|
||||
|
||||
# Apply overdue rule to mark iteration as overdue
|
||||
@@ -667,3 +668,178 @@ class TestLifecycleModels(TestCase):
|
||||
reviewers = list(rule.get_reviewers())
|
||||
self.assertIn(explicit_reviewer, reviewers)
|
||||
self.assertIn(group_member, reviewers)
|
||||
|
||||
|
||||
class TestLifecycleDateBoundaries(TestCase):
|
||||
"""Verify that start_of_day normalization ensures correct overdue/due
|
||||
detection regardless of exact task execution time within a day.
|
||||
|
||||
The daily task may run at any point during the day. The start_of_day
|
||||
normalization in _get_newly_overdue_iterations and _get_newly_due_objects
|
||||
ensures that the boundary is always at midnight, so millisecond variations
|
||||
in task execution time do not affect results."""
|
||||
|
||||
def _create_rule_and_iteration(self, grace_period="days=1", interval="days=365"):
|
||||
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
content_type = ContentType.objects.get_for_model(Application)
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=str(app.pk),
|
||||
interval=interval,
|
||||
grace_period=grace_period,
|
||||
)
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(app.pk), rule=rule
|
||||
)
|
||||
return app, rule, iteration
|
||||
|
||||
def test_overdue_iteration_opened_yesterday(self):
|
||||
"""grace_period=1 day: iteration opened yesterday at any time is overdue today."""
|
||||
_, rule, iteration = self._create_rule_and_iteration(grace_period="days=1")
|
||||
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
|
||||
for opened_on in [
|
||||
dt.datetime(2025, 6, 14, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 14, 12, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 14, 23, 59, 59, 999999, tzinfo=dt.UTC),
|
||||
]:
|
||||
with self.subTest(opened_on=opened_on):
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=opened_on, state=ReviewState.PENDING
|
||||
)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertIn(iteration, list(rule._get_newly_overdue_iterations()))
|
||||
|
||||
def test_not_overdue_iteration_opened_today(self):
|
||||
"""grace_period=1 day: iteration opened today at any time is NOT overdue."""
|
||||
_, rule, iteration = self._create_rule_and_iteration(grace_period="days=1")
|
||||
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
|
||||
for opened_on in [
|
||||
dt.datetime(2025, 6, 15, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 23, 59, 59, 999999, tzinfo=dt.UTC),
|
||||
]:
|
||||
with self.subTest(opened_on=opened_on):
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=opened_on, state=ReviewState.PENDING
|
||||
)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertNotIn(iteration, list(rule._get_newly_overdue_iterations()))
|
||||
|
||||
def test_overdue_independent_of_task_execution_time(self):
|
||||
"""Overdue detection gives the same result whether the task runs at 00:00:01 or 23:59:59."""
|
||||
_, rule, iteration = self._create_rule_and_iteration(grace_period="days=1")
|
||||
opened_on = dt.datetime(2025, 6, 14, 18, 0, 0, tzinfo=dt.UTC)
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=opened_on, state=ReviewState.PENDING
|
||||
)
|
||||
for task_time in [
|
||||
dt.datetime(2025, 6, 15, 0, 0, 1, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 12, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 23, 59, 59, tzinfo=dt.UTC),
|
||||
]:
|
||||
with self.subTest(task_time=task_time):
|
||||
with patch("django.utils.timezone.now", return_value=task_time):
|
||||
self.assertIn(iteration, list(rule._get_newly_overdue_iterations()))
|
||||
|
||||
def test_overdue_boundary_multi_day_grace_period(self):
|
||||
"""grace_period=30 days: overdue after 30 full days, not after 29."""
|
||||
_, rule, iteration = self._create_rule_and_iteration(grace_period="days=30")
|
||||
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
|
||||
|
||||
# Opened 30 days ago (May 16), should go overdue
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=dt.datetime(2025, 5, 16, 12, 0, 0, tzinfo=dt.UTC),
|
||||
state=ReviewState.PENDING,
|
||||
)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertIn(iteration, list(rule._get_newly_overdue_iterations()))
|
||||
|
||||
# Opened 29 days ago (May 17), should NOT go overdue
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=dt.datetime(2025, 5, 17, 12, 0, 0, tzinfo=dt.UTC),
|
||||
state=ReviewState.PENDING,
|
||||
)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertNotIn(iteration, list(rule._get_newly_overdue_iterations()))
|
||||
|
||||
def test_due_object_iteration_opened_yesterday(self):
|
||||
"""interval=1 day: object with iteration opened yesterday is due for a new review."""
|
||||
app, rule, iteration = self._create_rule_and_iteration(interval="days=1")
|
||||
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
|
||||
for opened_on in [
|
||||
dt.datetime(2025, 6, 14, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 14, 12, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 14, 23, 59, 59, 999999, tzinfo=dt.UTC),
|
||||
]:
|
||||
with self.subTest(opened_on=opened_on):
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(opened_on=opened_on)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertIn(app, list(rule._get_newly_due_objects()))
|
||||
|
||||
def test_not_due_object_iteration_opened_today(self):
|
||||
"""interval=1 day: object with iteration opened today is NOT due."""
|
||||
app, rule, iteration = self._create_rule_and_iteration(interval="days=1")
|
||||
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
|
||||
for opened_on in [
|
||||
dt.datetime(2025, 6, 15, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 23, 59, 59, 999999, tzinfo=dt.UTC),
|
||||
]:
|
||||
with self.subTest(opened_on=opened_on):
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(opened_on=opened_on)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertNotIn(app, list(rule._get_newly_due_objects()))
|
||||
|
||||
def test_due_independent_of_task_execution_time(self):
|
||||
"""Due detection gives the same result whether the task runs at 00:00:01 or 23:59:59."""
|
||||
app, rule, iteration = self._create_rule_and_iteration(interval="days=1")
|
||||
opened_on = dt.datetime(2025, 6, 14, 18, 0, 0, tzinfo=dt.UTC)
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(opened_on=opened_on)
|
||||
for task_time in [
|
||||
dt.datetime(2025, 6, 15, 0, 0, 1, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 12, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 23, 59, 59, tzinfo=dt.UTC),
|
||||
]:
|
||||
with self.subTest(task_time=task_time):
|
||||
with patch("django.utils.timezone.now", return_value=task_time):
|
||||
self.assertIn(app, list(rule._get_newly_due_objects()))
|
||||
|
||||
def test_due_boundary_multi_day_interval(self):
|
||||
"""interval=30 days: due after 30 full days, not after 29."""
|
||||
app, rule, iteration = self._create_rule_and_iteration(interval="days=30")
|
||||
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
|
||||
|
||||
# Previous review opened 30 days ago (May 16), review is due for the object
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=dt.datetime(2025, 5, 16, 12, 0, 0, tzinfo=dt.UTC)
|
||||
)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertIn(app, list(rule._get_newly_due_objects()))
|
||||
|
||||
# Previous review opened 29 days ago (May 17), new review is NOT due
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=dt.datetime(2025, 5, 17, 12, 0, 0, tzinfo=dt.UTC)
|
||||
)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertNotIn(app, list(rule._get_newly_due_objects()))
|
||||
|
||||
def test_apply_overdue_at_boundary(self):
|
||||
"""apply() marks iteration overdue when grace period just expired,
|
||||
regardless of what time the daily task runs."""
|
||||
_, rule, iteration = self._create_rule_and_iteration(
|
||||
grace_period="days=1", interval="days=365"
|
||||
)
|
||||
opened_on = dt.datetime(2025, 6, 14, 20, 0, 0, tzinfo=dt.UTC)
|
||||
for task_time in [
|
||||
dt.datetime(2025, 6, 15, 0, 0, 1, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 23, 59, 59, tzinfo=dt.UTC),
|
||||
]:
|
||||
with self.subTest(task_time=task_time):
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=opened_on, state=ReviewState.PENDING
|
||||
)
|
||||
with patch("django.utils.timezone.now", return_value=task_time):
|
||||
rule.apply()
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.OVERDUE)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from datetime import datetime
|
||||
from urllib import parse
|
||||
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
@@ -39,6 +40,10 @@ def link_for_model(model: Model) -> str:
|
||||
return f"{reverse("authentik_core:if-admin")}#{admin_link_for_model(model)}"
|
||||
|
||||
|
||||
def start_of_day(dt: datetime) -> datetime:
|
||||
return dt.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
|
||||
class ContentTypeField(ChoiceField):
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(choices=model_choices(), **kwargs)
|
||||
|
||||
@@ -331,7 +331,7 @@ class GoogleWorkspaceGroupTests(TestCase):
|
||||
).exists()
|
||||
)
|
||||
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||
self.assertEqual(len(http.requests()), 5)
|
||||
self.assertEqual(len(http.requests()), 7)
|
||||
|
||||
def test_sync_discover_multiple(self):
|
||||
"""Test group discovery"""
|
||||
@@ -372,7 +372,7 @@ class GoogleWorkspaceGroupTests(TestCase):
|
||||
).exists()
|
||||
)
|
||||
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||
self.assertEqual(len(http.requests()), 5)
|
||||
self.assertEqual(len(http.requests()), 7)
|
||||
# Change response to trigger update
|
||||
http.add_response(
|
||||
f"https://admin.googleapis.com/admin/directory/v1/groups?customer=my_customer&maxResults=500&orderBy=email&key={self.api_key}&alt=json",
|
||||
|
||||
@@ -309,7 +309,7 @@ class GoogleWorkspaceUserTests(TestCase):
|
||||
).exists()
|
||||
)
|
||||
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||
self.assertEqual(len(http.requests()), 5)
|
||||
self.assertEqual(len(http.requests()), 7)
|
||||
|
||||
def test_sync_discover_multiple(self):
|
||||
"""Test user discovery, running multiple times"""
|
||||
@@ -352,7 +352,7 @@ class GoogleWorkspaceUserTests(TestCase):
|
||||
).exists()
|
||||
)
|
||||
self.assertFalse(Event.objects.filter(action=EventAction.SYSTEM_EXCEPTION).exists())
|
||||
self.assertEqual(len(http.requests()), 5)
|
||||
self.assertEqual(len(http.requests()), 7)
|
||||
# Change response, which will trigger a discovery update
|
||||
http.add_response(
|
||||
f"https://admin.googleapis.com/admin/directory/v1/users?customer=my_customer&maxResults=500&orderBy=email&key={self.api_key}&alt=json",
|
||||
|
||||
@@ -78,7 +78,8 @@ class MicrosoftEntraUserClient(MicrosoftEntraSyncClient[User, MicrosoftEntraProv
|
||||
def create(self, user: User):
|
||||
"""Create user from scratch and create a connection object"""
|
||||
microsoft_user = self.to_schema(user, None)
|
||||
self.check_email_valid(microsoft_user.user_principal_name)
|
||||
if microsoft_user.user_principal_name:
|
||||
self.check_email_valid(microsoft_user.user_principal_name)
|
||||
with transaction.atomic():
|
||||
try:
|
||||
response = self._request(self.client.users.post(microsoft_user))
|
||||
@@ -118,7 +119,8 @@ class MicrosoftEntraUserClient(MicrosoftEntraSyncClient[User, MicrosoftEntraProv
|
||||
def update(self, user: User, connection: MicrosoftEntraProviderUser):
|
||||
"""Update existing user"""
|
||||
microsoft_user = self.to_schema(user, connection)
|
||||
self.check_email_valid(microsoft_user.user_principal_name)
|
||||
if microsoft_user.user_principal_name:
|
||||
self.check_email_valid(microsoft_user.user_principal_name)
|
||||
response = self._request(
|
||||
self.client.users.by_user_id(connection.microsoft_id).patch(microsoft_user)
|
||||
)
|
||||
|
||||
@@ -5,6 +5,7 @@ from django.urls import reverse
|
||||
from rest_framework.fields import CharField, SerializerMethodField, URLField
|
||||
|
||||
from authentik.core.api.providers import ProviderSerializer
|
||||
from authentik.core.models import Provider
|
||||
from authentik.enterprise.api import EnterpriseRequiredMixin
|
||||
from authentik.enterprise.providers.ws_federation.models import WSFederationProvider
|
||||
from authentik.enterprise.providers.ws_federation.processors.metadata import MetadataProcessor
|
||||
@@ -18,6 +19,29 @@ class WSFederationProviderSerializer(EnterpriseRequiredMixin, SAMLProviderSerial
|
||||
wtrealm = CharField(source="audience")
|
||||
url_wsfed = SerializerMethodField()
|
||||
|
||||
def get_url_download_metadata(self, instance: WSFederationProvider) -> str:
|
||||
"""Get metadata download URL"""
|
||||
if "request" not in self._context:
|
||||
return ""
|
||||
request: HttpRequest = self._context["request"]._request
|
||||
try:
|
||||
return request.build_absolute_uri(
|
||||
reverse(
|
||||
"authentik_providers_ws_federation:metadata-download",
|
||||
kwargs={"application_slug": instance.application.slug},
|
||||
)
|
||||
)
|
||||
except Provider.application.RelatedObjectDoesNotExist:
|
||||
return request.build_absolute_uri(
|
||||
reverse(
|
||||
"authentik_api:wsfederationprovider-metadata",
|
||||
kwargs={
|
||||
"pk": instance.pk,
|
||||
},
|
||||
)
|
||||
+ "?download"
|
||||
)
|
||||
|
||||
def get_url_wsfed(self, instance: WSFederationProvider) -> str:
|
||||
"""Get WS-Fed url"""
|
||||
if "request" not in self._context:
|
||||
|
||||
@@ -81,6 +81,8 @@ class SignInProcessor:
|
||||
self.sign_in_request = sign_in_request
|
||||
self.saml_processor = AssertionProcessor(self.provider, self.request, AuthNRequest())
|
||||
self.saml_processor.provider.audience = self.sign_in_request.wtrealm
|
||||
if self.provider.signing_kp:
|
||||
self.saml_processor.provider.sign_assertion = True
|
||||
|
||||
def create_response_token(self):
|
||||
root = Element(f"{{{NS_WS_FED_TRUST}}}RequestSecurityTokenResponse", nsmap=NS_MAP)
|
||||
@@ -148,7 +150,8 @@ class SignInProcessor:
|
||||
def response(self) -> dict[str, str]:
|
||||
root = self.create_response_token()
|
||||
assertion = root.xpath("//saml:Assertion", namespaces=NS_MAP)[0]
|
||||
self.saml_processor._sign(assertion)
|
||||
if self.provider.signing_kp:
|
||||
self.saml_processor._sign(assertion)
|
||||
str_token = etree.tostring(root).decode("utf-8") # nosec
|
||||
return delete_none_values(
|
||||
{
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from django.urls import path
|
||||
|
||||
from authentik.enterprise.providers.ws_federation.api.providers import WSFederationProviderViewSet
|
||||
from authentik.enterprise.providers.ws_federation.views import WSFedEntryView
|
||||
from authentik.enterprise.providers.ws_federation.views import MetadataDownload, WSFedEntryView
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
@@ -11,6 +11,12 @@ urlpatterns = [
|
||||
WSFedEntryView.as_view(),
|
||||
name="wsfed",
|
||||
),
|
||||
# Metadata
|
||||
path(
|
||||
"<slug:application_slug>/metadata/",
|
||||
MetadataDownload.as_view(),
|
||||
name="metadata-download",
|
||||
),
|
||||
]
|
||||
|
||||
api_urlpatterns = [
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from django.http import Http404, HttpRequest, HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.shortcuts import get_object_or_404, redirect
|
||||
from django.urls import reverse
|
||||
from django.utils.translation import gettext as _
|
||||
from django.views import View
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import Application, AuthenticatedSession
|
||||
@@ -160,3 +162,24 @@ class WSFedFlowFinalView(ChallengeStageView):
|
||||
"attrs": response,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class MetadataDownload(View):
|
||||
"""Redirect to metadata download"""
|
||||
|
||||
def dispatch(self, request: HttpRequest, application_slug: str) -> HttpResponse:
|
||||
app = Application.objects.filter(slug=application_slug).with_provider().first()
|
||||
if not app:
|
||||
raise Http404
|
||||
provider = app.get_provider()
|
||||
if not provider:
|
||||
raise Http404
|
||||
return redirect(
|
||||
reverse(
|
||||
"authentik_api:wsfederationprovider-metadata",
|
||||
kwargs={
|
||||
"pk": provider.pk,
|
||||
},
|
||||
)
|
||||
+ "?download"
|
||||
)
|
||||
|
||||
@@ -93,11 +93,13 @@ def on_login_failed(
|
||||
credentials: dict[str, str],
|
||||
request: HttpRequest,
|
||||
stage: Stage | None = None,
|
||||
context: dict[str, Any] | None = None,
|
||||
**kwargs,
|
||||
):
|
||||
"""Failed Login, authentik custom event"""
|
||||
user = User.objects.filter(username=credentials.get("username")).first()
|
||||
Event.new(EventAction.LOGIN_FAILED, **credentials, stage=stage, **kwargs).from_http(
|
||||
context = context or {}
|
||||
Event.new(EventAction.LOGIN_FAILED, **credentials, stage=stage, **context).from_http(
|
||||
request, user
|
||||
)
|
||||
|
||||
|
||||
@@ -207,3 +207,9 @@ class TestEvents(TestCase):
|
||||
"username": user.username,
|
||||
},
|
||||
)
|
||||
|
||||
def test_invalid_string(self):
|
||||
"""Test creating an event with invalid unicode string data"""
|
||||
event = Event.new("unittest", foo="foo bar \u0000 baz")
|
||||
event.save()
|
||||
self.assertEqual(event.context["foo"], "foo bar baz")
|
||||
|
||||
@@ -36,6 +36,10 @@ ALLOWED_SPECIAL_KEYS = re.compile(
|
||||
)
|
||||
|
||||
|
||||
def cleanse_str(raw: Any) -> str:
|
||||
return str(raw).replace("\u0000", "")
|
||||
|
||||
|
||||
def cleanse_item(key: str, value: Any) -> Any:
|
||||
"""Cleanse a single item"""
|
||||
if isinstance(value, dict):
|
||||
@@ -66,7 +70,7 @@ def cleanse_dict(source: dict[Any, Any]) -> dict[Any, Any]:
|
||||
|
||||
def model_to_dict(model: Model) -> dict[str, Any]:
|
||||
"""Convert model to dict"""
|
||||
name = str(model)
|
||||
name = cleanse_str(model)
|
||||
if hasattr(model, "name"):
|
||||
name = model.name
|
||||
return {
|
||||
@@ -133,11 +137,11 @@ def sanitize_item(value: Any) -> Any: # noqa: PLR0911, PLR0912
|
||||
if isinstance(value, ASN):
|
||||
return ASN_CONTEXT_PROCESSOR.asn_to_dict(value)
|
||||
if isinstance(value, Path):
|
||||
return str(value)
|
||||
return cleanse_str(value)
|
||||
if isinstance(value, Exception):
|
||||
return str(value)
|
||||
return cleanse_str(value)
|
||||
if isinstance(value, YAMLTag):
|
||||
return str(value)
|
||||
return cleanse_str(value)
|
||||
if isinstance(value, Enum):
|
||||
return value.value
|
||||
if isinstance(value, type):
|
||||
@@ -161,7 +165,7 @@ def sanitize_item(value: Any) -> Any: # noqa: PLR0911, PLR0912
|
||||
raise ValueError("JSON can't represent timezone-aware times.")
|
||||
return value.isoformat()
|
||||
if isinstance(value, timedelta):
|
||||
return str(value.total_seconds())
|
||||
return cleanse_str(value.total_seconds())
|
||||
if callable(value):
|
||||
return {
|
||||
"type": "callable",
|
||||
@@ -174,8 +178,8 @@ def sanitize_item(value: Any) -> Any: # noqa: PLR0911, PLR0912
|
||||
try:
|
||||
return DjangoJSONEncoder().default(value)
|
||||
except TypeError:
|
||||
return str(value)
|
||||
return str(value)
|
||||
return cleanse_str(value)
|
||||
return cleanse_str(value)
|
||||
|
||||
|
||||
def sanitize_dict(source: dict[Any, Any]) -> dict[Any, Any]:
|
||||
|
||||
@@ -29,6 +29,12 @@ class RefreshOtherFlowsAfterAuthentication(Flag[bool], key="flows_refresh_others
|
||||
visibility = "public"
|
||||
|
||||
|
||||
class ContinuousLogin(Flag[bool], key="flows_continuous_login"):
|
||||
|
||||
default = False
|
||||
visibility = "public"
|
||||
|
||||
|
||||
class AuthentikFlowsConfig(ManagedAppConfig):
|
||||
"""authentik flows app config"""
|
||||
|
||||
|
||||
@@ -9,7 +9,15 @@
|
||||
{{ block.super }}
|
||||
<link rel="prefetch" href="{{ flow_background_url }}" />
|
||||
{% if flow.compatibility_mode and not inspector %}
|
||||
<script data-id="shady-dom">ShadyDOM = { force: true };</script>
|
||||
{% comment %}
|
||||
@see {@link web/types/webcomponents.d.ts} for type definitions.
|
||||
{% endcomment %}
|
||||
<script data-id="shady-dom">
|
||||
"use strict";
|
||||
|
||||
window.ShadyDOM = window.ShadyDOM || {}
|
||||
window.ShadyDOM.force = true
|
||||
</script>
|
||||
{% endif %}
|
||||
{% include "base/header_js.html" %}
|
||||
<script data-id="flow-config">
|
||||
@@ -45,16 +53,11 @@
|
||||
slug="{{ flow.slug }}"
|
||||
class="pf-c-login"
|
||||
data-layout="{{ flow.layout|default:'stacked' }}"
|
||||
loading
|
||||
>
|
||||
{% include "base/placeholder.html" %}
|
||||
|
||||
<ak-brand-links
|
||||
slot="footer"
|
||||
exportparts="list:brand-links-list, list-item:brand-links-list-item"
|
||||
role="contentinfo"
|
||||
aria-label="{% trans 'Site footer' %}"
|
||||
class="pf-c-login__footer {% if flow.layout == 'stacked' %}pf-m-dark{% endif %}"
|
||||
></ak-brand-links>
|
||||
<ak-brand-links name="flow-links" slot="footer"></ak-brand-links>
|
||||
</ak-flow-executor>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -141,6 +141,10 @@ web:
|
||||
# workers: 2
|
||||
threads: 4
|
||||
path: /
|
||||
timeout_http_read_header: 5s
|
||||
timeout_http_read: 30s
|
||||
timeout_http_write: 60s
|
||||
timeout_http_idle: 120s
|
||||
|
||||
worker:
|
||||
processes: 1
|
||||
@@ -178,3 +182,5 @@ storage:
|
||||
# backend: file # or s3
|
||||
# file: {}
|
||||
# s3: {}
|
||||
|
||||
skip_migrations: false
|
||||
|
||||
@@ -103,6 +103,7 @@ class SyncTasks:
|
||||
)
|
||||
users_tasks.run().wait(timeout=provider.get_object_sync_time_limit_ms(User))
|
||||
group_tasks.run().wait(timeout=provider.get_object_sync_time_limit_ms(Group))
|
||||
self._sync_cleanup(provider, task)
|
||||
except TransientSyncException as exc:
|
||||
self.logger.warning("transient sync exception", exc=exc)
|
||||
task.warning("Sync encountered a transient exception. Retrying", exc=exc)
|
||||
@@ -111,6 +112,35 @@ class SyncTasks:
|
||||
task.error(exc)
|
||||
return
|
||||
|
||||
def _sync_cleanup(self, provider: OutgoingSyncProvider, task: Task):
|
||||
"""Delete remote objects that are no longer in scope"""
|
||||
for object_type in (User, Group):
|
||||
try:
|
||||
client = provider.client_for_model(object_type)
|
||||
except TransientSyncException:
|
||||
continue
|
||||
in_scope_pks = set(provider.get_object_qs(object_type).values_list("pk", flat=True))
|
||||
stale = client.connection_type.objects.filter(provider=provider).exclude(
|
||||
**{f"{client.connection_type_query}__pk__in": in_scope_pks}
|
||||
)
|
||||
for connection in stale:
|
||||
try:
|
||||
client.delete(connection.scim_id)
|
||||
task.info(
|
||||
f"Deleted out-of-scope {object_type._meta.verbose_name}",
|
||||
scim_id=connection.scim_id,
|
||||
)
|
||||
except NotFoundSyncException:
|
||||
pass
|
||||
except TransientSyncException as exc:
|
||||
self.logger.warning("transient error during cleanup", exc=exc)
|
||||
self.logger.warning(
|
||||
"Cleanup encountered a transient exception. Retrying", exc=exc
|
||||
)
|
||||
raise Retry() from exc
|
||||
except DryRunRejected as exc:
|
||||
self.logger.info("Rejected dry-run cleanup event", exc=exc)
|
||||
|
||||
def sync_objects(
|
||||
self,
|
||||
object_type: str,
|
||||
|
||||
119
authentik/lib/tests/test_utils_inheritance.py
Normal file
119
authentik/lib/tests/test_utils_inheritance.py
Normal file
@@ -0,0 +1,119 @@
|
||||
"""Tests for inheritance helpers."""
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
from django.db import connection, models
|
||||
from django.test import TransactionTestCase
|
||||
from django.test.utils import isolate_apps
|
||||
|
||||
from authentik.lib.utils.inheritance import get_deepest_child
|
||||
|
||||
|
||||
@contextmanager
|
||||
def temporary_inheritance_models():
|
||||
"""Create a temporary multi-table inheritance graph for testing."""
|
||||
with isolate_apps("authentik.lib.tests"):
|
||||
|
||||
class GrandParent(models.Model):
|
||||
class Meta:
|
||||
app_label = "tests"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"GrandParent({self.pk})"
|
||||
|
||||
class Parent(GrandParent):
|
||||
class Meta:
|
||||
app_label = "tests"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Parent({self.pk})"
|
||||
|
||||
class Child(Parent):
|
||||
class Meta:
|
||||
app_label = "tests"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Child({self.pk})"
|
||||
|
||||
class GrandChild(Child):
|
||||
class Meta:
|
||||
app_label = "tests"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"GrandChild({self.pk})"
|
||||
|
||||
with connection.schema_editor() as schema_editor:
|
||||
schema_editor.create_model(GrandParent)
|
||||
schema_editor.create_model(Parent)
|
||||
schema_editor.create_model(Child)
|
||||
schema_editor.create_model(GrandChild)
|
||||
|
||||
try:
|
||||
yield GrandParent, Parent, Child, GrandChild
|
||||
finally:
|
||||
with connection.schema_editor() as schema_editor:
|
||||
schema_editor.delete_model(GrandChild)
|
||||
schema_editor.delete_model(Child)
|
||||
schema_editor.delete_model(Parent)
|
||||
schema_editor.delete_model(GrandParent)
|
||||
|
||||
|
||||
class TestInheritanceUtils(TransactionTestCase):
|
||||
"""Tests for helper functions in authentik.lib.utils.inheritance."""
|
||||
|
||||
def test_get_deepest_child_grandparent_to_parent(self):
|
||||
"""GrandParent -> Parent."""
|
||||
with temporary_inheritance_models() as (GrandParent, Parent, _Child, _GrandChild):
|
||||
parent = Parent.objects.create()
|
||||
grandparent = GrandParent.objects.get(pk=parent.pk)
|
||||
|
||||
resolved = get_deepest_child(grandparent)
|
||||
|
||||
self.assertIsInstance(resolved, Parent)
|
||||
self.assertEqual(resolved.pk, parent.pk)
|
||||
|
||||
def test_get_deepest_child_grandparent_to_child(self):
|
||||
"""GrandParent -> Child."""
|
||||
with temporary_inheritance_models() as (GrandParent, _Parent, Child, _GrandChild):
|
||||
child = Child.objects.create()
|
||||
grandparent = GrandParent.objects.get(pk=child.pk)
|
||||
|
||||
resolved = get_deepest_child(grandparent)
|
||||
|
||||
self.assertIsInstance(resolved, Child)
|
||||
self.assertEqual(resolved.pk, child.pk)
|
||||
|
||||
def test_get_deepest_child_grandparent_to_grandchild(self):
|
||||
"""GrandParent -> GrandChild."""
|
||||
with temporary_inheritance_models() as (GrandParent, _Parent, _Child, GrandChild):
|
||||
grandchild = GrandChild.objects.create()
|
||||
grandparent = GrandParent.objects.get(pk=grandchild.pk)
|
||||
|
||||
resolved = get_deepest_child(grandparent)
|
||||
|
||||
self.assertIsInstance(resolved, GrandChild)
|
||||
self.assertEqual(resolved.pk, grandchild.pk)
|
||||
|
||||
def test_get_deepest_child_parent_to_child(self):
|
||||
"""Parent -> Child (start from non-root)."""
|
||||
with temporary_inheritance_models() as (_GrandParent, Parent, Child, _GrandChild):
|
||||
child = Child.objects.create()
|
||||
parent = Parent.objects.get(pk=child.pk)
|
||||
|
||||
resolved = get_deepest_child(parent)
|
||||
|
||||
self.assertIsInstance(resolved, Child)
|
||||
self.assertEqual(resolved.pk, child.pk)
|
||||
|
||||
def test_get_deepest_child_no_queries_with_preloaded_relations(self):
|
||||
"""No extra queries when the inheritance chain is fully select_related."""
|
||||
with temporary_inheritance_models() as (GrandParent, _Parent, _Child, GrandChild):
|
||||
grandchild = GrandChild.objects.create()
|
||||
grandparent = GrandParent.objects.select_related("parent__child__grandchild").get(
|
||||
pk=grandchild.pk
|
||||
)
|
||||
|
||||
with self.assertNumQueries(0):
|
||||
resolved = get_deepest_child(grandparent)
|
||||
|
||||
self.assertIsInstance(resolved, GrandChild)
|
||||
41
authentik/lib/utils/inheritance.py
Normal file
41
authentik/lib/utils/inheritance.py
Normal file
@@ -0,0 +1,41 @@
|
||||
from django.db.models import Model, OneToOneField, OneToOneRel
|
||||
|
||||
|
||||
def get_deepest_child(parent: Model) -> Model:
|
||||
"""
|
||||
In multiple table inheritance, given any ancestor object, get the deepest child object.
|
||||
See https://docs.djangoproject.com/en/dev/topics/db/models/#multi-table-inheritance
|
||||
|
||||
This function does not query the database if `select_related` has been performed on all
|
||||
subclasses of `parent`'s model.
|
||||
"""
|
||||
|
||||
# Almost verbatim copy from django-model-utils, see
|
||||
# https://github.com/jazzband/django-model-utils/blob/5.0.0/model_utils/managers.py#L132
|
||||
one_to_one_rels = [
|
||||
field for field in parent._meta.get_fields() if isinstance(field, OneToOneRel)
|
||||
]
|
||||
|
||||
submodel_fields = [
|
||||
rel
|
||||
for rel in one_to_one_rels
|
||||
if isinstance(rel.field, OneToOneField)
|
||||
and issubclass(rel.field.model, parent._meta.model)
|
||||
and parent._meta.model is not rel.field.model
|
||||
and rel.parent_link
|
||||
]
|
||||
|
||||
submodel_accessors = [submodel_field.get_accessor_name() for submodel_field in submodel_fields]
|
||||
# End Copy
|
||||
|
||||
child = None
|
||||
for submodel in submodel_accessors:
|
||||
try:
|
||||
child = getattr(parent, submodel)
|
||||
break
|
||||
except AttributeError:
|
||||
continue
|
||||
|
||||
if not child:
|
||||
return parent
|
||||
return get_deepest_child(child)
|
||||
@@ -185,8 +185,10 @@ class KubernetesObjectReconciler[T]:
|
||||
|
||||
patch = self.get_patch()
|
||||
if patch is not None:
|
||||
current_json = ApiClient().sanitize_for_serialization(current)
|
||||
|
||||
try:
|
||||
current_json = ApiClient().sanitize_for_serialization(current)
|
||||
except AttributeError:
|
||||
current_json = asdict(current)
|
||||
try:
|
||||
if apply_patch(current_json, patch) != current_json:
|
||||
raise NeedsUpdate()
|
||||
|
||||
@@ -163,4 +163,5 @@ def outpost_pre_delete_cleanup(sender, instance: Outpost, **_):
|
||||
@receiver(pre_delete, sender=AuthenticatedSession)
|
||||
def outpost_logout_revoke(sender: type[AuthenticatedSession], instance: AuthenticatedSession, **_):
|
||||
"""Catch logout by expiring sessions being deleted"""
|
||||
outpost_session_end.send(instance.session.session_key)
|
||||
if Outpost.objects.exists():
|
||||
outpost_session_end.send(instance.session.session_key)
|
||||
|
||||
@@ -7,7 +7,6 @@ from socket import gethostname
|
||||
from typing import Any
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from asgiref.sync import async_to_sync
|
||||
from channels.layers import get_channel_layer
|
||||
from django.core.cache import cache
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
@@ -159,7 +158,7 @@ def outpost_send_update(pk: Any):
|
||||
layer = get_channel_layer()
|
||||
group = build_outpost_group(outpost.pk)
|
||||
LOGGER.debug("sending update", channel=group, outpost=outpost)
|
||||
async_to_sync(layer.group_send)(group, {"type": "event.update"})
|
||||
layer.group_send_blocking(group, {"type": "event.update"})
|
||||
|
||||
|
||||
@actor(description=_("Checks the local environment and create Service connections."))
|
||||
@@ -210,7 +209,7 @@ def outpost_session_end(session_id: str):
|
||||
for outpost in Outpost.objects.all():
|
||||
LOGGER.info("Sending session end signal to outpost", outpost=outpost)
|
||||
group = build_outpost_group(outpost.pk)
|
||||
async_to_sync(layer.group_send)(
|
||||
layer.group_send_blocking(
|
||||
group,
|
||||
{
|
||||
"type": "event.session.end",
|
||||
|
||||
@@ -132,9 +132,14 @@ class PolicyEngine:
|
||||
# If we didn't find any static bindings, do nothing
|
||||
return
|
||||
self.logger.debug("P_ENG: Found static bindings", **matched_bindings)
|
||||
if matched_bindings.get("passing", 0) > 0:
|
||||
# Any passing static binding -> passing
|
||||
passing = True
|
||||
if self.mode == PolicyEngineMode.MODE_ANY:
|
||||
if matched_bindings.get("passing", 0) > 0:
|
||||
# Any passing static binding -> passing
|
||||
passing = True
|
||||
elif self.mode == PolicyEngineMode.MODE_ALL:
|
||||
if matched_bindings.get("passing", 0) == matched_bindings["total"]:
|
||||
# All static bindings are passing -> passing
|
||||
passing = True
|
||||
elif matched_bindings["total"] > 0 and matched_bindings.get("passing", 0) < 1:
|
||||
# No matching static bindings but at least one is configured -> not passing
|
||||
passing = False
|
||||
@@ -185,6 +190,16 @@ class PolicyEngine:
|
||||
# Only call .recv() if no result is saved, otherwise we just deadlock here
|
||||
if not proc_info.result:
|
||||
proc_info.result = proc_info.connection.recv()
|
||||
if proc_info.result and proc_info.result._exec_time:
|
||||
HIST_POLICIES_EXECUTION_TIME.labels(
|
||||
binding_order=proc_info.binding.order,
|
||||
binding_target_type=proc_info.binding.target_type,
|
||||
binding_target_name=proc_info.binding.target_name,
|
||||
object_type=(
|
||||
class_to_path(self.request.obj.__class__) if self.request.obj else ""
|
||||
),
|
||||
mode="execute_process",
|
||||
).observe(proc_info.result._exec_time)
|
||||
return self
|
||||
|
||||
@property
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from multiprocessing import get_context
|
||||
from multiprocessing.connection import Connection
|
||||
from time import perf_counter
|
||||
|
||||
from django.core.cache import cache
|
||||
from sentry_sdk import start_span
|
||||
@@ -11,8 +12,6 @@ from structlog.stdlib import get_logger
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.utils.errors import exception_to_dict
|
||||
from authentik.lib.utils.reflection import class_to_path
|
||||
from authentik.policies.apps import HIST_POLICIES_EXECUTION_TIME
|
||||
from authentik.policies.exceptions import PolicyException
|
||||
from authentik.policies.models import PolicyBinding
|
||||
from authentik.policies.types import CACHE_PREFIX, PolicyRequest, PolicyResult
|
||||
@@ -123,18 +122,9 @@ class PolicyProcess(PROCESS_CLASS):
|
||||
|
||||
def profiling_wrapper(self):
|
||||
"""Run with profiling enabled"""
|
||||
with (
|
||||
start_span(
|
||||
op="authentik.policy.process.execute",
|
||||
) as span,
|
||||
HIST_POLICIES_EXECUTION_TIME.labels(
|
||||
binding_order=self.binding.order,
|
||||
binding_target_type=self.binding.target_type,
|
||||
binding_target_name=self.binding.target_name,
|
||||
object_type=class_to_path(self.request.obj.__class__) if self.request.obj else "",
|
||||
mode="execute_process",
|
||||
).time(),
|
||||
):
|
||||
with start_span(
|
||||
op="authentik.policy.process.execute",
|
||||
) as span:
|
||||
span: Span
|
||||
span.set_data("policy", self.binding.policy)
|
||||
span.set_data("request", self.request)
|
||||
@@ -142,8 +132,14 @@ class PolicyProcess(PROCESS_CLASS):
|
||||
|
||||
def run(self): # pragma: no cover
|
||||
"""Task wrapper to run policy checking"""
|
||||
result = None
|
||||
try:
|
||||
self.connection.send(self.profiling_wrapper())
|
||||
start = perf_counter()
|
||||
result = self.profiling_wrapper()
|
||||
end = perf_counter()
|
||||
result._exec_time = max((end - start), 0)
|
||||
except Exception as exc: # noqa
|
||||
LOGGER.warning("Policy failed to run", exc=exc)
|
||||
self.connection.send(PolicyResult(False, str(exc)))
|
||||
result = PolicyResult(False, str(exc))
|
||||
finally:
|
||||
self.connection.send(result)
|
||||
|
||||
@@ -33,6 +33,9 @@ class TestPolicyEngine(TestCase):
|
||||
self.policy_raises = ExpressionPolicy.objects.create(
|
||||
name=generate_id(), expression="{{ 0/0 }}"
|
||||
)
|
||||
self.group_member = Group.objects.create(name=generate_id())
|
||||
self.user.groups.add(self.group_member)
|
||||
self.group_non_member = Group.objects.create(name=generate_id())
|
||||
|
||||
def test_engine_empty(self):
|
||||
"""Ensure empty policy list passes"""
|
||||
@@ -51,7 +54,7 @@ class TestPolicyEngine(TestCase):
|
||||
self.assertEqual(result.passing, True)
|
||||
self.assertEqual(result.messages, ("dummy",))
|
||||
|
||||
def test_engine_mode_all(self):
|
||||
def test_engine_mode_all_dyn(self):
|
||||
"""Ensure all policies passes with AND mode (false and true -> false)"""
|
||||
pbm = PolicyBindingModel.objects.create(policy_engine_mode=PolicyEngineMode.MODE_ALL)
|
||||
PolicyBinding.objects.create(target=pbm, policy=self.policy_false, order=0)
|
||||
@@ -67,7 +70,7 @@ class TestPolicyEngine(TestCase):
|
||||
),
|
||||
)
|
||||
|
||||
def test_engine_mode_any(self):
|
||||
def test_engine_mode_any_dyn(self):
|
||||
"""Ensure all policies passes with OR mode (false and true -> true)"""
|
||||
pbm = PolicyBindingModel.objects.create(policy_engine_mode=PolicyEngineMode.MODE_ANY)
|
||||
PolicyBinding.objects.create(target=pbm, policy=self.policy_false, order=0)
|
||||
@@ -83,6 +86,26 @@ class TestPolicyEngine(TestCase):
|
||||
),
|
||||
)
|
||||
|
||||
def test_engine_mode_all_static(self):
|
||||
"""Ensure all policies passes with OR mode (false and true -> true)"""
|
||||
pbm = PolicyBindingModel.objects.create(policy_engine_mode=PolicyEngineMode.MODE_ALL)
|
||||
PolicyBinding.objects.create(target=pbm, group=self.group_member, order=0)
|
||||
PolicyBinding.objects.create(target=pbm, group=self.group_non_member, order=1)
|
||||
engine = PolicyEngine(pbm, self.user)
|
||||
result = engine.build().result
|
||||
self.assertEqual(result.passing, False)
|
||||
self.assertEqual(result.messages, ())
|
||||
|
||||
def test_engine_mode_any_static(self):
|
||||
"""Ensure all policies passes with OR mode (false and true -> true)"""
|
||||
pbm = PolicyBindingModel.objects.create(policy_engine_mode=PolicyEngineMode.MODE_ANY)
|
||||
PolicyBinding.objects.create(target=pbm, group=self.group_member, order=0)
|
||||
PolicyBinding.objects.create(target=pbm, group=self.group_non_member, order=1)
|
||||
engine = PolicyEngine(pbm, self.user)
|
||||
result = engine.build().result
|
||||
self.assertEqual(result.passing, True)
|
||||
self.assertEqual(result.messages, ())
|
||||
|
||||
def test_engine_negate(self):
|
||||
"""Test negate flag"""
|
||||
pbm = PolicyBindingModel.objects.create()
|
||||
|
||||
@@ -77,6 +77,8 @@ class PolicyResult:
|
||||
|
||||
log_messages: list[LogEvent] | None
|
||||
|
||||
_exec_time: int | None
|
||||
|
||||
def __init__(self, passing: bool, *messages: str):
|
||||
self.passing = passing
|
||||
self.messages = messages
|
||||
@@ -84,6 +86,7 @@ class PolicyResult:
|
||||
self.source_binding = None
|
||||
self.source_results = []
|
||||
self.log_messages = []
|
||||
self._exec_time = None
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
@@ -1,31 +1,19 @@
|
||||
"""Shared logout stages for SAML and OIDC providers"""
|
||||
|
||||
from django.http import HttpResponse
|
||||
from rest_framework.fields import CharField, ListField
|
||||
from rest_framework.fields import CharField, DictField, ListField
|
||||
|
||||
from authentik.common.oauth.constants import PLAN_CONTEXT_OIDC_LOGOUT_IFRAME_SESSIONS
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.flows.challenge import Challenge, ChallengeResponse
|
||||
from authentik.flows.stage import ChallengeStageView
|
||||
from authentik.providers.saml.views.flows import PLAN_CONTEXT_SAML_LOGOUT_IFRAME_SESSIONS
|
||||
|
||||
|
||||
class LogoutURL(PassiveSerializer):
|
||||
"""Data for a single logout URL"""
|
||||
|
||||
url = CharField()
|
||||
provider_name = CharField(required=False, allow_null=True)
|
||||
binding = CharField(required=False, allow_null=True)
|
||||
saml_request = CharField(required=False, allow_null=True)
|
||||
saml_response = CharField(required=False, allow_null=True)
|
||||
saml_relay_state = CharField(required=False, allow_null=True)
|
||||
|
||||
|
||||
class IframeLogoutChallenge(Challenge):
|
||||
"""Challenge for iframe logout"""
|
||||
|
||||
component = CharField(default="ak-provider-iframe-logout")
|
||||
logout_urls = ListField(child=LogoutURL(), default=list)
|
||||
logout_urls = ListField(child=DictField(), default=list)
|
||||
|
||||
|
||||
class IframeLogoutChallengeResponse(ChallengeResponse):
|
||||
|
||||
@@ -68,6 +68,8 @@ class IDToken:
|
||||
at_hash: str | None = None
|
||||
# Session ID, https://openid.net/specs/openid-connect-frontchannel-1_0.html#ClaimsContents
|
||||
sid: str | None = None
|
||||
# JWT ID, https://www.rfc-editor.org/rfc/rfc7519.html#section-4.1.7
|
||||
jti: str | None = None
|
||||
|
||||
claims: dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
@@ -81,6 +83,7 @@ class IDToken:
|
||||
(token.expires if token.expires is not None else default_token_duration()).timestamp()
|
||||
)
|
||||
id_token.iss = provider.get_issuer(request)
|
||||
id_token.jti = generate_id()
|
||||
id_token.aud = provider.client_id
|
||||
id_token.claims = {}
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ from urllib.parse import parse_qs, urlparse
|
||||
|
||||
from django.test import RequestFactory
|
||||
from django.urls import reverse
|
||||
from django.utils import translation
|
||||
from django.utils.timezone import now
|
||||
|
||||
from authentik.blueprints.tests import apply_blueprint
|
||||
@@ -690,18 +691,21 @@ class TestAuthorize(OAuthTestCase):
|
||||
Application.objects.create(name="app", slug="app", provider=provider)
|
||||
state = generate_id()
|
||||
self.client.logout()
|
||||
response = self.client.get(
|
||||
reverse("authentik_providers_oauth2:authorize"),
|
||||
data={
|
||||
"response_type": "code",
|
||||
"client_id": "test",
|
||||
"state": state,
|
||||
"redirect_uri": "foo://localhost",
|
||||
"ui_locales": "invalid fr",
|
||||
},
|
||||
)
|
||||
parsed = parse_qs(urlparse(response.url).query)
|
||||
self.assertEqual(parsed["locale"], ["fr"])
|
||||
try:
|
||||
response = self.client.get(
|
||||
reverse("authentik_providers_oauth2:authorize"),
|
||||
data={
|
||||
"response_type": "code",
|
||||
"client_id": "test",
|
||||
"state": state,
|
||||
"redirect_uri": "foo://localhost",
|
||||
"ui_locales": "invalid fr",
|
||||
},
|
||||
)
|
||||
parsed = parse_qs(urlparse(response.url).query)
|
||||
self.assertEqual(parsed["locale"], ["fr"])
|
||||
finally:
|
||||
translation.deactivate()
|
||||
|
||||
@apply_blueprint("default/flow-default-authentication-flow.yaml")
|
||||
def test_ui_locales_invalid(self):
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
"""Device backchannel tests"""
|
||||
|
||||
from base64 import b64encode
|
||||
from json import loads
|
||||
from urllib.parse import quote
|
||||
|
||||
from django.urls import reverse
|
||||
|
||||
@@ -26,7 +28,7 @@ class TesOAuth2DeviceBackchannel(OAuthTestCase):
|
||||
provider=self.provider,
|
||||
)
|
||||
|
||||
def test_backchannel_invalid(self):
|
||||
def test_backchannel_invalid_client_id_via_post_body(self):
|
||||
"""Test backchannel"""
|
||||
res = self.client.post(
|
||||
reverse("authentik_providers_oauth2:device"),
|
||||
@@ -50,7 +52,7 @@ class TesOAuth2DeviceBackchannel(OAuthTestCase):
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
|
||||
def test_backchannel(self):
|
||||
def test_backchannel_client_id_via_post_body(self):
|
||||
"""Test backchannel"""
|
||||
res = self.client.post(
|
||||
reverse("authentik_providers_oauth2:device"),
|
||||
@@ -61,3 +63,50 @@ class TesOAuth2DeviceBackchannel(OAuthTestCase):
|
||||
self.assertEqual(res.status_code, 200)
|
||||
body = loads(res.content.decode())
|
||||
self.assertEqual(body["expires_in"], 60)
|
||||
|
||||
def test_backchannel_invalid_client_id_via_auth_header(self):
|
||||
"""Test backchannel"""
|
||||
creds = b64encode(b"foo:").decode()
|
||||
res = self.client.post(
|
||||
reverse("authentik_providers_oauth2:device"),
|
||||
HTTP_AUTHORIZATION=f"Basic {creds}",
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
res = self.client.post(
|
||||
reverse("authentik_providers_oauth2:device"),
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
# test without application
|
||||
self.application.provider = None
|
||||
self.application.save()
|
||||
res = self.client.post(
|
||||
reverse("authentik_providers_oauth2:device"),
|
||||
data={
|
||||
"client_id": "test",
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
|
||||
def test_backchannel_client_id_via_auth_header(self):
|
||||
"""Test backchannel"""
|
||||
creds = b64encode(f"{self.provider.client_id}:".encode()).decode()
|
||||
res = self.client.post(
|
||||
reverse("authentik_providers_oauth2:device"),
|
||||
HTTP_AUTHORIZATION=f"Basic {creds}",
|
||||
)
|
||||
self.assertEqual(res.status_code, 200)
|
||||
body = loads(res.content.decode())
|
||||
self.assertEqual(body["expires_in"], 60)
|
||||
|
||||
def test_backchannel_client_id_via_auth_header_urlencoded(self):
|
||||
"""Test URL-encoded client IDs in Basic auth"""
|
||||
self.provider.client_id = "test/client+id"
|
||||
self.provider.save()
|
||||
creds = b64encode(f"{quote(self.provider.client_id, safe='')}:".encode()).decode()
|
||||
res = self.client.post(
|
||||
reverse("authentik_providers_oauth2:device"),
|
||||
HTTP_AUTHORIZATION=f"Basic {creds}",
|
||||
)
|
||||
self.assertEqual(res.status_code, 200)
|
||||
body = loads(res.content.decode())
|
||||
self.assertEqual(body["expires_in"], 60)
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from base64 import b64encode
|
||||
from json import dumps
|
||||
from urllib.parse import quote
|
||||
|
||||
from django.test import RequestFactory
|
||||
from django.urls import reverse
|
||||
@@ -28,6 +29,7 @@ from authentik.providers.oauth2.models import (
|
||||
ScopeMapping,
|
||||
)
|
||||
from authentik.providers.oauth2.tests.utils import OAuthTestCase
|
||||
from authentik.providers.oauth2.utils import extract_client_auth
|
||||
from authentik.providers.oauth2.views.token import TokenParams
|
||||
|
||||
|
||||
@@ -115,6 +117,20 @@ class TestToken(OAuthTestCase):
|
||||
params = TokenParams.parse(request, provider, provider.client_id, provider.client_secret)
|
||||
self.assertEqual(params.provider, provider)
|
||||
|
||||
def test_extract_client_auth_basic_auth_percent_decodes(self):
|
||||
"""test percent-decoding of client credentials in Basic auth"""
|
||||
header = b64encode(
|
||||
f"{quote('client/id', safe='')}:{quote('secret+/==', safe='')}".encode()
|
||||
).decode()
|
||||
request = self.factory.post("/", HTTP_AUTHORIZATION=f"Basic {header}")
|
||||
self.assertEqual(extract_client_auth(request), ("client/id", "secret+/=="))
|
||||
|
||||
def test_extract_client_auth_basic_auth_preserves_raw_plus(self):
|
||||
"""test compatibility with clients that still send raw plus characters"""
|
||||
header = b64encode(b"client:secret+plus").decode()
|
||||
request = self.factory.post("/", HTTP_AUTHORIZATION=f"Basic {header}")
|
||||
self.assertEqual(extract_client_auth(request), ("client", "secret+plus"))
|
||||
|
||||
def test_auth_code_view(self):
|
||||
"""test request param"""
|
||||
provider = OAuth2Provider.objects.create(
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from base64 import b64encode
|
||||
from json import loads
|
||||
from urllib.parse import quote
|
||||
|
||||
from django.test import RequestFactory
|
||||
from django.urls import reverse
|
||||
@@ -178,6 +179,41 @@ class TestTokenClientCredentialsStandardCompat(OAuthTestCase):
|
||||
self.assertEqual(jwt["given_name"], self.user.name)
|
||||
self.assertEqual(jwt["preferred_username"], self.user.username)
|
||||
|
||||
def test_successful_basic_auth_urlencoded_client_secret(self):
|
||||
"""test successful with URL-encoded Basic auth credentials"""
|
||||
client_secret = b64encode(f"sa:{self.token.key}".encode()).decode()
|
||||
header = b64encode(
|
||||
f"{quote(self.provider.client_id, safe='')}:{quote(client_secret, safe='')}".encode()
|
||||
).decode()
|
||||
response = self.client.post(
|
||||
reverse("authentik_providers_oauth2:token"),
|
||||
{
|
||||
"grant_type": GRANT_TYPE_CLIENT_CREDENTIALS,
|
||||
"scope": f"{SCOPE_OPENID} {SCOPE_OPENID_EMAIL} {SCOPE_OPENID_PROFILE}",
|
||||
},
|
||||
HTTP_AUTHORIZATION=f"Basic {header}",
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
body = loads(response.content.decode())
|
||||
self.assertEqual(body["token_type"], TOKEN_TYPE)
|
||||
_, alg = self.provider.jwt_key
|
||||
jwt = decode(
|
||||
body["access_token"],
|
||||
key=self.provider.signing_key.public_key,
|
||||
algorithms=[alg],
|
||||
audience=self.provider.client_id,
|
||||
)
|
||||
self.assertEqual(jwt["given_name"], self.user.name)
|
||||
self.assertEqual(jwt["preferred_username"], self.user.username)
|
||||
jwt = decode(
|
||||
body["id_token"],
|
||||
key=self.provider.signing_key.public_key,
|
||||
algorithms=[alg],
|
||||
audience=self.provider.client_id,
|
||||
)
|
||||
self.assertEqual(jwt["given_name"], self.user.name)
|
||||
self.assertEqual(jwt["preferred_username"], self.user.username)
|
||||
|
||||
def test_successful_password(self):
|
||||
"""test successful (password grant)"""
|
||||
response = self.client.post(
|
||||
|
||||
@@ -7,7 +7,7 @@ from binascii import Error
|
||||
from hashlib import sha256
|
||||
from hmac import compare_digest
|
||||
from typing import Any
|
||||
from urllib.parse import urlparse
|
||||
from urllib.parse import unquote, urlparse
|
||||
|
||||
from django.http import HttpRequest, HttpResponse, JsonResponse
|
||||
from django.http.response import HttpResponseRedirect
|
||||
@@ -122,6 +122,10 @@ def extract_client_auth(request: HttpRequest) -> tuple[str, str]:
|
||||
try:
|
||||
user_pass = b64decode(b64_user_pass).decode("utf-8").partition(":")
|
||||
client_id, _, client_secret = user_pass
|
||||
# RFC 6749 requires client credentials in Basic auth to be form-encoded first.
|
||||
# We only percent-decode here so raw `+` characters keep their previous meaning.
|
||||
client_id = unquote(client_id)
|
||||
client_secret = unquote(client_secret)
|
||||
except ValueError, Error:
|
||||
client_id = client_secret = "" # nosec
|
||||
else:
|
||||
|
||||
@@ -432,7 +432,7 @@ class AuthorizationFlowInitView(BufferedPolicyAccessView):
|
||||
return response
|
||||
|
||||
def dispatch(self, request: HttpRequest, *args, **kwargs):
|
||||
# Activate language before parsing params (error messages should be localized)
|
||||
# Activate language before parsing params (error messages should be localised)
|
||||
return self.dispatch_with_language(request, *args, **kwargs)
|
||||
|
||||
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||
|
||||
@@ -16,7 +16,7 @@ from authentik.lib.config import CONFIG
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
from authentik.providers.oauth2.errors import DeviceCodeError
|
||||
from authentik.providers.oauth2.models import DeviceToken, OAuth2Provider
|
||||
from authentik.providers.oauth2.utils import TokenResponse
|
||||
from authentik.providers.oauth2.utils import TokenResponse, extract_client_auth
|
||||
from authentik.providers.oauth2.views.device_init import QS_KEY_CODE
|
||||
|
||||
LOGGER = get_logger()
|
||||
@@ -32,7 +32,7 @@ class DeviceView(View):
|
||||
|
||||
def parse_request(self):
|
||||
"""Parse incoming request"""
|
||||
client_id = self.request.POST.get("client_id", None)
|
||||
client_id, _ = extract_client_auth(self.request)
|
||||
if not client_id:
|
||||
raise DeviceCodeError("invalid_client")
|
||||
provider = OAuth2Provider.objects.filter(client_id=client_id).first()
|
||||
|
||||
@@ -368,7 +368,7 @@ class TokenParams:
|
||||
) -> tuple[dict, OAuthSource] | tuple[None, None]:
|
||||
# Fully decode the JWT without verifying the signature, so we can get access to
|
||||
# the header.
|
||||
# Get the Key ID from the header, and use that to optimize our source query to only find
|
||||
# Get the Key ID from the header, and use that to optimise our source query to only find
|
||||
# sources that have a JWK for that Key ID
|
||||
# The Key ID doesn't have a fixed format, but must match between an issued JWT
|
||||
# and whatever is returned by the JWKS endpoint
|
||||
|
||||
@@ -27,6 +27,8 @@ class TraefikMiddlewareSpecForwardAuth:
|
||||
|
||||
trustForwardHeader: bool = field(default=True)
|
||||
|
||||
maxResponseBodySize: int = field(default=1024 * 1024 * 4)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class TraefikMiddlewareSpec:
|
||||
@@ -140,6 +142,7 @@ class Traefik3MiddlewareReconciler(KubernetesObjectReconciler[TraefikMiddleware]
|
||||
],
|
||||
authResponseHeadersRegex="",
|
||||
trustForwardHeader=True,
|
||||
maxResponseBodySize=1024 * 1024 * 4,
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
"""Proxy provider signals"""
|
||||
|
||||
from django.db.models.signals import pre_delete
|
||||
from django.dispatch import receiver
|
||||
|
||||
from authentik.core.models import AuthenticatedSession
|
||||
from authentik.providers.proxy.tasks import proxy_on_logout
|
||||
|
||||
|
||||
@receiver(pre_delete, sender=AuthenticatedSession)
|
||||
def logout_proxy_revoke(sender: type[AuthenticatedSession], instance: AuthenticatedSession, **_):
|
||||
"""Catch logout by expiring sessions being deleted"""
|
||||
proxy_on_logout.send(instance.session.session_key)
|
||||
@@ -1,26 +0,0 @@
|
||||
"""proxy provider tasks"""
|
||||
|
||||
from asgiref.sync import async_to_sync
|
||||
from channels.layers import get_channel_layer
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from dramatiq.actor import actor
|
||||
|
||||
from authentik.outposts.consumer import build_outpost_group
|
||||
from authentik.outposts.models import Outpost, OutpostType
|
||||
from authentik.providers.oauth2.id_token import hash_session_key
|
||||
|
||||
|
||||
@actor(description=_("Terminate session on Proxy outpost."))
|
||||
def proxy_on_logout(session_id: str):
|
||||
layer = get_channel_layer()
|
||||
hashed_session_id = hash_session_key(session_id)
|
||||
for outpost in Outpost.objects.filter(type=OutpostType.PROXY):
|
||||
group = build_outpost_group(outpost.pk)
|
||||
async_to_sync(layer.group_send)(
|
||||
group,
|
||||
{
|
||||
"type": "event.provider.specific",
|
||||
"sub_type": "logout",
|
||||
"session_id": hashed_session_id,
|
||||
},
|
||||
)
|
||||
@@ -213,7 +213,6 @@ class SAMLProviderSerializer(ProviderSerializer):
|
||||
"sign_assertion",
|
||||
"sign_response",
|
||||
"sign_logout_request",
|
||||
"sign_logout_response",
|
||||
"sp_binding",
|
||||
"sls_binding",
|
||||
"logout_method",
|
||||
@@ -233,7 +232,7 @@ class SAMLMetadataSerializer(PassiveSerializer):
|
||||
"""SAML Provider Metadata serializer"""
|
||||
|
||||
metadata = CharField(read_only=True)
|
||||
download_url = CharField(read_only=True, required=False)
|
||||
download_url = CharField(read_only=True, required=False, allow_null=True)
|
||||
|
||||
|
||||
class SAMLProviderImportSerializer(PassiveSerializer):
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 5.2.7 on 2025-10-24 18:15
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_providers_saml", "0020_samlprovider_logout_method_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="samlprovider",
|
||||
name="sign_logout_response",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -227,7 +227,6 @@ class SAMLProvider(Provider):
|
||||
sign_assertion = models.BooleanField(default=True)
|
||||
sign_response = models.BooleanField(default=False)
|
||||
sign_logout_request = models.BooleanField(default=False)
|
||||
sign_logout_response = models.BooleanField(default=False)
|
||||
|
||||
@property
|
||||
def launch_url(self) -> str | None:
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
"""SAML Logout stages for automatic injection"""
|
||||
|
||||
from django.http import HttpResponse
|
||||
from rest_framework.fields import BooleanField, CharField, ChoiceField
|
||||
from rest_framework.fields import BooleanField, CharField
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.flows.challenge import Challenge, ChallengeResponse, HttpChallengeResponse
|
||||
from authentik.flows.stage import ChallengeStageView
|
||||
from authentik.providers.saml.models import SAMLBindings
|
||||
from authentik.providers.saml.views.flows import PLAN_CONTEXT_SAML_LOGOUT_NATIVE_SESSIONS
|
||||
|
||||
LOGGER = get_logger()
|
||||
@@ -20,16 +19,13 @@ class NativeLogoutChallenge(Challenge):
|
||||
"""Challenge for native browser logout"""
|
||||
|
||||
component = CharField(default="ak-provider-saml-native-logout")
|
||||
provider_name = CharField(required=False)
|
||||
is_complete = BooleanField(required=False, default=False)
|
||||
|
||||
post_url = CharField(required=False)
|
||||
redirect_url = CharField(required=False)
|
||||
|
||||
saml_binding = ChoiceField(choices=SAMLBindings.choices, required=False)
|
||||
saml_request = CharField(required=False)
|
||||
saml_response = CharField(required=False)
|
||||
saml_relay_state = CharField(required=False)
|
||||
relay_state = CharField(required=False)
|
||||
provider_name = CharField(required=False)
|
||||
binding = CharField(required=False)
|
||||
redirect_url = CharField(required=False)
|
||||
is_complete = BooleanField(required=False, default=False)
|
||||
|
||||
|
||||
class NativeLogoutChallengeResponse(ChallengeResponse):
|
||||
|
||||
@@ -1,196 +0,0 @@
|
||||
"""LogoutResponse processor"""
|
||||
|
||||
import base64
|
||||
from urllib.parse import quote, urlencode
|
||||
|
||||
import xmlsec
|
||||
from lxml import etree
|
||||
from lxml.etree import Element, SubElement
|
||||
|
||||
from authentik.common.saml.constants import (
|
||||
DIGEST_ALGORITHM_TRANSLATION_MAP,
|
||||
NS_MAP,
|
||||
NS_SAML_ASSERTION,
|
||||
NS_SAML_PROTOCOL,
|
||||
SIGN_ALGORITHM_TRANSFORM_MAP,
|
||||
)
|
||||
from authentik.providers.saml.models import SAMLProvider
|
||||
from authentik.providers.saml.processors.logout_request_parser import LogoutRequest
|
||||
from authentik.providers.saml.utils import get_random_id
|
||||
from authentik.providers.saml.utils.encoding import deflate_and_base64_encode
|
||||
from authentik.providers.saml.utils.time import get_time_string
|
||||
|
||||
|
||||
class LogoutResponseProcessor:
|
||||
"""Generate a SAML LogoutResponse"""
|
||||
|
||||
provider: SAMLProvider
|
||||
logout_request: LogoutRequest
|
||||
destination: str | None
|
||||
relay_state: str | None
|
||||
_issue_instant: str
|
||||
_response_id: str
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
provider: SAMLProvider,
|
||||
logout_request: LogoutRequest,
|
||||
destination: str | None = None,
|
||||
relay_state: str | None = None,
|
||||
):
|
||||
self.provider = provider
|
||||
self.logout_request = logout_request
|
||||
self.destination = destination
|
||||
self.relay_state = relay_state or (logout_request.relay_state if logout_request else None)
|
||||
self._issue_instant = get_time_string()
|
||||
self._response_id = get_random_id()
|
||||
|
||||
def get_issuer(self) -> Element:
|
||||
"""Get Issuer element"""
|
||||
issuer = Element(f"{{{NS_SAML_ASSERTION}}}Issuer")
|
||||
issuer.text = self.provider.issuer
|
||||
return issuer
|
||||
|
||||
def build(self, status: str = "Success") -> Element:
|
||||
"""Build a SAML LogoutResponse as etree Element"""
|
||||
response = Element(f"{{{NS_SAML_PROTOCOL}}}LogoutResponse", nsmap=NS_MAP)
|
||||
response.attrib["Version"] = "2.0"
|
||||
response.attrib["IssueInstant"] = self._issue_instant
|
||||
response.attrib["ID"] = self._response_id
|
||||
|
||||
if self.destination:
|
||||
response.attrib["Destination"] = self.destination
|
||||
|
||||
if self.logout_request and self.logout_request.id:
|
||||
response.attrib["InResponseTo"] = self.logout_request.id
|
||||
|
||||
response.append(self.get_issuer())
|
||||
|
||||
# Add Status element
|
||||
status_element = SubElement(response, f"{{{NS_SAML_PROTOCOL}}}Status")
|
||||
status_code = SubElement(status_element, f"{{{NS_SAML_PROTOCOL}}}StatusCode")
|
||||
status_code.attrib["Value"] = f"urn:oasis:names:tc:SAML:2.0:status:{status}"
|
||||
|
||||
return response
|
||||
|
||||
def build_response(self, status: str = "Success") -> str:
|
||||
"""Build and sign LogoutResponse, return as XML string (not encoded)"""
|
||||
response = self.build(status)
|
||||
if self.provider.signing_kp and self.provider.sign_logout_response:
|
||||
self._add_signature(response)
|
||||
self._sign_response(response)
|
||||
return etree.tostring(response).decode()
|
||||
|
||||
def encode_post(self, status: str = "Success") -> str:
|
||||
"""Encode LogoutResponse for POST binding"""
|
||||
response = self.build(status)
|
||||
if self.provider.signing_kp and self.provider.sign_logout_response:
|
||||
self._add_signature(response)
|
||||
self._sign_response(response)
|
||||
return base64.b64encode(etree.tostring(response)).decode()
|
||||
|
||||
def encode_redirect(self, status: str = "Success") -> str:
|
||||
"""Encode LogoutResponse for Redirect binding"""
|
||||
response = self.build(status)
|
||||
# Note: For redirect binding, signatures are added as query parameters, not in XML
|
||||
xml_str = etree.tostring(response, encoding="UTF-8", xml_declaration=True)
|
||||
return deflate_and_base64_encode(xml_str.decode("UTF-8"))
|
||||
|
||||
def get_redirect_url(self, status: str = "Success") -> str:
|
||||
"""Build complete logout response URL for redirect binding with signature if needed"""
|
||||
encoded_response = self.encode_redirect(status)
|
||||
params = {
|
||||
"SAMLResponse": encoded_response,
|
||||
}
|
||||
|
||||
if self.relay_state:
|
||||
params["RelayState"] = self.relay_state
|
||||
|
||||
if self.provider.signing_kp and self.provider.sign_logout_response:
|
||||
sig_alg = self.provider.signature_algorithm
|
||||
params["SigAlg"] = sig_alg
|
||||
|
||||
# Build the string to sign
|
||||
query_string = self._build_signable_query_string(params)
|
||||
|
||||
signature = self._sign_query_string(query_string)
|
||||
params["Signature"] = base64.b64encode(signature).decode()
|
||||
|
||||
# Some SP's use query params on their sls endpoint
|
||||
if not self.destination:
|
||||
raise ValueError("destination is required for redirect URL")
|
||||
|
||||
separator = "&" if "?" in self.destination else "?"
|
||||
return f"{self.destination}{separator}{urlencode(params)}"
|
||||
|
||||
def _add_signature(self, element: Element):
|
||||
"""Add signature placeholder to element"""
|
||||
sign_algorithm_transform = SIGN_ALGORITHM_TRANSFORM_MAP.get(
|
||||
self.provider.signature_algorithm, xmlsec.constants.TransformRsaSha1
|
||||
)
|
||||
signature = xmlsec.template.create(
|
||||
element,
|
||||
xmlsec.constants.TransformExclC14N,
|
||||
sign_algorithm_transform,
|
||||
ns=xmlsec.constants.DSigNs,
|
||||
)
|
||||
element.insert(1, signature) # Insert after Issuer
|
||||
|
||||
def _sign_response(self, response: Element):
|
||||
"""Sign the response element"""
|
||||
digest_algorithm_transform = DIGEST_ALGORITHM_TRANSLATION_MAP.get(
|
||||
self.provider.digest_algorithm, xmlsec.constants.TransformSha1
|
||||
)
|
||||
|
||||
xmlsec.tree.add_ids(response, ["ID"])
|
||||
signature_node = xmlsec.tree.find_node(response, xmlsec.constants.NodeSignature)
|
||||
|
||||
ref = xmlsec.template.add_reference(
|
||||
signature_node,
|
||||
digest_algorithm_transform,
|
||||
uri="#" + response.attrib["ID"],
|
||||
)
|
||||
xmlsec.template.add_transform(ref, xmlsec.constants.TransformEnveloped)
|
||||
xmlsec.template.add_transform(ref, xmlsec.constants.TransformExclC14N)
|
||||
key_info = xmlsec.template.ensure_key_info(signature_node)
|
||||
xmlsec.template.add_x509_data(key_info)
|
||||
|
||||
ctx = xmlsec.SignatureContext()
|
||||
ctx.key = xmlsec.Key.from_memory(
|
||||
self.provider.signing_kp.key_data, # Use key_data for the private key
|
||||
xmlsec.constants.KeyDataFormatPem,
|
||||
)
|
||||
ctx.key.load_cert_from_memory(
|
||||
self.provider.signing_kp.certificate_data, xmlsec.constants.KeyDataFormatPem
|
||||
)
|
||||
ctx.sign(signature_node)
|
||||
|
||||
def _build_signable_query_string(self, params: dict) -> str:
|
||||
"""Build query string for signing (order matters per SAML spec)"""
|
||||
# SAML spec requires specific order: SAMLResponse, RelayState, SigAlg
|
||||
# Values must be URL-encoded individually before concatenation
|
||||
ordered = []
|
||||
if "SAMLResponse" in params:
|
||||
ordered.append(f"SAMLResponse={quote(params['SAMLResponse'], safe='')}")
|
||||
if "RelayState" in params:
|
||||
ordered.append(f"RelayState={quote(params['RelayState'], safe='')}")
|
||||
if "SigAlg" in params:
|
||||
ordered.append(f"SigAlg={quote(params['SigAlg'], safe='')}")
|
||||
return "&".join(ordered)
|
||||
|
||||
def _sign_query_string(self, query_string: str) -> bytes:
|
||||
"""Sign the query string for redirect binding"""
|
||||
signature_algorithm_transform = SIGN_ALGORITHM_TRANSFORM_MAP.get(
|
||||
self.provider.signature_algorithm, xmlsec.constants.TransformRsaSha256
|
||||
)
|
||||
|
||||
key = xmlsec.Key.from_memory(
|
||||
self.provider.signing_kp.key_data,
|
||||
xmlsec.constants.KeyDataFormatPem,
|
||||
None,
|
||||
)
|
||||
|
||||
ctx = xmlsec.SignatureContext()
|
||||
ctx.key = key
|
||||
|
||||
return ctx.sign_binary(query_string.encode("utf-8"), signature_algorithm_transform)
|
||||
@@ -175,16 +175,16 @@ def handle_flow_pre_user_logout(
|
||||
logout_data = {
|
||||
"post_url": session.provider.sls_url,
|
||||
"saml_request": form_data["SAMLRequest"],
|
||||
"saml_relay_state": form_data["RelayState"],
|
||||
"relay_state": form_data["RelayState"],
|
||||
"provider_name": session.provider.name,
|
||||
"saml_binding": SAMLBindings.POST,
|
||||
"binding": SAMLBindings.POST,
|
||||
}
|
||||
else:
|
||||
logout_url = processor.get_redirect_url()
|
||||
logout_data = {
|
||||
"redirect_url": logout_url,
|
||||
"provider_name": session.provider.name,
|
||||
"saml_binding": SAMLBindings.REDIRECT,
|
||||
"binding": SAMLBindings.REDIRECT,
|
||||
}
|
||||
|
||||
native_sessions.append(logout_data)
|
||||
|
||||
@@ -5,11 +5,8 @@ from django.contrib.auth import get_user_model
|
||||
from dramatiq.actor import actor
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.providers.saml.models import SAMLProvider
|
||||
from authentik.providers.saml.processors.logout_request import LogoutRequestProcessor
|
||||
from authentik.providers.saml.processors.logout_request_parser import LogoutRequest
|
||||
from authentik.providers.saml.processors.logout_response_processor import LogoutResponseProcessor
|
||||
|
||||
LOGGER = get_logger()
|
||||
User = get_user_model()
|
||||
@@ -81,86 +78,3 @@ def send_post_logout_request(provider: SAMLProvider, processor: LogoutRequestPro
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@actor(description="Send SAML LogoutResponse to a Service Provider (backchannel)")
|
||||
def send_saml_logout_response(
|
||||
provider_pk: int,
|
||||
sls_url: str,
|
||||
logout_request_id: str | None = None,
|
||||
relay_state: str | None = None,
|
||||
):
|
||||
"""Send SAML LogoutResponse to a Service Provider using backchannel (server-to-server)"""
|
||||
provider = SAMLProvider.objects.filter(pk=provider_pk).first()
|
||||
if not provider:
|
||||
LOGGER.error(
|
||||
"Provider not found for SAML logout response",
|
||||
provider_pk=provider_pk,
|
||||
)
|
||||
return False
|
||||
|
||||
LOGGER.debug(
|
||||
"Sending backchannel SAML logout response",
|
||||
provider=provider.name,
|
||||
sls_url=sls_url,
|
||||
)
|
||||
|
||||
# Create a minimal LogoutRequest object for the response processor
|
||||
# We only need the ID and relay_state for building the response
|
||||
logout_request = None
|
||||
if logout_request_id:
|
||||
logout_request = LogoutRequest()
|
||||
logout_request.id = logout_request_id
|
||||
logout_request.relay_state = relay_state
|
||||
|
||||
# Build the logout response
|
||||
processor = LogoutResponseProcessor(
|
||||
provider=provider,
|
||||
logout_request=logout_request,
|
||||
destination=sls_url,
|
||||
relay_state=relay_state,
|
||||
)
|
||||
|
||||
encoded_response = processor.encode_post()
|
||||
|
||||
form_data = {
|
||||
"SAMLResponse": encoded_response,
|
||||
}
|
||||
|
||||
if relay_state:
|
||||
form_data["RelayState"] = relay_state
|
||||
|
||||
# Send the logout response to the SP
|
||||
try:
|
||||
response = requests.post(
|
||||
sls_url,
|
||||
data=form_data,
|
||||
timeout=10,
|
||||
headers={
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
},
|
||||
allow_redirects=True,
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
LOGGER.info(
|
||||
"Successfully sent backchannel logout response to SP",
|
||||
provider=provider.name,
|
||||
sls_url=sls_url,
|
||||
status_code=response.status_code,
|
||||
)
|
||||
return True
|
||||
|
||||
except requests.exceptions.RequestException as exc:
|
||||
LOGGER.warning(
|
||||
"Failed to send backchannel logout response to SP",
|
||||
provider=provider.name,
|
||||
sls_url=sls_url,
|
||||
error=str(exc),
|
||||
)
|
||||
Event.new(
|
||||
EventAction.CONFIGURATION_ERROR,
|
||||
provider=provider,
|
||||
message=f"Backchannel logout response failed: {str(exc)}",
|
||||
).save()
|
||||
return False
|
||||
|
||||
@@ -69,7 +69,7 @@ class TestNativeLogoutStageView(TestCase):
|
||||
{
|
||||
"redirect_url": "https://sp1.example.com/sls?SAMLRequest=encoded",
|
||||
"provider_name": "test-provider-1",
|
||||
"saml_binding": "redirect",
|
||||
"binding": "redirect",
|
||||
}
|
||||
]
|
||||
stage_view = NativeLogoutStageView(
|
||||
@@ -85,7 +85,7 @@ class TestNativeLogoutStageView(TestCase):
|
||||
|
||||
# Should return a NativeLogoutChallenge
|
||||
self.assertIsInstance(challenge, NativeLogoutChallenge)
|
||||
self.assertEqual(challenge.initial_data["saml_binding"], "redirect")
|
||||
self.assertEqual(challenge.initial_data["binding"], "redirect")
|
||||
self.assertEqual(challenge.initial_data["provider_name"], "test-provider-1")
|
||||
self.assertIn("redirect_url", challenge.initial_data)
|
||||
|
||||
@@ -102,9 +102,9 @@ class TestNativeLogoutStageView(TestCase):
|
||||
{
|
||||
"post_url": "https://sp2.example.com/sls",
|
||||
"saml_request": "encoded_saml_request",
|
||||
"saml_relay_state": "https://idp.example.com/flow/test-flow",
|
||||
"relay_state": "https://idp.example.com/flow/test-flow",
|
||||
"provider_name": "test-provider-2",
|
||||
"saml_binding": "post",
|
||||
"binding": "post",
|
||||
}
|
||||
]
|
||||
stage_view = NativeLogoutStageView(
|
||||
@@ -120,11 +120,11 @@ class TestNativeLogoutStageView(TestCase):
|
||||
|
||||
# Should return a NativeLogoutChallenge
|
||||
self.assertIsInstance(challenge, NativeLogoutChallenge)
|
||||
self.assertEqual(challenge.initial_data["saml_binding"], "post")
|
||||
self.assertEqual(challenge.initial_data["binding"], "post")
|
||||
self.assertEqual(challenge.initial_data["provider_name"], "test-provider-2")
|
||||
self.assertEqual(challenge.initial_data["post_url"], "https://sp2.example.com/sls")
|
||||
self.assertIn("saml_request", challenge.initial_data)
|
||||
self.assertIn("saml_relay_state", challenge.initial_data)
|
||||
self.assertIn("relay_state", challenge.initial_data)
|
||||
|
||||
def test_get_challenge_all_complete(self):
|
||||
"""Test get_challenge when all providers are done"""
|
||||
|
||||
@@ -1,139 +0,0 @@
|
||||
"""logout response tests"""
|
||||
|
||||
from defusedxml import ElementTree
|
||||
from django.test import TestCase
|
||||
|
||||
from authentik.blueprints.tests import apply_blueprint
|
||||
from authentik.common.saml.constants import (
|
||||
NS_SAML_ASSERTION,
|
||||
NS_SAML_PROTOCOL,
|
||||
NS_SIGNATURE,
|
||||
)
|
||||
from authentik.core.tests.utils import create_test_cert, create_test_flow
|
||||
from authentik.providers.saml.models import SAMLPropertyMapping, SAMLProvider
|
||||
from authentik.providers.saml.processors.logout_request_parser import LogoutRequest
|
||||
from authentik.providers.saml.processors.logout_response_processor import LogoutResponseProcessor
|
||||
|
||||
|
||||
class TestLogoutResponse(TestCase):
|
||||
"""Test LogoutResponse processor"""
|
||||
|
||||
@apply_blueprint("system/providers-saml.yaml")
|
||||
def setUp(self):
|
||||
cert = create_test_cert()
|
||||
self.provider: SAMLProvider = SAMLProvider.objects.create(
|
||||
authorization_flow=create_test_flow(),
|
||||
acs_url="http://testserver/source/saml/provider/acs/",
|
||||
sls_url="http://testserver/source/saml/provider/sls/",
|
||||
signing_kp=cert,
|
||||
verification_kp=cert,
|
||||
)
|
||||
self.provider.property_mappings.set(SAMLPropertyMapping.objects.all())
|
||||
self.provider.save()
|
||||
|
||||
def test_build_response(self):
|
||||
"""Test building a LogoutResponse"""
|
||||
logout_request = LogoutRequest(
|
||||
id="test-request-id",
|
||||
issuer="test-sp",
|
||||
relay_state="test-relay-state",
|
||||
)
|
||||
|
||||
processor = LogoutResponseProcessor(
|
||||
self.provider, logout_request, destination=self.provider.sls_url
|
||||
)
|
||||
response_xml = processor.build_response(status="Success")
|
||||
|
||||
# Parse and verify
|
||||
root = ElementTree.fromstring(response_xml)
|
||||
self.assertEqual(root.tag, f"{{{NS_SAML_PROTOCOL}}}LogoutResponse")
|
||||
self.assertEqual(root.attrib["Version"], "2.0")
|
||||
self.assertEqual(root.attrib["Destination"], self.provider.sls_url)
|
||||
self.assertEqual(root.attrib["InResponseTo"], "test-request-id")
|
||||
|
||||
# Check Issuer
|
||||
issuer = root.find(f"{{{NS_SAML_ASSERTION}}}Issuer")
|
||||
self.assertEqual(issuer.text, self.provider.issuer)
|
||||
|
||||
# Check Status
|
||||
status = root.find(f".//{{{NS_SAML_PROTOCOL}}}StatusCode")
|
||||
self.assertEqual(status.attrib["Value"], "urn:oasis:names:tc:SAML:2.0:status:Success")
|
||||
|
||||
def test_build_response_signed(self):
|
||||
"""Test building a signed LogoutResponse"""
|
||||
self.provider.sign_logout_response = True
|
||||
self.provider.save()
|
||||
|
||||
logout_request = LogoutRequest(
|
||||
id="test-request-id",
|
||||
issuer="test-sp",
|
||||
relay_state="test-relay-state",
|
||||
)
|
||||
|
||||
processor = LogoutResponseProcessor(
|
||||
self.provider, logout_request, destination=self.provider.sls_url
|
||||
)
|
||||
response_xml = processor.build_response(status="Success")
|
||||
|
||||
# Parse and verify signature is present
|
||||
root = ElementTree.fromstring(response_xml)
|
||||
signature = root.find(f".//{{{NS_SIGNATURE}}}Signature")
|
||||
self.assertIsNotNone(signature)
|
||||
|
||||
# Verify signature structure
|
||||
signed_info = signature.find(f"{{{NS_SIGNATURE}}}SignedInfo")
|
||||
self.assertIsNotNone(signed_info)
|
||||
signature_value = signature.find(f"{{{NS_SIGNATURE}}}SignatureValue")
|
||||
self.assertIsNotNone(signature_value)
|
||||
self.assertIsNotNone(signature_value.text)
|
||||
|
||||
def test_no_inresponseto(self):
|
||||
"""Test building response without a logout request omits InResponseTo attribute"""
|
||||
processor = LogoutResponseProcessor(self.provider, None, destination=self.provider.sls_url)
|
||||
response_xml = processor.build_response(status="Success")
|
||||
|
||||
root = ElementTree.fromstring(response_xml)
|
||||
self.assertEqual(root.tag, f"{{{NS_SAML_PROTOCOL}}}LogoutResponse")
|
||||
self.assertNotIn("InResponseTo", root.attrib)
|
||||
|
||||
def test_no_destination(self):
|
||||
"""Test building response without destination"""
|
||||
logout_request = LogoutRequest(
|
||||
id="test-request-id",
|
||||
issuer="test-sp",
|
||||
)
|
||||
|
||||
processor = LogoutResponseProcessor(self.provider, logout_request, destination=None)
|
||||
response_xml = processor.build_response(status="Success")
|
||||
|
||||
root = ElementTree.fromstring(response_xml)
|
||||
self.assertNotIn("Destination", root.attrib)
|
||||
|
||||
def test_relay_state_from_logout_request(self):
|
||||
"""Test that relay_state is taken from logout_request if not provided"""
|
||||
logout_request = LogoutRequest(
|
||||
id="test-request-id",
|
||||
issuer="test-sp",
|
||||
relay_state="request-relay-state",
|
||||
)
|
||||
|
||||
processor = LogoutResponseProcessor(
|
||||
self.provider, logout_request, destination=self.provider.sls_url
|
||||
)
|
||||
self.assertEqual(processor.relay_state, "request-relay-state")
|
||||
|
||||
def test_relay_state_override(self):
|
||||
"""Test that explicit relay_state overrides logout_request relay_state"""
|
||||
logout_request = LogoutRequest(
|
||||
id="test-request-id",
|
||||
issuer="test-sp",
|
||||
relay_state="request-relay-state",
|
||||
)
|
||||
|
||||
processor = LogoutResponseProcessor(
|
||||
self.provider,
|
||||
logout_request,
|
||||
destination=self.provider.sls_url,
|
||||
relay_state="explicit-relay-state",
|
||||
)
|
||||
self.assertEqual(processor.relay_state, "explicit-relay-state")
|
||||
@@ -1,291 +0,0 @@
|
||||
"""Tests for SAML provider tasks"""
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from django.test import TestCase
|
||||
from requests.exceptions import ConnectionError, HTTPError
|
||||
|
||||
from authentik.common.saml.constants import SAML_NAME_ID_FORMAT_EMAIL
|
||||
from authentik.core.tests.utils import create_test_cert, create_test_flow
|
||||
from authentik.providers.saml.models import SAMLProvider
|
||||
from authentik.providers.saml.tasks import (
|
||||
send_post_logout_request,
|
||||
send_saml_logout_request,
|
||||
send_saml_logout_response,
|
||||
)
|
||||
|
||||
|
||||
class TestSendSamlLogoutResponse(TestCase):
|
||||
"""Tests for send_saml_logout_response task"""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test fixtures"""
|
||||
self.cert = create_test_cert()
|
||||
self.flow = create_test_flow()
|
||||
|
||||
self.provider = SAMLProvider.objects.create(
|
||||
name="test-provider",
|
||||
authorization_flow=self.flow,
|
||||
acs_url="https://sp.example.com/acs",
|
||||
sls_url="https://sp.example.com/sls",
|
||||
issuer="https://idp.example.com",
|
||||
signing_kp=self.cert,
|
||||
)
|
||||
|
||||
@patch("authentik.providers.saml.tasks.requests.post")
|
||||
def test_successful_logout_response(self, mock_post):
|
||||
"""Test successful POST to SP returns True"""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_post.return_value = mock_response
|
||||
|
||||
result = send_saml_logout_response(
|
||||
provider_pk=self.provider.pk,
|
||||
sls_url=self.provider.sls_url,
|
||||
logout_request_id="test-request-id",
|
||||
relay_state="https://sp.example.com/return",
|
||||
)
|
||||
|
||||
self.assertTrue(result)
|
||||
mock_post.assert_called_once()
|
||||
|
||||
# Verify the POST was made with correct data
|
||||
call_kwargs = mock_post.call_args[1]
|
||||
self.assertEqual(call_kwargs["timeout"], 10)
|
||||
self.assertEqual(
|
||||
call_kwargs["headers"]["Content-Type"], "application/x-www-form-urlencoded"
|
||||
)
|
||||
|
||||
# Verify form data contains SAMLResponse and RelayState
|
||||
form_data = call_kwargs["data"]
|
||||
self.assertIn("SAMLResponse", form_data)
|
||||
self.assertIn("RelayState", form_data)
|
||||
self.assertEqual(form_data["RelayState"], "https://sp.example.com/return")
|
||||
|
||||
@patch("authentik.providers.saml.tasks.requests.post")
|
||||
def test_successful_logout_response_no_relay_state(self, mock_post):
|
||||
"""Test successful POST without relay_state"""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_post.return_value = mock_response
|
||||
|
||||
result = send_saml_logout_response(
|
||||
provider_pk=self.provider.pk,
|
||||
sls_url=self.provider.sls_url,
|
||||
logout_request_id="test-request-id",
|
||||
relay_state=None,
|
||||
)
|
||||
|
||||
self.assertTrue(result)
|
||||
|
||||
# Verify form data does not contain RelayState
|
||||
form_data = mock_post.call_args[1]["data"]
|
||||
self.assertIn("SAMLResponse", form_data)
|
||||
self.assertNotIn("RelayState", form_data)
|
||||
|
||||
def test_provider_not_found(self):
|
||||
"""Test returns False when provider doesn't exist"""
|
||||
result = send_saml_logout_response(
|
||||
provider_pk=99999, # Non-existent provider
|
||||
sls_url="https://sp.example.com/sls",
|
||||
logout_request_id="test-request-id",
|
||||
relay_state=None,
|
||||
)
|
||||
|
||||
self.assertFalse(result)
|
||||
|
||||
@patch("authentik.providers.saml.tasks.Event")
|
||||
@patch("authentik.providers.saml.tasks.requests.post")
|
||||
def test_http_error_creates_event(self, mock_post, mock_event_class):
|
||||
"""Test HTTP error creates an error event"""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 500
|
||||
mock_response.raise_for_status.side_effect = HTTPError("500 Server Error")
|
||||
mock_post.return_value = mock_response
|
||||
|
||||
mock_event = MagicMock()
|
||||
mock_event_class.new.return_value = mock_event
|
||||
|
||||
result = send_saml_logout_response(
|
||||
provider_pk=self.provider.pk,
|
||||
sls_url=self.provider.sls_url,
|
||||
logout_request_id="test-request-id",
|
||||
relay_state=None,
|
||||
)
|
||||
|
||||
self.assertFalse(result)
|
||||
|
||||
# Verify error event was created
|
||||
mock_event_class.new.assert_called_once()
|
||||
call_kwargs = mock_event_class.new.call_args[1]
|
||||
self.assertIn("Backchannel logout response failed", call_kwargs["message"])
|
||||
mock_event.save.assert_called_once()
|
||||
|
||||
|
||||
class TestSendSamlLogoutRequest(TestCase):
|
||||
"""Tests for send_saml_logout_request task"""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test fixtures"""
|
||||
self.cert = create_test_cert()
|
||||
self.flow = create_test_flow()
|
||||
|
||||
self.provider = SAMLProvider.objects.create(
|
||||
name="test-provider",
|
||||
authorization_flow=self.flow,
|
||||
acs_url="https://sp.example.com/acs",
|
||||
sls_url="https://sp.example.com/sls",
|
||||
issuer="https://idp.example.com",
|
||||
signing_kp=self.cert,
|
||||
)
|
||||
|
||||
@patch("authentik.providers.saml.tasks.requests.post")
|
||||
def test_successful_logout_request(self, mock_post):
|
||||
"""Test successful POST logout request returns True"""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_post.return_value = mock_response
|
||||
|
||||
result = send_saml_logout_request(
|
||||
provider_pk=self.provider.pk,
|
||||
sls_url=self.provider.sls_url,
|
||||
name_id="test@example.com",
|
||||
name_id_format=SAML_NAME_ID_FORMAT_EMAIL,
|
||||
session_index="test-session-123",
|
||||
)
|
||||
|
||||
self.assertTrue(result)
|
||||
mock_post.assert_called_once()
|
||||
|
||||
# Verify the POST was made with correct data
|
||||
call_kwargs = mock_post.call_args[1]
|
||||
self.assertEqual(call_kwargs["timeout"], 10)
|
||||
self.assertEqual(
|
||||
call_kwargs["headers"]["Content-Type"], "application/x-www-form-urlencoded"
|
||||
)
|
||||
|
||||
# Verify form data contains SAMLRequest
|
||||
form_data = call_kwargs["data"]
|
||||
self.assertIn("SAMLRequest", form_data)
|
||||
|
||||
def test_provider_not_found(self):
|
||||
"""Test returns False when provider doesn't exist"""
|
||||
result = send_saml_logout_request(
|
||||
provider_pk=99999, # Non-existent provider
|
||||
sls_url="https://sp.example.com/sls",
|
||||
name_id="test@example.com",
|
||||
name_id_format=SAML_NAME_ID_FORMAT_EMAIL,
|
||||
session_index="test-session-123",
|
||||
)
|
||||
|
||||
self.assertFalse(result)
|
||||
|
||||
@patch("authentik.providers.saml.tasks.requests.post")
|
||||
def test_http_error_raises(self, mock_post):
|
||||
"""Test HTTP error raises exception (no try/catch in send_post_logout_request)"""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 500
|
||||
mock_response.raise_for_status.side_effect = HTTPError("500 Server Error")
|
||||
mock_post.return_value = mock_response
|
||||
|
||||
with self.assertRaises(HTTPError):
|
||||
send_saml_logout_request(
|
||||
provider_pk=self.provider.pk,
|
||||
sls_url=self.provider.sls_url,
|
||||
name_id="test@example.com",
|
||||
name_id_format=SAML_NAME_ID_FORMAT_EMAIL,
|
||||
session_index="test-session-123",
|
||||
)
|
||||
|
||||
|
||||
class TestSendPostLogoutRequest(TestCase):
|
||||
"""Tests for send_post_logout_request function"""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test fixtures"""
|
||||
self.cert = create_test_cert()
|
||||
self.flow = create_test_flow()
|
||||
|
||||
self.provider = SAMLProvider.objects.create(
|
||||
name="test-provider",
|
||||
authorization_flow=self.flow,
|
||||
acs_url="https://sp.example.com/acs",
|
||||
sls_url="https://sp.example.com/sls",
|
||||
issuer="https://idp.example.com",
|
||||
signing_kp=self.cert,
|
||||
)
|
||||
|
||||
@patch("authentik.providers.saml.tasks.requests.post")
|
||||
def test_successful_post(self, mock_post):
|
||||
"""Test successful POST returns True"""
|
||||
from authentik.providers.saml.processors.logout_request import LogoutRequestProcessor
|
||||
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_post.return_value = mock_response
|
||||
|
||||
processor = LogoutRequestProcessor(
|
||||
provider=self.provider,
|
||||
user=None,
|
||||
destination=self.provider.sls_url,
|
||||
name_id="test@example.com",
|
||||
name_id_format=SAML_NAME_ID_FORMAT_EMAIL,
|
||||
session_index="test-session-123",
|
||||
)
|
||||
|
||||
result = send_post_logout_request(self.provider, processor)
|
||||
|
||||
self.assertTrue(result)
|
||||
mock_post.assert_called_once()
|
||||
|
||||
@patch("authentik.providers.saml.tasks.requests.post")
|
||||
def test_with_relay_state(self, mock_post):
|
||||
"""Test POST includes RelayState when present"""
|
||||
from authentik.providers.saml.processors.logout_request import LogoutRequestProcessor
|
||||
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_post.return_value = mock_response
|
||||
|
||||
processor = LogoutRequestProcessor(
|
||||
provider=self.provider,
|
||||
user=None,
|
||||
destination=self.provider.sls_url,
|
||||
name_id="test@example.com",
|
||||
name_id_format=SAML_NAME_ID_FORMAT_EMAIL,
|
||||
session_index="test-session-123",
|
||||
relay_state="https://sp.example.com/return",
|
||||
)
|
||||
|
||||
result = send_post_logout_request(self.provider, processor)
|
||||
|
||||
self.assertTrue(result)
|
||||
|
||||
# Verify RelayState is included
|
||||
form_data = mock_post.call_args[1]["data"]
|
||||
self.assertIn("RelayState", form_data)
|
||||
self.assertEqual(form_data["RelayState"], "https://sp.example.com/return")
|
||||
|
||||
@patch("authentik.providers.saml.tasks.requests.post")
|
||||
def test_connection_error_raises(self, mock_post):
|
||||
"""Test connection error raises exception"""
|
||||
from authentik.providers.saml.processors.logout_request import LogoutRequestProcessor
|
||||
|
||||
mock_post.side_effect = ConnectionError("Connection refused")
|
||||
|
||||
processor = LogoutRequestProcessor(
|
||||
provider=self.provider,
|
||||
user=None,
|
||||
destination=self.provider.sls_url,
|
||||
name_id="test@example.com",
|
||||
name_id_format=SAML_NAME_ID_FORMAT_EMAIL,
|
||||
session_index="test-session-123",
|
||||
)
|
||||
|
||||
with self.assertRaises(ConnectionError):
|
||||
send_post_logout_request(self.provider, processor)
|
||||
@@ -14,9 +14,7 @@ from authentik.flows.views.executor import SESSION_KEY_PLAN
|
||||
from authentik.providers.saml.exceptions import CannotHandleAssertion
|
||||
from authentik.providers.saml.models import SAMLBindings, SAMLLogoutMethods, SAMLProvider
|
||||
from authentik.providers.saml.processors.logout_request import LogoutRequestProcessor
|
||||
from authentik.providers.saml.views.flows import (
|
||||
PLAN_CONTEXT_SAML_RELAY_STATE,
|
||||
)
|
||||
from authentik.providers.saml.views.flows import PLAN_CONTEXT_SAML_RELAY_STATE
|
||||
from authentik.providers.saml.views.sp_slo import (
|
||||
SPInitiatedSLOBindingPOSTView,
|
||||
SPInitiatedSLOBindingRedirectView,
|
||||
@@ -93,32 +91,33 @@ class TestSPInitiatedSLOViews(TestCase):
|
||||
self.assertEqual(logout_request.issuer, self.provider.issuer)
|
||||
self.assertEqual(logout_request.session_index, "test-session-123")
|
||||
|
||||
def test_redirect_view_handles_logout_response_with_relay_state(self):
|
||||
"""Test that redirect view handles logout response with RelayState"""
|
||||
# Use raw URL (no encoding needed)
|
||||
relay_state = "https://idp.example.com/flow/return"
|
||||
def test_redirect_view_handles_logout_response_with_plan_context(self):
|
||||
"""Test that redirect view always redirects to plan context URL, ignoring RelayState"""
|
||||
plan_relay_state = "https://idp.example.com/flow/return"
|
||||
|
||||
# Create request with SAML logout response
|
||||
request = self.factory.get(
|
||||
f"/slo/redirect/{self.application.slug}/",
|
||||
{
|
||||
"SAMLResponse": "dummy-response",
|
||||
"RelayState": relay_state,
|
||||
"RelayState": "https://somewhere-else.example.com/return",
|
||||
},
|
||||
)
|
||||
request.session = {}
|
||||
plan = FlowPlan(flow_pk="test-flow")
|
||||
plan.context[PLAN_CONTEXT_SAML_RELAY_STATE] = plan_relay_state
|
||||
request.session = {SESSION_KEY_PLAN: plan}
|
||||
request.brand = self.brand
|
||||
|
||||
view = SPInitiatedSLOBindingRedirectView()
|
||||
view.setup(request, application_slug=self.application.slug)
|
||||
response = view.dispatch(request, application_slug=self.application.slug)
|
||||
|
||||
# Should redirect to relay state URL
|
||||
# Should redirect to plan context URL, not the request's RelayState
|
||||
self.assertEqual(response.status_code, 302)
|
||||
self.assertEqual(response.url, relay_state)
|
||||
self.assertEqual(response.url, plan_relay_state)
|
||||
|
||||
def test_redirect_view_handles_logout_response_plain_relay_state(self):
|
||||
"""Test that redirect view handles logout response with plain RelayState"""
|
||||
def test_redirect_view_ignores_relay_state_without_plan(self):
|
||||
"""Test that redirect view ignores RelayState and falls back to root when no plan context"""
|
||||
relay_state = "https://sp.example.com/plain"
|
||||
|
||||
# Create request with SAML logout response
|
||||
@@ -136,9 +135,9 @@ class TestSPInitiatedSLOViews(TestCase):
|
||||
view.setup(request, application_slug=self.application.slug)
|
||||
response = view.dispatch(request, application_slug=self.application.slug)
|
||||
|
||||
# Should redirect to plain relay state
|
||||
# Should ignore relay_state and redirect to root (no plan context)
|
||||
self.assertEqual(response.status_code, 302)
|
||||
self.assertEqual(response.url, relay_state)
|
||||
self.assertEqual(response.url, reverse("authentik_core:root-redirect"))
|
||||
|
||||
def test_redirect_view_handles_logout_response_no_relay_state_with_plan_context(self):
|
||||
"""Test that redirect view uses plan context fallback when no RelayState"""
|
||||
@@ -230,29 +229,30 @@ class TestSPInitiatedSLOViews(TestCase):
|
||||
self.assertEqual(logout_request.issuer, self.provider.issuer)
|
||||
self.assertEqual(logout_request.session_index, "test-session-123")
|
||||
|
||||
def test_post_view_handles_logout_response_with_relay_state(self):
|
||||
"""Test that POST view handles logout response with RelayState"""
|
||||
# Use raw URL (no encoding needed)
|
||||
relay_state = "https://idp.example.com/flow/return"
|
||||
def test_post_view_handles_logout_response_with_plan_context(self):
|
||||
"""Test that POST view always redirects to plan context URL, ignoring RelayState"""
|
||||
plan_relay_state = "https://idp.example.com/flow/return"
|
||||
|
||||
# Create POST request with SAML logout response
|
||||
request = self.factory.post(
|
||||
f"/slo/post/{self.application.slug}/",
|
||||
{
|
||||
"SAMLResponse": "dummy-response",
|
||||
"RelayState": relay_state,
|
||||
"RelayState": "https://somewhere-else.example.com/return",
|
||||
},
|
||||
)
|
||||
request.session = {}
|
||||
plan = FlowPlan(flow_pk="test-flow")
|
||||
plan.context[PLAN_CONTEXT_SAML_RELAY_STATE] = plan_relay_state
|
||||
request.session = {SESSION_KEY_PLAN: plan}
|
||||
request.brand = self.brand
|
||||
|
||||
view = SPInitiatedSLOBindingPOSTView()
|
||||
view.setup(request, application_slug=self.application.slug)
|
||||
response = view.dispatch(request, application_slug=self.application.slug)
|
||||
|
||||
# Should redirect to relay state URL
|
||||
# Should redirect to plan context URL, not the request's RelayState
|
||||
self.assertEqual(response.status_code, 302)
|
||||
self.assertEqual(response.url, relay_state)
|
||||
self.assertEqual(response.url, plan_relay_state)
|
||||
|
||||
def test_post_view_handles_logout_response_no_relay_state_with_plan_context(self):
|
||||
"""Test that POST view uses plan context fallback when no RelayState"""
|
||||
@@ -419,7 +419,7 @@ class TestSPInitiatedSLOViews(TestCase):
|
||||
view.resolve_provider_application()
|
||||
|
||||
def test_relay_state_decoding_failure(self):
|
||||
"""Test handling of RelayState that's a path"""
|
||||
"""Test that arbitrary path RelayState is ignored and redirects to root"""
|
||||
# Create request with relay state that is a path
|
||||
request = self.factory.get(
|
||||
f"/slo/redirect/{self.application.slug}/",
|
||||
@@ -435,9 +435,73 @@ class TestSPInitiatedSLOViews(TestCase):
|
||||
view.setup(request, application_slug=self.application.slug)
|
||||
response = view.dispatch(request, application_slug=self.application.slug)
|
||||
|
||||
# Should treat it as plain URL and redirect to it
|
||||
# Should ignore relay_state and redirect to root (no plan context)
|
||||
self.assertEqual(response.status_code, 302)
|
||||
self.assertEqual(response.url, "/some/invalid/path")
|
||||
self.assertEqual(response.url, reverse("authentik_core:root-redirect"))
|
||||
|
||||
def test_redirect_view_blocks_external_relay_state(self):
|
||||
"""Test that redirect view ignores external malicious URL and redirects to root"""
|
||||
request = self.factory.get(
|
||||
f"/slo/redirect/{self.application.slug}/",
|
||||
{
|
||||
"SAMLResponse": "dummy-response",
|
||||
"RelayState": "https://evil.com/phishing",
|
||||
},
|
||||
)
|
||||
request.session = {}
|
||||
request.brand = self.brand
|
||||
|
||||
view = SPInitiatedSLOBindingRedirectView()
|
||||
view.setup(request, application_slug=self.application.slug)
|
||||
response = view.dispatch(request, application_slug=self.application.slug)
|
||||
|
||||
# Should ignore relay_state and redirect to root (no plan context)
|
||||
self.assertEqual(response.status_code, 302)
|
||||
self.assertEqual(response.url, reverse("authentik_core:root-redirect"))
|
||||
|
||||
def test_redirect_view_ignores_relay_state_uses_plan_context(self):
|
||||
"""Test that redirect view always uses plan context URL regardless of RelayState"""
|
||||
plan_relay_state = "https://authentik.example.com/if/flow/logout/"
|
||||
|
||||
request = self.factory.get(
|
||||
f"/slo/redirect/{self.application.slug}/",
|
||||
{
|
||||
"SAMLResponse": "dummy-response",
|
||||
"RelayState": "https://evil.com/phishing",
|
||||
},
|
||||
)
|
||||
plan = FlowPlan(flow_pk="test-flow")
|
||||
plan.context[PLAN_CONTEXT_SAML_RELAY_STATE] = plan_relay_state
|
||||
request.session = {SESSION_KEY_PLAN: plan}
|
||||
request.brand = self.brand
|
||||
|
||||
view = SPInitiatedSLOBindingRedirectView()
|
||||
view.setup(request, application_slug=self.application.slug)
|
||||
response = view.dispatch(request, application_slug=self.application.slug)
|
||||
|
||||
# Should always use plan context value, ignoring malicious RelayState
|
||||
self.assertEqual(response.status_code, 302)
|
||||
self.assertEqual(response.url, plan_relay_state)
|
||||
|
||||
def test_post_view_ignores_external_relay_state(self):
|
||||
"""Test that POST view ignores external RelayState and redirects to root"""
|
||||
request = self.factory.post(
|
||||
f"/slo/post/{self.application.slug}/",
|
||||
{
|
||||
"SAMLResponse": "dummy-response",
|
||||
"RelayState": "https://evil.com/phishing",
|
||||
},
|
||||
)
|
||||
request.session = {}
|
||||
request.brand = self.brand
|
||||
|
||||
view = SPInitiatedSLOBindingPOSTView()
|
||||
view.setup(request, application_slug=self.application.slug)
|
||||
response = view.dispatch(request, application_slug=self.application.slug)
|
||||
|
||||
# Should ignore relay_state and redirect to root (no plan context)
|
||||
self.assertEqual(response.status_code, 302)
|
||||
self.assertEqual(response.url, reverse("authentik_core:root-redirect"))
|
||||
|
||||
|
||||
class TestSPInitiatedSLOLogoutMethods(TestCase):
|
||||
|
||||
@@ -15,22 +15,10 @@ from authentik.flows.stage import SessionEndStage
|
||||
from authentik.flows.views.executor import SESSION_KEY_PLAN
|
||||
from authentik.lib.views import bad_request_message
|
||||
from authentik.policies.views import PolicyAccessView
|
||||
from authentik.providers.iframe_logout import IframeLogoutStageView
|
||||
from authentik.providers.saml.exceptions import CannotHandleAssertion
|
||||
from authentik.providers.saml.models import (
|
||||
SAMLBindings,
|
||||
SAMLLogoutMethods,
|
||||
SAMLProvider,
|
||||
SAMLSession,
|
||||
)
|
||||
from authentik.providers.saml.native_logout import NativeLogoutStageView
|
||||
from authentik.providers.saml.models import SAMLProvider, SAMLSession
|
||||
from authentik.providers.saml.processors.logout_request_parser import LogoutRequestParser
|
||||
from authentik.providers.saml.processors.logout_response_processor import LogoutResponseProcessor
|
||||
from authentik.providers.saml.tasks import send_saml_logout_response
|
||||
from authentik.providers.saml.utils.encoding import nice64
|
||||
from authentik.providers.saml.views.flows import (
|
||||
PLAN_CONTEXT_SAML_LOGOUT_IFRAME_SESSIONS,
|
||||
PLAN_CONTEXT_SAML_LOGOUT_NATIVE_SESSIONS,
|
||||
PLAN_CONTEXT_SAML_LOGOUT_REQUEST,
|
||||
PLAN_CONTEXT_SAML_RELAY_STATE,
|
||||
REQUEST_KEY_RELAY_STATE,
|
||||
@@ -41,6 +29,24 @@ from authentik.providers.saml.views.flows import (
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
def _get_redirect_url(request: HttpRequest, relay_state: str = "") -> str:
|
||||
"""Get the safe redirect URL from the plan context, logging a warning if the
|
||||
incoming relay_state doesn't match the stored value."""
|
||||
stored_relay_state = ""
|
||||
if SESSION_KEY_PLAN in request.session:
|
||||
plan: FlowPlan = request.session[SESSION_KEY_PLAN]
|
||||
stored_relay_state = plan.context.get(PLAN_CONTEXT_SAML_RELAY_STATE, "")
|
||||
|
||||
if relay_state and relay_state != stored_relay_state:
|
||||
LOGGER.warning(
|
||||
"SAML logout relay_state mismatch, possible open redirect attempt",
|
||||
received_relay_state=relay_state,
|
||||
stored_relay_state=stored_relay_state,
|
||||
)
|
||||
|
||||
return stored_relay_state
|
||||
|
||||
|
||||
class SPInitiatedSLOView(PolicyAccessView):
|
||||
"""Handle SP-initiated SAML Single Logout requests"""
|
||||
|
||||
@@ -80,102 +86,7 @@ class SPInitiatedSLOView(PolicyAccessView):
|
||||
**self.plan_context,
|
||||
},
|
||||
)
|
||||
|
||||
if self.provider.sls_url:
|
||||
# Get logout request and extract relay state
|
||||
logout_request = self.plan_context.get(PLAN_CONTEXT_SAML_LOGOUT_REQUEST)
|
||||
relay_state = logout_request.relay_state if logout_request else None
|
||||
|
||||
# Store relay state for the logout response
|
||||
plan.context[PLAN_CONTEXT_SAML_RELAY_STATE] = relay_state
|
||||
|
||||
if self.provider.logout_method == SAMLLogoutMethods.FRONTCHANNEL_NATIVE:
|
||||
# Native mode - user will be redirected/posted away from authentik
|
||||
processor = LogoutResponseProcessor(
|
||||
self.provider,
|
||||
logout_request,
|
||||
destination=self.provider.sls_url,
|
||||
)
|
||||
|
||||
if self.provider.sls_binding == SAMLBindings.POST:
|
||||
logout_response = processor.encode_post()
|
||||
logout_data = {
|
||||
"post_url": self.provider.sls_url,
|
||||
"saml_response": logout_response,
|
||||
"saml_relay_state": relay_state,
|
||||
"provider_name": self.provider.name,
|
||||
"saml_binding": SAMLBindings.POST,
|
||||
}
|
||||
else:
|
||||
logout_url = processor.get_redirect_url()
|
||||
logout_data = {
|
||||
"redirect_url": logout_url,
|
||||
"provider_name": self.provider.name,
|
||||
"saml_binding": SAMLBindings.REDIRECT,
|
||||
}
|
||||
|
||||
plan.context[PLAN_CONTEXT_SAML_LOGOUT_NATIVE_SESSIONS] = [logout_data]
|
||||
plan.append_stage(in_memory_stage(NativeLogoutStageView))
|
||||
elif self.provider.logout_method == SAMLLogoutMethods.BACKCHANNEL:
|
||||
# Backchannel mode - server sends logout response directly to SP in background
|
||||
# No user interaction needed
|
||||
if self.provider.sls_binding != SAMLBindings.POST:
|
||||
LOGGER.warning(
|
||||
"Backchannel logout requires POST binding, but provider is configured "
|
||||
"with %s binding",
|
||||
self.provider.sls_binding,
|
||||
provider=self.provider,
|
||||
)
|
||||
|
||||
# Queue the logout response to be sent in the background
|
||||
# This doesn't block the user's logout from completing
|
||||
send_saml_logout_response.send(
|
||||
provider_pk=self.provider.pk,
|
||||
sls_url=self.provider.sls_url,
|
||||
logout_request_id=logout_request.id if logout_request else None,
|
||||
relay_state=relay_state,
|
||||
)
|
||||
|
||||
LOGGER.debug(
|
||||
"Queued backchannel logout response",
|
||||
provider=self.provider,
|
||||
sls_url=self.provider.sls_url,
|
||||
)
|
||||
|
||||
# Just end the session - no user interaction needed
|
||||
plan.append_stage(in_memory_stage(SessionEndStage))
|
||||
else:
|
||||
# Iframe mode (default for FRONTCHANNEL_IFRAME) - user stays on authentik
|
||||
processor = LogoutResponseProcessor(
|
||||
self.provider,
|
||||
logout_request,
|
||||
destination=self.provider.sls_url,
|
||||
)
|
||||
|
||||
logout_response = processor.build_response()
|
||||
|
||||
if self.provider.sls_binding == SAMLBindings.POST:
|
||||
logout_data = {
|
||||
"url": self.provider.sls_url,
|
||||
"saml_response": nice64(logout_response),
|
||||
"saml_relay_state": relay_state,
|
||||
"provider_name": self.provider.name,
|
||||
"binding": SAMLBindings.POST,
|
||||
}
|
||||
else:
|
||||
logout_url = processor.get_redirect_url()
|
||||
logout_data = {
|
||||
"url": logout_url,
|
||||
"provider_name": self.provider.name,
|
||||
"binding": SAMLBindings.REDIRECT,
|
||||
}
|
||||
|
||||
plan.context[PLAN_CONTEXT_SAML_LOGOUT_IFRAME_SESSIONS] = [logout_data]
|
||||
plan.append_stage(in_memory_stage(IframeLogoutStageView))
|
||||
plan.append_stage(in_memory_stage(SessionEndStage))
|
||||
else:
|
||||
# No SLS URL configured, just end session
|
||||
plan.append_stage(in_memory_stage(SessionEndStage))
|
||||
plan.append_stage(in_memory_stage(SessionEndStage))
|
||||
|
||||
# Remove samlsession from database
|
||||
auth_session = AuthenticatedSession.from_request(self.request, self.request.user)
|
||||
@@ -203,17 +114,9 @@ class SPInitiatedSLOBindingRedirectView(SPInitiatedSLOView):
|
||||
# IDP SLO, so we want to redirect to our next provider
|
||||
if REQUEST_KEY_SAML_RESPONSE in request.GET:
|
||||
relay_state = request.GET.get(REQUEST_KEY_RELAY_STATE, "")
|
||||
if relay_state:
|
||||
return redirect(relay_state)
|
||||
|
||||
# No RelayState provided, try to get return URL from plan context
|
||||
if SESSION_KEY_PLAN in request.session:
|
||||
plan: FlowPlan = request.session[SESSION_KEY_PLAN]
|
||||
relay_state = plan.context.get(PLAN_CONTEXT_SAML_RELAY_STATE)
|
||||
if relay_state:
|
||||
return redirect(relay_state)
|
||||
|
||||
# No relay state and no plan context - redirect to root
|
||||
redirect_url = _get_redirect_url(request, relay_state)
|
||||
if redirect_url:
|
||||
return redirect(redirect_url)
|
||||
return redirect("authentik_core:root-redirect")
|
||||
|
||||
# For SAML logout requests, use the parent dispatch with auth checks
|
||||
@@ -254,17 +157,9 @@ class SPInitiatedSLOBindingPOSTView(SPInitiatedSLOView):
|
||||
# IDP SLO, so we want to redirect to our next provider
|
||||
if REQUEST_KEY_SAML_RESPONSE in request.POST:
|
||||
relay_state = request.POST.get(REQUEST_KEY_RELAY_STATE, "")
|
||||
if relay_state:
|
||||
return redirect(relay_state)
|
||||
|
||||
# No RelayState provided, try to get return URL from plan context
|
||||
if SESSION_KEY_PLAN in request.session:
|
||||
plan: FlowPlan = request.session[SESSION_KEY_PLAN]
|
||||
relay_state = plan.context.get(PLAN_CONTEXT_SAML_RELAY_STATE)
|
||||
if relay_state:
|
||||
return redirect(relay_state)
|
||||
|
||||
# No relay state and no plan context - redirect to root
|
||||
redirect_url = _get_redirect_url(request, relay_state)
|
||||
if redirect_url:
|
||||
return redirect(redirect_url)
|
||||
return redirect("authentik_core:root-redirect")
|
||||
|
||||
# For SAML logout requests, use the parent dispatch with auth checks
|
||||
|
||||
@@ -65,7 +65,7 @@ class EnterpriseUser(BaseModel):
|
||||
employeeNumber: str | None = Field(
|
||||
None,
|
||||
description="Numeric or alphanumeric identifier assigned to a person, "
|
||||
"typically based on order of hire or association with an organization.",
|
||||
"typically based on order of hire or association with anorganization.",
|
||||
)
|
||||
costCenter: str | None = Field(None, description="Identifies the name of a cost center.")
|
||||
organization: str | None = Field(None, description="Identifies the name of an organization.")
|
||||
@@ -73,7 +73,7 @@ class EnterpriseUser(BaseModel):
|
||||
department: str | None = Field(
|
||||
None,
|
||||
description="Numeric or alphanumeric identifier assigned to a person,"
|
||||
" typically based on order of hire or association with an organization.",
|
||||
" typically based on order of hire or association with anorganization.",
|
||||
)
|
||||
manager: Manager | None = Field(
|
||||
None,
|
||||
|
||||
@@ -10,6 +10,7 @@ from authentik.blueprints.tests import apply_blueprint
|
||||
from authentik.core.models import Application, Group, User
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.providers.scim.models import SCIMMapping, SCIMProvider, SCIMProviderGroup
|
||||
from authentik.providers.scim.tasks import scim_sync
|
||||
|
||||
|
||||
class SCIMGroupTests(TestCase):
|
||||
@@ -205,3 +206,80 @@ class SCIMGroupTests(TestCase):
|
||||
self.assertEqual(mock.request_history[1].method, "POST")
|
||||
self.assertEqual(mock.request_history[2].method, "GET")
|
||||
self.assertNotIn("PUT", [req.method for req in mock.request_history])
|
||||
|
||||
def _create_stale_provider_group(self, scim_id: str) -> Group:
|
||||
"""Create a group that is outside the provider's scope (via group_filters) with an
|
||||
existing SCIMProviderGroup, simulating a previously synced group now out of scope."""
|
||||
self.app.backchannel_providers.remove(self.provider)
|
||||
anchor = Group.objects.create(name=generate_id())
|
||||
stale = Group.objects.create(name=generate_id())
|
||||
self.app.backchannel_providers.add(self.provider)
|
||||
|
||||
self.provider.group_filters.set([anchor])
|
||||
SCIMProviderGroup.objects.create(provider=self.provider, group=stale, scim_id=scim_id)
|
||||
return stale
|
||||
|
||||
@Mocker()
|
||||
def test_sync_cleanup_stale_group_delete(self, mock: Mocker):
|
||||
"""Stale (out-of-scope) groups are deleted during full sync cleanup"""
|
||||
scim_id = generate_id()
|
||||
mock.get("https://localhost/ServiceProviderConfig", json={})
|
||||
|
||||
mock.post("https://localhost/Groups", json={"id": generate_id()})
|
||||
mock.delete(f"https://localhost/Groups/{scim_id}", status_code=204)
|
||||
self._create_stale_provider_group(scim_id)
|
||||
|
||||
scim_sync.send(self.provider.pk).get_result()
|
||||
|
||||
delete_reqs = [r for r in mock.request_history if r.method == "DELETE"]
|
||||
self.assertEqual(len(delete_reqs), 1)
|
||||
self.assertEqual(delete_reqs[0].url, f"https://localhost/Groups/{scim_id}")
|
||||
self.assertFalse(
|
||||
SCIMProviderGroup.objects.filter(provider=self.provider, scim_id=scim_id).exists()
|
||||
)
|
||||
|
||||
@Mocker()
|
||||
def test_sync_cleanup_stale_group_not_found(self, mock: Mocker):
|
||||
"""Stale group cleanup handles 404 from the remote gracefully"""
|
||||
scim_id = generate_id()
|
||||
mock.get("https://localhost/ServiceProviderConfig", json={})
|
||||
mock.post("https://localhost/Groups", json={"id": generate_id()})
|
||||
mock.delete(f"https://localhost/Groups/{scim_id}", status_code=404)
|
||||
self._create_stale_provider_group(scim_id)
|
||||
|
||||
scim_sync.send(self.provider.pk).get_result()
|
||||
|
||||
delete_reqs = [r for r in mock.request_history if r.method == "DELETE"]
|
||||
self.assertEqual(len(delete_reqs), 1)
|
||||
|
||||
self.assertFalse(
|
||||
SCIMProviderGroup.objects.filter(provider=self.provider, scim_id=scim_id).exists()
|
||||
)
|
||||
|
||||
@Mocker()
|
||||
def test_sync_cleanup_stale_group_transient_error(self, mock: Mocker):
|
||||
"""Stale group cleanup logs and retries on transient HTTP errors"""
|
||||
scim_id = generate_id()
|
||||
mock.get("https://localhost/ServiceProviderConfig", json={})
|
||||
mock.post("https://localhost/Groups", json={"id": generate_id()})
|
||||
mock.delete(f"https://localhost/Groups/{scim_id}", status_code=429)
|
||||
self._create_stale_provider_group(scim_id)
|
||||
|
||||
scim_sync.send(self.provider.pk)
|
||||
|
||||
delete_reqs = [r for r in mock.request_history if r.method == "DELETE"]
|
||||
self.assertEqual(len(delete_reqs), 1)
|
||||
|
||||
@Mocker()
|
||||
def test_sync_cleanup_stale_group_dry_run(self, mock: Mocker):
|
||||
"""Stale group cleanup skips HTTP DELETE in dry_run mode"""
|
||||
self.provider.dry_run = True
|
||||
self.provider.save()
|
||||
scim_id = generate_id()
|
||||
mock.get("https://localhost/ServiceProviderConfig", json={})
|
||||
self._create_stale_provider_group(scim_id)
|
||||
|
||||
scim_sync.send(self.provider.pk)
|
||||
|
||||
delete_reqs = [r for r in mock.request_history if r.method == "DELETE"]
|
||||
self.assertEqual(len(delete_reqs), 0)
|
||||
|
||||
@@ -1,17 +1,19 @@
|
||||
"""SCIM User tests"""
|
||||
|
||||
from json import loads
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.test import TestCase
|
||||
from jsonschema import validate
|
||||
from requests_mock import Mocker
|
||||
|
||||
from authentik.blueprints.tests import apply_blueprint
|
||||
from authentik.core.models import Application, Group, User
|
||||
from authentik.core.models import Application, Group, User, UserTypes
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.sync.outgoing.base import SAFE_METHODS
|
||||
from authentik.lib.sync.outgoing.exceptions import TransientSyncException
|
||||
from authentik.providers.scim.models import SCIMMapping, SCIMProvider, SCIMProviderUser
|
||||
from authentik.providers.scim.tasks import scim_sync, scim_sync_objects
|
||||
from authentik.providers.scim.tasks import scim_sync, scim_sync_objects, sync_tasks
|
||||
from authentik.tasks.models import Task
|
||||
from authentik.tenants.models import Tenant
|
||||
|
||||
@@ -537,3 +539,104 @@ class SCIMUserTests(TestCase):
|
||||
self.assertEqual(mock.call_count, 2)
|
||||
self.assertEqual(mock.request_history[0].method, "GET")
|
||||
self.assertEqual(mock.request_history[1].method, "POST")
|
||||
|
||||
def _create_stale_provider_user(self, scim_id: str, uid: str) -> User:
|
||||
"""Create a service-account user (excluded from provider scope) with an existing
|
||||
SCIMProviderUser, simulating a previously synced user that is now out of scope."""
|
||||
user = User.objects.create(
|
||||
username=uid,
|
||||
name=f"{uid} {uid}",
|
||||
email=f"{uid}@goauthentik.io",
|
||||
type=UserTypes.SERVICE_ACCOUNT,
|
||||
)
|
||||
SCIMProviderUser.objects.create(provider=self.provider, user=user, scim_id=scim_id)
|
||||
return user
|
||||
|
||||
@Mocker()
|
||||
def test_sync_cleanup_stale_user_delete(self, mock: Mocker):
|
||||
"""Stale (out-of-scope) users are deleted during full sync cleanup"""
|
||||
scim_id = generate_id()
|
||||
uid = generate_id()
|
||||
mock.get("https://localhost/ServiceProviderConfig", json={})
|
||||
mock.delete(f"https://localhost/Users/{scim_id}", status_code=204)
|
||||
self._create_stale_provider_user(scim_id, uid)
|
||||
|
||||
scim_sync.send(self.provider.pk).get_result()
|
||||
|
||||
delete_reqs = [r for r in mock.request_history if r.method == "DELETE"]
|
||||
self.assertEqual(len(delete_reqs), 1)
|
||||
self.assertEqual(delete_reqs[0].url, f"https://localhost/Users/{scim_id}")
|
||||
self.assertFalse(
|
||||
SCIMProviderUser.objects.filter(provider=self.provider, scim_id=scim_id).exists()
|
||||
)
|
||||
|
||||
@Mocker()
|
||||
def test_sync_cleanup_stale_user_not_found(self, mock: Mocker):
|
||||
"""Stale user cleanup handles 404 from the remote gracefully"""
|
||||
scim_id = generate_id()
|
||||
uid = generate_id()
|
||||
mock.get("https://localhost/ServiceProviderConfig", json={})
|
||||
mock.delete(f"https://localhost/Users/{scim_id}", status_code=404)
|
||||
self._create_stale_provider_user(scim_id, uid)
|
||||
|
||||
scim_sync.send(self.provider.pk).get_result()
|
||||
|
||||
delete_reqs = [r for r in mock.request_history if r.method == "DELETE"]
|
||||
self.assertEqual(len(delete_reqs), 1)
|
||||
|
||||
self.assertFalse(
|
||||
SCIMProviderUser.objects.filter(provider=self.provider, scim_id=scim_id).exists()
|
||||
)
|
||||
|
||||
@Mocker()
|
||||
def test_sync_cleanup_stale_user_transient_error(self, mock: Mocker):
|
||||
"""Stale user cleanup logs and retries on transient HTTP errors"""
|
||||
scim_id = generate_id()
|
||||
uid = generate_id()
|
||||
mock.get("https://localhost/ServiceProviderConfig", json={})
|
||||
mock.delete(f"https://localhost/Users/{scim_id}", status_code=429)
|
||||
self._create_stale_provider_user(scim_id, uid)
|
||||
|
||||
scim_sync.send(self.provider.pk)
|
||||
|
||||
delete_reqs = [r for r in mock.request_history if r.method == "DELETE"]
|
||||
self.assertEqual(len(delete_reqs), 1)
|
||||
|
||||
@Mocker()
|
||||
def test_sync_cleanup_stale_user_dry_run(self, mock: Mocker):
|
||||
"""Stale user cleanup skips HTTP DELETE in dry_run mode"""
|
||||
self.provider.dry_run = True
|
||||
self.provider.save()
|
||||
scim_id = generate_id()
|
||||
uid = generate_id()
|
||||
mock.get("https://localhost/ServiceProviderConfig", json={})
|
||||
self._create_stale_provider_user(scim_id, uid)
|
||||
|
||||
scim_sync.send(self.provider.pk)
|
||||
|
||||
delete_reqs = [r for r in mock.request_history if r.method == "DELETE"]
|
||||
self.assertEqual(len(delete_reqs), 0)
|
||||
|
||||
def test_sync_cleanup_client_for_model_transient(self):
|
||||
"""Cleanup silently skips an object type when client_for_model raises
|
||||
TransientSyncException"""
|
||||
with Mocker() as mock:
|
||||
mock.get("https://localhost/ServiceProviderConfig", json={})
|
||||
with patch.object(
|
||||
SCIMProvider,
|
||||
"client_for_model",
|
||||
side_effect=TransientSyncException("connection failed"),
|
||||
):
|
||||
scim_sync.send(self.provider.pk).get_result()
|
||||
|
||||
def test_sync_transient_exception(self):
|
||||
"""TransientSyncException in _sync_cleanup is caught by sync() which then
|
||||
schedules a retry"""
|
||||
with Mocker() as mock:
|
||||
mock.get("https://localhost/ServiceProviderConfig", json={})
|
||||
with patch.object(
|
||||
sync_tasks,
|
||||
"_sync_cleanup",
|
||||
side_effect=TransientSyncException("connection failed"),
|
||||
):
|
||||
scim_sync.send(self.provider.pk)
|
||||
|
||||
@@ -60,11 +60,7 @@ class LDAPSourceSerializer(SourceSerializer):
|
||||
sources = sources.exclude(pk=self.instance.pk)
|
||||
if sources.exists():
|
||||
raise ValidationError(
|
||||
{
|
||||
"sync_users_password": _(
|
||||
"Only a single LDAP Source with password synchronization is allowed"
|
||||
)
|
||||
}
|
||||
_("Only a single LDAP Source with password synchronization is allowed")
|
||||
)
|
||||
return sync_users_password
|
||||
|
||||
@@ -221,7 +217,7 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
|
||||
for sync_class in SYNC_CLASSES:
|
||||
class_name = sync_class.name()
|
||||
all_objects.setdefault(class_name, [])
|
||||
for page in sync_class(source).get_objects(size_limit=10):
|
||||
for page in sync_class(source, Task()).get_objects(size_limit=10):
|
||||
for obj in page:
|
||||
obj: dict
|
||||
obj.pop("raw_attributes", None)
|
||||
|
||||
@@ -14,6 +14,7 @@ from django.utils.translation import gettext_lazy as _
|
||||
from ldap3 import ALL, NONE, RANDOM, Connection, Server, ServerPool, Tls
|
||||
from ldap3.core.exceptions import LDAPException, LDAPInsufficientAccessRightsResult, LDAPSchemaError
|
||||
from rest_framework.serializers import Serializer
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import (
|
||||
Group,
|
||||
@@ -31,6 +32,7 @@ from authentik.tasks.schedules.common import ScheduleSpec
|
||||
LDAP_TIMEOUT = 15
|
||||
LDAP_UNIQUENESS = "ldap_uniq"
|
||||
LDAP_DISTINGUISHED_NAME = "distinguishedName"
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
def flatten(value: Any) -> Any:
|
||||
@@ -268,6 +270,7 @@ class LDAPSource(IncomingSyncSource):
|
||||
)
|
||||
|
||||
if self.start_tls:
|
||||
LOGGER.debug("Connection StartTLS", source=self)
|
||||
conn.start_tls(read_server_info=False)
|
||||
try:
|
||||
successful = conn.bind()
|
||||
@@ -278,7 +281,9 @@ class LDAPSource(IncomingSyncSource):
|
||||
# See https://github.com/goauthentik/authentik/issues/4590
|
||||
# See also https://github.com/goauthentik/authentik/issues/3399
|
||||
if server_kwargs.get("get_info", ALL) == NONE:
|
||||
LOGGER.warning("Failed to connect after schema downgrade", source=self, exc=exc)
|
||||
raise exc
|
||||
LOGGER.warning("Downgrading connection to no schema info", source=self, exc=exc)
|
||||
server_kwargs["get_info"] = NONE
|
||||
return self.connection(server, server_kwargs, connection_kwargs)
|
||||
finally:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user