Compare commits

..

98 Commits

Author SHA1 Message Date
Jens Langhammer
4ca8f032f4 add endpoint to start sync
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2026-04-05 22:12:30 +02:00
Jens Langhammer
97acd6288a split api
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2026-04-05 21:47:56 +02:00
Jens L.
debd09135a sources/ldap: Better Active Directory tests (#21281)
* sources/ldap: Better Active Directory tests

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* sigh pytest

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

---------

Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2026-04-05 21:41:17 +02:00
Jens L.
dc320df3a3 providers/rac: add e2e tests (#21390)
* add test_runner option to not capture stdout

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* fix exception for container failing to start not being raised

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* maybe use channels server for testing?

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* simplify and patch enterprise

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* simplify waiting for outpost

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* add rac SSH tests

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* fix rac missing in CI

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* retry on container failure

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* bump healthcheck tries

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* patch email port always

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* fixup?

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* fix guardian cache

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* only build webui when using selenium

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* only use channels when needed

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* fix coverage and combine

based on https://github.com/django/channels/issues/2063#issuecomment-2067722400

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* dont even cache

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* test with delete_token_on_disconnect

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

---------

Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2026-04-05 19:07:31 +02:00
authentik-automation[bot]
c93e0115d0 core, web: update translations (#21387)
Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
2026-04-05 16:58:33 +02:00
Jens L.
adbc8ca335 root: fix scripts compose & gen-diff (#21389)
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2026-04-05 16:58:19 +02:00
Jens L.
ea2bdde5a3 enterprise/providers/ssf: test conformance (#21383)
* bump conformance server

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* add support for rfc push

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* make format and aud optional

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* fix some endpoints

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* force 401

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* implement get and patch for streams

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* cleanup

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* enable async stream deletion

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* allow configuring remote certificate validation

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* add verification endpoint

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* add support for authorization_header

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* set default aud cause spec cant agree with itself

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* bump timeout

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* fix header `typ`

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* enabled -> status

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* re-migrate

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* gen

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* more tests

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* more tests and a fix

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* make streams deletable

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* and more logs and fix a silly bug

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* add stream status endpoint

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* move ssf out of preview

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* unrelated typing fix

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* format

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* sigh

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* more tests

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

---------

Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2026-04-05 16:35:39 +02:00
Jens L.
f38584b343 root: misc API client and web typing fixes (#21388)
* fix relObjId type

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* fix slot comments

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* sigh

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* use prettier on generated ts code

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

---------

Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2026-04-05 13:46:08 +02:00
Jens L.
d5ee53feb2 providers/ldap: inherit adjustable page size for LDAP searchers (#21377)
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2026-04-04 22:42:35 +02:00
Jens L.
827a77dd52 web/admin: more and more polish (#21303)
* fix user edit button

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* fix impersonate button not aligned

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* cleanup oauth2 provider page

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* better desc for outpost health

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* fix static table not updating when items change

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* fix lint

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* include oidc providers in ssf provider retrieve

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* consistent oauth provider label

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* rework ssf view page

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* make client-rust makefile on macos

specifically when gnu sed is installed in the path

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

---------

Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2026-04-04 22:35:11 +02:00
Lars
418fa620fe website/integrations: immich: set correct issuer url (#21379) 2026-04-03 21:51:11 +00:00
dependabot[bot]
1c8a082760 core: bump library/node from 25.8.2-trixie to 25.9.0-trixie in /website (#21372)
Bumps library/node from 25.8.2-trixie to 25.9.0-trixie.

---
updated-dependencies:
- dependency-name: library/node
  dependency-version: 25.9.0-trixie
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-03 15:42:03 +02:00
dependabot[bot]
7ebaf1d2c3 ci: bump taiki-e/install-action from 2.71.1 to 2.71.2 in /.github/actions/setup (#21370)
ci: bump taiki-e/install-action in /.github/actions/setup

Bumps [taiki-e/install-action](https://github.com/taiki-e/install-action) from 2.71.1 to 2.71.2.
- [Release notes](https://github.com/taiki-e/install-action/releases)
- [Changelog](https://github.com/taiki-e/install-action/blob/main/CHANGELOG.md)
- [Commits](0cccd59f03...d858f81139)

---
updated-dependencies:
- dependency-name: taiki-e/install-action
  dependency-version: 2.71.2
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-03 15:41:32 +02:00
dependabot[bot]
460abb2ab5 core: bump github.com/go-jose/go-jose/v4 from 4.1.3 to 4.1.4 (#21364)
Bumps [github.com/go-jose/go-jose/v4](https://github.com/go-jose/go-jose) from 4.1.3 to 4.1.4.
- [Release notes](https://github.com/go-jose/go-jose/releases)
- [Commits](https://github.com/go-jose/go-jose/compare/v4.1.3...v4.1.4)

---
updated-dependencies:
- dependency-name: github.com/go-jose/go-jose/v4
  dependency-version: 4.1.4
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-03 14:35:17 +01:00
dependabot[bot]
d40c1513ed core: bump mypy from 1.19.1 to 1.20.0 (#21365)
Bumps [mypy](https://github.com/python/mypy) from 1.19.1 to 1.20.0.
- [Changelog](https://github.com/python/mypy/blob/master/CHANGELOG.md)
- [Commits](https://github.com/python/mypy/compare/v1.19.1...v1.20.0)

---
updated-dependencies:
- dependency-name: mypy
  dependency-version: 1.20.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-03 14:35:14 +01:00
dependabot[bot]
e1948de78e core: bump ruff from 0.15.8 to 0.15.9 (#21366)
Bumps [ruff](https://github.com/astral-sh/ruff) from 0.15.8 to 0.15.9.
- [Release notes](https://github.com/astral-sh/ruff/releases)
- [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md)
- [Commits](https://github.com/astral-sh/ruff/compare/0.15.8...0.15.9)

---
updated-dependencies:
- dependency-name: ruff
  dependency-version: 0.15.9
  dependency-type: direct:development
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-03 14:35:11 +01:00
dependabot[bot]
2433e92fb9 core: bump types-docker from 7.1.0.20260402 to 7.1.0.20260403 (#21367)
Bumps [types-docker](https://github.com/python/typeshed) from 7.1.0.20260402 to 7.1.0.20260403.
- [Commits](https://github.com/python/typeshed/commits)

---
updated-dependencies:
- dependency-name: types-docker
  dependency-version: 7.1.0.20260403
  dependency-type: direct:development
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-03 14:35:07 +01:00
dependabot[bot]
3a7842b4eb core: bump aws-cdk-lib from 2.246.0 to 2.247.0 (#21368)
Bumps [aws-cdk-lib](https://github.com/aws/aws-cdk) from 2.246.0 to 2.247.0.
- [Release notes](https://github.com/aws/aws-cdk/releases)
- [Changelog](https://github.com/aws/aws-cdk/blob/main/CHANGELOG.v2.alpha.md)
- [Commits](https://github.com/aws/aws-cdk/compare/v2.246.0...v2.247.0)

---
updated-dependencies:
- dependency-name: aws-cdk-lib
  dependency-version: 2.247.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-03 14:35:03 +01:00
dependabot[bot]
ea4f9b7832 ci: bump docker/login-action from 4.0.0 to 4.1.0 (#21369)
Bumps [docker/login-action](https://github.com/docker/login-action) from 4.0.0 to 4.1.0.
- [Release notes](https://github.com/docker/login-action/releases)
- [Commits](b45d80f862...4907a6ddec)

---
updated-dependencies:
- dependency-name: docker/login-action
  dependency-version: 4.1.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-03 14:35:00 +01:00
dependabot[bot]
31d6e08c0f web: bump the storybook group across 1 directory with 5 updates (#21371)
Bumps the storybook group with 4 updates in the /web directory: [@storybook/addon-docs](https://github.com/storybookjs/storybook/tree/HEAD/code/addons/docs), [@storybook/addon-links](https://github.com/storybookjs/storybook/tree/HEAD/code/addons/links), [@storybook/web-components](https://github.com/storybookjs/storybook/tree/HEAD/code/renderers/web-components) and [@storybook/web-components-vite](https://github.com/storybookjs/storybook/tree/HEAD/code/frameworks/web-components-vite).


Updates `@storybook/addon-docs` from 10.3.3 to 10.3.4
- [Release notes](https://github.com/storybookjs/storybook/releases)
- [Changelog](https://github.com/storybookjs/storybook/blob/next/CHANGELOG.md)
- [Commits](https://github.com/storybookjs/storybook/commits/v10.3.4/code/addons/docs)

Updates `@storybook/addon-links` from 10.3.3 to 10.3.4
- [Release notes](https://github.com/storybookjs/storybook/releases)
- [Changelog](https://github.com/storybookjs/storybook/blob/next/CHANGELOG.md)
- [Commits](https://github.com/storybookjs/storybook/commits/v10.3.4/code/addons/links)

Updates `@storybook/web-components` from 10.3.3 to 10.3.4
- [Release notes](https://github.com/storybookjs/storybook/releases)
- [Changelog](https://github.com/storybookjs/storybook/blob/next/CHANGELOG.md)
- [Commits](https://github.com/storybookjs/storybook/commits/v10.3.4/code/renderers/web-components)

Updates `@storybook/web-components-vite` from 10.3.3 to 10.3.4
- [Release notes](https://github.com/storybookjs/storybook/releases)
- [Changelog](https://github.com/storybookjs/storybook/blob/next/CHANGELOG.md)
- [Commits](https://github.com/storybookjs/storybook/commits/v10.3.4/code/frameworks/web-components-vite)

Updates `storybook` from 10.3.3 to 10.3.4
- [Release notes](https://github.com/storybookjs/storybook/releases)
- [Changelog](https://github.com/storybookjs/storybook/blob/next/CHANGELOG.md)
- [Commits](https://github.com/storybookjs/storybook/commits/v10.3.4/code/core)

---
updated-dependencies:
- dependency-name: "@storybook/addon-docs"
  dependency-version: 10.3.4
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: storybook
- dependency-name: "@storybook/addon-links"
  dependency-version: 10.3.4
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: storybook
- dependency-name: "@storybook/web-components"
  dependency-version: 10.3.4
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: storybook
- dependency-name: "@storybook/web-components-vite"
  dependency-version: 10.3.4
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: storybook
- dependency-name: storybook
  dependency-version: 10.3.4
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: storybook
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-03 14:34:55 +01:00
Dominic R
b96c477b6a website/docs: Clean up PostgreSQL documentation (#21131)
* Clean up PostgreSQL documentation

* Overview

* SSL wording

* Conn age

* Schema text

* Replica line

* Direct tip

* Backup text

* Restore text

* Access text

* Copy text

* Issue text

* Sentence case

* Section intro

* Primary reads

* Username text

* Password text

* TLS modes

* Health checks

* Replica case

* Replica intro

* Backup guides

* Docker intro

* Stop stack

* Stop wording

* Backup alt

* Dump wording

* Remove alt

* Network note

* Verify login

* Dump safety

* Log names
2026-04-02 13:37:38 -04:00
Marc 'risson' Schmitt
111f0c072f root: fix compose generation for patch releases release candidates (#21353)
* root: fix compose generation for patch releases release candidates

Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>

* add comment

Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>

---------

Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-04-02 18:51:47 +02:00
dependabot[bot]
84581a0dbd web: bump @swc/cli from 0.8.0 to 0.8.1 in /web in the swc group across 1 directory (#21300)
web: bump @swc/cli in /web in the swc group across 1 directory

Bumps the swc group with 1 update in the /web directory: [@swc/cli](https://github.com/swc-project/pkgs).


Updates `@swc/cli` from 0.8.0 to 0.8.1
- [Commits](https://github.com/swc-project/pkgs/commits)

---
updated-dependencies:
- dependency-name: "@swc/cli"
  dependency-version: 0.8.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: swc
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-02 17:01:33 +02:00
Marc 'risson' Schmitt
1ceb46ca15 providers/proxy: fix oidc client not using socket in embedded outpost (#21280)
* providers/proxy: fix oidc client not using socket in embedded outpost

Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>

* cleanup and switch

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

---------

Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
Co-authored-by: Jens Langhammer <jens@goauthentik.io>
2026-04-02 16:50:12 +02:00
Dominic R
78f98641be packages/client-rust: fix portable sed usage (#21337)
* packages/client-rust: fix portable sed usage

* cleanup

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

---------

Signed-off-by: Jens Langhammer <jens@goauthentik.io>
Co-authored-by: Jens Langhammer <jens@goauthentik.io>
2026-04-02 14:30:51 +00:00
Marc 'risson' Schmitt
62ccf88512 packages/ak-common/tokio/proxy_procotol: init (#21311) 2026-04-02 13:40:38 +00:00
Marc 'risson' Schmitt
3355669274 packages/ak-common/config: init (#21256) 2026-04-02 15:05:35 +02:00
dependabot[bot]
ba82c97409 core: bump beryju.io/ldap from 0.1.0 to 0.2.1 (#21235)
* core: bump beryju.io/ldap from 0.1.0 to 0.2.1

Bumps [beryju.io/ldap](https://github.com/beryju/ldap) from 0.1.0 to 0.2.1.
- [Commits](https://github.com/beryju/ldap/compare/v0.1.0...v0.2.1)

---
updated-dependencies:
- dependency-name: beryju.io/ldap
  dependency-version: 0.2.1
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

* update code

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

---------

Signed-off-by: dependabot[bot] <support@github.com>
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Jens Langhammer <jens@goauthentik.io>
2026-04-02 14:37:00 +02:00
dependabot[bot]
478d76206f web: bump @sentry/browser from 10.46.0 to 10.47.0 in /web in the sentry group across 1 directory (#21297)
web: bump @sentry/browser in /web in the sentry group across 1 directory

Bumps the sentry group with 1 update in the /web directory: [@sentry/browser](https://github.com/getsentry/sentry-javascript).


Updates `@sentry/browser` from 10.46.0 to 10.47.0
- [Release notes](https://github.com/getsentry/sentry-javascript/releases)
- [Changelog](https://github.com/getsentry/sentry-javascript/blob/develop/CHANGELOG.md)
- [Commits](https://github.com/getsentry/sentry-javascript/compare/10.46.0...10.47.0)

---
updated-dependencies:
- dependency-name: "@sentry/browser"
  dependency-version: 10.47.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: sentry
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-02 14:28:10 +02:00
Marc 'risson' Schmitt
d3fca338b3 packages/ak-common/arbiter: init (#21253)
* packages/ak-arbiter: init

Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>

* fixup

Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>

* add tests

Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>

* lint

Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>

* sort out package versions

Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>

* rename to ak-lib

Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>

* fixup

Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>

* packages/ak-lib: init

Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>

* fixup

Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>

* root: fix rustfmt config

Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>

* packages/ak-common: rename from ak-lib

Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>

---------

Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-04-02 14:06:28 +02:00
Dominic R
b3036776ed website/docs: fix full dev setup ordering (#21332) 2026-04-02 07:11:47 -04:00
dependabot[bot]
fbd507e5fc core: bump types-docker from 7.1.0.20260328 to 7.1.0.20260402 (#21342)
Bumps [types-docker](https://github.com/python/typeshed) from 7.1.0.20260328 to 7.1.0.20260402.
- [Commits](https://github.com/python/typeshed/commits)

---
updated-dependencies:
- dependency-name: types-docker
  dependency-version: 7.1.0.20260402
  dependency-type: direct:development
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-02 11:05:49 +00:00
Marc 'risson' Schmitt
df6d580150 packages/ak-common: rename from ak-lib (#21314)
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-04-02 11:00:01 +00:00
Marc 'risson' Schmitt
a8db5f1bfa root: fix rustfmt config (#21312) 2026-04-02 12:37:08 +02:00
dependabot[bot]
5a5ca9aa02 core: bump types-ldap3 from 2.9.13.20260319 to 2.9.13.20260402 (#21343)
Bumps [types-ldap3](https://github.com/python/typeshed) from 2.9.13.20260319 to 2.9.13.20260402.
- [Commits](https://github.com/python/typeshed/commits)

---
updated-dependencies:
- dependency-name: types-ldap3
  dependency-version: 2.9.13.20260402
  dependency-type: direct:development
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-02 10:34:06 +00:00
dependabot[bot]
79654d9864 web: bump the bundler group across 1 directory with 4 updates (#21345)
Bumps the bundler group with 1 update in the /web directory: [esbuild](https://github.com/evanw/esbuild).


Updates `esbuild` from 0.27.4 to 0.27.5
- [Release notes](https://github.com/evanw/esbuild/releases)
- [Changelog](https://github.com/evanw/esbuild/blob/main/CHANGELOG.md)
- [Commits](https://github.com/evanw/esbuild/compare/v0.27.4...v0.27.5)

Updates `@esbuild/darwin-arm64` from 0.27.4 to 0.27.5
- [Release notes](https://github.com/evanw/esbuild/releases)
- [Changelog](https://github.com/evanw/esbuild/blob/main/CHANGELOG.md)
- [Commits](https://github.com/evanw/esbuild/compare/v0.27.4...v0.27.5)

Updates `@esbuild/linux-arm64` from 0.27.4 to 0.27.5
- [Release notes](https://github.com/evanw/esbuild/releases)
- [Changelog](https://github.com/evanw/esbuild/blob/main/CHANGELOG.md)
- [Commits](https://github.com/evanw/esbuild/compare/v0.27.4...v0.27.5)

Updates `@esbuild/linux-x64` from 0.27.4 to 0.27.5
- [Release notes](https://github.com/evanw/esbuild/releases)
- [Changelog](https://github.com/evanw/esbuild/blob/main/CHANGELOG.md)
- [Commits](https://github.com/evanw/esbuild/compare/v0.27.4...v0.27.5)

---
updated-dependencies:
- dependency-name: esbuild
  dependency-version: 0.27.5
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: bundler
- dependency-name: "@esbuild/darwin-arm64"
  dependency-version: 0.27.5
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: bundler
- dependency-name: "@esbuild/linux-arm64"
  dependency-version: 0.27.5
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: bundler
- dependency-name: "@esbuild/linux-x64"
  dependency-version: 0.27.5
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: bundler
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-02 12:00:42 +02:00
dependabot[bot]
e7bc1a88ef core: bump aiohttp from 3.13.3 to 3.13.4 (#21333)
---
updated-dependencies:
- dependency-name: aiohttp
  dependency-version: 3.13.4
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-02 10:57:34 +01:00
authentik-automation[bot]
2f65ff003e core, web: update translations (#21335)
Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
2026-04-02 10:57:30 +01:00
dependabot[bot]
c06083ab87 lifecycle/aws: bump aws-cdk from 2.1115.1 to 2.1116.0 in /lifecycle/aws (#21338)
Bumps [aws-cdk](https://github.com/aws/aws-cdk-cli/tree/HEAD/packages/aws-cdk) from 2.1115.1 to 2.1116.0.
- [Release notes](https://github.com/aws/aws-cdk-cli/releases)
- [Commits](https://github.com/aws/aws-cdk-cli/commits/aws-cdk@v2.1116.0/packages/aws-cdk)

---
updated-dependencies:
- dependency-name: aws-cdk
  dependency-version: 2.1116.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-02 10:57:27 +01:00
dependabot[bot]
07753ce8bb core: bump types-requests from 2.33.0.20260327 to 2.33.0.20260402 (#21339)
Bumps [types-requests](https://github.com/python/typeshed) from 2.33.0.20260327 to 2.33.0.20260402.
- [Commits](https://github.com/python/typeshed/commits)

---
updated-dependencies:
- dependency-name: types-requests
  dependency-version: 2.33.0.20260402
  dependency-type: direct:development
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-02 10:57:23 +01:00
dependabot[bot]
aefd583b0a core: bump django-stubs[compatible-mypy] from 6.0.1 to 6.0.2 (#21340)
Bumps [django-stubs[compatible-mypy]](https://github.com/typeddjango/django-stubs) from 6.0.1 to 6.0.2.
- [Release notes](https://github.com/typeddjango/django-stubs/releases)
- [Commits](https://github.com/typeddjango/django-stubs/compare/6.0.1...6.0.2)

---
updated-dependencies:
- dependency-name: django-stubs[compatible-mypy]
  dependency-version: 6.0.2
  dependency-type: direct:development
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-02 10:57:20 +01:00
dependabot[bot]
b6df1a8058 core: bump types-channels from 4.3.0.20260321 to 4.3.0.20260402 (#21341)
Bumps [types-channels](https://github.com/python/typeshed) from 4.3.0.20260321 to 4.3.0.20260402.
- [Commits](https://github.com/python/typeshed/commits)

---
updated-dependencies:
- dependency-name: types-channels
  dependency-version: 4.3.0.20260402
  dependency-type: direct:development
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-02 10:57:16 +01:00
dependabot[bot]
25a44ca35f core: bump types-jwcrypto from 1.5.0.20251102 to 1.5.0.20260402 (#21344)
Bumps [types-jwcrypto](https://github.com/python/typeshed) from 1.5.0.20251102 to 1.5.0.20260402.
- [Commits](https://github.com/python/typeshed/commits)

---
updated-dependencies:
- dependency-name: types-jwcrypto
  dependency-version: 1.5.0.20260402
  dependency-type: direct:development
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-02 10:57:09 +01:00
dependabot[bot]
fe870ea0f0 core: bump astral-sh/uv from 0.11.2 to 0.11.3 in /lifecycle/container (#21346)
Bumps [astral-sh/uv](https://github.com/astral-sh/uv) from 0.11.2 to 0.11.3.
- [Release notes](https://github.com/astral-sh/uv/releases)
- [Changelog](https://github.com/astral-sh/uv/blob/main/CHANGELOG.md)
- [Commits](https://github.com/astral-sh/uv/compare/0.11.2...0.11.3)

---
updated-dependencies:
- dependency-name: astral-sh/uv
  dependency-version: 0.11.3
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-02 10:57:06 +01:00
dependabot[bot]
c085be8d1b ci: bump taiki-e/install-action from 2.70.4 to 2.71.1 in /.github/actions/setup (#21347)
ci: bump taiki-e/install-action in /.github/actions/setup

Bumps [taiki-e/install-action](https://github.com/taiki-e/install-action) from 2.70.4 to 2.71.1.
- [Release notes](https://github.com/taiki-e/install-action/releases)
- [Changelog](https://github.com/taiki-e/install-action/blob/main/CHANGELOG.md)
- [Commits](bfadeaba21...0cccd59f03)

---
updated-dependencies:
- dependency-name: taiki-e/install-action
  dependency-version: 2.71.1
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-02 10:57:02 +01:00
Jens L.
1964394399 ci: allow setting working directory for setup action (#21329)
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2026-04-02 00:33:24 +02:00
Jens L.
5bf11f71f1 security: update policy to include explicit intended functionality (#21308)
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2026-04-01 23:39:00 +02:00
Marc 'risson' Schmitt
7a8a25a6ff packages/django-postgres-cache: fix expiry and delete (#21307) 2026-04-01 14:28:40 +00:00
Dewi Roberts
dea66394c7 website/docs: entra scim: add note about validator (#21273)
Add note
2026-04-01 14:13:45 +00:00
dependabot[bot]
4dd1f0c346 core: bump djangorestframework-stubs[compatible-mypy] from 3.16.8 to 3.16.9 (#21294)
core: bump djangorestframework-stubs[compatible-mypy]

Bumps [djangorestframework-stubs[compatible-mypy]](https://github.com/typeddjango/djangorestframework-stubs) from 3.16.8 to 3.16.9.
- [Release notes](https://github.com/typeddjango/djangorestframework-stubs/releases)
- [Commits](https://github.com/typeddjango/djangorestframework-stubs/compare/3.16.8...3.16.9)

---
updated-dependencies:
- dependency-name: djangorestframework-stubs[compatible-mypy]
  dependency-version: 3.16.9
  dependency-type: direct:development
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-01 13:20:46 +00:00
dependabot[bot]
b58e673f96 web: bump @xmldom/xmldom from 0.8.11 to 0.8.12 in /web (#21301)
Bumps [@xmldom/xmldom](https://github.com/xmldom/xmldom) from 0.8.11 to 0.8.12.
- [Release notes](https://github.com/xmldom/xmldom/releases)
- [Changelog](https://github.com/xmldom/xmldom/blob/master/CHANGELOG.md)
- [Commits](https://github.com/xmldom/xmldom/compare/0.8.11...0.8.12)

---
updated-dependencies:
- dependency-name: "@xmldom/xmldom"
  dependency-version: 0.8.12
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-01 15:03:52 +02:00
Jens L.
8610c25bd3 blueprints: rework one-time import (#18074)
* initial move

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* rework permissions

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* initial UI rework

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* add option to one-time import from file

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* adjust ui

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* update api

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* fix import form logs

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* reset correctly

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* improve error handling

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

---------

Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2026-04-01 15:03:16 +02:00
dependabot[bot]
82c8b3ff75 lifecycle/aws: bump aws-cdk from 2.1115.0 to 2.1115.1 in /lifecycle/aws (#21293)
Bumps [aws-cdk](https://github.com/aws/aws-cdk-cli/tree/HEAD/packages/aws-cdk) from 2.1115.0 to 2.1115.1.
- [Release notes](https://github.com/aws/aws-cdk-cli/releases)
- [Commits](https://github.com/aws/aws-cdk-cli/commits/aws-cdk@v2.1115.1/packages/aws-cdk)

---
updated-dependencies:
- dependency-name: aws-cdk
  dependency-version: 2.1115.1
  dependency-type: direct:development
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-01 12:59:26 +00:00
authentik-automation[bot]
e2379f9c3b core, web: update translations (#21288)
Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
2026-04-01 12:59:12 +00:00
dependabot[bot]
2e9f40b4ce core: bump sentry-sdk from 2.56.0 to 2.57.0 (#21295)
Bumps [sentry-sdk](https://github.com/getsentry/sentry-python) from 2.56.0 to 2.57.0.
- [Release notes](https://github.com/getsentry/sentry-python/releases)
- [Changelog](https://github.com/getsentry/sentry-python/blob/master/CHANGELOG.md)
- [Commits](https://github.com/getsentry/sentry-python/compare/2.56.0...2.57.0)

---
updated-dependencies:
- dependency-name: sentry-sdk
  dependency-version: 2.57.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-01 12:52:20 +00:00
dependabot[bot]
f0270e1151 core: bump aws-cdk-lib from 2.245.0 to 2.246.0 (#21296)
Bumps [aws-cdk-lib](https://github.com/aws/aws-cdk) from 2.245.0 to 2.246.0.
- [Release notes](https://github.com/aws/aws-cdk/releases)
- [Changelog](https://github.com/aws/aws-cdk/blob/main/CHANGELOG.v2.alpha.md)
- [Commits](https://github.com/aws/aws-cdk/compare/v2.245.0...v2.246.0)

---
updated-dependencies:
- dependency-name: aws-cdk-lib
  dependency-version: 2.246.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-01 12:52:08 +00:00
authentik-automation[bot]
1faa2cdbb7 stages/authenticator_webauthn: Update FIDO MDS3 & Passkey aaguid blobs (#21290)
Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
2026-04-01 13:52:02 +01:00
dependabot[bot]
197934837d ci: bump getsentry/action-release from 3.5.0 to 3.6.0 (#21298)
Bumps [getsentry/action-release](https://github.com/getsentry/action-release) from 3.5.0 to 3.6.0.
- [Release notes](https://github.com/getsentry/action-release/releases)
- [Changelog](https://github.com/getsentry/action-release/blob/master/CHANGELOG.md)
- [Commits](dab6548b3c...5657c9e888)

---
updated-dependencies:
- dependency-name: getsentry/action-release
  dependency-version: 3.6.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-01 12:51:46 +00:00
dependabot[bot]
5ffa209515 ci: bump taiki-e/install-action from 2.70.3 to 2.70.4 in /.github/actions/setup (#21299)
ci: bump taiki-e/install-action in /.github/actions/setup

Bumps [taiki-e/install-action](https://github.com/taiki-e/install-action) from 2.70.3 to 2.70.4.
- [Release notes](https://github.com/taiki-e/install-action/releases)
- [Changelog](https://github.com/taiki-e/install-action/blob/main/CHANGELOG.md)
- [Commits](6ef672efc2...bfadeaba21)

---
updated-dependencies:
- dependency-name: taiki-e/install-action
  dependency-version: 2.70.4
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-01 12:51:25 +00:00
Jens L.
dc96bda2d3 website/docs: add example recovery flow with MFA (#19497)
* website/docs: add example recovery flow with MFA

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* Apply suggestion from @tanberry

Co-authored-by: Tana M Berry <tanamarieberry@yahoo.com>
Signed-off-by: Jens L. <jens@beryju.org>

---------

Signed-off-by: Jens Langhammer <jens@goauthentik.io>
Signed-off-by: Jens L. <jens@beryju.org>
Co-authored-by: Tana M Berry <tanamarieberry@yahoo.com>
Co-authored-by: Dewi Roberts <dewi@goauthentik.io>
2026-04-01 12:24:33 +00:00
Dominic R
fabe43127a website/docs: format cache settings (#21289) 2026-04-01 07:08:41 -04:00
Connor Peshek
8dddc05bc0 source/saml: Add forceauthn to saml authnrequest (#20883)
* source/saml: Add ForceAuthn support to SAML AuthnRequest
2026-03-31 22:54:01 -05:00
transifex-integration[bot]
1f872d1721 translate: Updates for project authentik and language fr_FR (#21285)
Co-authored-by: transifex-integration[bot] <43880903+transifex-integration[bot]@users.noreply.github.com>
2026-03-31 17:43:07 +00:00
Marc 'risson' Schmitt
fd3196744e packages/django-postgres-cache: rework to use ORM (#17771) 2026-03-31 17:05:14 +00:00
Connor Peshek
a6064ec334 providers/saml: Fix redirect for saml slo (#21258)
* providers/saml: fix redirect for logouts

* lint

* update logic

* fix tests

* update build

* fix makefile

* remove sed backup artifacts (.rs-e files)
2026-03-31 18:27:36 +02:00
Jens L.
06408cba59 core: fix provider not nullable (#21275)
* core: fix provider not nullable

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* fix more inconsistencies

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* idk man

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

---------

Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2026-03-31 18:27:22 +02:00
Dewi Roberts
f4ba5ee885 website/docs: ad source: add note about ldap signing (#21274)
Add note
2026-03-31 11:24:20 -04:00
Marc 'risson' Schmitt
be77dc910e website/api: update API clients doc (#21202) 2026-03-31 07:52:28 -05:00
dependabot[bot]
b9b34102ac ci: bump taiki-e/install-action from 2.70.2 to 2.70.3 in /.github/actions/setup (#21267)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-31 14:07:30 +02:00
dependabot[bot]
9d9be53d6f lifecycle/aws: bump aws-cdk from 2.1114.1 to 2.1115.0 in /lifecycle/aws (#21265)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-31 14:01:13 +02:00
authentik-automation[bot]
2d73ea6cb4 core, web: update translations (#21264)
Co-authored-by: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
2026-03-31 14:00:42 +02:00
Marc 'risson' Schmitt
55e555c047 packages/ak-lib: init (#21257)
* packages/ak-lib: init

Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>

* fixup

Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>

---------

Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-31 11:33:46 +02:00
Shiv Tyagi
b9cc9e9cc3 website/docs: document group_uuid as a property for group object (#20865)
The application might need a unique id for a group to uniquely identify it. It can help in various cases like detecting group renames and more.
We should document `group_uuid` field of the group object to make users aware that it can be used in custom property mappings.

Signed-off-by: Shiv Tyagi <67995771+shiv-tyagi@users.noreply.github.com>
Co-authored-by: Dewi Roberts <dewi@goauthentik.io>
2026-03-31 08:41:32 +01:00
Ken Sternberg
86f16921a3 web/flow: extract lifecycle events peripheral to stage management into their own controllers (#20898)
* web/flow: extract lifecycle events peripheral to stage management into their own controllers

## What

Three features embedded in FlowExecutor, Iframe message handling (from captchas), Multitab message handling, and Websocket message handling, have been extracted from the FlowExecutor and placed into their own controllers.

The `renderFrameBackground()` method has been removed.

# Why

The three features mentioned are all *peripheral* to the task of coordinating challenges. The Iframe message handling may result in a challenge being returned, but there’s a bit of set-up and tear down that doesn’t really correspond well to the central concern of the FlowExecutor; it’s more like a sub-stage of IdentificationStage. By being attached to the executor as Controllers they participate in the executor’s lifecycle and have access to it, but their own internal logic is separated out, making them easier to understand and maintain. As a result, all of the associated machinery– attaching to `window`, disconnecting the websocket client, and so on– can be removed from the FlowExecutor.

The `renderFrameBackground()` method is not used.

* Darn spelling errors.

* Removed debugging line; added some comments.

* Restore frame-based backgrounds to executor; fix comments in FlowIframeMessageController

* Fix comment.

* Prettier and its opinions.

* Web/elements/drawer (#21149)

* .

* .

* .

* .

* .

* .

* Prettier had opinions.

* ## What

Componentize the drawer.

Remove unused CSS.

Provide a better mechanism for manipulating classes than “classMap”;

## Why

### The drawer

The drawer was the last thing that we loaded “native” into the UI. This is “the stupidest thing that could work,” just pasting @beryju’s drawer pattern into a component and giving it some functionality. It’s an excellent start to P5 the thing, however.

The two portions of the drawer, the “content” and the “panel”, are slots; the content is from the anonymous slot. This mirrors my philosophy that components are for layout and control, but the look and feel of their content should be driven by the content, not the component.

### Remove unused CSS

I literally could not find a reason any of these were in the top-level CSS; they don’t set CSS Custom Properties not accessible within the components that use them, they don’t affect the visuals of the components that are present within the top-level DOM, and they were just filling up space.

### class-list

ClassMap always bothered me as an especially clunky solution to what is essentially a problem in set theory: the `element.classlist` needs to be adjusted to match “the set of all classes currently active on this component.” ClassList is my solution: a directive that takes a *list* of classes and does the same set-theoretic comparisons as ClassMap, but with a cleaner API. Anything in the list that is a non-empty string is valid: like ClassMap, it will be left or added to ClassList; everything else (`false`, `""`, `null`, `undefined`) will be removed. (Symbols, numbers, and objects are technically possible and will be reject as “not part of the classList set”, but Typescript won’t allow you to pass those in.)

This allows us to say things like:

    const open = (this.open && "pf-m-expanded") || "pf-m-collapsed"
    ...
    class="pf-c-drawer ${classList(open)}"

… which I think is cleaner than:

    const open = {
       "pf-m-expanded": this.open,
       "pf-m-collapsed": !this.open
    };
    ...
    class="pf-c-drawer ${classMap(open)}"

- \[🦤\] The code has been formatted (`make web`)

* Revised comments; changed a variable name.

* Update after merge.

---------

Co-authored-by: Jens Langhammer <jens@goauthentik.io>
2026-03-30 15:46:02 -07:00
dependabot[bot]
18ee19e49c core: bump pygments from 2.19.2 to 2.20.0 (#21260)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-30 18:49:49 +00:00
Jens L.
20e2d3fac7 website/docs: add grafana dashboard (#21254)
* website/docs: add grafana dashboard

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* Optimised images with calibre/image-actions

* Optimised images with calibre/image-actions

---------

Signed-off-by: Jens Langhammer <jens@goauthentik.io>
Co-authored-by: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
2026-03-30 19:32:49 +02:00
Jens L.
0b1ba60354 stages/authenticator_webauthn: save attestation certificate when creating credential (#20095)
* stages/authenticator_webauthn: save attestation certificate when creating credential

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* add toggle

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* fix migration

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* gen

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* squash

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* better test

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* ui

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* docs

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* gen

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

---------

Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2026-03-30 13:55:39 +02:00
Jens L.
0748a3800f web/admin: fix missing icon on app view page (#21251)
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2026-03-30 12:30:09 +02:00
Jens L.
453c0c04a2 web/elements: allow table per-column options (#21250)
* web/elements: allow table per-column options

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* style param instead

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

---------

Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2026-03-30 12:02:55 +02:00
dependabot[bot]
7ff87bb401 ci: bump actions/setup-go from 6.3.0 to 6.4.0 (#21245)
Bumps [actions/setup-go](https://github.com/actions/setup-go) from 6.3.0 to 6.4.0.
- [Release notes](https://github.com/actions/setup-go/releases)
- [Commits](4b73464bb3...4a3601121d)

---
updated-dependencies:
- dependency-name: actions/setup-go
  dependency-version: 6.4.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-30 11:59:31 +02:00
dependabot[bot]
8045b141c1 web: bump knip from 6.0.6 to 6.1.0 in /web (#21241)
Bumps [knip](https://github.com/webpro-nl/knip/tree/HEAD/packages/knip) from 6.0.6 to 6.1.0.
- [Release notes](https://github.com/webpro-nl/knip/releases)
- [Commits](https://github.com/webpro-nl/knip/commits/knip@6.1.0/packages/knip)

---
updated-dependencies:
- dependency-name: knip
  dependency-version: 6.1.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-30 11:59:01 +02:00
dependabot[bot]
1538f74acc web: bump globby from 16.1.1 to 16.2.0 in /web (#21242)
Bumps [globby](https://github.com/sindresorhus/globby) from 16.1.1 to 16.2.0.
- [Release notes](https://github.com/sindresorhus/globby/releases)
- [Commits](https://github.com/sindresorhus/globby/compare/v16.1.1...v16.2.0)

---
updated-dependencies:
- dependency-name: globby
  dependency-version: 16.2.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-30 11:58:46 +02:00
dependabot[bot]
b1c2535c85 core: bump types-requests from 2.32.4.20260324 to 2.33.0.20260327 (#21236)
Bumps [types-requests](https://github.com/python/typeshed) from 2.32.4.20260324 to 2.33.0.20260327.
- [Commits](https://github.com/python/typeshed/commits)

---
updated-dependencies:
- dependency-name: types-requests
  dependency-version: 2.33.0.20260327
  dependency-type: direct:development
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-30 10:58:12 +01:00
dependabot[bot]
c78514ed01 core: bump types-docker from 7.1.0.20260322 to 7.1.0.20260328 (#21237)
Bumps [types-docker](https://github.com/python/typeshed) from 7.1.0.20260322 to 7.1.0.20260328.
- [Commits](https://github.com/python/typeshed/commits)

---
updated-dependencies:
- dependency-name: types-docker
  dependency-version: 7.1.0.20260328
  dependency-type: direct:development
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-30 10:58:08 +01:00
dependabot[bot]
44db237ce9 core: bump aws-cdk-lib from 2.244.0 to 2.245.0 (#21238)
Bumps [aws-cdk-lib](https://github.com/aws/aws-cdk) from 2.244.0 to 2.245.0.
- [Release notes](https://github.com/aws/aws-cdk/releases)
- [Changelog](https://github.com/aws/aws-cdk/blob/main/CHANGELOG.v2.alpha.md)
- [Commits](https://github.com/aws/aws-cdk/compare/v2.244.0...v2.245.0)

---
updated-dependencies:
- dependency-name: aws-cdk-lib
  dependency-version: 2.245.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-30 10:58:03 +01:00
dependabot[bot]
e45a76e26d ci: bump int128/docker-manifest-create-action from 2.16.0 to 2.17.0 (#21244)
Bumps [int128/docker-manifest-create-action](https://github.com/int128/docker-manifest-create-action) from 2.16.0 to 2.17.0.
- [Release notes](https://github.com/int128/docker-manifest-create-action/releases)
- [Commits](8aac06098a...44422a4b04)

---
updated-dependencies:
- dependency-name: int128/docker-manifest-create-action
  dependency-version: 2.17.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-30 10:57:59 +01:00
dependabot[bot]
d5055eba1a ci: bump astral-sh/setup-uv from 7.6.0 to 8.0.0 in /.github/actions/setup (#21246)
ci: bump astral-sh/setup-uv in /.github/actions/setup

Bumps [astral-sh/setup-uv](https://github.com/astral-sh/setup-uv) from 7.6.0 to 8.0.0.
- [Release notes](https://github.com/astral-sh/setup-uv/releases)
- [Commits](37802adc94...cec208311d)

---
updated-dependencies:
- dependency-name: astral-sh/setup-uv
  dependency-version: 8.0.0
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-30 10:57:54 +01:00
dependabot[bot]
e00cf88867 ci: bump taiki-e/install-action from 2.69.12 to 2.70.2 in /.github/actions/setup (#21247)
ci: bump taiki-e/install-action in /.github/actions/setup

Bumps [taiki-e/install-action](https://github.com/taiki-e/install-action) from 2.69.12 to 2.70.2.
- [Release notes](https://github.com/taiki-e/install-action/releases)
- [Changelog](https://github.com/taiki-e/install-action/blob/main/CHANGELOG.md)
- [Commits](80a23c5ba9...e9e8e031bc)

---
updated-dependencies:
- dependency-name: taiki-e/install-action
  dependency-version: 2.70.2
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-30 10:57:50 +01:00
dependabot[bot]
d2eba75203 ci: bump actions/setup-go from 6.3.0 to 6.4.0 in /.github/actions/setup (#21248)
Bumps [actions/setup-go](https://github.com/actions/setup-go) from 6.3.0 to 6.4.0.
- [Release notes](https://github.com/actions/setup-go/releases)
- [Commits](4b73464bb3...4a3601121d)

---
updated-dependencies:
- dependency-name: actions/setup-go
  dependency-version: 6.4.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-30 10:57:47 +01:00
authentik-automation[bot]
9f8aefe304 core, web: update translations (#21233)
Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
2026-03-30 03:08:54 +02:00
transifex-integration[bot]
2e8c402a0f translate: Updates for project authentik and language fr_FR (#21214)
* translate: Translate django.po in fr_FR

100% translated source file: 'django.po'
on 'fr_FR'.

* translate: Translate web/xliff/en.xlf in fr_FR

100% translated source file: 'web/xliff/en.xlf'
on 'fr_FR'.

---------

Co-authored-by: transifex-integration[bot] <43880903+transifex-integration[bot]@users.noreply.github.com>
2026-03-30 01:33:51 +02:00
Jens L.
480bffd5ac web/admin: polish recent events, various button alignments and labels (#21232)
* clarify stats durations

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* fix button alignment

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* fix user list button alignment

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* fix page size for recent events card

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* more renderDescriptionList, related actions

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

---------

Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2026-03-30 01:32:26 +02:00
Jens L.
1848c6c380 outposts: Create separate metrics service in Kubernetes (#21229)
* outposts: create separate metrics service

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* fix service monitor plumbing

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* update docs

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* format

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* fix

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* add some static tests

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* make metrics service ClusterIP

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* update service monitor when labels mismatch

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

---------

Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2026-03-29 23:51:10 +02:00
Jens L.
416dd0cf86 events: fix exception in volume endpoint, adjust simple table size (#21230)
* admin: set SimpleEventTable default page size

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* fix event endpoint broken

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

---------

Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2026-03-29 23:30:11 +02:00
Jens L.
d1c997b2fe core: Application stats, device events & cleanup (#21225)
* core: app stats

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* refctor

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* rework to generic API

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* oops

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* handling

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* fix docs

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* more docs

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* unrelated fix

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* allow filtering events by device

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* show device events on device page

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* simply event tables

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

* tests

Signed-off-by: Jens Langhammer <jens@goauthentik.io>

---------

Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2026-03-29 21:58:12 +02:00
dependabot[bot]
a62c6c92a8 core: bump axllent/mailpit from v1.29.4 to v1.29.5 in /tests/e2e (#21226)
Bumps axllent/mailpit from v1.29.4 to v1.29.5.

---
updated-dependencies:
- dependency-name: axllent/mailpit
  dependency-version: v1.29.5
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-29 20:12:26 +01:00
Jens L.
189251dc26 proviers/ldap: avoid concurrent header writes in API Client (#21223)
Signed-off-by: Jens Langhammer <jens@goauthentik.io>
2026-03-29 20:52:49 +02:00
dependabot[bot]
2b3b6e045a core: bump github.com/grafana/pyroscope-go from 1.2.7 to 1.2.8 (#21168)
Bumps [github.com/grafana/pyroscope-go](https://github.com/grafana/pyroscope-go) from 1.2.7 to 1.2.8.
- [Release notes](https://github.com/grafana/pyroscope-go/releases)
- [Commits](https://github.com/grafana/pyroscope-go/compare/v1.2.7...v1.2.8)

---
updated-dependencies:
- dependency-name: github.com/grafana/pyroscope-go
  dependency-version: 1.2.8
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-29 17:53:29 +02:00
1194 changed files with 98081 additions and 49587 deletions

5
.cargo/config.toml Normal file
View File

@@ -0,0 +1,5 @@
[alias]
t = ["nextest", "run"]
[build]
rustflags = ["--cfg", "tokio_unstable"]

View File

@@ -2,12 +2,14 @@
allow = [
"Apache-2.0",
"BSD-3-Clause",
"CC0-1.0",
"CDLA-Permissive-2.0",
"ISC",
"MIT",
"MPL-2.0",
"OpenSSL",
"Unicode-3.0",
"Zlib",
]
[licenses.private]

View File

@@ -12,5 +12,4 @@ reorder_impl_items = true
style_edition = "2024"
use_field_init_shorthand = true
use_try_shorthand = true
where_single_line = true
wrap_comments = true

View File

@@ -10,4 +10,4 @@ build_docs/**
blueprints/local
.git
.venv
target/
target

View File

@@ -8,6 +8,11 @@ inputs:
postgresql_version:
description: "Optional postgresql image tag"
default: "16"
working-directory:
description: |
Optional working directory if this repo isn't in the root of the actions workspace.
When set, needs to contain a trailing slash
default: ""
runs:
using: "composite"
@@ -22,17 +27,18 @@ runs:
sudo rm -rf /usr/local/lib/android
- name: Install uv
if: ${{ contains(inputs.dependencies, 'python') }}
uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v5
uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v5
with:
enable-cache: true
- name: Setup python
if: ${{ contains(inputs.dependencies, 'python') }}
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v5
with:
python-version-file: "pyproject.toml"
python-version-file: "${{ inputs.working-directory }}pyproject.toml"
- name: Install Python deps
if: ${{ contains(inputs.dependencies, 'python') }}
shell: bash
working-directory: ${{ inputs.working-directory }}
run: uv sync --all-extras --dev --frozen
- name: Setup rust (stable)
if: ${{ contains(inputs.dependencies, 'rust') && !contains(inputs.dependencies, 'rust-nightly') }}
@@ -48,34 +54,35 @@ runs:
rustflags: ""
- name: Setup rust dependencies
if: ${{ contains(inputs.dependencies, 'rust') }}
uses: taiki-e/install-action@80a23c5ba9e1100fd8b777106e810018ed662a7b # v2
uses: taiki-e/install-action@d858f8113943481093e02986a7586a4819a3bfd6 # v2
with:
tool: cargo-deny cargo-machete cargo-llvm-cov nextest
- name: Setup node (web)
if: ${{ contains(inputs.dependencies, 'node') }}
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v4
with:
node-version-file: web/package.json
node-version-file: "${{ inputs.working-directory }}web/package.json"
cache: "npm"
cache-dependency-path: web/package-lock.json
cache-dependency-path: "${{ inputs.working-directory }}web/package-lock.json"
registry-url: "https://registry.npmjs.org"
- name: Setup node (root)
if: ${{ contains(inputs.dependencies, 'node') }}
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v4
with:
node-version-file: package.json
node-version-file: "${{ inputs.working-directory }}package.json"
cache: "npm"
cache-dependency-path: package-lock.json
cache-dependency-path: "${{ inputs.working-directory }}package-lock.json"
registry-url: "https://registry.npmjs.org"
- name: Install Node deps
if: ${{ contains(inputs.dependencies, 'node') }}
shell: bash
working-directory: ${{ inputs.working-directory }}
run: npm ci
- name: Setup go
if: ${{ contains(inputs.dependencies, 'go') }}
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v5
uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v5
with:
go-version-file: "go.mod"
go-version-file: "${{ inputs.working-directory }}go.mod"
- name: Setup docker cache
if: ${{ contains(inputs.dependencies, 'runtime') }}
uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7
@@ -84,6 +91,7 @@ runs:
- name: Setup dependencies
if: ${{ contains(inputs.dependencies, 'runtime') }}
shell: bash
working-directory: ${{ inputs.working-directory }}
run: |
export PSQL_TAG=${{ inputs.postgresql_version }}
docker compose -f .github/actions/setup/compose.yml up -d
@@ -91,6 +99,7 @@ runs:
- name: Generate config
if: ${{ contains(inputs.dependencies, 'python') }}
shell: uv run python {0}
working-directory: ${{ inputs.working-directory }}
run: |
from authentik.lib.generators import generate_id
from yaml import safe_dump

View File

@@ -56,13 +56,13 @@ jobs:
release: ${{ inputs.release }}
- name: Login to Docker Hub
if: ${{ inputs.registry_dockerhub }}
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
username: ${{ secrets.DOCKER_CORP_USERNAME }}
password: ${{ secrets.DOCKER_CORP_PASSWORD }}
- name: Login to GitHub Container Registry
if: ${{ inputs.registry_ghcr }}
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -72,7 +72,7 @@ jobs:
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
with:
go-version-file: "go.mod"
- name: Build Docker Image

View File

@@ -79,18 +79,18 @@ jobs:
image-name: ${{ inputs.image_name }}
- name: Login to Docker Hub
if: ${{ inputs.registry_dockerhub }}
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
username: ${{ secrets.DOCKER_CORP_USERNAME }}
password: ${{ secrets.DOCKER_CORP_PASSWORD }}
- name: Login to GitHub Container Registry
if: ${{ inputs.registry_ghcr }}
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: int128/docker-manifest-create-action@8aac06098a12365ccdf99372dcfb453ccce8a0b0 # v2
- uses: int128/docker-manifest-create-action@44422a4b046d55dc036df622039ed3aec43c613c # v2
id: build
with:
tags: ${{ matrix.tag }}

View File

@@ -89,7 +89,7 @@ jobs:
image-name: ghcr.io/goauthentik/dev-docs
- name: Login to Container Registry
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}

View File

@@ -196,6 +196,7 @@ jobs:
- name: run integration
run: |
uv run coverage run manage.py test tests/integration
uv run coverage combine
uv run coverage xml
- uses: ./.github/actions/test-results
if: ${{ always() }}
@@ -223,6 +224,9 @@ jobs:
profiles: selenium
- name: ldap
glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap*
- name: rac
glob: tests/e2e/test_provider_rac*
profiles: selenium
- name: ws-fed
glob: tests/e2e/test_provider_ws_fed*
profiles: selenium
@@ -247,11 +251,12 @@ jobs:
docker compose -f tests/e2e/compose.yml up -d --quiet-pull
- id: cache-web
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v4
if: contains(matrix.job.profiles, 'selenium')
with:
path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
- name: prepare web ui
if: steps.cache-web.outputs.cache-hit != 'true'
if: steps.cache-web.outputs.cache-hit != 'true' && contains(matrix.job.profiles, 'selenium')
working-directory: web
run: |
npm ci
@@ -260,6 +265,7 @@ jobs:
- name: run e2e
run: |
uv run coverage run manage.py test ${{ matrix.job.glob }}
uv run coverage combine
uv run coverage xml
- uses: ./.github/actions/test-results
if: ${{ always() }}
@@ -304,6 +310,7 @@ jobs:
- name: run conformance
run: |
uv run coverage run manage.py test ${{ matrix.job.glob }}
uv run coverage combine
uv run coverage xml
- uses: ./.github/actions/test-results
if: ${{ always() }}

View File

@@ -22,7 +22,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
with:
go-version-file: "go.mod"
- name: Prepare and generate API
@@ -41,7 +41,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
with:
go-version-file: "go.mod"
- name: Setup authentik env
@@ -98,7 +98,7 @@ jobs:
image-name: ghcr.io/goauthentik/dev-${{ matrix.type }}
- name: Login to Container Registry
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -142,7 +142,7 @@ jobs:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
ref: ${{ github.event.pull_request.head.sha }}
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
with:
go-version-file: "go.mod"
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5

View File

@@ -44,7 +44,7 @@ jobs:
with:
image-name: ghcr.io/goauthentik/docs
- name: Login to GitHub Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -84,7 +84,7 @@ jobs:
- rac
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
with:
go-version-file: "go.mod"
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
@@ -104,12 +104,12 @@ jobs:
with:
image-name: ghcr.io/goauthentik/${{ matrix.type }},authentik/${{ matrix.type }}
- name: Docker Login Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
username: ${{ secrets.DOCKER_CORP_USERNAME }}
password: ${{ secrets.DOCKER_CORP_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -148,7 +148,7 @@ jobs:
goarch: [amd64, arm64]
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
with:
go-version-file: "go.mod"
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
@@ -236,7 +236,7 @@ jobs:
container=$(docker container create ${{ steps.ev.outputs.imageMainName }})
docker cp ${container}:web/ .
- name: Create a Sentry.io release
uses: getsentry/action-release@dab6548b3c03c4717878099e43782cf5be654289 # v3
uses: getsentry/action-release@5657c9e888b4e2cc85f4d29143ea4131fde4a73a # v3
continue-on-error: true
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}

View File

@@ -27,6 +27,7 @@ Makefile @goauthentik/infrastructure
.editorconfig @goauthentik/infrastructure
CODEOWNERS @goauthentik/infrastructure
# Backend packages
packages/ak-* @goauthentik/backend
packages/client-rust @goauthentik/backend
packages/django-channels-postgres @goauthentik/backend
packages/django-postgres-cache @goauthentik/backend

284
Cargo.lock generated
View File

@@ -67,6 +67,21 @@ version = "1.0.102"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c"
[[package]]
name = "arc-swap"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a07d1f37ff60921c83bdfc7407723bdefe89b44b98a9b772f225c8f9d67141a6"
dependencies = [
"rustversion",
]
[[package]]
name = "arraydeque"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d902e3d592a523def97af8f317b08ce16b7ab854c1985a0c671e6f15cebc236"
[[package]]
name = "async-trait"
version = "0.1.89"
@@ -102,6 +117,28 @@ dependencies = [
"uuid",
]
[[package]]
name = "authentik-common"
version = "2026.5.0-rc1"
dependencies = [
"arc-swap",
"axum-server",
"config",
"eyre",
"glob",
"nix",
"notify",
"pin-project-lite",
"serde",
"serde_json",
"tempfile",
"thiserror 2.0.18",
"tokio",
"tokio-util",
"tracing",
"url",
]
[[package]]
name = "autocfg"
version = "1.5.0"
@@ -146,6 +183,28 @@ dependencies = [
"fs_extra",
]
[[package]]
name = "axum-server"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1df331683d982a0b9492b38127151e6453639cd34926eb9c07d4cd8c6d22bfc"
dependencies = [
"arc-swap",
"bytes",
"either",
"fs-err",
"http",
"http-body",
"hyper",
"hyper-util",
"pin-project-lite",
"rustls",
"rustls-pki-types",
"tokio",
"tokio-rustls",
"tower-service",
]
[[package]]
name = "base64"
version = "0.22.1"
@@ -158,7 +217,7 @@ version = "0.72.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "993776b509cfb49c750f11b8f07a46fa23e0a1386ffc01fb1e7d343efc387895"
dependencies = [
"bitflags",
"bitflags 2.11.0",
"cexpr",
"clang-sys",
"itertools",
@@ -172,6 +231,12 @@ dependencies = [
"syn",
]
[[package]]
name = "bitflags"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
version = "2.11.0"
@@ -324,6 +389,19 @@ dependencies = [
"memchr",
]
[[package]]
name = "config"
version = "0.15.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e68cfe19cd7d23ffde002c24ffa5cda73931913ef394d5eaaa32037dc940c0c"
dependencies = [
"async-trait",
"pathdiff",
"serde_core",
"winnow",
"yaml-rust2",
]
[[package]]
name = "core-foundation"
version = "0.9.4"
@@ -434,6 +512,12 @@ dependencies = [
"once_cell",
]
[[package]]
name = "fastrand"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "find-msvc-tools"
version = "0.1.9"
@@ -461,12 +545,31 @@ dependencies = [
"percent-encoding",
]
[[package]]
name = "fs-err"
version = "3.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73fde052dbfc920003cfd2c8e2c6e6d4cc7c1091538c3a24226cec0665ab08c0"
dependencies = [
"autocfg",
"tokio",
]
[[package]]
name = "fs_extra"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c"
[[package]]
name = "fsevent-sys"
version = "4.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2"
dependencies = [
"libc",
]
[[package]]
name = "futures-channel"
version = "0.3.32"
@@ -607,6 +710,15 @@ version = "0.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
[[package]]
name = "hashlink"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1"
dependencies = [
"hashbrown 0.15.5",
]
[[package]]
name = "heck"
version = "0.5.0"
@@ -658,6 +770,12 @@ version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87"
[[package]]
name = "httpdate"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
[[package]]
name = "hyper"
version = "1.8.1"
@@ -672,6 +790,7 @@ dependencies = [
"http",
"http-body",
"httparse",
"httpdate",
"itoa",
"pin-project-lite",
"pin-utils",
@@ -847,6 +966,26 @@ dependencies = [
"serde_core",
]
[[package]]
name = "inotify"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd5b3eaf1a28b758ac0faa5a4254e8ab2705605496f1b1f3fbbc3988ad73d199"
dependencies = [
"bitflags 2.11.0",
"inotify-sys",
"libc",
]
[[package]]
name = "inotify-sys"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb"
dependencies = [
"libc",
]
[[package]]
name = "ipnet"
version = "2.12.0"
@@ -948,6 +1087,26 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "kqueue"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a"
dependencies = [
"kqueue-sys",
"libc",
]
[[package]]
name = "kqueue-sys"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b"
dependencies = [
"bitflags 1.3.2",
"libc",
]
[[package]]
name = "leb128fmt"
version = "0.1.0"
@@ -970,6 +1129,12 @@ dependencies = [
"windows-link",
]
[[package]]
name = "linux-raw-sys"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53"
[[package]]
name = "litemap"
version = "0.8.1"
@@ -1032,10 +1197,23 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc"
dependencies = [
"libc",
"log",
"wasi",
"windows-sys 0.61.2",
]
[[package]]
name = "nix"
version = "0.31.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d6d0705320c1e6ba1d912b5e37cf18071b6c2e9b7fa8215a1e8a7651966f5d3"
dependencies = [
"bitflags 2.11.0",
"cfg-if",
"cfg_aliases",
"libc",
]
[[package]]
name = "nom"
version = "7.1.3"
@@ -1046,6 +1224,33 @@ dependencies = [
"minimal-lexical",
]
[[package]]
name = "notify"
version = "8.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4d3d07927151ff8575b7087f245456e549fea62edf0ec4e565a5ee50c8402bc3"
dependencies = [
"bitflags 2.11.0",
"fsevent-sys",
"inotify",
"kqueue",
"libc",
"log",
"mio",
"notify-types",
"walkdir",
"windows-sys 0.60.2",
]
[[package]]
name = "notify-types"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42b8cfee0e339a0337359f3c88165702ac6e600dc01c0cc9579a92d62b08477a"
dependencies = [
"bitflags 2.11.0",
]
[[package]]
name = "num-conv"
version = "0.2.0"
@@ -1102,6 +1307,12 @@ dependencies = [
"windows-link",
]
[[package]]
name = "pathdiff"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3"
[[package]]
name = "percent-encoding"
version = "2.3.2"
@@ -1275,7 +1486,7 @@ version = "0.5.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d"
dependencies = [
"bitflags",
"bitflags 2.11.0",
]
[[package]]
@@ -1387,6 +1598,19 @@ version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
[[package]]
name = "rustix"
version = "1.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190"
dependencies = [
"bitflags 2.11.0",
"errno",
"libc",
"linux-raw-sys",
"windows-sys 0.61.2",
]
[[package]]
name = "rustls"
version = "0.23.37"
@@ -1505,7 +1729,7 @@ version = "3.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7f4bc775c73d9a02cde8bf7b2ec4c9d12743edf609006c7facc23998404cd1d"
dependencies = [
"bitflags",
"bitflags 2.11.0",
"core-foundation 0.10.1",
"core-foundation-sys",
"libc",
@@ -1701,7 +1925,7 @@ version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a13f3d0daba03132c0aa9767f98351b3488edc2c100cda2d2ec2b04f3d8d3c8b"
dependencies = [
"bitflags",
"bitflags 2.11.0",
"core-foundation 0.9.4",
"system-configuration-sys",
]
@@ -1716,6 +1940,19 @@ dependencies = [
"libc",
]
[[package]]
name = "tempfile"
version = "3.27.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32497e9a4c7b38532efcdebeef879707aa9f794296a4f0244f6f69e9bc8574bd"
dependencies = [
"fastrand",
"getrandom 0.4.2",
"once_cell",
"rustix",
"windows-sys 0.61.2",
]
[[package]]
name = "thiserror"
version = "1.0.69"
@@ -1814,6 +2051,7 @@ dependencies = [
"signal-hook-registry",
"socket2",
"tokio-macros",
"tracing",
"windows-sys 0.61.2",
]
@@ -1876,7 +2114,7 @@ version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8"
dependencies = [
"bitflags",
"bitflags 2.11.0",
"bytes",
"futures-util",
"http",
@@ -1907,9 +2145,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100"
dependencies = [
"pin-project-lite",
"tracing-attributes",
"tracing-core",
]
[[package]]
name = "tracing-attributes"
version = "0.1.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "tracing-core"
version = "0.1.36"
@@ -2134,7 +2384,7 @@ version = "0.244.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe"
dependencies = [
"bitflags",
"bitflags 2.11.0",
"hashbrown 0.15.5",
"indexmap",
"semver",
@@ -2435,6 +2685,15 @@ version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650"
[[package]]
name = "winnow"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09dac053f1cd375980747450bfc7250c264eaae0583872e845c0c7cd578872b5"
dependencies = [
"memchr",
]
[[package]]
name = "wit-bindgen"
version = "0.51.0"
@@ -2493,7 +2752,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2"
dependencies = [
"anyhow",
"bitflags",
"bitflags 2.11.0",
"indexmap",
"log",
"serde",
@@ -2529,6 +2788,17 @@ version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9"
[[package]]
name = "yaml-rust2"
version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2462ea039c445496d8793d052e13787f2b90e750b833afee748e601c17621ed9"
dependencies = [
"arraydeque",
"encoding_rs",
"hashlink",
]
[[package]]
name = "yoke"
version = "0.8.1"

View File

@@ -1,5 +1,9 @@
[workspace]
members = ["packages/client-rust", "website/scripts/docsmg"]
members = [
"packages/ak-common",
"packages/client-rust",
"website/scripts/docsmg",
]
resolver = "3"
[workspace.package]
@@ -14,11 +18,21 @@ license-file = "LICENSE"
publish = false
[workspace.dependencies]
arc-swap = "= 1.9.0"
axum-server = { version = "= 0.8.0", features = ["tls-rustls-no-provider"] }
aws-lc-rs = { version = "= 1.16.2", features = ["fips"] }
clap = { version = "= 4.6.0", features = ["derive", "env"] }
colored = "= 3.1.1"
config-rs = { package = "config", version = "= 0.15.22", default-features = false, features = [
"yaml",
"async",
] }
dotenvy = "= 0.15.7"
eyre = "= 0.6.12"
glob = "= 0.3.3"
nix = { version = "= 0.31.2", features = ["signal"] }
notify = "= 8.2.0"
pin-project-lite = "= 0.2.17"
regex = "= 1.12.3"
reqwest = { version = "= 0.13.2", features = [
"form",
@@ -42,11 +56,16 @@ serde_repr = "= 0.1.20"
serde_with = { version = "= 3.18.0", default-features = false, features = [
"base64",
] }
tokio = { version = "= 1.50.0", features = ["full"] }
tempfile = "= 3.27.0"
thiserror = "= 2.0.18"
tokio = { version = "= 1.50.0", features = ["full", "tracing"] }
tokio-util = { version = "= 0.7.18", features = ["full"] }
tracing = "= 0.1.44"
url = "= 2.5.8"
uuid = { version = "= 1.23.0", features = ["serde", "v4"] }
ak-common = { package = "authentik-common", version = "2026.5.0-rc1", path = "./packages/ak-common" }
[profile.dev.package.backtrace]
opt-level = 3
@@ -89,12 +108,20 @@ perf = { priority = -1, level = "warn" }
style = { priority = -1, level = "warn" }
suspicious = { priority = -1, level = "warn" }
### and disable the ones we don't want
### cargo group
multiple_crate_versions = "allow"
### pedantic group
missing_errors_doc = "allow"
missing_panics_doc = "allow"
must_use_candidate = "allow"
redundant_closure_for_method_calls = "allow"
struct_field_names = "allow"
too_many_lines = "allow"
### nursery
redundant_pub_crate = "allow"
missing_const_for_fn = "allow"
option_if_let_else = "allow"
redundant_pub_crate = "allow"
significant_drop_tightening = "allow"
### restriction group
allow_attributes = "warn"
allow_attributes_without_reason = "warn"
@@ -107,7 +134,6 @@ create_dir = "warn"
dbg_macro = "warn"
default_numeric_fallback = "warn"
disallowed_script_idents = "warn"
doc_paragraphs_missing_punctuation = "warn"
empty_drop = "warn"
empty_enum_variants_with_brackets = "warn"
empty_structs_with_brackets = "warn"

View File

@@ -74,6 +74,7 @@ rust-test: ## Run the Rust tests
test: ## Run the server tests and produce a coverage report (locally)
$(UV) run coverage run manage.py test --keepdb $(or $(filter-out $@,$(MAKECMDGOALS)),authentik)
$(UV) run coverage combine
$(UV) run coverage html
$(UV) run coverage report
@@ -153,6 +154,7 @@ endif
$(eval current_version := $(shell cat ${PWD}/internal/constants/VERSION))
$(SED_INPLACE) 's/^version = ".*"/version = "$(version)"/' ${PWD}/pyproject.toml
$(SED_INPLACE) 's/^VERSION = ".*"/VERSION = "$(version)"/' ${PWD}/authentik/__init__.py
$(SED_INPLACE) "s/version = \"${current_version}\"/version = \"$(version)\"" ${PWD}/Cargo.toml ${PWD}/Cargo.lock
$(MAKE) gen-build gen-compose aws-cfn
$(SED_INPLACE) "s/\"${current_version}\"/\"$(version)\"/" ${PWD}/package.json ${PWD}/package-lock.json ${PWD}/web/package.json ${PWD}/web/package-lock.json
echo -n $(version) > ${PWD}/internal/constants/VERSION
@@ -284,7 +286,7 @@ docs-api-build:
npm run --prefix website -w api build
docs-api-watch: ## Build and watch the API documentation
npm run --prefix website -w api build:api
npm run --prefix website -w api generate
npm run --prefix website -w api start
docs-api-clean: ## Clean generated API documentation
@@ -343,5 +345,6 @@ ci-lint-clippy: ci--meta-debug
ci-test: ci--meta-debug
$(UV) run coverage run manage.py test --keepdb authentik
$(UV) run coverage combine
$(UV) run coverage report
$(UV) run coverage xml

View File

@@ -60,6 +60,36 @@ authentik reserves the right to reclassify CVSS as necessary. To determine sever
| 7.0 8.9 | High |
| 9.0 10.0 | Critical |
## Intended functionality
The following capabilities are part of intentional system design and should not be reported as security vulnerabilities:
- Expressions (property mappings/policies/prompts) can execute arbitrary Python code without safeguards.
This is expected behavior. Any user with permission to create or modify objects containing expression fields can write code that is executed within authentik. If a vulnerability allows a user without the required permissions to write or modify code and have it executed, that would be a valid security report.
However, the fact that expressions are executed as part of normal operations is not considered a privilege escalation or security vulnerability.
- Blueprints can access all files on the filesystem.
This access is intentional to allow legitimate configuration and deployment tasks. It does not represent a security problem by itself.
- Importing blueprints allows arbitrary modification of application objects.
This is intended functionality. This behavior reflects the privileged design of blueprint imports. It is "exploitable" when importing blueprints from untrusted sources without reviewing the blueprint beforehand. However, any method to create, modify or execute blueprints without the required permissions would be a valid security report.
- Flow imports may contain objects other than flows (such as policies, users, groups, etc.)
This is expected behavior as flow imports are blueprint files.
- Prompt HTML is not escaped.
Prompts intentionally allow raw HTML, including script tags, so they can be used to create interactive or customized user interface elements. Because of this, scripts within prompts may affect or interact with the surrounding page as designed.
- Open redirects that do not include tokens or other sensitive information are not considered a security vulnerability.
Redirects that only change navigation flow and do not expose session tokens, API keys, or other confidential data are considered acceptable and do not require reporting.
## Disclosure process
1. Report from Github or Issue is reported via Email as listed above.

View File

@@ -1,24 +1,60 @@
"""Serializer mixin for managed models"""
from typing import cast
from django.conf import settings
from django.core.files.uploadedfile import InMemoryUploadedFile
from django.utils.translation import gettext_lazy as _
from drf_spectacular.utils import extend_schema, inline_serializer
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
from rest_framework.fields import CharField, DateTimeField
from rest_framework.exceptions import PermissionDenied, ValidationError
from rest_framework.fields import (
BooleanField,
CharField,
DateTimeField,
FileField,
)
from rest_framework.parsers import MultiPartParser
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer
from rest_framework.viewsets import ModelViewSet
from authentik.api.validation import validate
from authentik.blueprints.models import BlueprintInstance
from authentik.blueprints.v1.common import Blueprint
from authentik.blueprints.v1.importer import Importer
from authentik.blueprints.v1.oci import OCI_PREFIX
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import JSONDictField, ModelSerializer, PassiveSerializer
from authentik.core.models import User
from authentik.events.logs import LogEventSerializer
from authentik.rbac.decorators import permission_required
def get_blueprints():
if settings.DEBUG:
return blueprints_find_dict()
return blueprints_find_dict.send().get_result(block=True)
class BlueprintUploadSerializer(PassiveSerializer):
"""Serializer to upload file"""
file = FileField(required=False)
path = CharField(required=False)
def validate_path(self, path: str) -> str:
"""Ensure the path (if set) specified is retrievable"""
if path == "":
return path
files: list[dict] = get_blueprints()
if path not in [file["path"] for file in files]:
raise ValidationError(_("Blueprint file does not exist"))
return path
class ManagedSerializer:
"""Managed Serializer"""
@@ -39,7 +75,7 @@ class BlueprintInstanceSerializer(ModelSerializer):
"""Ensure the path (if set) specified is retrievable"""
if path == "" or path.startswith(OCI_PREFIX):
return path
files: list[dict] = blueprints_find_dict.send().get_result(block=True)
files: list[dict] = get_blueprints()
if path not in [file["path"] for file in files]:
raise ValidationError(_("Blueprint file does not exist"))
return path
@@ -88,6 +124,33 @@ class BlueprintInstanceSerializer(ModelSerializer):
}
def check_blueprint_perms(blueprint: Blueprint, user: User, explicit_action: str | None = None):
"""Check for individual permissions for each model in a blueprint"""
for entry in blueprint.entries:
full_model = entry.get_model(blueprint)
app, __, model = full_model.partition(".")
perms = [
f"{app}.add_{model}",
f"{app}.change_{model}",
f"{app}.delete_{model}",
]
if explicit_action:
perms = [f"{app}.{explicit_action}_{model}"]
for perm in perms:
if not user.has_perm(perm):
raise PermissionDenied(
{
entry.id: _(
"User lacks permission to create {model}".format_map(
{
"model": full_model,
}
)
)
}
)
class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
"""Blueprint instances"""
@@ -97,6 +160,12 @@ class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
filterset_fields = ["name", "path"]
ordering = ["name"]
class BlueprintImportResultSerializer(PassiveSerializer):
"""Logs of an attempted blueprint import"""
logs = LogEventSerializer(many=True, read_only=True)
success = BooleanField(read_only=True)
@extend_schema(
responses={
200: ListSerializer(
@@ -115,7 +184,7 @@ class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
@action(detail=False, pagination_class=None, filter_backends=[])
def available(self, request: Request) -> Response:
"""Get blueprints"""
files: list[dict] = blueprints_find_dict.send().get_result(block=True)
files: list[dict] = get_blueprints()
return Response(files)
@permission_required("authentik_blueprints.view_blueprintinstance")
@@ -131,3 +200,53 @@ class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
blueprint = self.get_object()
apply_blueprint.send_with_options(args=(blueprint.pk,), rel_obj=blueprint)
return self.retrieve(request, *args, **kwargs)
@extend_schema(
request={"multipart/form-data": BlueprintUploadSerializer},
responses={
204: BlueprintImportResultSerializer,
400: BlueprintImportResultSerializer,
},
)
@action(url_path="import", detail=False, methods=["POST"], parser_classes=(MultiPartParser,))
@validate(
BlueprintUploadSerializer,
)
def import_(self, request: Request, body: BlueprintUploadSerializer) -> Response:
"""Import blueprint from .yaml file and apply it once, without creating an instance"""
string_contents = ""
if body.validated_data.get("file"):
file = cast(InMemoryUploadedFile, body.validated_data["file"])
string_contents = file.read().decode()
elif body.validated_data.get("path"):
string_contents = BlueprintInstance(
path=body.validated_data.get("path")
).retrieve_file()
else:
raise ValidationError("Either path or file must be set")
importer = Importer.from_string(string_contents)
check_blueprint_perms(importer.blueprint, request.user)
valid, logs = importer.validate()
import_response = self.BlueprintImportResultSerializer(
data={
"logs": [],
"success": False,
}
)
import_response.is_valid(raise_exception=True)
import_response.initial_data["logs"] = [LogEventSerializer(log).data for log in logs]
import_response.initial_data["success"] = valid
import_response.is_valid()
if not valid:
return Response(data=import_response.initial_data, status=200)
successful = importer.apply()
import_response.initial_data["success"] = successful
import_response.is_valid()
if not successful:
return Response(data=import_response.initial_data, status=200)
return Response(data=import_response.initial_data, status=200)

View File

@@ -48,7 +48,12 @@ class ApplicationSerializer(ModelSerializer):
"""Application Serializer"""
launch_url = SerializerMethodField()
provider_obj = ProviderSerializer(source="get_provider", required=False, read_only=True)
provider_obj = ProviderSerializer(
source="get_provider",
required=False,
read_only=True,
allow_null=True,
)
backchannel_providers_obj = ProviderSerializer(
source="backchannel_providers", required=False, read_only=True, many=True
)

View File

@@ -2,9 +2,8 @@
from django.apps import apps
from django.db.models import Model
from django.utils.translation import gettext as _
from drf_spectacular.utils import PolymorphicProxySerializer, extend_schema, extend_schema_field
from rest_framework.exceptions import PermissionDenied, ValidationError
from rest_framework.exceptions import ValidationError
from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, ListField
from rest_framework.permissions import IsAuthenticated
from rest_framework.request import Request
@@ -13,6 +12,7 @@ from rest_framework.views import APIView
from yaml import ScalarNode
from authentik.api.validation import validate
from authentik.blueprints.api import check_blueprint_perms
from authentik.blueprints.v1.common import (
Blueprint,
BlueprintEntry,
@@ -165,21 +165,7 @@ class TransactionalApplicationView(APIView):
def put(self, request: Request, body: TransactionApplicationSerializer) -> Response:
"""Convert data into a blueprint, validate it and apply it"""
blueprint: Blueprint = body.validated_data
for entry in blueprint.entries:
full_model = entry.get_model(blueprint)
app, __, model = full_model.partition(".")
if not request.user.has_perm(f"{app}.add_{model}"):
raise PermissionDenied(
{
entry.id: _(
"User lacks permission to create {model}".format_map(
{
"model": full_model,
}
)
)
}
)
check_blueprint_perms(blueprint, request.user, explicit_action="add")
importer = Importer(blueprint, {})
applied = importer.apply()
response = {"applied": False, "logs": []}

View File

@@ -796,11 +796,11 @@ class Application(SerializerModel, PolicyBindingModel):
def backchannel_provider_for[T: Provider](self, provider_type: type[T], **kwargs) -> T | None:
"""Get Backchannel provider for a specific type"""
providers = self.backchannel_providers.filter(
provider: BackchannelProvider | None = self.backchannel_providers.filter(
**{f"{provider_type._meta.model_name}__isnull": False},
**kwargs,
)
return getattr(providers.first(), provider_type._meta.model_name)
).first()
return getattr(provider, provider_type._meta.model_name) if provider else None
def __str__(self):
return str(self.name)

View File

@@ -18,7 +18,10 @@ from authentik.rbac.decorators import permission_required
class EnrollmentTokenSerializer(ModelSerializer):
device_group_obj = DeviceAccessGroupSerializer(
source="device_group", read_only=True, required=False
source="device_group",
read_only=True,
required=False,
allow_null=True,
)
def __init__(self, *args, **kwargs) -> None:

View File

@@ -18,6 +18,10 @@ class SSFProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer):
ssf_url = SerializerMethodField()
token_obj = TokenSerializer(source="token", required=False, read_only=True)
oidc_auth_providers_obj = ProviderSerializer(
read_only=True, source="oidc_auth_providers", many=True
)
def get_ssf_url(self, instance: SSFProvider) -> str | None:
request: Request = self._context.get("request")
if not request:
@@ -45,8 +49,10 @@ class SSFProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer):
"signing_key",
"token_obj",
"oidc_auth_providers",
"oidc_auth_providers_obj",
"ssf_url",
"event_retention",
"push_verify_certificates",
]
extra_kwargs = {}
@@ -54,7 +60,7 @@ class SSFProviderSerializer(EnterpriseRequiredMixin, ProviderSerializer):
class SSFProviderViewSet(UsedByMixin, ModelViewSet):
"""SSFProvider Viewset"""
queryset = SSFProvider.objects.all()
queryset = SSFProvider.objects.all().prefetch_related("oidc_auth_providers")
serializer_class = SSFProviderSerializer
filterset_fields = {
"application": ["isnull"],

View File

@@ -1,6 +1,7 @@
"""SSF Stream API Views"""
from rest_framework.viewsets import ReadOnlyModelViewSet
from rest_framework import mixins
from rest_framework.viewsets import GenericViewSet
from authentik.core.api.utils import ModelSerializer
from authentik.enterprise.providers.ssf.api.providers import SSFProviderSerializer
@@ -16,6 +17,7 @@ class SSFStreamSerializer(ModelSerializer):
model = Stream
fields = [
"pk",
"status",
"provider",
"provider_obj",
"delivery_method",
@@ -27,7 +29,12 @@ class SSFStreamSerializer(ModelSerializer):
]
class SSFStreamViewSet(ReadOnlyModelViewSet):
class SSFStreamViewSet(
mixins.RetrieveModelMixin,
mixins.DestroyModelMixin,
mixins.ListModelMixin,
GenericViewSet,
):
"""SSFStream Viewset"""
queryset = Stream.objects.all()

View File

@@ -0,0 +1,43 @@
# Generated by Django 5.2.12 on 2026-04-04 16:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_providers_ssf", "0001_initial"),
]
operations = [
migrations.AddField(
model_name="ssfprovider",
name="push_verify_certificates",
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name="stream",
name="authorization_header",
field=models.TextField(default=None, null=True),
),
migrations.AddField(
model_name="stream",
name="status",
field=models.TextField(
choices=[("enabled", "Enabled"), ("paused", "Paused"), ("disabled", "Disabled")],
default="enabled",
),
),
migrations.AlterField(
model_name="stream",
name="delivery_method",
field=models.TextField(
choices=[
("https://schemas.openid.net/secevent/risc/delivery-method/push", "Risc Push"),
("https://schemas.openid.net/secevent/risc/delivery-method/poll", "Risc Poll"),
("urn:ietf:rfc:8935", "SSF RFC Push"),
("urn:ietf:rfc:8936", "SSF RFC Pull"),
]
),
),
]

View File

@@ -33,6 +33,8 @@ class DeliveryMethods(models.TextChoices):
RISC_PUSH = "https://schemas.openid.net/secevent/risc/delivery-method/push"
RISC_POLL = "https://schemas.openid.net/secevent/risc/delivery-method/poll"
RFC_PUSH = "urn:ietf:rfc:8935", _("SSF RFC Push")
RFC_PULL = "urn:ietf:rfc:8936", _("SSF RFC Pull")
class SSFEventStatus(models.TextChoices):
@@ -43,6 +45,13 @@ class SSFEventStatus(models.TextChoices):
SENT = "sent"
class StreamStatus(models.TextChoices):
ENABLED = "enabled"
PAUSED = "paused"
DISABLED = "disabled"
class SSFProvider(TasksModel, BackchannelProvider):
"""Shared Signals Framework provider to allow applications to
receive user events from authentik."""
@@ -54,6 +63,8 @@ class SSFProvider(TasksModel, BackchannelProvider):
help_text=_("Key used to sign the SSF Events."),
)
push_verify_certificates = models.BooleanField(default=True)
oidc_auth_providers = models.ManyToManyField(OAuth2Provider, blank=True, default=None)
token = models.ForeignKey(Token, on_delete=models.CASCADE, null=True, default=None)
@@ -106,10 +117,14 @@ class Stream(models.Model):
"""SSF Stream"""
uuid = models.UUIDField(default=uuid4, primary_key=True, editable=False)
status = models.TextField(choices=StreamStatus.choices, default=StreamStatus.ENABLED)
provider = models.ForeignKey(SSFProvider, on_delete=models.CASCADE)
delivery_method = models.TextField(choices=DeliveryMethods.choices)
endpoint_url = models.TextField(null=True)
authorization_header = models.TextField(null=True, default=None)
events_requested = ArrayField(models.TextField(choices=EventTypes.choices), default=list)
format = models.TextField()
@@ -146,7 +161,7 @@ class Stream(models.Model):
}
def encode(self, data: dict) -> str:
headers = {}
headers = {"typ": "secevent+jwt"}
if self.provider.signing_key:
headers["kid"] = self.provider.signing_key.kid
key, alg = self.provider.jwt_key

View File

@@ -16,6 +16,7 @@ from authentik.enterprise.providers.ssf.models import (
SSFEventStatus,
Stream,
StreamEvent,
StreamStatus,
)
from authentik.lib.utils.http import get_http_session
from authentik.lib.utils.time import timedelta_from_string
@@ -88,23 +89,42 @@ def send_ssf_event(stream_uuid: UUID, event_data: dict[str, Any]):
self.set_uid(event.pk)
if event.status == SSFEventStatus.SENT:
return
if stream.delivery_method != DeliveryMethods.RISC_PUSH:
if stream.delivery_method not in [DeliveryMethods.RISC_PUSH, DeliveryMethods.RFC_PUSH]:
return
headers = {"Content-Type": "application/secevent+jwt", "Accept": "application/json"}
if stream.authorization_header:
headers["Authorization"] = stream.authorization_header
try:
response = session.post(
event.stream.endpoint_url,
data=event.stream.encode(event.payload),
headers={"Content-Type": "application/secevent+jwt", "Accept": "application/json"},
headers=headers,
verify=stream.provider.push_verify_certificates,
timeout=180,
)
response.raise_for_status()
event.status = SSFEventStatus.SENT
event.save()
return
self.info("Event successfully sent", status=response.status_code)
# Cleanup, if we were the last pending message for this stream and it has been deleted
# (status=StreamStatus.DISABLED), then we can delete the stream
if (
not StreamEvent.objects.filter(
stream=stream,
status__in=[SSFEventStatus.PENDING_FAILED, SSFEventStatus.PENDING_NEW],
).exists()
and stream.status == StreamStatus.DISABLED
):
LOGGER.info(
"Deleting inactive stream as all pending messages were sent.", stream=stream
)
self.info("Deleting inactive stream as all pending messages were sent.")
stream.delete()
except RequestException as exc:
LOGGER.warning("Failed to send SSF event", exc=exc)
LOGGER.warning("Failed to send SSF event", exc=exc, stream=stream)
attrs = {}
if exc.response:
if exc.response is not None:
attrs["response"] = {
"content": exc.response.text,
"status": exc.response.status_code,
@@ -113,5 +133,6 @@ def send_ssf_event(stream_uuid: UUID, event_data: dict[str, Any]):
self.warning("Failed to send request", **attrs)
# Re-up the expiry of the stream event
event.expires = now() + timedelta_from_string(event.stream.provider.event_retention)
self.info(f"Event will be re-sent at {event.expires}")
event.status = SSFEventStatus.PENDING_FAILED
event.save()

View File

@@ -0,0 +1,170 @@
import json
from dataclasses import asdict
from django.urls import reverse
from django.utils import timezone
from rest_framework.test import APITestCase
from authentik.core.models import Application, Token, TokenIntents
from authentik.core.tests.utils import (
create_test_admin_user,
create_test_cert,
create_test_flow,
create_test_user,
)
from authentik.enterprise.providers.ssf.models import (
SSFEventStatus,
SSFProvider,
Stream,
StreamEvent,
)
from authentik.lib.generators import generate_id
from authentik.providers.oauth2.id_token import IDToken
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider
class TestSSFAuth(APITestCase):
def setUp(self):
self.application = Application.objects.create(name=generate_id(), slug=generate_id())
self.provider = SSFProvider.objects.create(
name=generate_id(),
signing_key=create_test_cert(),
backchannel_application=self.application,
)
def test_stream_add_token(self):
"""test stream add (token auth)"""
res = self.client.post(
reverse(
"authentik_providers_ssf:stream",
kwargs={"application_slug": self.application.slug},
),
data={
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
"aud": ["https://app.authentik.company"],
"delivery": {
"method": "https://schemas.openid.net/secevent/risc/delivery-method/push",
"endpoint_url": "https://app.authentik.company",
},
"events_requested": [
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
],
"format": "iss_sub",
},
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
)
self.assertEqual(res.status_code, 201)
stream = Stream.objects.filter(provider=self.provider).first()
self.assertIsNotNone(stream)
event = StreamEvent.objects.filter(stream=stream).first()
self.assertIsNotNone(event)
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
self.assertEqual(
event.payload["events"],
{"https://schemas.openid.net/secevent/ssf/event-type/verification": {"state": None}},
)
def test_stream_add_oidc(self):
"""test stream add (oidc auth)"""
provider = OAuth2Provider.objects.create(
name=generate_id(),
authorization_flow=create_test_flow(),
)
self.application.provider = provider
self.application.save()
user = create_test_admin_user()
token = AccessToken.objects.create(
provider=provider,
user=user,
token=generate_id(),
auth_time=timezone.now(),
_scope="openid user profile",
_id_token=json.dumps(
asdict(
IDToken("foo", "bar"),
)
),
)
res = self.client.post(
reverse(
"authentik_providers_ssf:stream",
kwargs={"application_slug": self.application.slug},
),
data={
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
"aud": ["https://app.authentik.company"],
"delivery": {
"method": "https://schemas.openid.net/secevent/risc/delivery-method/push",
"endpoint_url": "https://app.authentik.company",
},
"events_requested": [
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
],
"format": "iss_sub",
},
HTTP_AUTHORIZATION=f"Bearer {token.token}",
)
self.assertEqual(res.status_code, 201)
stream = Stream.objects.filter(provider=self.provider).first()
self.assertIsNotNone(stream)
event = StreamEvent.objects.filter(stream=stream).first()
self.assertIsNotNone(event)
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
self.assertEqual(
event.payload["events"],
{"https://schemas.openid.net/secevent/ssf/event-type/verification": {"state": None}},
)
def test_token_invalid(self):
res = self.client.post(
reverse(
"authentik_providers_ssf:stream",
kwargs={"application_slug": self.application.slug},
),
data={
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
"aud": ["https://app.authentik.company"],
"delivery": {
"method": "https://schemas.openid.net/secevent/risc/delivery-method/push",
"endpoint_url": "https://app.authentik.company",
},
"events_requested": [
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
],
"format": "iss_sub",
},
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}a",
)
# Response code needs to be 401 according to spec
self.assertEqual(res.status_code, 401)
def test_token_unrelated(self):
token = Token.objects.create(
identifier=generate_id(), user=create_test_user(), intent=TokenIntents.INTENT_API
)
res = self.client.post(
reverse(
"authentik_providers_ssf:stream",
kwargs={"application_slug": self.application.slug},
),
data={
"iss": "https://authentik.company/.well-known/ssf-configuration/foo/5",
"aud": ["https://app.authentik.company"],
"delivery": {
"method": "https://schemas.openid.net/secevent/risc/delivery-method/push",
"endpoint_url": "https://app.authentik.company",
},
"events_requested": [
"https://schemas.openid.net/secevent/caep/event-type/credential-change",
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
],
"format": "iss_sub",
},
HTTP_AUTHORIZATION=f"Bearer {token.key}",
)
# Response code needs to be 401 according to spec
self.assertEqual(res.status_code, 401)

View File

@@ -44,3 +44,15 @@ class TestConfiguration(APITestCase):
self.assertEqual(res.status_code, 200)
content = json.loads(res.content)
self.assertEqual(content["spec_version"], "1_0-ID2")
def test_config_not_found(self):
"""test SSF configuration (authenticated)"""
self.provider.delete()
res = self.client.get(
reverse(
"authentik_providers_ssf:configuration",
kwargs={"application_slug": self.application.slug},
),
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
)
self.assertEqual(res.status_code, 404)

View File

@@ -1,21 +1,18 @@
import json
from dataclasses import asdict
from uuid import uuid4
from django.urls import reverse
from django.utils import timezone
from rest_framework.test import APITestCase
from authentik.core.models import Application
from authentik.core.tests.utils import create_test_admin_user, create_test_cert, create_test_flow
from authentik.core.tests.utils import create_test_cert
from authentik.enterprise.providers.ssf.models import (
SSFEventStatus,
SSFProvider,
Stream,
StreamEvent,
StreamStatus,
)
from authentik.lib.generators import generate_id
from authentik.providers.oauth2.id_token import IDToken
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider
class TestStream(APITestCase):
@@ -87,29 +84,71 @@ class TestStream(APITestCase):
{"delivery": {"method": ["Polling for SSF events is not currently supported."]}},
)
def test_stream_add_oidc(self):
"""test stream add (oidc auth)"""
provider = OAuth2Provider.objects.create(
name=generate_id(),
authorization_flow=create_test_flow(),
)
self.application.provider = provider
self.application.save()
user = create_test_admin_user()
token = AccessToken.objects.create(
provider=provider,
user=user,
token=generate_id(),
auth_time=timezone.now(),
_scope="openid user profile",
_id_token=json.dumps(
asdict(
IDToken("foo", "bar"),
)
def test_stream_delete(self):
"""delete stream"""
stream = Stream.objects.create(provider=self.provider)
res = self.client.delete(
reverse(
"authentik_providers_ssf:stream",
kwargs={"application_slug": self.application.slug},
),
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
)
self.assertEqual(res.status_code, 204)
stream.refresh_from_db()
self.assertEqual(stream.status, StreamStatus.DISABLED)
res = self.client.post(
def test_stream_get(self):
"""get stream"""
Stream.objects.create(provider=self.provider)
res = self.client.get(
reverse(
"authentik_providers_ssf:stream",
kwargs={"application_slug": self.application.slug},
),
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
)
self.assertEqual(res.status_code, 200)
def test_stream_get_filter_query(self):
"""get stream"""
other_stream = Stream.objects.create(provider=self.provider)
stream = Stream.objects.create(provider=self.provider)
res = self.client.get(
reverse(
"authentik_providers_ssf:stream",
kwargs={"application_slug": self.application.slug},
)
+ f"?stream_id={stream.pk}",
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
)
self.assertEqual(res.status_code, 200)
self.assertIn(str(stream.pk), res.content.decode())
self.assertNotIn(str(other_stream.pk), res.content.decode())
def test_stream_patch(self):
"""patch stream"""
other_stream = Stream.objects.create(provider=self.provider)
stream = Stream.objects.create(provider=self.provider)
res = self.client.patch(
reverse(
"authentik_providers_ssf:stream",
kwargs={"application_slug": self.application.slug},
),
data={
"delivery": {"endpoint_url": "https://localhost"},
"stream_id": str(stream.pk),
},
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
)
self.assertEqual(res.status_code, 200)
self.assertIn(str(stream.pk), res.content.decode())
self.assertNotIn(str(other_stream.pk), res.content.decode())
def test_stream_put(self):
"""put stream"""
stream = Stream.objects.create(provider=self.provider)
res = self.client.put(
reverse(
"authentik_providers_ssf:stream",
kwargs={"application_slug": self.application.slug},
@@ -126,29 +165,63 @@ class TestStream(APITestCase):
"https://schemas.openid.net/secevent/caep/event-type/session-revoked",
],
"format": "iss_sub",
"stream_id": str(stream.pk),
},
HTTP_AUTHORIZATION=f"Bearer {token.token}",
)
self.assertEqual(res.status_code, 201)
stream = Stream.objects.filter(provider=self.provider).first()
self.assertIsNotNone(stream)
event = StreamEvent.objects.filter(stream=stream).first()
self.assertIsNotNone(event)
self.assertEqual(event.status, SSFEventStatus.PENDING_FAILED)
self.assertEqual(
event.payload["events"],
{"https://schemas.openid.net/secevent/ssf/event-type/verification": {"state": None}},
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
)
self.assertEqual(res.status_code, 200)
self.assertIn(str(stream.pk), res.content.decode())
stream.refresh_from_db()
self.assertEqual(stream.aud, ["https://app.authentik.company"])
def test_stream_delete(self):
"""delete stream"""
def test_stream_verify(self):
"""Test stream verify"""
stream = Stream.objects.create(provider=self.provider)
res = self.client.delete(
res = self.client.post(
reverse(
"authentik_providers_ssf:stream",
"authentik_providers_ssf:stream-verify",
kwargs={"application_slug": self.application.slug},
),
data={
"stream_id": str(stream.pk),
},
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
)
self.assertEqual(res.status_code, 204)
self.assertFalse(Stream.objects.filter(pk=stream.pk).exists())
def test_stream_status(self):
"""Test stream status"""
stream = Stream.objects.create(provider=self.provider)
res = self.client.get(
reverse(
"authentik_providers_ssf:stream-status",
kwargs={"application_slug": self.application.slug},
),
data={
"stream_id": str(stream.pk),
},
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
)
self.assertEqual(res.status_code, 200)
self.assertJSONEqual(
res.content,
{
"stream_id": str(stream.pk),
"status": str(stream.status),
},
)
def test_stream_status_not_found(self):
"""Test stream status"""
Stream.objects.create(provider=self.provider)
res = self.client.get(
reverse(
"authentik_providers_ssf:stream-status",
kwargs={"application_slug": self.application.slug},
),
data={
"stream_id": str(uuid4()),
},
HTTP_AUTHORIZATION=f"Bearer {self.provider.token.key}",
)
self.assertEqual(res.status_code, 404)

View File

@@ -0,0 +1,123 @@
from jwt import decode_complete
from requests_mock import Mocker
from rest_framework.test import APITestCase
from authentik.core.models import Application
from authentik.core.tests.utils import create_test_cert
from authentik.enterprise.providers.ssf.models import (
DeliveryMethods,
EventTypes,
SSFProvider,
Stream,
StreamStatus,
)
from authentik.enterprise.providers.ssf.tasks import send_ssf_event
from authentik.lib.generators import generate_id
from authentik.tasks.models import TaskLog
class TestTasks(APITestCase):
def setUp(self):
self.application = Application.objects.create(name=generate_id(), slug=generate_id())
self.provider = SSFProvider.objects.create(
name=generate_id(),
signing_key=create_test_cert(),
backchannel_application=self.application,
)
def test_push_simple(self):
stream = Stream.objects.create(
provider=self.provider,
delivery_method=DeliveryMethods.RFC_PUSH,
endpoint_url="http://localhost/ssf-push",
)
event_data = stream.prepare_event_payload(
EventTypes.SET_VERIFICATION,
{"state": None},
sub_id={"format": "opaque", "id": str(stream.uuid)},
)
with Mocker() as mocker:
mocker.post("http://localhost/ssf-push", status_code=202)
send_ssf_event.send_with_options(
args=(stream.pk, event_data), rel_obj=stream.provider
).get_result(block=True, timeout=1)
self.assertEqual(
mocker.request_history[0].headers["Content-Type"], "application/secevent+jwt"
)
jwt = decode_complete(mocker.request_history[0].body, options={"verify_signature": False})
self.assertEqual(jwt["header"]["typ"], "secevent+jwt")
self.assertIsNone(jwt["payload"]["events"][EventTypes.SET_VERIFICATION]["state"])
def test_push_auth(self):
auth = generate_id()
stream = Stream.objects.create(
provider=self.provider,
delivery_method=DeliveryMethods.RFC_PUSH,
endpoint_url="http://localhost/ssf-push",
authorization_header=auth,
)
event_data = stream.prepare_event_payload(
EventTypes.SET_VERIFICATION,
{"state": None},
sub_id={"format": "opaque", "id": str(stream.uuid)},
)
with Mocker() as mocker:
mocker.post("http://localhost/ssf-push", status_code=202)
send_ssf_event.send_with_options(
args=(stream.pk, event_data), rel_obj=stream.provider
).get_result(block=True, timeout=1)
self.assertEqual(mocker.request_history[0].headers["Authorization"], auth)
self.assertEqual(
mocker.request_history[0].headers["Content-Type"], "application/secevent+jwt"
)
jwt = decode_complete(mocker.request_history[0].body, options={"verify_signature": False})
self.assertEqual(jwt["header"]["typ"], "secevent+jwt")
self.assertIsNone(jwt["payload"]["events"][EventTypes.SET_VERIFICATION]["state"])
def test_push_stream_disable(self):
auth = generate_id()
stream = Stream.objects.create(
provider=self.provider,
delivery_method=DeliveryMethods.RFC_PUSH,
endpoint_url="http://localhost/ssf-push",
authorization_header=auth,
status=StreamStatus.DISABLED,
)
event_data = stream.prepare_event_payload(
EventTypes.SET_VERIFICATION,
{"state": None},
sub_id={"format": "opaque", "id": str(stream.uuid)},
)
with Mocker() as mocker:
mocker.post("http://localhost/ssf-push", status_code=202)
send_ssf_event.send_with_options(
args=(stream.pk, event_data), rel_obj=stream.provider
).get_result(block=True, timeout=1)
jwt = decode_complete(mocker.request_history[0].body, options={"verify_signature": False})
self.assertEqual(jwt["header"]["typ"], "secevent+jwt")
self.assertIsNone(jwt["payload"]["events"][EventTypes.SET_VERIFICATION]["state"])
self.assertFalse(Stream.objects.filter(pk=stream.pk).exists())
def test_push_error(self):
stream = Stream.objects.create(
provider=self.provider,
delivery_method=DeliveryMethods.RFC_PUSH,
endpoint_url="http://localhost/ssf-push",
)
event_data = stream.prepare_event_payload(
EventTypes.SET_VERIFICATION,
{"state": None},
sub_id={"format": "opaque", "id": str(stream.uuid)},
)
with Mocker() as mocker:
mocker.post("http://localhost/ssf-push", text="error", status_code=400)
send_ssf_event.send_with_options(
args=(stream.pk, event_data), rel_obj=stream.provider
).get_result(block=True, timeout=1)
logs = (
TaskLog.objects.filter(task__actor_name=send_ssf_event.actor_name)
.order_by("timestamp")
.filter(event="Failed to send request")
.first()
)
self.assertEqual(logs.attributes, {"response": {"status": 400, "content": "error"}})

View File

@@ -6,7 +6,11 @@ from authentik.enterprise.providers.ssf.api.providers import SSFProviderViewSet
from authentik.enterprise.providers.ssf.api.streams import SSFStreamViewSet
from authentik.enterprise.providers.ssf.views.configuration import ConfigurationView
from authentik.enterprise.providers.ssf.views.jwks import JWKSview
from authentik.enterprise.providers.ssf.views.stream import StreamView
from authentik.enterprise.providers.ssf.views.stream import (
StreamStatusView,
StreamVerifyView,
StreamView,
)
urlpatterns = [
path(
@@ -24,6 +28,16 @@ urlpatterns = [
StreamView.as_view(),
name="stream",
),
path(
"application/ssf/<slug:application_slug>/stream/verify/",
StreamVerifyView.as_view(),
name="stream-verify",
),
path(
"application/ssf/<slug:application_slug>/stream/status/",
StreamStatusView.as_view(),
name="stream-status",
),
]
api_urlpatterns = [

View File

@@ -64,3 +64,7 @@ class SSFTokenAuth(BaseAuthentication):
if jwt_token:
return (jwt_token.user, token)
return None
# Required to correctly propagate a 401 header which the SSF spec requires
def authenticate_header(self, request):
return "SSF"

View File

@@ -1,10 +1,10 @@
from django.http import HttpRequest
from django.http import Http404, HttpRequest
from rest_framework.permissions import IsAuthenticated
from rest_framework.views import APIView
from structlog.stdlib import BoundLogger, get_logger
from authentik.core.models import Application
from authentik.enterprise.providers.ssf.models import SSFProvider
from authentik.enterprise.providers.ssf.models import SSFProvider, Stream, StreamStatus
from authentik.enterprise.providers.ssf.views.auth import SSFTokenAuth
@@ -21,3 +21,18 @@ class SSFView(APIView):
def get_authenticators(self):
return [SSFTokenAuth(self)]
class SSFStreamView(SSFView):
def get_object(self, any_status=False) -> Stream:
streams = Stream.objects.filter(provider=self.provider)
if not any_status:
streams = streams.filter(status__in=[StreamStatus.ENABLED, StreamStatus.PAUSED])
if "stream_id" in self.request.query_params:
streams = streams.filter(pk=self.request.query_params["stream_id"])
if "stream_id" in self.request.data:
streams = streams.filter(pk=self.request.data["stream_id"])
stream = streams.first()
if not stream:
raise Http404()
return stream

View File

@@ -47,9 +47,23 @@ class ConfigurationView(SSFView):
},
)
),
"delivery_methods_supported": [
DeliveryMethods.RISC_PUSH,
],
"verification_endpoint": self.request.build_absolute_uri(
reverse(
"authentik_providers_ssf:stream-verify",
kwargs={
"application_slug": application.slug,
},
)
),
"status_endpoint": self.request.build_absolute_uri(
reverse(
"authentik_providers_ssf:stream-status",
kwargs={
"application_slug": application.slug,
},
)
),
"delivery_methods_supported": [DeliveryMethods.RISC_PUSH, DeliveryMethods.RFC_PUSH],
"authorization_schemes": [{"spec_urn": "urn:ietf:rfc:6749"}],
}
return JsonResponse(data)

View File

@@ -1,3 +1,5 @@
from uuid import uuid4
from django.http import HttpRequest
from django.urls import reverse
from rest_framework.exceptions import PermissionDenied, ValidationError
@@ -13,9 +15,10 @@ from authentik.enterprise.providers.ssf.models import (
EventTypes,
SSFProvider,
Stream,
StreamStatus,
)
from authentik.enterprise.providers.ssf.tasks import send_ssf_events
from authentik.enterprise.providers.ssf.views.base import SSFView
from authentik.enterprise.providers.ssf.views.base import SSFStreamView
LOGGER = get_logger()
@@ -23,6 +26,7 @@ LOGGER = get_logger()
class StreamDeliverySerializer(PassiveSerializer):
method = ChoiceField(choices=[(x.value, x.value) for x in DeliveryMethods])
endpoint_url = CharField(required=False)
authorization_header = CharField(required=False)
def validate_method(self, method: DeliveryMethods):
"""Currently only push is supported"""
@@ -31,7 +35,7 @@ class StreamDeliverySerializer(PassiveSerializer):
return method
def validate(self, attrs: dict) -> dict:
if attrs["method"] == DeliveryMethods.RISC_PUSH:
if attrs.get("method") in [DeliveryMethods.RISC_PUSH, DeliveryMethods.RFC_PUSH]:
if not attrs.get("endpoint_url"):
raise ValidationError("Endpoint URL is required when using push.")
return attrs
@@ -42,8 +46,8 @@ class StreamSerializer(ModelSerializer):
events_requested = ListField(
child=ChoiceField(choices=[(x.value, x.value) for x in EventTypes])
)
format = CharField()
aud = ListField(child=CharField())
format = CharField(default="iss_sub")
aud = ListField(child=CharField(), allow_empty=True, default=list)
def create(self, validated_data):
provider: SSFProvider = validated_data["provider"]
@@ -58,15 +62,19 @@ class StreamSerializer(ModelSerializer):
)
# Ensure that streams always get SET verification events sent to them
validated_data["events_requested"].append(EventTypes.SET_VERIFICATION)
stream_id = uuid4()
default_aud = f"goauthentik.io/providers/ssf/{str(stream_id)}"
return super().create(
{
"delivery_method": validated_data["delivery"]["method"],
"endpoint_url": validated_data["delivery"].get("endpoint_url"),
"authorization_header": validated_data["delivery"].get("authorization_header"),
"format": validated_data["format"],
"provider": validated_data["provider"],
"events_requested": validated_data["events_requested"],
"aud": validated_data["aud"],
"aud": validated_data["aud"] or [default_aud],
"iss": iss,
"pk": stream_id,
}
)
@@ -101,7 +109,14 @@ class StreamResponseSerializer(PassiveSerializer):
return [x.value for x in EventTypes]
class StreamView(SSFView):
class StreamView(SSFStreamView):
def get(self, request: Request, *args, **kwargs):
stream = self.get_object()
return Response(
StreamResponseSerializer(instance=stream, context={"request": request}).data
)
@validate(StreamSerializer)
def post(self, request: Request, *args, body: StreamSerializer, **kwargs) -> Response:
if not request.user.has_perm("authentik_providers_ssf.add_stream", self.provider):
@@ -109,6 +124,8 @@ class StreamView(SSFView):
"User does not have permission to create stream for this provider."
)
instance: Stream = body.save(provider=self.provider)
LOGGER.info("Sending verification event", stream=instance)
send_ssf_events(
EventTypes.SET_VERIFICATION,
{
@@ -120,10 +137,56 @@ class StreamView(SSFView):
response = StreamResponseSerializer(instance=instance, context={"request": request}).data
return Response(response, status=201)
def patch(self, request: Request, *args, **kwargs) -> Response:
stream = self.get_object()
serializer = StreamSerializer(stream, data=request.data, partial=True)
serializer.is_valid(raise_exception=True)
serializer.save()
response = StreamResponseSerializer(
instance=serializer.instance, context={"request": request}
).data
return Response(response, status=200)
def put(self, request: Request, *args, **kwargs) -> Response:
stream = self.get_object()
serializer = StreamSerializer(stream, data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
response = StreamResponseSerializer(
instance=serializer.instance, context={"request": request}
).data
return Response(response, status=200)
def delete(self, request: Request, *args, **kwargs) -> Response:
streams = Stream.objects.filter(provider=self.provider)
# Technically this parameter is required by the spec...
if "stream_id" in request.query_params:
streams = streams.filter(stream_id=request.query_params["stream_id"])
streams.delete()
stream = self.get_object()
stream.status = StreamStatus.DISABLED
stream.save()
return Response(status=204)
class StreamVerifyView(SSFStreamView):
def post(self, request: Request, *args, **kwargs):
stream = self.get_object()
state = request.data.get("state", None)
send_ssf_events(
EventTypes.SET_VERIFICATION,
{
"state": state,
},
stream_filter={"pk": stream.uuid},
sub_id={"format": "opaque", "id": str(stream.uuid)},
)
return Response(status=204)
class StreamStatusView(SSFStreamView):
def get(self, request: Request, *args, **kwargs):
stream = self.get_object(any_status=True)
return Response(
{
"stream_id": str(stream.pk),
"status": str(stream.status),
}
)

View File

@@ -9,30 +9,49 @@ from django.db.models import DateTimeField as DjangoDateTimeField
from django.db.models.fields.json import KeyTextTransform, KeyTransform
from django.db.models.functions import TruncHour
from django.db.models.query_utils import Q
from django.utils.text import slugify
from django.utils.timezone import now
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter, extend_schema
from guardian.shortcuts import get_objects_for_user
from rest_framework.decorators import action
from rest_framework.fields import ChoiceField, DateTimeField, DictField, IntegerField
from rest_framework.fields import (
CharField,
ChoiceField,
DateTimeField,
DictField,
IntegerField,
ListField,
)
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from authentik.api.validation import validate
from authentik.core.api.object_types import TypeCreateSerializer
from authentik.core.api.utils import ModelSerializer, PassiveSerializer
from authentik.events.models import Event, EventAction
from authentik.lib.utils.reflection import ConditionalInheritance
from authentik.lib.utils.time import timedelta_from_string, timedelta_string_validator
AGGR_MAX_AGE = timedelta(days=90)
class EventVolumeSerializer(PassiveSerializer):
"""Count of events of action created on day"""
"""Count of events of action created on day for a single event action"""
action = ChoiceField(choices=EventAction.choices)
time = DateTimeField()
count = IntegerField()
class EventStatsSerializer(PassiveSerializer):
"""Count of unique users in events and aggregated counts per specified deltas"""
unique_users = IntegerField()
count_step = DictField()
class EventSerializer(ModelSerializer):
"""Event Serializer"""
@@ -84,6 +103,11 @@ class EventsFilter(django_filters.FilterSet):
lookup_expr="authorized_application__pk",
label="Context Authorized application",
)
context_device = django_filters.CharFilter(
field_name="context",
lookup_expr="device__pk",
label="Context Device Primary Key",
)
action = django_filters.CharFilter(
field_name="action",
lookup_expr="icontains",
@@ -123,6 +147,16 @@ class EventViewSet(
):
"""Event Read-Only Viewset"""
class EventVolumeParameters(PassiveSerializer):
history_days = IntegerField(default=7, required=False)
class EventStatsParameters(PassiveSerializer):
count_steps = ListField(
child=CharField(validators=[timedelta_string_validator]),
required=True,
help_text="Timedelta, format of 'weeks=3;days=2;hours=3,seconds=2'",
)
queryset = Event.objects.all()
serializer_class = EventSerializer
ordering = ["-created"]
@@ -225,24 +259,16 @@ class EventViewSet(
@extend_schema(
responses={200: EventVolumeSerializer(many=True)},
parameters=[
OpenApiParameter(
"history_days",
type=OpenApiTypes.NUMBER,
location=OpenApiParameter.QUERY,
required=False,
default=7,
),
],
parameters=[EventVolumeParameters],
)
@action(detail=False, methods=["GET"], pagination_class=None)
def volume(self, request: Request) -> Response:
@validate(EventVolumeParameters, "query")
def volume(self, request: Request, query: EventVolumeParameters) -> Response:
"""Get event volume for specified filters and timeframe"""
queryset: QuerySet[Event] = self.filter_queryset(self.get_queryset())
delta = timedelta(days=7)
time_delta = request.query_params.get("history_days", 7)
if time_delta:
delta = timedelta(days=min(int(time_delta), 60))
delta = timedelta(days=query.validated_data.get("history_days", 7))
if delta.total_seconds() > AGGR_MAX_AGE.total_seconds():
delta = AGGR_MAX_AGE
return Response(
queryset.filter(created__gte=now() - delta)
.annotate(hour=TruncHour("created"))
@@ -257,6 +283,40 @@ class EventViewSet(
.order_by("time", "action")
)
@extend_schema(
responses={200: EventStatsSerializer()},
parameters=[EventStatsParameters],
filters=True,
)
@action(detail=False, methods=["GET"], pagination_class=None)
@validate(EventStatsParameters, "query")
def stats(self, request: Request, query: EventStatsParameters) -> Response:
"""Get event stats for specified filters and count steps"""
_now = now()
aggrs = {
"unique_users": Count("user__pk", distinct=True),
}
largest_delta = 0
for step in query.validated_data.get("count_steps"):
delta = timedelta_from_string(step)
if delta.total_seconds() > AGGR_MAX_AGE.total_seconds():
delta = AGGR_MAX_AGE
largest_delta = max(largest_delta, delta.total_seconds())
aggrs[slugify(step).replace("-", "_")] = Count(
"event_uuid", filter=Q(created__gte=_now - delta)
)
data = (
self.filter_queryset(self.get_queryset())
.filter(created__gte=now() - timedelta(days=60))
.aggregate(**aggrs)
)
return Response(
{
"unique_users": data.pop("unique_users"),
"count_step": data,
}
)
@extend_schema(responses={200: TypeCreateSerializer(many=True)})
@action(detail=False, pagination_class=None, filter_backends=[])
def actions(self, request: Request) -> Response:

View File

@@ -1,8 +1,12 @@
"""Event API tests"""
from datetime import timedelta
from json import loads
from django.urls import reverse
from django.utils.datastructures import MultiValueDict
from django.utils.http import urlencode
from django.utils.timezone import now
from rest_framework.test import APITestCase
from authentik.core.tests.utils import create_test_admin_user
@@ -91,3 +95,52 @@ class TestEventsAPI(APITestCase):
},
)
self.assertEqual(response.status_code, 400)
def test_volume(self):
Event.objects.all().delete()
Event.new(EventAction.LOGIN).set_user(self.user).save()
evt = Event.new(EventAction.LOGIN).set_user(self.user)
evt.created = now() - timedelta(days=6)
evt.save()
res = self.client.get(
reverse("authentik_api:event-volume")
+ "?"
+ urlencode(
{
"action": EventAction.LOGIN,
}
)
)
self.assertEqual(res.status_code, 200)
data = loads(res.content)
self.assertEqual(len(data), 1)
def test_stats(self):
Event.objects.all().delete()
Event.new(EventAction.LOGIN).set_user(self.user).save()
evt = Event.new(EventAction.LOGIN).set_user(self.user)
evt.created = now() - timedelta(days=6)
evt.save()
res = self.client.get(
reverse("authentik_api:event-stats")
+ "?"
+ urlencode(
MultiValueDict({"count_steps": ["hours=24", "days=7", "days=240"]}), doseq=True
)
)
self.assertEqual(res.status_code, 200, res.content)
self.assertJSONEqual(
res.content, {"unique_users": 1, "count_step": {"hours24": 2, "days7": 2, "days240": 2}}
)
def test_stats_invalid(self):
res = self.client.get(
reverse("authentik_api:event-stats")
+ "?"
+ urlencode({"count_steps": "24d"}, doseq=True)
)
self.assertEqual(res.status_code, 400)
self.assertJSONEqual(
res.content,
{"count_steps": {"0": ["24d is not in the correct format of 'hours=3;minutes=1'."]}},
)

View File

@@ -7,15 +7,18 @@ from django.utils.translation import gettext as _
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiResponse, extend_schema
from rest_framework.decorators import action
from rest_framework.fields import BooleanField, FileField, ReadOnlyField, SerializerMethodField
from rest_framework.parsers import MultiPartParser
from rest_framework.fields import (
BooleanField,
FileField,
ReadOnlyField,
SerializerMethodField,
)
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from structlog.stdlib import get_logger
from authentik.blueprints.v1.exporter import FlowExporter
from authentik.blueprints.v1.importer import Importer
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import (
CacheSerializer,
@@ -24,7 +27,6 @@ from authentik.core.api.utils import (
PassiveSerializer,
ThemedUrlsSerializer,
)
from authentik.events.logs import LogEventSerializer
from authentik.flows.api.flows_diagram import FlowDiagram, FlowDiagramSerializer
from authentik.flows.exceptions import FlowNonApplicableException
from authentik.flows.models import Flow
@@ -106,13 +108,6 @@ class FlowSetSerializer(FlowSerializer):
]
class FlowImportResultSerializer(PassiveSerializer):
"""Logs of an attempted flow import"""
logs = LogEventSerializer(many=True, read_only=True)
success = BooleanField(read_only=True)
class FlowViewSet(UsedByMixin, ModelViewSet):
"""Flow Viewset"""
@@ -146,59 +141,6 @@ class FlowViewSet(UsedByMixin, ModelViewSet):
LOGGER.debug("Cleared flow cache", keys=len(keys))
return Response(status=204)
@permission_required(
None,
[
"authentik_flows.add_flow",
"authentik_flows.change_flow",
"authentik_flows.add_flowstagebinding",
"authentik_flows.change_flowstagebinding",
"authentik_flows.add_stage",
"authentik_flows.change_stage",
"authentik_policies.add_policy",
"authentik_policies.change_policy",
"authentik_policies.add_policybinding",
"authentik_policies.change_policybinding",
"authentik_stages_prompt.add_prompt",
"authentik_stages_prompt.change_prompt",
],
)
@extend_schema(
request={"multipart/form-data": FlowUploadSerializer},
responses={
204: FlowImportResultSerializer,
400: FlowImportResultSerializer,
},
)
@action(url_path="import", detail=False, methods=["POST"], parser_classes=(MultiPartParser,))
def import_flow(self, request: Request) -> Response:
"""Import flow from .yaml file"""
import_response = FlowImportResultSerializer(
data={
"logs": [],
"success": False,
}
)
import_response.is_valid()
file = request.FILES.get("file", None)
if not file:
return Response(data=import_response.initial_data, status=400)
importer = Importer.from_string(file.read().decode())
valid, logs = importer.validate()
import_response.initial_data["logs"] = [LogEventSerializer(log).data for log in logs]
import_response.initial_data["success"] = valid
import_response.is_valid()
if not valid:
return Response(data=import_response.initial_data, status=200)
successful = importer.apply()
import_response.initial_data["success"] = successful
import_response.is_valid()
if not successful:
return Response(data=import_response.initial_data, status=200)
return Response(data=import_response.initial_data, status=200)
@permission_required(
"authentik_flows.export_flow",
[

View File

@@ -47,33 +47,23 @@
{% block body %}
<ak-skip-to-content></ak-skip-to-content>
<ak-message-container></ak-message-container>
<div class="pf-c-page__drawer">
<div class="pf-c-drawer pf-m-collapsed" id="flow-drawer">
<div class="pf-c-drawer__main">
<div class="pf-c-drawer__content">
<div class="pf-c-drawer__body">
<ak-flow-executor
slug="{{ flow.slug }}"
class="pf-c-login"
data-layout="{{ flow.layout|default:'stacked' }}"
loading
>
{% include "base/placeholder.html" %}
<ak-brand-links name="flow-links" slot="footer"></ak-brand-links>
</ak-flow-executor>
</div>
</div>
<ak-flow-inspector
id="flow-inspector"
data-registration="lazy"
class="pf-c-drawer__panel pf-m-width-33"
slug="{{ flow.slug }}"
></ak-flow-inspector>
</div>
</div>
</div>
</div>
<ak-drawer id="flow-drawer">
<ak-flow-executor
slug="{{ flow.slug }}"
class="pf-c-login"
data-layout="{{ flow.layout|default:'stacked' }}"
loading
>
{% include "base/placeholder.html" %}
<ak-brand-links name="flow-links" slot="footer"></ak-brand-links>
</ak-flow-executor>
<ak-flow-inspector
slot="panel"
id="flow-inspector"
data-registration="lazy"
slug="{{ flow.slug }}"
></ak-flow-inspector>
</ak-drawer>
{% endblock %}

View File

@@ -47,6 +47,7 @@ listen:
- "[::]:9300"
debug: 0.0.0.0:9900
debug_py: 0.0.0.0:9901
debug_tokio: "[::]:6669"
trusted_proxy_cidrs:
- 127.0.0.0/8
- 10.0.0.0/8
@@ -73,6 +74,19 @@ log_level: info
log:
http_headers:
- User-Agent
rust_log:
"console_subscriber": info
"h2": info
"hyper_util": warn
"mio": info
"notify": info
"reqwest": info
"runtime": info
"rustls": info
"sqlx": info
"sqlx_postgres": info
"tokio": info
"tungstenite": info
sessions:
unauthenticated_age: days=1

View File

@@ -1,14 +1,16 @@
"""authentik sentry integration"""
from asyncio.exceptions import CancelledError
from typing import TYPE_CHECKING, Any
from typing import Any
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation, ValidationError
from django.db import DatabaseError, InternalError, OperationalError, ProgrammingError
from django.http.response import Http404
from docker.errors import DockerException
from dramatiq.errors import Retry
from h11 import LocalProtocolError
from ldap3.core.exceptions import LDAPException
from psycopg.errors import Error
from rest_framework.exceptions import APIException
from sentry_sdk import HttpTransport, get_current_scope
@@ -28,11 +30,6 @@ from authentik import authentik_build_hash, authentik_version
from authentik.lib.config import CONFIG
from authentik.lib.utils.http import authentik_user_agent
from authentik.lib.utils.reflection import get_env
from authentik.tasks import TASK_WORKER
if TYPE_CHECKING or TASK_WORKER:
from docker.errors import DockerException
from ldap3.core.exceptions import LDAPException
LOGGER = get_logger()
_root_path = CONFIG.get("web.path", "/")
@@ -66,6 +63,10 @@ ignored_classes = (
Retry,
# custom baseclass
SentryIgnoredException,
# ldap errors
LDAPException,
# Docker errors
DockerException,
# End-user errors
Http404,
# AsyncIO
@@ -131,14 +132,6 @@ def traces_sampler(sampling_context: dict) -> float:
def should_ignore_exception(exc: Exception) -> bool:
"""Check if an exception should be dropped"""
if TASK_WORKER and isinstance(
exc,
# ldap errors
LDAPException |
# Docker errors
DockerException,
):
return True
return isinstance(exc, ignored_classes)

View File

@@ -4,12 +4,15 @@ from inspect import currentframe
from pathlib import Path
def load_fixture(path: str, **kwargs) -> str:
def load_fixture(path: str, path_only=False, **kwargs) -> str:
"""Load fixture, optionally formatting it with kwargs"""
current = currentframe()
parent = current.f_back
calling_file_path = parent.f_globals["__file__"]
with open(Path(calling_file_path).resolve().parent / Path(path), encoding="utf-8") as _fixture:
fixture_path = Path(calling_file_path).resolve().parent / Path(path)
if path_only:
return fixture_path
with open(fixture_path, encoding="utf-8") as _fixture:
fixture = _fixture.read()
try:
return fixture % kwargs

View File

@@ -4,10 +4,12 @@ from dataclasses import asdict
from django.utils.translation import gettext_lazy as _
from drf_spectacular.utils import extend_schema
from kubernetes.client.configuration import Configuration
from kubernetes.config.config_exception import ConfigException
from kubernetes.config.kube_config import load_kube_config_from_dict
from rest_framework import mixins, serializers
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
from rest_framework.fields import BooleanField, CharField, ReadOnlyField
from rest_framework.mixins import DestroyModelMixin, ListModelMixin, RetrieveModelMixin
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet, ModelViewSet
@@ -24,7 +26,6 @@ from authentik.outposts.models import (
KubernetesServiceConnection,
OutpostServiceConnection,
)
from authentik.outposts.tasks import outpost_validate_kubeconfig
from authentik.rbac.filters import ObjectFilter
@@ -61,10 +62,10 @@ class ServiceConnectionStateSerializer(PassiveSerializer):
class ServiceConnectionViewSet(
TypesMixin,
RetrieveModelMixin,
DestroyModelMixin,
mixins.RetrieveModelMixin,
mixins.DestroyModelMixin,
UsedByMixin,
ListModelMixin,
mixins.ListModelMixin,
GenericViewSet,
):
"""ServiceConnection Viewset"""
@@ -111,12 +112,16 @@ class KubernetesServiceConnectionSerializer(ServiceConnectionSerializer):
"""Validate kubeconfig by attempting to load it"""
if kubeconfig == {}:
if not self.initial_data["local"]:
raise ValidationError(
raise serializers.ValidationError(
_("You can only use an empty kubeconfig when connecting to a local cluster.")
)
# Empty kubeconfig is valid
return kubeconfig
outpost_validate_kubeconfig.send_with_options((kubeconfig,))
config = Configuration()
try:
load_kube_config_from_dict(kubeconfig, client_configuration=config)
except ConfigException:
raise serializers.ValidationError(_("Invalid kubeconfig")) from None
return kubeconfig
class Meta:

View File

@@ -58,6 +58,9 @@ class BaseController:
self.connection = connection
self.logger = get_logger()
self.deployment_ports = []
self.metrics_ports = [
DeploymentPort(9300, "http-metrics", "tcp"),
]
def up(self):
"""Called by scheduled task to reconcile deployment/service/etc"""

View File

@@ -2,7 +2,7 @@
from typing import TYPE_CHECKING
from kubernetes.client import CoreV1Api, V1Service, V1ServicePort, V1ServiceSpec
from kubernetes.client import CoreV1Api, V1ObjectMeta, V1Service, V1ServicePort, V1ServiceSpec
from authentik.outposts.controllers.base import FIELD_MANAGER
from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler
@@ -84,3 +84,47 @@ class ServiceReconciler(KubernetesObjectReconciler[V1Service]):
reference,
field_manager=FIELD_MANAGER,
)
class MetricsServiceReconciler(ServiceReconciler):
@property
def noop(self) -> bool:
return self.is_embedded
@staticmethod
def reconciler_name() -> str:
return "service-metrics"
@property
def name(self):
name_suffix = "-metrics"
name = super().name
return name[: 63 - len(name_suffix)] + name_suffix
def get_object_meta(self, **kwargs) -> V1ObjectMeta:
meta: V1ObjectMeta = super().get_object_meta(**kwargs)
meta.labels["goauthentik.io/service-type"] = "metrics"
return meta
def get_reference_object(self) -> V1Service:
"""Get deployment object for outpost"""
meta = self.get_object_meta(name=self.name)
ports = []
for port in self.controller.metrics_ports:
ports.append(
V1ServicePort(
name=port.name,
port=port.port,
protocol=port.protocol.upper(),
target_port=port.inner_port or port.port,
)
)
selector_labels = DeploymentReconciler(self.controller).get_pod_meta()
return V1Service(
metadata=meta,
spec=V1ServiceSpec(
ports=ports,
selector=selector_labels,
type="ClusterIP",
),
)

View File

@@ -8,6 +8,8 @@ from kubernetes.client import ApiextensionsV1Api, CustomObjectsApi
from authentik.outposts.controllers.base import FIELD_MANAGER
from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler
from authentik.outposts.controllers.k8s.service import MetricsServiceReconciler
from authentik.outposts.controllers.k8s.triggers import NeedsUpdate
if TYPE_CHECKING:
from authentik.outposts.controllers.kubernetes import KubernetesController
@@ -55,6 +57,10 @@ class PrometheusServiceMonitor:
metadata: PrometheusServiceMonitorMetadata
spec: PrometheusServiceMonitorSpec
def to_dict(self):
"""`to_dict` to conform to how the kubernetes client converts objects to dicts"""
return asdict(self)
CRD_NAME = "servicemonitors.monitoring.coreos.com"
CRD_GROUP = "monitoring.coreos.com"
@@ -74,6 +80,11 @@ class PrometheusServiceMonitorReconciler(KubernetesObjectReconciler[PrometheusSe
def reconciler_name() -> str:
return "prometheus servicemonitor"
def reconcile(self, current: PrometheusServiceMonitor, reference: PrometheusServiceMonitor):
if current.spec.selector.matchLabels != reference.spec.selector.matchLabels:
raise NeedsUpdate()
super().reconcile(current, reference)
@property
def noop(self) -> bool:
if not self._crd_exists():
@@ -108,7 +119,9 @@ class PrometheusServiceMonitorReconciler(KubernetesObjectReconciler[PrometheusSe
)
],
selector=PrometheusServiceMonitorSpecSelector(
matchLabels=self.get_object_meta(name=self.name).labels,
matchLabels=MetricsServiceReconciler(self.controller)
.get_object_meta(name=self.name)
.labels,
),
),
)

View File

@@ -1,19 +1,17 @@
"""k8s utils"""
from pathlib import Path
from typing import TYPE_CHECKING
from kubernetes.client.models.v1_container_port import V1ContainerPort
from kubernetes.client.models.v1_service_port import V1ServicePort
from kubernetes.config.incluster_config import SERVICE_TOKEN_FILENAME
from authentik.outposts.controllers.k8s.triggers import NeedsRecreate
from authentik.tasks import TASK_WORKER
if TYPE_CHECKING or TASK_WORKER:
from kubernetes.client.models.v1_container_port import V1ContainerPort
from kubernetes.client.models.v1_service_port import V1ServicePort
def get_namespace() -> str:
"""Get the namespace if we're running in a pod, otherwise default to default"""
path = Path("/var/run/secrets/kubernetes.io/serviceaccount/namespace")
path = Path(SERVICE_TOKEN_FILENAME.replace("token", "namespace"))
if path.exists():
with open(path, encoding="utf8") as _namespace_file:
return _namespace_file.read()

View File

@@ -18,7 +18,7 @@ from authentik.outposts.controllers.base import BaseClient, BaseController, Cont
from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler
from authentik.outposts.controllers.k8s.deployment import DeploymentReconciler
from authentik.outposts.controllers.k8s.secret import SecretReconciler
from authentik.outposts.controllers.k8s.service import ServiceReconciler
from authentik.outposts.controllers.k8s.service import MetricsServiceReconciler, ServiceReconciler
from authentik.outposts.controllers.k8s.service_monitor import PrometheusServiceMonitorReconciler
from authentik.outposts.models import (
KubernetesServiceConnection,
@@ -74,6 +74,7 @@ class KubernetesController(BaseController):
SecretReconciler.reconciler_name(): SecretReconciler,
DeploymentReconciler.reconciler_name(): DeploymentReconciler,
ServiceReconciler.reconciler_name(): ServiceReconciler,
MetricsServiceReconciler.reconciler_name(): MetricsServiceReconciler,
PrometheusServiceMonitorReconciler.reconciler_name(): (
PrometheusServiceMonitorReconciler
),
@@ -82,6 +83,7 @@ class KubernetesController(BaseController):
SecretReconciler.reconciler_name(),
DeploymentReconciler.reconciler_name(),
ServiceReconciler.reconciler_name(),
MetricsServiceReconciler.reconciler_name(),
PrometheusServiceMonitorReconciler.reconciler_name(),
]

View File

@@ -4,54 +4,41 @@ from hashlib import sha256
from os import R_OK, access
from pathlib import Path
from socket import gethostname
from typing import TYPE_CHECKING, Any
from typing import Any
from urllib.parse import urlparse
from channels.layers import get_channel_layer
from django.core.cache import cache
from django.utils.translation import gettext_lazy as _
from django_dramatiq_postgres.middleware import CurrentTask
from docker.constants import DEFAULT_UNIX_SOCKET
from dramatiq.actor import actor
from rest_framework.exceptions import ValidationError
from kubernetes.config.incluster_config import SERVICE_TOKEN_FILENAME
from kubernetes.config.kube_config import KUBE_CONFIG_DEFAULT_LOCATION
from structlog.stdlib import get_logger
from yaml import safe_load
from authentik.lib.config import CONFIG
from authentik.outposts.consumer import build_outpost_group
from authentik.tasks import TASK_WORKER
if TYPE_CHECKING or TASK_WORKER:
from docker.constants import DEFAULT_UNIX_SOCKET
from kubernetes.client.configuration import Configuration
from kubernetes.config.config_exception import ConfigException
from kubernetes.config.incluster_config import SERVICE_TOKEN_FILENAME
from kubernetes.config.kube_config import (
KUBE_CONFIG_DEFAULT_LOCATION,
load_kube_config_from_dict,
)
from authentik.outposts.controllers.base import BaseController, ControllerException
from authentik.outposts.controllers.docker import DockerClient
from authentik.outposts.controllers.kubernetes import KubernetesClient
from authentik.outposts.models import (
DockerServiceConnection,
KubernetesServiceConnection,
Outpost,
OutpostServiceConnection,
OutpostType,
ServiceConnectionInvalid,
)
from authentik.providers.ldap.controllers.docker import LDAPDockerController
from authentik.providers.ldap.controllers.kubernetes import LDAPKubernetesController
from authentik.providers.proxy.controllers.docker import ProxyDockerController
from authentik.providers.proxy.controllers.kubernetes import ProxyKubernetesController
from authentik.providers.rac.controllers.docker import RACDockerController
from authentik.providers.rac.controllers.kubernetes import RACKubernetesController
from authentik.providers.radius.controllers.docker import RadiusDockerController
from authentik.providers.radius.controllers.kubernetes import RadiusKubernetesController
from authentik.tasks.middleware import CurrentTask
from authentik.outposts.controllers.base import BaseController, ControllerException
from authentik.outposts.controllers.docker import DockerClient
from authentik.outposts.controllers.kubernetes import KubernetesClient
from authentik.outposts.models import (
DockerServiceConnection,
KubernetesServiceConnection,
Outpost,
OutpostServiceConnection,
OutpostType,
ServiceConnectionInvalid,
)
from authentik.providers.ldap.controllers.docker import LDAPDockerController
from authentik.providers.ldap.controllers.kubernetes import LDAPKubernetesController
from authentik.providers.proxy.controllers.docker import ProxyDockerController
from authentik.providers.proxy.controllers.kubernetes import ProxyKubernetesController
from authentik.providers.rac.controllers.docker import RACDockerController
from authentik.providers.rac.controllers.kubernetes import RACKubernetesController
from authentik.providers.radius.controllers.docker import RadiusDockerController
from authentik.providers.radius.controllers.kubernetes import RadiusKubernetesController
from authentik.tasks.middleware import CurrentTask
LOGGER = get_logger()
CACHE_KEY_OUTPOST_DOWN = "goauthentik.io/outposts/teardown/%s"
@@ -229,13 +216,3 @@ def outpost_session_end(session_id: str):
"session_id": hashed_session_id,
},
)
@actor(description=_("Validate kubeconfig"), throws=ValidationError)
def outpost_validate_kubeconfig(kubeconfig: dict[str, Any]):
config = Configuration()
try:
load_kube_config_from_dict(kubeconfig, client_configuration=config)
except ConfigException:
raise ValidationError(_("Invalid kubeconfig")) from None
return kubeconfig

View File

@@ -1,11 +1,16 @@
"""Kubernetes controller tests"""
from unittest.mock import MagicMock, patch
from django.test import TestCase
from kubernetes.client import ApiClient
from yaml import SafeLoader, load_all
from authentik.blueprints.tests import reconcile_app
from authentik.lib.generators import generate_id
from authentik.outposts.apps import MANAGED_OUTPOST
from authentik.outposts.controllers.k8s.deployment import DeploymentReconciler
from authentik.outposts.controllers.k8s.service_monitor import PrometheusServiceMonitorReconciler
from authentik.outposts.controllers.kubernetes import KubernetesController
from authentik.outposts.models import KubernetesServiceConnection, Outpost, OutpostType
@@ -28,7 +33,7 @@ class KubernetesControllerTests(TestCase):
self.integration,
# Pass something not-none as client so we don't
# attempt to connect to K8s as that's not needed
client=self,
client=ApiClient(),
)
rec = DeploymentReconciler(controller)
self.assertEqual(rec.name, "ak-outpost-authentik-embedded-outpost")
@@ -42,3 +47,18 @@ class KubernetesControllerTests(TestCase):
controller.outpost.config = _cfg
self.assertEqual(rec.name, f"outpost-{controller.outpost.uuid.hex}")
self.assertLess(len(rec.name), 64)
def test_static(self):
self.controller = KubernetesController(
self.outpost,
self.integration,
# Pass something not-none as client so we don't
# attempt to connect to K8s as that's not needed
client=ApiClient(),
)
with patch.object(
PrometheusServiceMonitorReconciler, "_crd_exists", MagicMock(return_value=True)
):
manifest = self.controller.get_static_deployment()
manifests = list(load_all(manifest, Loader=SafeLoader))
self.assertEqual(len(manifests), 5)

View File

@@ -15,7 +15,6 @@ class ProxyKubernetesController(KubernetesController):
super().__init__(outpost, connection)
self.deployment_ports = [
DeploymentPort(9000, "http", "tcp"),
DeploymentPort(9300, "http-metrics", "tcp"),
DeploymentPort(9443, "https", "tcp"),
]
self.reconcilers[IngressReconciler.reconciler_name()] = IngressReconciler

View File

@@ -233,7 +233,7 @@ class SAMLMetadataSerializer(PassiveSerializer):
"""SAML Provider Metadata serializer"""
metadata = CharField(read_only=True)
download_url = CharField(read_only=True, required=False)
download_url = CharField(read_only=True, required=False, allow_null=True)
class SAMLProviderImportSerializer(PassiveSerializer):

View File

@@ -93,32 +93,33 @@ class TestSPInitiatedSLOViews(TestCase):
self.assertEqual(logout_request.issuer, self.provider.issuer)
self.assertEqual(logout_request.session_index, "test-session-123")
def test_redirect_view_handles_logout_response_with_relay_state(self):
"""Test that redirect view handles logout response with RelayState"""
# Use raw URL (no encoding needed)
relay_state = "https://idp.example.com/flow/return"
def test_redirect_view_handles_logout_response_with_plan_context(self):
"""Test that redirect view always redirects to plan context URL, ignoring RelayState"""
plan_relay_state = "https://idp.example.com/flow/return"
# Create request with SAML logout response
request = self.factory.get(
f"/slo/redirect/{self.application.slug}/",
{
"SAMLResponse": "dummy-response",
"RelayState": relay_state,
"RelayState": "https://somewhere-else.example.com/return",
},
)
request.session = {}
plan = FlowPlan(flow_pk="test-flow")
plan.context[PLAN_CONTEXT_SAML_RELAY_STATE] = plan_relay_state
request.session = {SESSION_KEY_PLAN: plan}
request.brand = self.brand
view = SPInitiatedSLOBindingRedirectView()
view.setup(request, application_slug=self.application.slug)
response = view.dispatch(request, application_slug=self.application.slug)
# Should redirect to relay state URL
# Should redirect to plan context URL, not the request's RelayState
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, relay_state)
self.assertEqual(response.url, plan_relay_state)
def test_redirect_view_handles_logout_response_plain_relay_state(self):
"""Test that redirect view handles logout response with plain RelayState"""
def test_redirect_view_ignores_relay_state_without_plan(self):
"""Test that redirect view ignores RelayState and falls back to root when no plan context"""
relay_state = "https://sp.example.com/plain"
# Create request with SAML logout response
@@ -136,9 +137,9 @@ class TestSPInitiatedSLOViews(TestCase):
view.setup(request, application_slug=self.application.slug)
response = view.dispatch(request, application_slug=self.application.slug)
# Should redirect to plain relay state
# Should ignore relay_state and redirect to root (no plan context)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, relay_state)
self.assertEqual(response.url, reverse("authentik_core:root-redirect"))
def test_redirect_view_handles_logout_response_no_relay_state_with_plan_context(self):
"""Test that redirect view uses plan context fallback when no RelayState"""
@@ -230,29 +231,30 @@ class TestSPInitiatedSLOViews(TestCase):
self.assertEqual(logout_request.issuer, self.provider.issuer)
self.assertEqual(logout_request.session_index, "test-session-123")
def test_post_view_handles_logout_response_with_relay_state(self):
"""Test that POST view handles logout response with RelayState"""
# Use raw URL (no encoding needed)
relay_state = "https://idp.example.com/flow/return"
def test_post_view_handles_logout_response_with_plan_context(self):
"""Test that POST view always redirects to plan context URL, ignoring RelayState"""
plan_relay_state = "https://idp.example.com/flow/return"
# Create POST request with SAML logout response
request = self.factory.post(
f"/slo/post/{self.application.slug}/",
{
"SAMLResponse": "dummy-response",
"RelayState": relay_state,
"RelayState": "https://somewhere-else.example.com/return",
},
)
request.session = {}
plan = FlowPlan(flow_pk="test-flow")
plan.context[PLAN_CONTEXT_SAML_RELAY_STATE] = plan_relay_state
request.session = {SESSION_KEY_PLAN: plan}
request.brand = self.brand
view = SPInitiatedSLOBindingPOSTView()
view.setup(request, application_slug=self.application.slug)
response = view.dispatch(request, application_slug=self.application.slug)
# Should redirect to relay state URL
# Should redirect to plan context URL, not the request's RelayState
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, relay_state)
self.assertEqual(response.url, plan_relay_state)
def test_post_view_handles_logout_response_no_relay_state_with_plan_context(self):
"""Test that POST view uses plan context fallback when no RelayState"""
@@ -419,7 +421,7 @@ class TestSPInitiatedSLOViews(TestCase):
view.resolve_provider_application()
def test_relay_state_decoding_failure(self):
"""Test handling of RelayState that's a path"""
"""Test that arbitrary path RelayState is ignored and redirects to root"""
# Create request with relay state that is a path
request = self.factory.get(
f"/slo/redirect/{self.application.slug}/",
@@ -435,9 +437,73 @@ class TestSPInitiatedSLOViews(TestCase):
view.setup(request, application_slug=self.application.slug)
response = view.dispatch(request, application_slug=self.application.slug)
# Should treat it as plain URL and redirect to it
# Should ignore relay_state and redirect to root (no plan context)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/some/invalid/path")
self.assertEqual(response.url, reverse("authentik_core:root-redirect"))
def test_redirect_view_blocks_external_relay_state(self):
"""Test that redirect view ignores external malicious URL and redirects to root"""
request = self.factory.get(
f"/slo/redirect/{self.application.slug}/",
{
"SAMLResponse": "dummy-response",
"RelayState": "https://evil.com/phishing",
},
)
request.session = {}
request.brand = self.brand
view = SPInitiatedSLOBindingRedirectView()
view.setup(request, application_slug=self.application.slug)
response = view.dispatch(request, application_slug=self.application.slug)
# Should ignore relay_state and redirect to root (no plan context)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, reverse("authentik_core:root-redirect"))
def test_redirect_view_ignores_relay_state_uses_plan_context(self):
"""Test that redirect view always uses plan context URL regardless of RelayState"""
plan_relay_state = "https://authentik.example.com/if/flow/logout/"
request = self.factory.get(
f"/slo/redirect/{self.application.slug}/",
{
"SAMLResponse": "dummy-response",
"RelayState": "https://evil.com/phishing",
},
)
plan = FlowPlan(flow_pk="test-flow")
plan.context[PLAN_CONTEXT_SAML_RELAY_STATE] = plan_relay_state
request.session = {SESSION_KEY_PLAN: plan}
request.brand = self.brand
view = SPInitiatedSLOBindingRedirectView()
view.setup(request, application_slug=self.application.slug)
response = view.dispatch(request, application_slug=self.application.slug)
# Should always use plan context value, ignoring malicious RelayState
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, plan_relay_state)
def test_post_view_ignores_external_relay_state(self):
"""Test that POST view ignores external RelayState and redirects to root"""
request = self.factory.post(
f"/slo/post/{self.application.slug}/",
{
"SAMLResponse": "dummy-response",
"RelayState": "https://evil.com/phishing",
},
)
request.session = {}
request.brand = self.brand
view = SPInitiatedSLOBindingPOSTView()
view.setup(request, application_slug=self.application.slug)
response = view.dispatch(request, application_slug=self.application.slug)
# Should ignore relay_state and redirect to root (no plan context)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, reverse("authentik_core:root-redirect"))
class TestSPInitiatedSLOLogoutMethods(TestCase):

View File

@@ -41,6 +41,24 @@ from authentik.providers.saml.views.flows import (
LOGGER = get_logger()
def _get_redirect_url(request: HttpRequest, relay_state: str = "") -> str:
"""Get the safe redirect URL from the plan context, logging a warning if the
incoming relay_state doesn't match the stored value."""
stored_relay_state = ""
if SESSION_KEY_PLAN in request.session:
plan: FlowPlan = request.session[SESSION_KEY_PLAN]
stored_relay_state = plan.context.get(PLAN_CONTEXT_SAML_RELAY_STATE, "")
if relay_state and relay_state != stored_relay_state:
LOGGER.warning(
"SAML logout relay_state mismatch, possible open redirect attempt",
received_relay_state=relay_state,
stored_relay_state=stored_relay_state,
)
return stored_relay_state
class SPInitiatedSLOView(PolicyAccessView):
"""Handle SP-initiated SAML Single Logout requests"""
@@ -203,17 +221,9 @@ class SPInitiatedSLOBindingRedirectView(SPInitiatedSLOView):
# IDP SLO, so we want to redirect to our next provider
if REQUEST_KEY_SAML_RESPONSE in request.GET:
relay_state = request.GET.get(REQUEST_KEY_RELAY_STATE, "")
if relay_state:
return redirect(relay_state)
# No RelayState provided, try to get return URL from plan context
if SESSION_KEY_PLAN in request.session:
plan: FlowPlan = request.session[SESSION_KEY_PLAN]
relay_state = plan.context.get(PLAN_CONTEXT_SAML_RELAY_STATE)
if relay_state:
return redirect(relay_state)
# No relay state and no plan context - redirect to root
redirect_url = _get_redirect_url(request, relay_state)
if redirect_url:
return redirect(redirect_url)
return redirect("authentik_core:root-redirect")
# For SAML logout requests, use the parent dispatch with auth checks
@@ -254,17 +264,9 @@ class SPInitiatedSLOBindingPOSTView(SPInitiatedSLOView):
# IDP SLO, so we want to redirect to our next provider
if REQUEST_KEY_SAML_RESPONSE in request.POST:
relay_state = request.POST.get(REQUEST_KEY_RELAY_STATE, "")
if relay_state:
return redirect(relay_state)
# No RelayState provided, try to get return URL from plan context
if SESSION_KEY_PLAN in request.session:
plan: FlowPlan = request.session[SESSION_KEY_PLAN]
relay_state = plan.context.get(PLAN_CONTEXT_SAML_RELAY_STATE)
if relay_state:
return redirect(relay_state)
# No relay state and no plan context - redirect to root
redirect_url = _get_redirect_url(request, relay_state)
if redirect_url:
return redirect(redirect_url)
return redirect("authentik_core:root-redirect")
# For SAML logout requests, use the parent dispatch with auth checks

View File

View File

@@ -0,0 +1,35 @@
"""Source API Views"""
from rest_framework.viewsets import ModelViewSet
from authentik.core.api.sources import (
GroupSourceConnectionSerializer,
GroupSourceConnectionViewSet,
UserSourceConnectionSerializer,
UserSourceConnectionViewSet,
)
from authentik.sources.ldap.models import (
GroupLDAPSourceConnection,
UserLDAPSourceConnection,
)
class UserLDAPSourceConnectionSerializer(UserSourceConnectionSerializer):
class Meta(UserSourceConnectionSerializer.Meta):
model = UserLDAPSourceConnection
class UserLDAPSourceConnectionViewSet(UserSourceConnectionViewSet, ModelViewSet):
queryset = UserLDAPSourceConnection.objects.all()
serializer_class = UserLDAPSourceConnectionSerializer
class GroupLDAPSourceConnectionSerializer(GroupSourceConnectionSerializer):
class Meta(GroupSourceConnectionSerializer.Meta):
model = GroupLDAPSourceConnection
class GroupLDAPSourceConnectionViewSet(GroupSourceConnectionViewSet, ModelViewSet):
queryset = GroupLDAPSourceConnection.objects.all()
serializer_class = GroupLDAPSourceConnectionSerializer

View File

@@ -0,0 +1,33 @@
from rest_framework.viewsets import ModelViewSet
from authentik.core.api.property_mappings import PropertyMappingFilterSet, PropertyMappingSerializer
from authentik.core.api.used_by import UsedByMixin
from authentik.sources.ldap.models import (
LDAPSourcePropertyMapping,
)
class LDAPSourcePropertyMappingSerializer(PropertyMappingSerializer):
"""LDAP PropertyMapping Serializer"""
class Meta:
model = LDAPSourcePropertyMapping
fields = PropertyMappingSerializer.Meta.fields
class LDAPSourcePropertyMappingFilter(PropertyMappingFilterSet):
"""Filter for LDAPSourcePropertyMapping"""
class Meta(PropertyMappingFilterSet.Meta):
model = LDAPSourcePropertyMapping
class LDAPSourcePropertyMappingViewSet(UsedByMixin, ModelViewSet):
"""LDAP PropertyMapping Viewset"""
queryset = LDAPSourcePropertyMapping.objects.all()
serializer_class = LDAPSourcePropertyMappingSerializer
filterset_class = LDAPSourcePropertyMappingFilter
search_fields = ["name"]
ordering = ["name"]

View File

@@ -4,7 +4,7 @@ from typing import Any
from django.core.cache import cache
from django.utils.translation import gettext_lazy as _
from drf_spectacular.utils import extend_schema, inline_serializer
from drf_spectacular.utils import OpenApiResponse, extend_schema, inline_serializer
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
from rest_framework.fields import DictField, ListField, SerializerMethodField
@@ -13,23 +13,15 @@ from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from authentik.core.api.property_mappings import PropertyMappingFilterSet, PropertyMappingSerializer
from authentik.core.api.sources import (
GroupSourceConnectionSerializer,
GroupSourceConnectionViewSet,
SourceSerializer,
UserSourceConnectionSerializer,
UserSourceConnectionViewSet,
)
from authentik.core.api.used_by import UsedByMixin
from authentik.crypto.models import CertificateKeyPair
from authentik.lib.sync.api import SyncStatusSerializer
from authentik.rbac.filters import ObjectFilter
from authentik.sources.ldap.models import (
GroupLDAPSourceConnection,
LDAPSource,
LDAPSourcePropertyMapping,
UserLDAPSourceConnection,
)
from authentik.sources.ldap.tasks import CACHE_KEY_STATUS, SYNC_CLASSES, ldap_sync
from authentik.tasks.models import Task, TaskStatus
@@ -153,6 +145,25 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
search_fields = ["name", "slug"]
ordering = ["name"]
@extend_schema(
responses={
204: OpenApiResponse(description="Sync started"),
},
request=None,
)
@action(
methods=["POST"],
detail=True,
pagination_class=None,
url_path="sync/start",
filter_backends=[ObjectFilter],
)
def sync_start(self, request: Request, slug: str) -> Response:
"""Start source sync"""
source: LDAPSource = self.get_object()
ldap_sync.send(source.pk)
return Response(status=204)
@extend_schema(responses={200: SyncStatusSerializer()})
@action(
methods=["GET"],
@@ -162,7 +173,7 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
filter_backends=[ObjectFilter],
)
def sync_status(self, request: Request, slug: str) -> Response:
"""Get provider's sync status"""
"""Get sources's sync status"""
source: LDAPSource = self.get_object()
status = {}
@@ -224,48 +235,3 @@ class LDAPSourceViewSet(UsedByMixin, ModelViewSet):
obj.pop("raw_dn", None)
all_objects[class_name].append(obj)
return Response(data=all_objects)
class LDAPSourcePropertyMappingSerializer(PropertyMappingSerializer):
"""LDAP PropertyMapping Serializer"""
class Meta:
model = LDAPSourcePropertyMapping
fields = PropertyMappingSerializer.Meta.fields
class LDAPSourcePropertyMappingFilter(PropertyMappingFilterSet):
"""Filter for LDAPSourcePropertyMapping"""
class Meta(PropertyMappingFilterSet.Meta):
model = LDAPSourcePropertyMapping
class LDAPSourcePropertyMappingViewSet(UsedByMixin, ModelViewSet):
"""LDAP PropertyMapping Viewset"""
queryset = LDAPSourcePropertyMapping.objects.all()
serializer_class = LDAPSourcePropertyMappingSerializer
filterset_class = LDAPSourcePropertyMappingFilter
search_fields = ["name"]
ordering = ["name"]
class UserLDAPSourceConnectionSerializer(UserSourceConnectionSerializer):
class Meta(UserSourceConnectionSerializer.Meta):
model = UserLDAPSourceConnection
class UserLDAPSourceConnectionViewSet(UserSourceConnectionViewSet, ModelViewSet):
queryset = UserLDAPSourceConnection.objects.all()
serializer_class = UserLDAPSourceConnectionSerializer
class GroupLDAPSourceConnectionSerializer(GroupSourceConnectionSerializer):
class Meta(GroupSourceConnectionSerializer.Meta):
model = GroupLDAPSourceConnection
class GroupLDAPSourceConnectionViewSet(GroupSourceConnectionViewSet, ModelViewSet):
queryset = GroupLDAPSourceConnection.objects.all()
serializer_class = GroupLDAPSourceConnectionSerializer

View File

@@ -159,7 +159,7 @@ class LDAPSource(IncomingSyncSource):
@property
def serializer(self) -> type[Serializer]:
from authentik.sources.ldap.api import LDAPSourceSerializer
from authentik.sources.ldap.api.sources import LDAPSourceSerializer
return LDAPSourceSerializer
@@ -356,7 +356,7 @@ class LDAPSourcePropertyMapping(PropertyMapping):
@property
def serializer(self) -> type[Serializer]:
from authentik.sources.ldap.api import LDAPSourcePropertyMappingSerializer
from authentik.sources.ldap.api.property_mappings import LDAPSourcePropertyMappingSerializer
return LDAPSourcePropertyMappingSerializer
@@ -377,7 +377,7 @@ class UserLDAPSourceConnection(UserSourceConnection):
@property
def serializer(self) -> type[Serializer]:
from authentik.sources.ldap.api import (
from authentik.sources.ldap.api.connections import (
UserLDAPSourceConnectionSerializer,
)
@@ -400,7 +400,7 @@ class GroupLDAPSourceConnection(GroupSourceConnection):
@property
def serializer(self) -> type[Serializer]:
from authentik.sources.ldap.api import (
from authentik.sources.ldap.api.connections import (
GroupLDAPSourceConnectionSerializer,
)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,154 @@
{
"raw": {
"altServer": [],
"configurationNamingContext": [
"CN=Configuration,DC=t,DC=goauthentik,DC=io"
],
"currentTime": [
"20260331161910.0Z"
],
"defaultNamingContext": [
"DC=t,DC=goauthentik,DC=io"
],
"dnsHostName": [
"ak-dc.t.goauthentik.io"
],
"domainControllerFunctionality": [
"10"
],
"domainFunctionality": [
"10"
],
"dsServiceName": [
"CN=NTDS Settings,CN=AK-DC,CN=Servers,CN=Default-First-Site-Name,CN=Sites,CN=Configuration,DC=t,DC=goauthentik,DC=io"
],
"forestFunctionality": [
"10"
],
"highestCommittedUSN": [
"20594"
],
"isGlobalCatalogReady": [
"TRUE"
],
"isSynchronized": [
"TRUE"
],
"ldapServiceName": [
"t.goauthentik.io:ak-dc$@T.GOAUTHENTIK.IO"
],
"namingContexts": [
"DC=t,DC=goauthentik,DC=io",
"CN=Configuration,DC=t,DC=goauthentik,DC=io",
"CN=Schema,CN=Configuration,DC=t,DC=goauthentik,DC=io",
"DC=DomainDnsZones,DC=t,DC=goauthentik,DC=io",
"DC=ForestDnsZones,DC=t,DC=goauthentik,DC=io"
],
"rootDomainNamingContext": [
"DC=t,DC=goauthentik,DC=io"
],
"schemaNamingContext": [
"CN=Schema,CN=Configuration,DC=t,DC=goauthentik,DC=io"
],
"serverName": [
"CN=AK-DC,CN=Servers,CN=Default-First-Site-Name,CN=Sites,CN=Configuration,DC=t,DC=goauthentik,DC=io"
],
"subschemaSubentry": [
"CN=Aggregate,CN=Schema,CN=Configuration,DC=t,DC=goauthentik,DC=io"
],
"supportedCapabilities": [
"1.2.840.113556.1.4.800",
"1.2.840.113556.1.4.1670",
"1.2.840.113556.1.4.1791",
"1.2.840.113556.1.4.1935",
"1.2.840.113556.1.4.2080",
"1.2.840.113556.1.4.2237"
],
"supportedControl": [
"1.2.840.113556.1.4.319",
"1.2.840.113556.1.4.801",
"1.2.840.113556.1.4.473",
"1.2.840.113556.1.4.528",
"1.2.840.113556.1.4.417",
"1.2.840.113556.1.4.619",
"1.2.840.113556.1.4.841",
"1.2.840.113556.1.4.529",
"1.2.840.113556.1.4.805",
"1.2.840.113556.1.4.521",
"1.2.840.113556.1.4.970",
"1.2.840.113556.1.4.1338",
"1.2.840.113556.1.4.474",
"1.2.840.113556.1.4.1339",
"1.2.840.113556.1.4.1340",
"1.2.840.113556.1.4.1413",
"2.16.840.1.113730.3.4.9",
"2.16.840.1.113730.3.4.10",
"1.2.840.113556.1.4.1504",
"1.2.840.113556.1.4.1852",
"1.2.840.113556.1.4.802",
"1.2.840.113556.1.4.1907",
"1.2.840.113556.1.4.1948",
"1.2.840.113556.1.4.1974",
"1.2.840.113556.1.4.1341",
"1.2.840.113556.1.4.2026",
"1.2.840.113556.1.4.2064",
"1.2.840.113556.1.4.2065",
"1.2.840.113556.1.4.2066",
"1.2.840.113556.1.4.2090",
"1.2.840.113556.1.4.2205",
"1.2.840.113556.1.4.2204",
"1.2.840.113556.1.4.2206",
"1.2.840.113556.1.4.2211",
"1.2.840.113556.1.4.2239",
"1.2.840.113556.1.4.2255",
"1.2.840.113556.1.4.2256",
"1.2.840.113556.1.4.2309",
"1.2.840.113556.1.4.2330",
"1.2.840.113556.1.4.2354"
],
"supportedExtension": [
"1.3.6.1.4.1.1466.20037",
"1.3.6.1.4.1.1466.101.119.1",
"1.2.840.113556.1.4.1781",
"1.3.6.1.4.1.4203.1.11.3",
"1.2.840.113556.1.4.2212"
],
"supportedFeatures": [],
"supportedLDAPPolicies": [
"MaxPoolThreads",
"MaxPercentDirSyncRequests",
"MaxDatagramRecv",
"MaxReceiveBuffer",
"InitRecvTimeout",
"MaxConnections",
"MaxConnIdleTime",
"MaxPageSize",
"MaxBatchReturnMessages",
"MaxQueryDuration",
"MaxDirSyncDuration",
"MaxTempTableSize",
"MaxResultSetSize",
"MinResultSets",
"MaxResultSetsPerConn",
"MaxNotificationPerConn",
"MaxValRange",
"MaxValRangeTransitive",
"ThreadMemoryLimit",
"SystemMemoryLimitPercent",
"SecurityDescriptorWarningSize"
],
"supportedLDAPVersion": [
"3",
"2"
],
"supportedSASLMechanisms": [
"GSSAPI",
"GSS-SPNEGO",
"EXTERNAL",
"DIGEST-MD5"
],
"vendorName": [],
"vendorVersion": []
},
"type": "DsaInfo"
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,38 @@
$root = "dc=t,dc=goauthentik,dc=io"
$domain = "t.goauthentik.io"
$rootOU = New-ADOrganizationalUnit `
-Name "ak-test" `
-Path $root `
-PassThru
$userErinH = New-ADUser `
-GivenName "Erin M." `
-Surname "Hagens" `
-Name "Erin M. Hagens" `
-UserPrincipalName "erin.h@$domain" `
-SamAccountName "erin.h" `
-PasswordNotRequired $true `
-Path $rootOU.DistinguishedName `
-Enabled $true `
-PassThru
New-ADUser `
-GivenName "Deactivated" `
-Surname "Account" `
-Name "Deactivated Account" `
-UserPrincipalName "deactivated.a@$domain" `
-SamAccountName "deactivated.a" `
-Enabled $false `
-Path $rootOU.DistinguishedName
$groupTest = New-ADGroup `
-Name "Test Group" `
-Path $rootOU.DistinguishedName `
-GroupCategory Security `
-GroupScope Global `
-PassThru
Add-ADGroupMember `
-Identity $groupTest `
-Members $userErinH

View File

@@ -0,0 +1,27 @@
import ssl
from getpass import getpass
from pathlib import Path
from sys import argv
from ldap3 import ALL, ALL_ATTRIBUTES, Connection, Server, Tls
if __name__ == "__main__":
server = argv[1]
user = argv[2]
password = getpass()
output_dir = Path(__file__).parent
server = Server(server, get_info=ALL, tls=Tls(validate=ssl.CERT_NONE), use_ssl=True)
connection = Connection(server, user, password, raise_exceptions=True)
connection.bind()
server.info.to_file(str(output_dir / "info.json"))
server.schema.to_file(str(output_dir / "schema.json"))
if connection.search(
server.info.naming_contexts[0], "(objectclass=*)", attributes=ALL_ATTRIBUTES
):
connection.response_to_file(str(output_dir / "entries.json"), raw=True)
connection.unbind()

View File

@@ -1,102 +1,26 @@
"""ldap testing utils"""
from ldap3 import MOCK_SYNC, OFFLINE_AD_2012_R2, Connection, Server
from ldap3 import MOCK_SYNC, Connection, Server
from ldap3.strategy.mockSync import MockSyncStrategy
from authentik.sources.ldap.sync.vendor.ms_ad import UserAccountControl
from authentik.lib.generators import generate_id
from authentik.lib.tests.utils import load_fixture
def mock_ad_connection(password: str) -> Connection:
def mock_ad_connection() -> Connection:
"""Create mock AD connection"""
server = Server("my_fake_server", get_info=OFFLINE_AD_2012_R2)
_pass = "foo" # noqa # nosec
server = Server.from_definition(
"my_fake_server",
dsa_info=load_fixture("fixtures/ms_ad_2025/info.json"),
dsa_schema=load_fixture("fixtures/ms_ad_2025/schema.json"),
)
connection = Connection(
server,
user="cn=my_user,dc=goauthentik,dc=io",
password=_pass,
user="cn=ak-service-account,dc=t,dc=goauthentik,dc=io",
password=generate_id(),
client_strategy=MOCK_SYNC,
)
# Entry for password checking
connection.strategy.add_entry(
"cn=user,ou=users,dc=goauthentik,dc=io",
{
"name": "test-user",
"objectSid": "unique-test-group",
"objectClass": "person",
"displayName": "Erin M. Hagens",
"sAMAccountName": "sAMAccountName",
"distinguishedName": "cn=user,ou=users,dc=goauthentik,dc=io",
},
)
connection.strategy.add_entry(
"cn=group1,ou=groups,dc=goauthentik,dc=io",
{
"name": "test-group",
"objectSid": "unique-test-group",
"objectClass": "group",
"distinguishedName": "cn=group1,ou=groups,dc=goauthentik,dc=io",
"member": ["cn=user,ou=users,dc=goauthentik,dc=io"],
},
)
# Group without SID
connection.strategy.add_entry(
"cn=group2,ou=groups,dc=goauthentik,dc=io",
{
"name": "test-group",
"objectClass": "group",
"distinguishedName": "cn=group2,ou=groups,dc=goauthentik,dc=io",
},
)
connection.strategy.add_entry(
"cn=user0,ou=foo,ou=users,dc=goauthentik,dc=io",
{
"userPassword": password,
"sAMAccountName": "user0_sn",
"name": "user0_sn",
"revision": 0,
"objectSid": "user0",
"objectClass": "person",
"distinguishedName": "cn=user0,ou=foo,ou=users,dc=goauthentik,dc=io",
"userAccountControl": (
UserAccountControl.ACCOUNTDISABLE + UserAccountControl.NORMAL_ACCOUNT
),
},
)
# User without SID
connection.strategy.add_entry(
"cn=user1,ou=users,dc=goauthentik,dc=io",
{
"userPassword": "test1111",
"sAMAccountName": "user2_sn",
"name": "user1_sn",
"revision": 0,
"objectClass": "person",
"distinguishedName": "cn=user1,ou=users,dc=goauthentik,dc=io",
},
)
# Duplicate users
connection.strategy.add_entry(
"cn=user2,ou=users,dc=goauthentik,dc=io",
{
"userPassword": "test2222",
"sAMAccountName": "user2_sn",
"name": "user2_sn",
"revision": 0,
"objectSid": "unique-test2222",
"objectClass": "person",
"distinguishedName": "cn=user2,ou=users,dc=goauthentik,dc=io",
},
)
connection.strategy.add_entry(
"cn=user3,ou=users,dc=goauthentik,dc=io",
{
"userPassword": "test2222",
"sAMAccountName": "user2_sn",
"name": "user2_sn",
"revision": 0,
"objectSid": "unique-test2222",
"objectClass": "person",
"distinguishedName": "cn=user3,ou=users,dc=goauthentik,dc=io",
},
)
strategy: MockSyncStrategy = connection.strategy
strategy.entries_from_json(load_fixture("fixtures/ms_ad_2025/entries.json", path_only=True))
connection.bind()
return connection

View File

@@ -10,13 +10,11 @@ from rest_framework.test import APITestCase
from authentik.blueprints.tests import apply_blueprint
from authentik.core.tests.utils import create_test_admin_user
from authentik.lib.generators import generate_id, generate_key
from authentik.lib.generators import generate_id
from authentik.sources.ldap.api import LDAPSourceSerializer
from authentik.sources.ldap.models import LDAPSource, LDAPSourcePropertyMapping
from authentik.sources.ldap.tests.mock_ad import mock_ad_connection
LDAP_PASSWORD = generate_key()
class LDAPAPITests(APITestCase):
"""LDAP API tests"""
@@ -29,7 +27,7 @@ class LDAPAPITests(APITestCase):
"slug": " foo",
"server_uri": "ldaps://1.2.3.4",
"bind_cn": "",
"bind_password": LDAP_PASSWORD,
"bind_password": generate_id(),
"base_dn": "dc=foo",
"sync_users_password": True,
}
@@ -44,7 +42,7 @@ class LDAPAPITests(APITestCase):
slug=generate_id(),
server_uri="ldaps://1.2.3.4",
bind_cn="",
bind_password=LDAP_PASSWORD,
bind_password=generate_id(),
base_dn="dc=foo",
sync_users_password=True,
)
@@ -54,7 +52,7 @@ class LDAPAPITests(APITestCase):
"slug": generate_id(),
"server_uri": "ldaps://1.2.3.4",
"bind_cn": "",
"bind_password": LDAP_PASSWORD,
"bind_password": generate_id(),
"base_dn": "dc=foo",
"sync_users_password": True,
}
@@ -80,7 +78,7 @@ class LDAPAPITests(APITestCase):
"slug": " foo",
"server_uri": "ldaps://1.2.3.4",
"bind_cn": "",
"bind_password": LDAP_PASSWORD,
"bind_password": generate_id(),
"base_dn": "dc=foo",
"sync_users": True,
"user_property_mappings": [],
@@ -96,7 +94,7 @@ class LDAPAPITests(APITestCase):
"slug": " foo",
"server_uri": "ldaps://1.2.3.4",
"bind_cn": "",
"bind_password": LDAP_PASSWORD,
"bind_password": generate_id(),
"base_dn": "dc=foo",
"sync_groups": True,
"group_property_mappings": [],
@@ -122,7 +120,7 @@ class LDAPAPITests(APITestCase):
| Q(managed__startswith="goauthentik.io/sources/ldap/ms")
)
)
connection = MagicMock(return_value=mock_ad_connection(LDAP_PASSWORD))
connection = MagicMock(return_value=mock_ad_connection())
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
res = self.client.get(
reverse("authentik_api:ldapsource-debug", kwargs={"slug": source.slug})

View File

@@ -33,13 +33,17 @@ class LDAPSyncTests(TestCase):
def test_auth_direct_user_ad(self):
"""Test direct auth"""
self.source.base_dn = "dc=t,dc=goauthentik,dc=io"
self.source.additional_user_dn = ""
self.source.additional_group_dn = ""
self.source.save()
self.source.user_property_mappings.set(
LDAPSourcePropertyMapping.objects.filter(
Q(managed__startswith="goauthentik.io/sources/ldap/default-")
| Q(managed__startswith="goauthentik.io/sources/ldap/ms-")
)
)
raw_conn = mock_ad_connection(LDAP_PASSWORD)
raw_conn = mock_ad_connection()
bind_mock = Mock(wraps=raw_conn.bind)
raw_conn.bind = bind_mock
connection = MagicMock(return_value=raw_conn)
@@ -47,16 +51,16 @@ class LDAPSyncTests(TestCase):
user_sync = UserLDAPSynchronizer(self.source, Task())
user_sync.sync_full()
user = User.objects.get(username="user0_sn")
user = User.objects.get(username="erin.h")
# auth_user_by_bind = Mock(return_value=user)
backend = LDAPBackend()
self.assertEqual(
backend.authenticate(None, username="user0_sn", password=LDAP_PASSWORD),
backend.authenticate(None, username="erin.h", password=LDAP_PASSWORD),
user,
)
connection.assert_called_with(
connection_kwargs={
"user": "cn=user0,ou=foo,ou=users,dc=goauthentik,dc=io",
"user": "CN=Erin M. Hagens,OU=ak-test,DC=t,DC=goauthentik,DC=io",
"password": LDAP_PASSWORD,
}
)
@@ -64,18 +68,22 @@ class LDAPSyncTests(TestCase):
def test_auth_synced_user_ad(self):
"""Test Cached auth"""
self.source.base_dn = "dc=t,dc=goauthentik,dc=io"
self.source.additional_user_dn = ""
self.source.additional_group_dn = ""
self.source.save()
self.source.user_property_mappings.set(
LDAPSourcePropertyMapping.objects.filter(
Q(managed__startswith="goauthentik.io/sources/ldap/default-")
| Q(managed__startswith="goauthentik.io/sources/ldap/ms-")
)
)
connection = MagicMock(return_value=mock_ad_connection(LDAP_PASSWORD))
connection = MagicMock(return_value=mock_ad_connection())
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
user_sync = UserLDAPSynchronizer(self.source, Task())
user_sync.sync_full()
user = User.objects.get(username="user0_sn")
user = User.objects.get(username="erin.h")
auth_user_by_bind = Mock(return_value=user)
with patch(
"authentik.sources.ldap.auth.LDAPBackend.auth_user_by_bind",
@@ -83,7 +91,7 @@ class LDAPSyncTests(TestCase):
):
backend = LDAPBackend()
self.assertEqual(
backend.authenticate(None, username="user0_sn", password=LDAP_PASSWORD),
backend.authenticate(None, username="erin.h", password=LDAP_PASSWORD),
user,
)

View File

@@ -5,13 +5,11 @@ from unittest.mock import MagicMock, patch
from django.test import TestCase
from authentik.core.models import User
from authentik.lib.generators import generate_key
from authentik.sources.ldap.models import LDAPSource, LDAPSourcePropertyMapping
from authentik.sources.ldap.password import LDAPPasswordChanger
from authentik.sources.ldap.tests.mock_ad import mock_ad_connection
LDAP_PASSWORD = generate_key()
LDAP_CONNECTION_PATCH = MagicMock(return_value=mock_ad_connection(LDAP_PASSWORD))
LDAP_CONNECTION_PATCH = MagicMock(return_value=mock_ad_connection())
class LDAPPasswordTests(TestCase):
@@ -21,9 +19,9 @@ class LDAPPasswordTests(TestCase):
self.source = LDAPSource.objects.create(
name="ldap",
slug="ldap",
base_dn="dc=goauthentik,dc=io",
additional_user_dn="ou=users",
additional_group_dn="ou=groups",
base_dn="dc=t,dc=goauthentik,dc=io",
additional_user_dn="",
additional_group_dn="",
)
self.source.user_property_mappings.set(LDAPSourcePropertyMapping.objects.all())
self.source.save()
@@ -42,7 +40,9 @@ class LDAPPasswordTests(TestCase):
pwc = LDAPPasswordChanger(self.source)
user = User.objects.create(
username="test",
attributes={"distinguishedName": "cn=user,ou=users,dc=goauthentik,dc=io"},
attributes={
"distinguishedName": "CN=Erin M. Hagens,OU=ak-test,DC=t,DC=goauthentik,DC=io"
},
)
self.assertFalse(pwc.ad_password_complexity("test", user)) # 1 category
self.assertFalse(pwc.ad_password_complexity("test1", user)) # 2 categories

View File

@@ -54,12 +54,16 @@ class LDAPSyncTests(TestCase):
def test_sync_missing_page(self):
"""Test sync with missing page"""
connection = MagicMock(return_value=mock_ad_connection(LDAP_PASSWORD))
connection = MagicMock(return_value=mock_ad_connection())
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
ldap_sync_page.send(self.source.pk, class_to_path(UserLDAPSynchronizer), "foo")
def test_sync_error(self):
"""Test user sync"""
self.source.base_dn = "dc=t,dc=goauthentik,dc=io"
self.source.additional_user_dn = ""
self.source.additional_group_dn = ""
self.source.save()
self.source.user_property_mappings.set(
LDAPSourcePropertyMapping.objects.filter(
Q(managed__startswith="goauthentik.io/sources/ldap/default")
@@ -72,7 +76,7 @@ class LDAPSyncTests(TestCase):
)
self.source.user_property_mappings.set([mapping])
self.source.save()
connection = MagicMock(return_value=mock_ad_connection(LDAP_PASSWORD))
connection = MagicMock(return_value=mock_ad_connection())
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
user_sync = UserLDAPSynchronizer(self.source, Task())
with self.assertRaises(StopSync):
@@ -101,7 +105,7 @@ class LDAPSyncTests(TestCase):
)
)
self.source.user_property_mappings.add(none, byte_mapping)
connection = MagicMock(return_value=mock_ad_connection(LDAP_PASSWORD))
connection = MagicMock(return_value=mock_ad_connection())
# we basically just test that the mappings don't throw errors
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
@@ -110,24 +114,23 @@ class LDAPSyncTests(TestCase):
def test_sync_users_ad(self):
"""Test user sync"""
self.source.base_dn = "dc=t,dc=goauthentik,dc=io"
self.source.additional_user_dn = ""
self.source.additional_group_dn = ""
self.source.save()
self.source.user_property_mappings.set(
LDAPSourcePropertyMapping.objects.filter(
Q(managed__startswith="goauthentik.io/sources/ldap/default")
| Q(managed__startswith="goauthentik.io/sources/ldap/ms")
)
)
connection = MagicMock(return_value=mock_ad_connection(LDAP_PASSWORD))
connection = MagicMock(return_value=mock_ad_connection())
# Create the user beforehand so we can set attributes and check they aren't removed
user = User.objects.create(
username="user0_sn",
username="erin.h",
attributes={
"ldap_uniq": (
"S-117-6648368-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-"
"0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-"
"0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-"
"0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0-0"
),
"ldap_uniq": "S-1-5-21-1955698215-2946288202-2760262721-1114",
"foo": "bar",
},
)
@@ -135,11 +138,16 @@ class LDAPSyncTests(TestCase):
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
user_sync = UserLDAPSynchronizer(self.source, Task())
user_sync.sync_full()
user = User.objects.filter(username="user0_sn").first()
user.refresh_from_db()
self.assertEqual(user.name, "Erin M. Hagens")
self.assertEqual(user.attributes["foo"], "bar")
self.assertFalse(user.is_active)
self.assertEqual(user.path, "goauthentik.io/sources/ldap/users/foo")
self.assertFalse(User.objects.filter(username="user1_sn").exists())
self.assertTrue(user.is_active)
self.assertEqual(user.path, "goauthentik.io/sources/ldap/ak-test")
deactivated = User.objects.filter(username="deactivated.a").first()
self.assertIsNotNone(deactivated)
self.assertFalse(deactivated.is_active)
def test_sync_users_openldap(self):
"""Test user sync"""
@@ -213,6 +221,10 @@ class LDAPSyncTests(TestCase):
def test_sync_groups_ad(self):
"""Test group sync"""
self.source.base_dn = "dc=t,dc=goauthentik,dc=io"
self.source.additional_user_dn = ""
self.source.additional_group_dn = ""
self.source.save()
self.source.user_property_mappings.set(
LDAPSourcePropertyMapping.objects.filter(
Q(managed__startswith="goauthentik.io/sources/ldap/default")
@@ -224,7 +236,7 @@ class LDAPSyncTests(TestCase):
managed="goauthentik.io/sources/ldap/default-name"
)
)
connection = MagicMock(return_value=mock_ad_connection(LDAP_PASSWORD))
connection = MagicMock(return_value=mock_ad_connection())
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
_user = create_test_admin_user()
parent_group = Group.objects.get(name=_user.username)
@@ -234,7 +246,7 @@ class LDAPSyncTests(TestCase):
group_sync.sync_full()
membership_sync = MembershipLDAPSynchronizer(self.source, Task())
membership_sync.sync_full()
group: Group = Group.objects.filter(name="test-group").first()
group: Group = Group.objects.filter(name="Test Group").first()
self.assertIsNotNone(group)
self.assertEqual(group.parents.first(), parent_group)
@@ -346,7 +358,7 @@ class LDAPSyncTests(TestCase):
)
)
self.source.save()
connection = MagicMock(return_value=mock_ad_connection(LDAP_PASSWORD))
connection = MagicMock(return_value=mock_ad_connection())
with patch("authentik.sources.ldap.models.LDAPSource.connection", connection):
ldap_sync.send(self.source.pk)

View File

@@ -1,11 +1,11 @@
"""API URLs"""
from authentik.sources.ldap.api import (
from authentik.sources.ldap.api.connections import (
GroupLDAPSourceConnectionViewSet,
LDAPSourcePropertyMappingViewSet,
LDAPSourceViewSet,
UserLDAPSourceConnectionViewSet,
)
from authentik.sources.ldap.api.property_mappings import LDAPSourcePropertyMappingViewSet
from authentik.sources.ldap.api.sources import LDAPSourceViewSet
api_urlpatterns = [
("propertymappings/source/ldap", LDAPSourcePropertyMappingViewSet),

View File

@@ -40,6 +40,7 @@ class SAMLSourceSerializer(SourceSerializer):
"sso_url",
"slo_url",
"allow_idp_initiated",
"force_authn",
"name_id_policy",
"binding_type",
"verification_kp",
@@ -74,6 +75,7 @@ class SAMLSourceViewSet(UsedByMixin, ModelViewSet):
"sso_url",
"slo_url",
"allow_idp_initiated",
"force_authn",
"name_id_policy",
"binding_type",
"verification_kp",

View File

@@ -0,0 +1,21 @@
# Generated by Django 5.1.12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_sources_saml", "0021_samlsource_signed_assertion_and_more"),
]
operations = [
migrations.AddField(
model_name="samlsource",
name="force_authn",
field=models.BooleanField(
default=False,
help_text="When enabled, the IdP will re-authenticate the user even if a session exists.",
),
),
]

View File

@@ -116,6 +116,12 @@ class SAMLSource(Source):
"as no validation of the request ID is done."
),
)
force_authn = models.BooleanField(
default=False,
help_text=_(
"When enabled, the IdP will re-authenticate the user even if a session exists."
),
)
name_id_policy = models.TextField(
choices=SAMLNameIDPolicy.choices,
default=SAMLNameIDPolicy.PERSISTENT,

View File

@@ -67,6 +67,8 @@ class RequestProcessor:
auth_n_request.attrib["IssueInstant"] = self.issue_instant
auth_n_request.attrib["ProtocolBinding"] = SAML_BINDING_POST
auth_n_request.attrib["Version"] = "2.0"
if self.source.force_authn:
auth_n_request.attrib["ForceAuthn"] = "true"
# Create issuer object
auth_n_request.append(self.get_issuer())

View File

@@ -0,0 +1,35 @@
"""SAML Source AuthnRequest tests"""
from django.test import RequestFactory, TestCase
from authentik.core.tests.utils import create_test_flow
from authentik.lib.generators import generate_id
from authentik.sources.saml.models import SAMLSource
from authentik.sources.saml.processors.request import RequestProcessor
class TestRequestProcessor(TestCase):
"""Test SAML AuthnRequest generation"""
def setUp(self):
self.factory = RequestFactory()
self.source = SAMLSource.objects.create(
name=generate_id(),
slug=generate_id(),
issuer="authentik",
sso_url="https://idp.example.com/sso",
pre_authentication_flow=create_test_flow(),
)
def test_force_authn_flag(self):
"""Test that ForceAuthn attribute is set when force_authn is True"""
self.source.force_authn = True
self.source.save()
request = self.factory.get("/")
request.session = {}
processor = RequestProcessor(self.source, request, "")
auth_n = processor.get_auth_n()
self.assertEqual(auth_n.attrib.get("ForceAuthn"), "true")

View File

@@ -133,7 +133,7 @@ class AuthenticatorValidateStageWebAuthnTests(FlowTestCase):
def test_device_challenge_webauthn_restricted(self):
"""Test webauthn (getting device challenges with a webauthn
device that is not allowed due to aaguid restrictions)"""
webauthn_mds_import(force=True)
webauthn_mds_import.send(force=True).get_result()
request = self.request_factory.get("/")
request.user = self.user
@@ -358,7 +358,7 @@ class AuthenticatorValidateStageWebAuthnTests(FlowTestCase):
def test_validate_challenge_unrestricted(self):
"""Test webauthn authentication (unrestricted webauthn device)"""
webauthn_mds_import(force=True)
webauthn_mds_import.send(force=True).get_result()
device = WebAuthnDevice.objects.create(
user=self.user,
public_key=(
@@ -432,7 +432,7 @@ class AuthenticatorValidateStageWebAuthnTests(FlowTestCase):
def test_validate_challenge_restricted(self):
"""Test webauthn authentication (restricted device type, failure)"""
webauthn_mds_import(force=True)
webauthn_mds_import.send(force=True).get_result()
device = WebAuthnDevice.objects.create(
user=self.user,
public_key=(

View File

@@ -26,6 +26,7 @@ class AuthenticatorWebAuthnStageSerializer(StageSerializer):
"hints",
"device_type_restrictions",
"device_type_restrictions_obj",
"prevent_duplicate_devices",
"max_attempts",
]

View File

@@ -194,5 +194,10 @@
},
"70617373-7761-6c6c-6669-646f32303236": {
"name": "Passwall"
},
"c9cadfc9-89a9-489e-a25a-c7e86a4d5f15": {
"name": "Burp Suite Navigation Recorder",
"icon_dark": "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTUiIGhlaWdodD0iMTUiIHZpZXdCb3g9IjAgMCAxNSAxNSIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KICAgIDxyZWN0IHg9IjAuNSIgeT0iMC41IiB3aWR0aD0iMTQiIGhlaWdodD0iMTQiIHJ4PSIyLjEwMTA2IiBmaWxsPSIjRkY2NjMzIi8+CiAgICA8cGF0aCBkPSJNOC4wMTAyOCAxMi41SDYuOTg5NzJWMTAuOTA1N0w4LjM2NTM0IDkuMTE4OUg2Ljk4OTcyVjYuOTUxNDFINC41TDYuOTg5NzIgMy43MTU4NVYyLjVIOC4wMTAyOFY0LjA5NDI3TDYuNjM2NzggNS44ODExSDguMDEwMjhWOC4wNDg1OUgxMC41TDguMDEwMjggMTEuMjg0MVYxMi41WiIgZmlsbD0id2hpdGUiLz4KPC9zdmc+Cg==",
"icon_light": "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTUiIGhlaWdodD0iMTUiIHZpZXdCb3g9IjAgMCAxNSAxNSIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KICAgIDxyZWN0IHg9IjAuNSIgeT0iMC41IiB3aWR0aD0iMTQiIGhlaWdodD0iMTQiIHJ4PSIyLjEwMTA2IiBmaWxsPSIjRkY2NjMzIi8+CiAgICA8cGF0aCBkPSJNOC4wMTAyOCAxMi41SDYuOTg5NzJWMTAuOTA1N0w4LjM2NTM0IDkuMTE4OUg2Ljk4OTcyVjYuOTUxNDFINC41TDYuOTg5NzIgMy43MTU4NVYyLjVIOC4wMTAyOFY0LjA5NDI3TDYuNjM2NzggNS44ODExSDguMDEwMjhWOC4wNDg1OUgxMC41TDguMDEwMjggMTEuMjg0MVYxMi41WiIgZmlsbD0id2hpdGUiLz4KPC9zdmc+Cg=="
}
}

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,95 @@
# Generated by Django 5.2.12 on 2026-03-24 13:00
import datetime
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
replaces = [
(
"authentik_stages_authenticator_webauthn",
"0012_webauthndevice_created_webauthndevice_last_updated_and_more",
),
("authentik_stages_authenticator_webauthn", "0013_authenticatorwebauthnstage_max_attempts"),
(
"authentik_stages_authenticator_webauthn",
"0014_alter_authenticatorwebauthnstage_friendly_name",
),
("authentik_stages_authenticator_webauthn", "0015_authenticatorwebauthnstage_hints"),
(
"authentik_stages_authenticator_webauthn",
"0016_authenticatorwebauthnstage_prevent_duplicate_devices_and_more",
),
]
dependencies = [
("authentik_stages_authenticator_webauthn", "0001_squashed_0011_webauthndevice_aaguid"),
]
operations = [
migrations.AddField(
model_name="webauthndevice",
name="created",
field=models.DateTimeField(
auto_now_add=True,
default=datetime.datetime(1, 1, 1, 0, 0, tzinfo=datetime.timezone.utc),
),
preserve_default=False,
),
migrations.AddField(
model_name="webauthndevice",
name="last_updated",
field=models.DateTimeField(auto_now=True),
),
migrations.AddField(
model_name="webauthndevice",
name="last_used",
field=models.DateTimeField(null=True),
),
migrations.AddField(
model_name="authenticatorwebauthnstage",
name="max_attempts",
field=models.PositiveIntegerField(default=0),
),
migrations.AlterField(
model_name="authenticatorwebauthnstage",
name="friendly_name",
field=models.TextField(blank=True, default=""),
preserve_default=False,
),
migrations.AddField(
model_name="authenticatorwebauthnstage",
name="hints",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.TextField(
choices=[
("security-key", "Security Key"),
("client-device", "Client Device"),
("hybrid", "Hybrid"),
]
),
blank=True,
default=list,
size=None,
),
),
migrations.AddField(
model_name="authenticatorwebauthnstage",
name="prevent_duplicate_devices",
field=models.BooleanField(
default=True, help_text="When enabled, a given device can only be registered once."
),
),
migrations.AddField(
model_name="webauthndevice",
name="attestation_certificate_fingerprint",
field=models.TextField(default=None, null=True),
),
migrations.AddField(
model_name="webauthndevice",
name="attestation_certificate_pem",
field=models.TextField(default=None, null=True),
),
]

View File

@@ -0,0 +1,30 @@
# Generated by Django 5.2.11 on 2026-03-24 12:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_stages_authenticator_webauthn", "0015_authenticatorwebauthnstage_hints"),
]
operations = [
migrations.AddField(
model_name="authenticatorwebauthnstage",
name="prevent_duplicate_devices",
field=models.BooleanField(
default=True, help_text="When enabled, a given device can only be registered once."
),
),
migrations.AddField(
model_name="webauthndevice",
name="attestation_certificate_fingerprint",
field=models.TextField(default=None, null=True),
),
migrations.AddField(
model_name="webauthndevice",
name="attestation_certificate_pem",
field=models.TextField(default=None, null=True),
),
]

View File

@@ -1,5 +1,6 @@
"""WebAuthn stage"""
from cryptography.x509 import Certificate, load_pem_x509_certificate
from django.contrib.auth import get_user_model
from django.contrib.postgres.fields.array import ArrayField
from django.db import models
@@ -101,6 +102,9 @@ class AuthenticatorWebAuthnStage(ConfigurableStage, FriendlyNamedStage, Stage):
choices=AuthenticatorAttachment.choices, default=None, null=True
)
prevent_duplicate_devices = models.BooleanField(
default=True, help_text=_("When enabled, a given device can only be registered once.")
)
hints = ArrayField(
models.TextField(choices=WebAuthnHint.choices),
default=list,
@@ -159,6 +163,8 @@ class WebAuthnDevice(SerializerModel, Device):
created_on = models.DateTimeField(auto_now_add=True)
last_t = models.DateTimeField(default=now)
attestation_certificate_pem = models.TextField(null=True, default=None)
attestation_certificate_fingerprint = models.TextField(null=True, default=None)
aaguid = models.TextField(default=UNKNOWN_DEVICE_TYPE_AAGUID)
device_type = models.ForeignKey(
"WebAuthnDeviceType", on_delete=models.SET_DEFAULT, null=True, default=None
@@ -169,6 +175,12 @@ class WebAuthnDevice(SerializerModel, Device):
"""Get a publickeydescriptor for this device"""
return PublicKeyCredentialDescriptor(id=base64url_to_bytes(self.credential_id))
@property
def attestation_certificate(self) -> Certificate | None:
if not self.attestation_certificate_pem:
return None
return load_pem_x509_certificate(self.attestation_certificate_pem.encode())
def set_sign_count(self, sign_count: int) -> None:
"""Set the sign_count and update the last_t datetime."""
self.sign_count = sign_count

View File

@@ -1,7 +1,11 @@
"""WebAuthn stage"""
from dataclasses import dataclass
from uuid import UUID
from cryptography.hazmat.primitives.serialization import Encoding
from cryptography.x509 import load_der_x509_certificate
from django.db.models import Q
from django.http import HttpRequest, HttpResponse
from django.http.request import QueryDict
from django.utils.translation import gettext as __
@@ -11,6 +15,7 @@ from rest_framework.serializers import ValidationError
from webauthn.helpers.bytes_to_base64url import bytes_to_base64url
from webauthn.helpers.exceptions import WebAuthnException
from webauthn.helpers.options_to_json_dict import options_to_json_dict
from webauthn.helpers.parse_attestation_object import parse_attestation_object
from webauthn.helpers.structs import (
AttestationConveyancePreference,
AuthenticatorAttachment,
@@ -28,6 +33,7 @@ from webauthn.registration.verify_registration_response import (
from authentik.core.api.utils import JSONDictField
from authentik.core.models import User
from authentik.crypto.models import fingerprint_sha256
from authentik.flows.challenge import (
Challenge,
ChallengeResponse,
@@ -46,6 +52,14 @@ PLAN_CONTEXT_WEBAUTHN_CHALLENGE = "goauthentik.io/stages/authenticator_webauthn/
PLAN_CONTEXT_WEBAUTHN_ATTEMPT = "goauthentik.io/stages/authenticator_webauthn/attempt"
@dataclass
class VerifiedRegistrationData:
registration: VerifiedRegistration
exists_query: Q
attest_cert: str | None = None
attest_cert_fingerprint: str | None = None
class AuthenticatorWebAuthnChallenge(WithUserInfoChallenge):
"""WebAuthn Challenge"""
@@ -62,7 +76,7 @@ class AuthenticatorWebAuthnChallengeResponse(ChallengeResponse):
request: HttpRequest
user: User
def validate_response(self, response: dict) -> dict:
def validate_response(self, response: dict) -> VerifiedRegistrationData:
"""Validate webauthn challenge response"""
challenge = self.stage.executor.plan.context[PLAN_CONTEXT_WEBAUTHN_CHALLENGE]
@@ -77,13 +91,33 @@ class AuthenticatorWebAuthnChallengeResponse(ChallengeResponse):
self.stage.logger.warning("registration failed", exc=exc)
raise ValidationError(f"Registration failed. Error: {exc}") from None
credential_id_exists = WebAuthnDevice.objects.filter(
credential_id=bytes_to_base64url(registration.credential_id)
).first()
registration_data = VerifiedRegistrationData(
registration,
exists_query=Q(credential_id=bytes_to_base64url(registration.credential_id)),
)
stage: AuthenticatorWebAuthnStage = self.stage.executor.current_stage
att_obj = parse_attestation_object(registration.attestation_object)
if (
att_obj
and att_obj.att_stmt
and att_obj.att_stmt.x5c is not None
and len(att_obj.att_stmt.x5c) > 0
):
cert = load_der_x509_certificate(att_obj.att_stmt.x5c[0])
registration_data.attest_cert = cert.public_bytes(
encoding=Encoding.PEM,
).decode("utf-8")
registration_data.attest_cert_fingerprint = fingerprint_sha256(cert)
if stage.prevent_duplicate_devices:
registration_data.exists_query |= Q(
attestation_certificate_fingerprint=registration_data.attest_cert_fingerprint
)
credential_id_exists = WebAuthnDevice.objects.filter(registration_data.exists_query).first()
if credential_id_exists:
raise ValidationError("Credential ID already exists.")
stage: AuthenticatorWebAuthnStage = self.stage.executor.current_stage
aaguid = registration.aaguid
allowed_aaguids = stage.device_type_restrictions.values_list("aaguid", flat=True)
if allowed_aaguids.exists():
@@ -103,11 +137,11 @@ class AuthenticatorWebAuthnChallengeResponse(ChallengeResponse):
UUID(UNKNOWN_DEVICE_TYPE_AAGUID) in allowed_aaguids
and not WebAuthnDeviceType.objects.filter(aaguid=aaguid).exists()
):
return registration
return registration_data
# Otherwise just check if the given aaguid is in the allowed aaguids
if UUID(aaguid) not in allowed_aaguids:
raise invalid_error
return registration
return registration_data
class AuthenticatorWebAuthnStageView(ChallengeStageView):
@@ -190,26 +224,28 @@ class AuthenticatorWebAuthnStageView(ChallengeStageView):
def challenge_valid(self, response: ChallengeResponse) -> HttpResponse:
# Webauthn Challenge has already been validated
webauthn_credential: VerifiedRegistration = response.validated_data["response"]
existing_device = WebAuthnDevice.objects.filter(
credential_id=bytes_to_base64url(webauthn_credential.credential_id)
).first()
webauthn_credential: VerifiedRegistrationData = response.validated_data["response"]
existing_device = WebAuthnDevice.objects.filter(webauthn_credential.exists_query).first()
if not existing_device:
name = "WebAuthn Device"
device_type = WebAuthnDeviceType.objects.filter(
aaguid=webauthn_credential.aaguid
aaguid=webauthn_credential.registration.aaguid
).first()
if device_type and device_type.description:
name = device_type.description
WebAuthnDevice.objects.create(
name=name,
user=self.get_pending_user(),
public_key=bytes_to_base64url(webauthn_credential.credential_public_key),
credential_id=bytes_to_base64url(webauthn_credential.credential_id),
sign_count=webauthn_credential.sign_count,
public_key=bytes_to_base64url(
webauthn_credential.registration.credential_public_key
),
credential_id=bytes_to_base64url(webauthn_credential.registration.credential_id),
sign_count=webauthn_credential.registration.sign_count,
rp_id=get_rp_id(self.request),
device_type=device_type,
aaguid=webauthn_credential.aaguid,
aaguid=webauthn_credential.registration.aaguid,
attestation_certificate_pem=webauthn_credential.attest_cert,
attestation_certificate_fingerprint=webauthn_credential.attest_cert_fingerprint,
)
else:
return self.executor.stage_invalid("Device with Credential ID already exists.")

View File

@@ -3,23 +3,19 @@
from functools import lru_cache
from json import loads
from pathlib import Path
from typing import TYPE_CHECKING
from django.core.cache import cache
from django.db.transaction import atomic
from django.utils.translation import gettext_lazy as _
from dramatiq.actor import actor
from fido2.mds3 import filter_revoked, parse_blob
from authentik.stages.authenticator_webauthn.models import (
UNKNOWN_DEVICE_TYPE_AAGUID,
WebAuthnDeviceType,
)
from authentik.tasks import TASK_WORKER
from authentik.tasks.middleware import CurrentTask
if TYPE_CHECKING or TASK_WORKER:
from fido2.mds3 import filter_revoked, parse_blob
CACHE_KEY_MDS_NO = "goauthentik.io/stages/authenticator_webauthn/mds_no"
AAGUID_BLOB_PATH = Path(__file__).parent / "mds" / "aaguid.json"
MDS_BLOB_PATH = Path(__file__).parent / "mds" / "blob.jwt"

View File

@@ -0,0 +1,10 @@
{
"id": "f7wv8mP-poSxh-567eWxZntzCBDW8hWlvzf92QJkT--Y2oBRz4IEAZ6M2PI9_KEQ",
"rawId": "f7wv8mP-poSxh-567eWxZntzCBDW8hWlvzf92QJkT--Y2oBRz4IEAZ6M2PI9_KEQ",
"type": "public-key",
"registrationClientExtensions": "{}",
"response": {
"clientDataJSON": "eyJ0eXBlIjoid2ViYXV0aG4uY3JlYXRlIiwiY2hhbGxlbmdlIjoiaUhJWDNBdGtaWkN4U1lMeE9oazgwWlhJN1JuQUMwUGI0V1RrOWRFSjRlTEpkem9oOGpSbWpLVzJVOW9FX0NCbjVuNlpqNjdCSUladkZMM2xwaXdKd2ciLCJvcmlnaW4iOiJodHRwOi8vbG9jYWxob3N0OjkwMDAiLCJjcm9zc09yaWdpbiI6ZmFsc2V9",
"attestationObject": "o2NmbXRmcGFja2VkZ2F0dFN0bXSjY2FsZyZjc2lnWEYwRAIgRkCRBg_Z0-cS8M4HyiZpar7cy6PRbGW_G0yTnG_lMUUCIHOKwNqU_Mr4sip5zUECezH-NJWdIGUbFR7D7mSC1wMSY3g1Y4FZAt0wggLZMIIBwaADAgECAgkA8Oq7fWgETIowDQYJKoZIhvcNAQELBQAwLjEsMCoGA1UEAxMjWXViaWNvIFUyRiBSb290IENBIFNlcmlhbCA0NTcyMDA2MzEwIBcNMTQwODAxMDAwMDAwWhgPMjA1MDA5MDQwMDAwMDBaMG8xCzAJBgNVBAYTAlNFMRIwEAYDVQQKDAlZdWJpY28gQUIxIjAgBgNVBAsMGUF1dGhlbnRpY2F0b3IgQXR0ZXN0YXRpb24xKDAmBgNVBAMMH1l1YmljbyBVMkYgRUUgU2VyaWFsIDIxMDk0NjczNzYwWTATBgcqhkjOPQIBBggqhkjOPQMBBwNCAATmZ9M7upxFm4Ce_MtqC64sXPxL14HVc0g9lv3pJR9kLM3mwgZVFPMzgkasmVKAACrSOK-8A3G21_rDv8ueedIwo4GBMH8wEwYKKwYBBAGCxAoNAQQFBAMFBAMwIgYJKwYBBAGCxAoCBBUxLjMuNi4xLjQuMS40MTQ4Mi4xLjcwEwYLKwYBBAGC5RwCAQEEBAMCBDAwIQYLKwYBBAGC5RwBAQQEEgQQL8BXn4ETR-qxFrtajbkgKjAMBgNVHRMBAf8EAjAAMA0GCSqGSIb3DQEBCwUAA4IBAQC2Mago15M4rSkAig1_eaOgPc8uDJsfYvrPtIqeVZV3p1FslZtkKxjwDEx3Io0Z-dRCIlwSaL0jGKCMahdzBk8CmcmbskOKR7tnsdDbJSuUln4SAVqaK-nkLdRUJoiQYf4fIlb--Hbdc5kyRoNxGrBt6rxvRWhq-e7hgXlsIzs-2ew9wKy98vkNqE8ZJ-lz1jIA0bj05AE5miU0XcwEoquyk4AjtF9bQlJBjQ1SdYVjH2HEVs25iwoU3g1uUn9nP20yTVhhKRMnpV_EdOjm18hxot9nV0isx5jXb5Z6-My58Vb-oHgStjkaN-3dxuJkEQuZtD1AtTItfvyUeIsL2kkiaGF1dGhEYXRhWMJJlg3liA6MaHQ0Fw9kdmBbj-SuuaKGMseZXPO6gx2XY8UAAAADL8BXn4ETR-qxFrtajbkgKgAwf7wv8mP-poSxh-567eWxZntzCBDW8hWlvzf92QJkT--Y2oBRz4IEAZ6M2PI9_KEQpQECAyYgASFYIH-8L_Jj_qaEsYfueu2KcYEacayeFjsZ1LowkryCG3MYIlggKCjYkvnPmx-ZcyOs3em0ZseMtwDga1j0Hi-WmFLboNmha2NyZWRQcm90ZWN0Ag"
}
}

View File

@@ -1,6 +1,7 @@
"""Test WebAuthn API"""
from base64 import b64decode
from json import loads
from django.urls import reverse
from webauthn.helpers.bytes_to_base64url import bytes_to_base64url
@@ -12,6 +13,7 @@ from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlan
from authentik.flows.tests import FlowTestCase
from authentik.flows.views.executor import SESSION_KEY_PLAN
from authentik.lib.generators import generate_id
from authentik.lib.tests.utils import load_fixture
from authentik.stages.authenticator_webauthn.models import (
UNKNOWN_DEVICE_TYPE_AAGUID,
AuthenticatorWebAuthnStage,
@@ -102,7 +104,7 @@ class TestAuthenticatorWebAuthnStage(FlowTestCase):
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
plan.context[PLAN_CONTEXT_WEBAUTHN_CHALLENGE] = b64decode(
b"03Xodi54gKsfnP5I9VFfhaGXVVE2NUyZpBBXns/JI+x6V9RY2Tw2QmxRJkhh7174EkRazUntIwjMVY9bFG60Lw=="
b"iHIX3AtkZZCxSYLxOhk80ZXI7RnAC0Pb4WTk9dEJ4eLJdzoh8jRmjKW2U9oE/CBn5n6Zj67BIIZvFL3lpiwJwg=="
)
session = self.client.session
session[SESSION_KEY_PLAN] = plan
@@ -111,35 +113,22 @@ class TestAuthenticatorWebAuthnStage(FlowTestCase):
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
data={
"component": "ak-stage-authenticator-webauthn",
"response": {
"id": "kqnmrVLnDG-OwsSNHkihYZaNz5s",
"rawId": "kqnmrVLnDG-OwsSNHkihYZaNz5s",
"type": "public-key",
"registrationClientExtensions": "{}",
"response": {
"clientDataJSON": (
"eyJ0eXBlIjoid2ViYXV0aG4uY3JlYXRlIiwiY2hhbGxlbmd"
"lIjoiMDNYb2RpNTRnS3NmblA1STlWRmZoYUdYVlZFMk5VeV"
"pwQkJYbnNfSkkteDZWOVJZMlR3MlFteFJKa2hoNzE3NEVrU"
"mF6VW50SXdqTVZZOWJGRzYwTHciLCJvcmlnaW4iOiJodHRw"
"Oi8vbG9jYWxob3N0OjkwMDAiLCJjcm9zc09yaWdpbiI6ZmFsc2V9"
),
"attestationObject": (
"o2NmbXRkbm9uZWdhdHRTdG10oGhhdXRoRGF0YViYSZYN5Yg"
"OjGh0NBcPZHZgW4_krrmihjLHmVzzuoMdl2NdAAAAAPv8MA"
"cVTk7MjAtuAgVX170AFJKp5q1S5wxvjsLEjR5IoWGWjc-bp"
"QECAyYgASFYIKtcZHPumH37XHs0IM1v3pUBRIqHVV_SE-Lq"
"2zpJAOVXIlgg74Fg_WdB0kuLYqCKbxogkEPaVtR_iR3IyQFIJAXBzds"
),
},
},
"response": loads(load_fixture("fixtures/register.json")),
},
SERVER_NAME="localhost",
SERVER_PORT="9000",
)
self.assertEqual(response.status_code, 200)
self.assertStageRedirects(response, reverse("authentik_core:root-redirect"))
self.assertTrue(WebAuthnDevice.objects.filter(user=self.user).exists())
device = WebAuthnDevice.objects.filter(user=self.user).first()
self.assertIsNotNone(device)
self.assertEqual(
device.credential_id, "f7wv8mP-poSxh-567eWxZntzCBDW8hWlvzf92QJkT--Y2oBRz4IEAZ6M2PI9_KEQ"
)
self.assertEqual(
device.attestation_certificate_fingerprint,
"3e:28:fc:df:45:19:bb:94:0a:0c:90:98:f2:08:72:53:2a:9e:e2:76:13:02:3e:69:61:4a:d9:90:49:80:3d:34",
)
def test_register_restricted_device_type_deny(self):
"""Test registration with restricted devices (fail)"""

View File

@@ -1,12 +0,0 @@
class TaskWorkerFlag:
_set = False
def enable(self):
self._set = True
def __bool__(self):
return self._set
TASK_WORKER = TaskWorkerFlag()

View File

@@ -26,6 +26,7 @@ from dramatiq.broker import Broker
from dramatiq.message import Message
from dramatiq.middleware import Middleware
from psycopg.errors import Error
from setproctitle import setthreadtitle
from structlog.stdlib import get_logger
from authentik import authentik_full_version
@@ -250,6 +251,7 @@ class WorkerHealthcheckMiddleware(Middleware):
@staticmethod
def run(addr: str, port: int):
setthreadtitle("authentik Worker Healthcheck server")
try:
server = HTTPServer((addr, port), _healthcheck_handler)
thread = cast(HTTPServerThread, current_thread())
@@ -278,6 +280,7 @@ class WorkerStatusMiddleware(Middleware):
@staticmethod
def run(event: TEvent):
setthreadtitle("authentik Worker status")
with transaction.atomic():
hostname = socket.gethostname()
WorkerStatus.objects.filter(hostname=hostname).delete()

View File

@@ -1,8 +1,6 @@
from authentik.root.setup import setup
from authentik.tasks import TASK_WORKER
setup()
TASK_WORKER.enable()
import django # noqa: E402

View File

@@ -0,0 +1,178 @@
version: 1
metadata:
labels:
blueprints.goauthentik.io/instantiate: "false"
name: Example - Recovery with email and MFA verification
entries:
- identifiers:
slug: default-recovery-flow
id: flow
model: authentik_flows.flow
attrs:
name: Default recovery flow
title: Reset your password
designation: recovery
authentication: require_unauthenticated
- identifiers:
name: default-recovery-field-password
id: prompt-field-password
model: authentik_stages_prompt.prompt
attrs:
field_key: password
label: Password
type: password
required: true
placeholder: Password
order: 0
placeholder_expression: false
- identifiers:
name: default-recovery-field-password-repeat
id: prompt-field-password-repeat
model: authentik_stages_prompt.prompt
attrs:
field_key: password_repeat
label: Password (repeat)
type: password
required: true
placeholder: Password (repeat)
order: 1
placeholder_expression: false
- identifiers:
name: default-recovery-skip-if-restored
id: default-recovery-skip-if-restored
model: authentik_policies_expression.expressionpolicy
attrs:
expression: |
return bool(request.context.get('is_restored', True))
- identifiers:
name: default-recovery-email
id: default-recovery-email
model: authentik_stages_email.emailstage
attrs:
use_global_settings: true
host: localhost
port: 25
username: ""
use_tls: false
use_ssl: false
timeout: 10
from_address: system@authentik.local
token_expiry: minutes=30
subject: authentik
template: email/password_reset.html
activate_user_on_success: true
recovery_max_attempts: 5
recovery_cache_timeout: minutes=5
- identifiers:
name: default-recovery-mfa
id: default-recovery-mfa
model: authentik_stages_authenticator_validate.authenticatorvalidatestage
- identifiers:
name: default-recovery-user-write
id: default-recovery-user-write
model: authentik_stages_user_write.userwritestage
attrs:
user_creation_mode: never_create
- identifiers:
name: default-recovery-identification
id: default-recovery-identification
model: authentik_stages_identification.identificationstage
attrs:
user_fields:
- email
- username
- identifiers:
name: default-recovery-user-login
id: default-recovery-user-login
model: authentik_stages_user_login.userloginstage
- identifiers:
name: Change your password
id: stages-prompt-password
model: authentik_stages_prompt.promptstage
attrs:
fields:
- !KeyOf prompt-field-password
- !KeyOf prompt-field-password-repeat
validation_policies: []
- identifiers:
target: !KeyOf flow
stage: !KeyOf default-recovery-identification
order: 10
model: authentik_flows.flowstagebinding
id: flow-binding-identification
attrs:
evaluate_on_plan: true
re_evaluate_policies: true
policy_engine_mode: any
invalid_response_action: retry
- identifiers:
target: !KeyOf flow
stage: !KeyOf default-recovery-email
order: 20
model: authentik_flows.flowstagebinding
id: flow-binding-email
attrs:
evaluate_on_plan: true
re_evaluate_policies: true
policy_engine_mode: any
invalid_response_action: retry
- identifiers:
target: !KeyOf flow
stage: !KeyOf default-recovery-mfa
order: 21
model: authentik_flows.flowstagebinding
id: flow-binding-mfa
attrs:
evaluate_on_plan: true
re_evaluate_policies: true
policy_engine_mode: any
invalid_response_action: retry
- identifiers:
target: !KeyOf flow
stage: !KeyOf stages-prompt-password
order: 30
model: authentik_flows.flowstagebinding
attrs:
evaluate_on_plan: true
re_evaluate_policies: false
policy_engine_mode: any
invalid_response_action: retry
- identifiers:
target: !KeyOf flow
stage: !KeyOf default-recovery-user-write
order: 40
model: authentik_flows.flowstagebinding
attrs:
evaluate_on_plan: true
re_evaluate_policies: false
policy_engine_mode: any
invalid_response_action: retry
- identifiers:
target: !KeyOf flow
stage: !KeyOf default-recovery-user-login
order: 100
model: authentik_flows.flowstagebinding
attrs:
evaluate_on_plan: true
re_evaluate_policies: false
policy_engine_mode: any
invalid_response_action: retry
- identifiers:
policy: !KeyOf default-recovery-skip-if-restored
target: !KeyOf flow-binding-identification
order: 0
model: authentik_policies.policybinding
attrs:
negate: false
enabled: true
timeout: 30
- identifiers:
policy: !KeyOf default-recovery-skip-if-restored
target: !KeyOf flow-binding-email
order: 0
state: absent
model: authentik_policies.policybinding
attrs:
negate: false
enabled: true
timeout: 30

View File

@@ -7507,6 +7507,10 @@
"type": "string",
"minLength": 1,
"title": "Event retention"
},
"push_verify_certificates": {
"type": "boolean",
"title": "Push verify certificates"
}
},
"required": []
@@ -13493,6 +13497,11 @@
"title": "Allow idp initiated",
"description": "Allows authentication flows initiated by the IdP. This can be a security risk, as no validation of the request ID is done."
},
"force_authn": {
"type": "boolean",
"title": "Force authn",
"description": "When enabled, the IdP will re-authenticate the user even if a session exists."
},
"name_id_policy": {
"type": "string",
"enum": [
@@ -14854,6 +14863,11 @@
},
"title": "Device type restrictions"
},
"prevent_duplicate_devices": {
"type": "boolean",
"title": "Prevent duplicate devices",
"description": "When enabled, a given device can only be registered once."
},
"max_attempts": {
"type": "integer",
"minimum": 0,

View File

@@ -275,10 +275,11 @@
"htmlcov", // Coverage HTML output
"coverage.txt", // Coverage text output
//#endregion
//#region Media
//#region Media/Static
"./data", // Media files
"./media", // Legacy media files
"*.{png,jpg,pdf,svg}" // Binary files
"*.{png,jpg,pdf,svg}", // Binary files
"*dashboard.json" // Dashboards
//#endregion
],
"useGitignore": true,

6
go.mod
View File

@@ -3,7 +3,7 @@ module goauthentik.io
go 1.26.0
require (
beryju.io/ldap v0.1.0
beryju.io/ldap v0.2.1
beryju.io/radius-eap v0.1.0
github.com/avast/retry-go/v4 v4.7.0
github.com/coreos/go-oidc/v3 v3.17.0
@@ -18,7 +18,7 @@ require (
github.com/gorilla/securecookie v1.1.2
github.com/gorilla/sessions v1.4.0
github.com/gorilla/websocket v1.5.3
github.com/grafana/pyroscope-go v1.2.7
github.com/grafana/pyroscope-go v1.2.8
github.com/jackc/pgx/v5 v5.9.1
github.com/jellydator/ttlcache/v3 v3.4.0
github.com/mitchellh/mapstructure v1.5.0
@@ -48,7 +48,7 @@ require (
github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667 // indirect
github.com/go-http-utils/fresh v0.0.0-20161124030543-7231e26a4b27 // indirect
github.com/go-http-utils/headers v0.0.0-20181008091004-fed159eddc2a // indirect
github.com/go-jose/go-jose/v4 v4.1.3 // indirect
github.com/go-jose/go-jose/v4 v4.1.4 // indirect
github.com/go-logr/logr v1.4.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-openapi/analysis v0.24.3 // indirect

12
go.sum
View File

@@ -1,5 +1,5 @@
beryju.io/ldap v0.1.0 h1:rPjGE3qR1Klbvn9N+iECWdzt/tK87XHgz8W5wZJg9B8=
beryju.io/ldap v0.1.0/go.mod h1:sOrYV+ZlDTDu/IvIiEiuAaXzjcpMBE+XXr4V+NJ0pWI=
beryju.io/ldap v0.2.1 h1:rhTAP2CXqrKZy/UycLC/aPSSBMcgJMzooKqk3TwVFxY=
beryju.io/ldap v0.2.1/go.mod h1:GJSw3pVOON/3+L5att3Eysmj7j0GmjLvA6/WNmPajD4=
beryju.io/radius-eap v0.1.0 h1:5M3HwkzH3nIEBcKDA2z5+sb4nCY3WdKL/SDDKTBvoqw=
beryju.io/radius-eap v0.1.0/go.mod h1:yYtO59iyoLNEepdyp1gZ0i1tGdjPbrR2M/v5yOz7Fkc=
github.com/Azure/go-ntlmssp v0.1.0 h1:DjFo6YtWzNqNvQdrwEyr/e4nhU3vRiwenz5QX7sFz+A=
@@ -32,8 +32,8 @@ github.com/go-http-utils/fresh v0.0.0-20161124030543-7231e26a4b27 h1:O6yi4xa9b2D
github.com/go-http-utils/fresh v0.0.0-20161124030543-7231e26a4b27/go.mod h1:AYvN8omj7nKLmbcXS2dyABYU6JB1Lz1bHmkkq1kf4I4=
github.com/go-http-utils/headers v0.0.0-20181008091004-fed159eddc2a h1:v6zMvHuY9yue4+QkG/HQ/W67wvtQmWJ4SDo9aK/GIno=
github.com/go-http-utils/headers v0.0.0-20181008091004-fed159eddc2a/go.mod h1:I79BieaU4fxrw4LMXby6q5OS9XnoR9UIKLOzDFjUmuw=
github.com/go-jose/go-jose/v4 v4.1.3 h1:CVLmWDhDVRa6Mi/IgCgaopNosCaHz7zrMeF9MlZRkrs=
github.com/go-jose/go-jose/v4 v4.1.3/go.mod h1:x4oUasVrzR7071A4TnHLGSPpNOm2a21K9Kf04k1rs08=
github.com/go-jose/go-jose/v4 v4.1.4 h1:moDMcTHmvE6Groj34emNPLs/qtYXRVcd6S7NHbHz3kA=
github.com/go-jose/go-jose/v4 v4.1.4/go.mod h1:x4oUasVrzR7071A4TnHLGSPpNOm2a21K9Kf04k1rs08=
github.com/go-ldap/ldap/v3 v3.4.13 h1:+x1nG9h+MZN7h/lUi5Q3UZ0fJ1GyDQYbPvbuH38baDQ=
github.com/go-ldap/ldap/v3 v3.4.13/go.mod h1:LxsGZV6vbaK0sIvYfsv47rfh4ca0JXokCoKjZxsszv0=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
@@ -105,8 +105,8 @@ github.com/gorilla/sessions v1.4.0/go.mod h1:FLWm50oby91+hl7p/wRxDth9bWSuk0qVL2e
github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/grafana/pyroscope-go v1.2.7 h1:VWBBlqxjyR0Cwk2W6UrE8CdcdD80GOFNutj0Kb1T8ac=
github.com/grafana/pyroscope-go v1.2.7/go.mod h1:o/bpSLiJYYP6HQtvcoVKiE9s5RiNgjYTj1DhiddP2Pc=
github.com/grafana/pyroscope-go v1.2.8 h1:UvCwIhlx9DeV7F6TW/z8q1Mi4PIm3vuUJ2ZlCEvmA4M=
github.com/grafana/pyroscope-go v1.2.8/go.mod h1:SSi59eQ1/zmKoY/BKwa5rSFsJaq+242Bcrr4wPix1g8=
github.com/grafana/pyroscope-go/godeltaprof v0.1.9 h1:c1Us8i6eSmkW+Ez05d3co8kasnuOY813tbMN8i/a3Og=
github.com/grafana/pyroscope-go/godeltaprof v0.1.9/go.mod h1:2+l7K7twW49Ct4wFluZD3tZ6e0SjanjcUUBPVD/UuGU=
github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8=

View File

@@ -16,6 +16,7 @@ type Config struct {
Listen ListenConfig `yaml:"listen" env:", prefix=AUTHENTIK_LISTEN__"`
Web WebConfig `yaml:"web" env:", prefix=AUTHENTIK_WEB__"`
Log LogConfig `yaml:"log" env:", prefix=AUTHENTIK_LOG__"`
LDAP LDAPConfig `yaml:"ldap" env:", prefix=AUTHENTIK_LDAP__"`
// Outpost specific config
// These are only relevant for proxy/ldap outposts, and cannot be set via YAML
@@ -114,3 +115,7 @@ type WebConfig struct {
type LogConfig struct {
HttpHeaders []string `yaml:"http_headers" env:"HTTP_HEADERS, overwrite"`
}
type LDAPConfig struct {
PageSize int `yaml:"page_size" env:"PAGE_SIZE, overwrite"`
}

View File

@@ -12,8 +12,8 @@ import (
"goauthentik.io/internal/outpost/ldap/metrics"
)
func (ls *LDAPServer) Bind(bindDN string, bindPW string, conn net.Conn) (ldap.LDAPResultCode, error) {
req, span := bind.NewRequest(bindDN, bindPW, conn)
func (ls *LDAPServer) Bind(r ldap.BindRequest, conn net.Conn) (ldap.LDAPResultCode, error) {
req, span := bind.NewRequest(r, conn)
selectedApp := ""
defer func() {
span.Finish()
@@ -35,7 +35,7 @@ func (ls *LDAPServer) Bind(bindDN string, bindPW string, conn net.Conn) (ldap.LD
}()
for _, instance := range ls.providers {
username, err := instance.binder.GetUsername(bindDN)
username, err := instance.binder.GetUsername(r.BindDN)
if err == nil {
selectedApp = instance.GetAppSlug()
c, err := instance.binder.Bind(username, req)

View File

@@ -23,7 +23,7 @@ func (db *DirectBinder) Bind(username string, req *bind.Request) (ldap.LDAPResul
fe.Params.Add("goauthentik.io/outpost/ldap", "true")
fe.Answers[flow.StageIdentification] = username
fe.SetSecrets(req.BindPW, db.si.GetMFASupport())
fe.SetSecrets(req.Password, db.si.GetMFASupport())
passed, err := fe.Execute()
flags := flags.UserFlags{

View File

@@ -46,7 +46,7 @@ func NewSessionBinder(si server.LDAPServerInstance, oldBinder bind.Binder) *Sess
func (sb *SessionBinder) Bind(username string, req *bind.Request) (ldap.LDAPResultCode, error) {
item := sb.sessions.Get(Credentials{
DN: req.BindDN,
Password: req.BindPW,
Password: req.Password,
})
if item != nil {
sb.log.WithField("bindDN", req.BindDN).Info("authenticated from session")
@@ -63,7 +63,7 @@ func (sb *SessionBinder) Bind(username string, req *bind.Request) (ldap.LDAPResu
}
sb.sessions.Set(Credentials{
DN: req.BindDN,
Password: req.BindPW,
Password: req.Password,
}, result, time.Until(flags.Session.Expires))
}
return result, err

View File

@@ -5,6 +5,7 @@ import (
"net"
"strings"
"beryju.io/ldap"
"github.com/getsentry/sentry-go"
"github.com/google/uuid"
log "github.com/sirupsen/logrus"
@@ -12,15 +13,17 @@ import (
)
type Request struct {
BindDN string
BindPW string
id string
conn net.Conn
log *log.Entry
ctx context.Context
ldap.BindRequest
id string
conn net.Conn
log *log.Entry
ctx context.Context
}
func NewRequest(bindDN string, bindPW string, conn net.Conn) (*Request, *sentry.Span) {
func NewRequest(req ldap.BindRequest, conn net.Conn) (*Request, *sentry.Span) {
bindDN := strings.ToLower(req.BindDN)
req.BindDN = bindDN
span := sentry.StartSpan(context.TODO(), "authentik.providers.ldap.bind",
sentry.WithTransactionName("authentik.providers.ldap.bind"))
span.Description = bindDN
@@ -38,12 +41,11 @@ func NewRequest(bindDN string, bindPW string, conn net.Conn) (*Request, *sentry.
bindDN = strings.ToLower(bindDN)
return &Request{
BindDN: bindDN,
BindPW: bindPW,
conn: conn,
log: log.WithField("bindDN", bindDN).WithField("requestId", rid).WithField("client", utils.GetIP(conn.RemoteAddr())),
id: rid,
ctx: span.Context(),
BindRequest: req,
conn: conn,
log: log.WithField("bindDN", bindDN).WithField("requestId", rid).WithField("client", utils.GetIP(conn.RemoteAddr())),
id: rid,
ctx: span.Context(),
}, span
}

Some files were not shown because too many files have changed in this diff Show More