Compare commits

..

178 Commits

Author SHA1 Message Date
Marc 'risson' Schmitt
04c066d8b0 lint
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-19 19:20:34 +01:00
Marc 'risson' Schmitt
f3341a4b83 Merge branch 'main' into rust-server
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-19 18:46:47 +01:00
Marc 'risson' Schmitt
27f652dcf3 wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-19 16:22:45 +01:00
Marc 'risson' Schmitt
dca2c2f536 wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-19 16:10:43 +01:00
Marc 'risson' Schmitt
5d426411dd wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-19 15:59:31 +01:00
Marc 'risson' Schmitt
35ec2ea930 remove useless change
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-19 15:58:09 +01:00
Marc 'risson' Schmitt
b7c4d04c16 Merge remote-tracking branch 'origin/multiple-listeners' into rust-server
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-19 15:56:52 +01:00
Marc 'risson' Schmitt
8ef1b945e8 lint
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-19 15:47:44 +01:00
Marc 'risson' Schmitt
7fab5b6e93 Merge branch 'main' into multiple-listeners 2026-03-19 14:23:47 +00:00
Marc 'risson' Schmitt
7468a7271c Update website/docs/releases/2026/v2026.5.md
Co-authored-by: Tana M Berry <tanamarieberry@yahoo.com>
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-19 15:23:30 +01:00
Marc 'risson' Schmitt
1a270f9c6e Apply suggestions from code review
Co-authored-by: Tana M Berry <tanamarieberry@yahoo.com>
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-19 15:23:01 +01:00
Marc 'risson' Schmitt
3ae126cd99 Update website/docs/install-config/configuration/configuration.mdx
Co-authored-by: Tana M Berry <tanamarieberry@yahoo.com>
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-19 15:22:42 +01:00
Marc 'risson' Schmitt
6db2fbc8aa wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-19 14:38:19 +01:00
Marc 'risson' Schmitt
32f6738a40 errgroup
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-19 14:33:42 +01:00
Marc 'risson' Schmitt
1ddc596362 better unix listener
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-19 14:22:41 +01:00
Marc 'risson' Schmitt
1281371077 Merge branch 'main' into rust-server 2026-03-18 15:29:43 +01:00
Marc 'risson' Schmitt
58508ebc4e Merge branch 'main' into rust-server 2026-03-18 15:29:25 +01:00
Marc 'risson' Schmitt
aa614ad31c lint
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-18 15:22:47 +01:00
Marc 'risson' Schmitt
b9b1c7ccf6 metrics socket and healthchecks for all outposts
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-18 15:17:33 +01:00
Marc 'risson' Schmitt
f8209680fa server healthcheck and unix socket
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-18 14:58:55 +01:00
Marc 'risson' Schmitt
2b2c6a3b9b Merge branch 'main' into multiple-listeners 2026-03-18 13:43:09 +01:00
Marc 'risson' Schmitt
62644a79fd root: allow listening on multiple IPs
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-16 18:40:45 +01:00
Marc 'risson' Schmitt
c426c94a25 Merge branch 'main' into rust-server 2026-03-16 14:09:37 +01:00
Marc 'risson' Schmitt
2e04738306 use waitgroups for multiple servers, TODO: fix healthchecks
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-13 16:13:56 +01:00
Marc 'risson' Schmitt
297e8db6eb use waitgroups for multiple servers
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-13 16:03:45 +01:00
Marc 'risson' Schmitt
5b9a30be4b Merge branch 'main' into rust-server 2026-03-13 15:59:30 +01:00
Marc 'risson' Schmitt
457429f261 wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-12 14:45:48 +01:00
Marc 'risson' Schmitt
a0bac73c59 go ak api controller: add support for unix urls
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-12 14:44:15 +01:00
Marc 'risson' Schmitt
b82abaf230 fixup
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-12 13:45:21 +01:00
Marc 'risson' Schmitt
c4b1e4bd44 http timeouts
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-12 13:41:50 +01:00
Marc 'risson' Schmitt
5592c4769a Merge branch 'main' into rust-server 2026-03-12 13:26:15 +01:00
Marc 'risson' Schmitt
f71f5b7278 fixup
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-11 19:12:03 +01:00
Marc 'risson' Schmitt
d7159cfce2 fixup
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-11 19:02:21 +01:00
Marc 'risson' Schmitt
30dc4e120b fixup
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-11 19:01:15 +01:00
Marc 'risson' Schmitt
619023be75 fixup
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-11 19:00:17 +01:00
Marc 'risson' Schmitt
de63473cd2 more ci
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-11 18:59:23 +01:00
Marc 'risson' Schmitt
6aa50b962c rustfmt in ci
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-11 18:50:01 +01:00
Marc 'risson' Schmitt
f240ca1708 more ci
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-11 18:48:21 +01:00
Marc 'risson' Schmitt
550da2005e start ci
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-11 18:46:28 +01:00
Marc 'risson' Schmitt
8818a0b06c revert
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-11 17:54:18 +01:00
Marc 'risson' Schmitt
013190ddd0 fix tests?
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-11 17:50:43 +01:00
Marc 'risson' Schmitt
6fb777ae5b make server listen on unix socket
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-11 17:45:55 +01:00
Marc 'risson' Schmitt
41f13d8805 fix sentry tracing
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-11 16:26:12 +01:00
Marc 'risson' Schmitt
fc5f0e7dc5 spellcheck
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-11 15:20:51 +01:00
Marc 'risson' Schmitt
9b9379ac8f lint
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-11 15:09:28 +01:00
Marc 'risson' Schmitt
c4b0825dad tasks/test: remove worker health check
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-11 15:06:39 +01:00
Marc 'risson' Schmitt
946ace14c1 fix makefile
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-11 15:05:50 +01:00
Marc 'risson' Schmitt
6a9eb8e9c7 Merge branch 'main' into rust-server 2026-03-11 14:28:31 +01:00
Marc 'risson' Schmitt
4f0d0e72d5 disable sentry span handling, it's broken
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-10 18:55:59 +01:00
Marc 'risson' Schmitt
411648672e config: separate initial loading and starting the reloader
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-10 18:47:51 +01:00
Marc 'risson' Schmitt
d5f6d30aeb update deps
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-10 17:26:24 +01:00
Marc 'risson' Schmitt
1508ad0ab8 subpath
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-10 17:10:56 +01:00
Marc 'risson' Schmitt
892e8fd856 config fallback values
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-10 17:04:58 +01:00
Marc 'risson' Schmitt
d4b0ac7c14 more todos
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-10 16:14:19 +01:00
Marc 'risson' Schmitt
fe4857abbb db: use connection callbacks for search path and application name
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-10 16:13:02 +01:00
Marc 'risson' Schmitt
8b73872c0d refine sentry setup
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-10 16:00:19 +01:00
Marc 'risson' Schmitt
d22597377a finally finish tracing
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-10 15:40:34 +01:00
Marc 'risson' Schmitt
58d198d60a Merge branch 'main' into rust-server
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-10 13:31:55 +01:00
Marc 'risson' Schmitt
1de19546d7 tracing almost done
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-09 18:00:38 +01:00
Marc 'risson' Schmitt
8ad054ce65 Merge branch 'main' into rust-server 2026-03-09 16:22:12 +01:00
Marc 'risson' Schmitt
df95fc89eb wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-06 19:53:19 +01:00
Marc 'risson' Schmitt
75898710f1 update, some logging
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-06 15:30:46 +01:00
Marc 'risson' Schmitt
3a5a0c2e4f fmt
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-06 13:45:37 +01:00
Marc 'risson' Schmitt
b806e14a00 Merge branch 'main' into rust-server
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-06 13:44:28 +01:00
Marc 'risson' Schmitt
c2d02cd807 fix deny
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-05 17:47:54 +01:00
Marc 'risson' Schmitt
1212402231 remove deprecated rustls-pemfile
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-05 17:43:43 +01:00
Marc 'risson' Schmitt
2927f414c5 extract tracelayer
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-05 17:33:37 +01:00
Marc 'risson' Schmitt
5ba18fbd55 fmt
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-05 13:46:35 +01:00
Marc 'risson' Schmitt
1b108e40d6 Merge branch 'main' into rust-server 2026-03-05 13:46:07 +01:00
Marc 'risson' Schmitt
982ae7b261 nit
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-05 13:35:24 +01:00
Marc 'risson' Schmitt
294a656ad2 worker status
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-05 13:34:35 +01:00
Marc 'risson' Schmitt
dab8bab916 better handling of socket path for future testing, healthcheck
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-04 16:54:57 +01:00
Marc 'risson' Schmitt
ee1803a0ae pedantic
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-03 18:33:13 +01:00
Marc 'risson' Schmitt
99c9894a04 extract brands
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-03 16:48:25 +01:00
Marc 'risson' Schmitt
2352ce72c9 Merge branch 'main' into rust-server 2026-03-03 14:03:27 +01:00
Marc 'risson' Schmitt
bb28e6425d small fixes
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-02 18:08:57 +01:00
Marc 'risson' Schmitt
f2149dfd90 write mode to authentik-mode file
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-02 15:40:30 +01:00
Marc 'risson' Schmitt
2ff0f09db1 spawn_blocking for tls computations
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-02 15:24:46 +01:00
Marc 'risson' Schmitt
40a91fd4fb fix minor stuff
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-02 15:08:55 +01:00
Marc 'risson' Schmitt
2e3f76441c Merge branch 'main' into rust-server
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-03-02 15:05:46 +01:00
Marc 'risson' Schmitt
f91474dd91 Merge branch 'main' into rust-server
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-27 18:19:54 +01:00
Marc 'risson' Schmitt
61dbd5976f some todos
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-27 18:16:25 +01:00
Marc 'risson' Schmitt
8099ac6508 some cleanup
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-27 18:15:50 +01:00
Marc 'risson' Schmitt
61ed26e3f6 worker started from rust
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-27 16:54:14 +01:00
Marc 'risson' Schmitt
ea17d4cbf1 finish tls
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-27 13:58:38 +01:00
Marc 'risson' Schmitt
ac388667d0 wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-26 17:14:08 +01:00
Marc 'risson' Schmitt
cdc42de5b5 wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-24 18:50:08 +01:00
Marc 'risson' Schmitt
2770c3a7e0 Merge branch 'main' into rust-server 2026-02-24 15:20:32 +01:00
Marc 'risson' Schmitt
f41f501702 finish metrics
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-24 15:20:09 +01:00
Marc 'risson' Schmitt
08685a574a better metrics
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-23 18:40:32 +01:00
Marc 'risson' Schmitt
15377f5154 better arbiter again
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-23 15:30:33 +01:00
Marc 'risson' Schmitt
52da505aab Merge branch 'main' into rust-server 2026-02-23 13:47:34 +01:00
Marc 'risson' Schmitt
d8a2a069aa add tests for extractors
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-20 18:38:09 +01:00
Marc 'risson' Schmitt
fec9dcc2e7 better logging
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-20 16:54:32 +01:00
Marc 'risson' Schmitt
b644fa5a2c revert unintended change
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-20 13:30:00 +01:00
Marc 'risson' Schmitt
9a5d59533e remove rust worker, fixup main
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-20 13:28:02 +01:00
Marc 'risson' Schmitt
3c64570398 use arcswap instead of lock for config
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-20 13:19:54 +01:00
Marc 'risson' Schmitt
a735f6dcf3 support proxying websockets to core
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-19 18:44:16 +01:00
Marc 'risson' Schmitt
f33e7f13eb autoreload
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-19 17:47:21 +01:00
Marc 'risson' Schmitt
eee00fa29b fix returned headers
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-19 16:45:42 +01:00
Marc 'risson' Schmitt
5a95a14a8f wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-19 16:38:21 +01:00
Marc 'risson' Schmitt
7b46fac608 fixup
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-19 15:31:35 +01:00
Marc 'risson' Schmitt
bb488e1c2c some better lints
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-19 15:29:02 +01:00
Marc 'risson' Schmitt
138aa0e4e9 Merge branch 'main' into rust-server
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-19 14:33:17 +01:00
Marc 'risson' Schmitt
e65cd2999f tls headers
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-18 19:36:53 +01:00
Marc 'risson' Schmitt
490790c272 cleanup forwarding
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-18 19:03:30 +01:00
Marc 'risson' Schmitt
b640b42dbb wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-18 18:16:42 +01:00
Marc 'risson' Schmitt
1371465ebe fixup
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-18 18:13:48 +01:00
Marc 'risson' Schmitt
c623b96dc2 some more cleanup
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-18 18:13:18 +01:00
Marc 'risson' Schmitt
43fe1918db cleanup
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-18 17:38:59 +01:00
Marc 'risson' Schmitt
3e2489834d remove cargo.lock for docsmg
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-18 15:56:28 +01:00
Marc 'risson' Schmitt
7ba86b7de3 introduce the arbiter, treewide fmt
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-18 15:55:45 +01:00
Marc 'risson' Schmitt
85ef3cda04 bring docsmg up to standards
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-18 15:18:48 +01:00
Marc 'risson' Schmitt
62911536bf fix tests
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-18 14:14:29 +01:00
Marc 'risson' Schmitt
1a27971399 move everything to a single crate
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-18 14:13:51 +01:00
Marc 'risson' Schmitt
7a0e946bb5 start moving to a single crate
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-17 18:45:49 +01:00
Marc 'risson' Schmitt
428ccc2c14 fix metrics endpoint
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-17 18:27:17 +01:00
Marc 'risson' Schmitt
0b706d5830 Merge branch 'main' into rust-server
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-17 13:44:03 +01:00
Marc 'risson' Schmitt
b9f4a1aed7 Merge branch 'main' into rust-server 2026-02-16 14:07:56 +01:00
Marc 'risson' Schmitt
d2cb45aadf start cleanup
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-11 15:15:43 +01:00
Marc 'risson' Schmitt
de12748f25 Merge branch 'main' into rust-server 2026-02-11 14:41:18 +01:00
Marc 'risson' Schmitt
f8f39b8edc start on metrics
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-09 19:25:06 +01:00
Marc 'risson' Schmitt
986385a951 initialize python globally when needed
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-09 18:45:20 +01:00
Marc 'risson' Schmitt
129ed95cf0 Merge branch 'main' into rust-server 2026-02-09 18:39:57 +01:00
Marc 'risson' Schmitt
dc0d535fcc small improvements to storage token checks
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-09 18:39:40 +01:00
Marc 'risson' Schmitt
5c0e23a78f features for which process to build
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-09 18:07:39 +01:00
Marc 'risson' Schmitt
b4bf082864 Merge branch 'main' into rust-server 2026-02-09 14:37:11 +01:00
Dominic R
2f00983c29 static file handling 2026-02-08 11:41:30 -05:00
Dominic R
af93a1e230 rev tls_state to what it was before 2026-02-08 09:47:32 -05:00
Dominic R
dbb3898621 fix client error 2026-02-08 09:44:47 -05:00
Dominic R
a668ddcaf5 make it work on macos 2026-02-08 09:32:38 -05:00
Marc 'risson' Schmitt
051aea6f99 wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-06 14:47:51 +01:00
Marc 'risson' Schmitt
b8104ec156 Merge branch 'main' into rust-server
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-05 17:30:12 +01:00
Marc 'risson' Schmitt
e59970e6ab fmt
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-04 19:04:18 +01:00
Marc 'risson' Schmitt
0b50b0aa13 actually use the proxy protocol acceptor
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-04 19:03:45 +01:00
Marc 'risson' Schmitt
7b9b1c2c70 proxy protocol and tls extractors finally
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-04 18:55:41 +01:00
Marc 'risson' Schmitt
1e1cdffb33 wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-04 14:29:20 +01:00
Marc 'risson' Schmitt
8ad572ba35 wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-02 19:14:14 +01:00
Marc 'risson' Schmitt
8a5b8ad047 custom tls acceptor, wip proxy protocol acceptor
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-02 18:43:27 +01:00
Marc 'risson' Schmitt
907a4ce478 Merge branch 'main' into rust-server
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-02-02 17:07:24 +01:00
Marc 'risson' Schmitt
a26254df02 compression and loading page
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-01-30 17:58:59 +01:00
Marc 'risson' Schmitt
bf9679dcb5 wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-01-30 17:01:10 +01:00
Marc 'risson' Schmitt
71ee2f6c66 wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-01-30 17:00:49 +01:00
Marc 'risson' Schmitt
90fb12a804 proxying works correctly now
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-01-30 16:09:41 +01:00
Marc 'risson' Schmitt
e271a8a0af static files
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-01-29 15:43:36 +01:00
Marc 'risson' Schmitt
6100fd7800 remove sea-orm
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-01-29 13:59:11 +01:00
Marc 'risson' Schmitt
b78d62f550 add console-subscriber for tokio-console debugging
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-01-28 15:56:14 +01:00
Marc 'risson' Schmitt
21eb1bb7d0 fix gunicorn shutdown detection
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-01-28 15:56:01 +01:00
Marc 'risson' Schmitt
e4445a44c4 db
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-01-28 15:22:41 +01:00
Marc 'risson' Schmitt
6fecbb41ca start on db
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-01-28 01:53:37 +01:00
Marc 'risson' Schmitt
4a840796bf Merge branch 'main' into rust-server
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-01-28 01:36:55 +01:00
Marc 'risson' Schmitt
cc7f190735 config reloading
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-01-28 01:33:25 +01:00
Marc 'risson' Schmitt
c4962f86dd wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-01-27 20:17:32 +01:00
Marc 'risson' Schmitt
ad672338e0 Merge branch 'main' into rust-server 2026-01-27 15:13:32 +01:00
Marc 'risson' Schmitt
fadf344955 wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-01-26 14:25:40 +01:00
Marc 'risson' Schmitt
8c58873a3a wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-01-22 17:19:21 +01:00
Marc 'risson' Schmitt
ac7dd69be2 fixup
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-01-21 17:57:24 +01:00
Marc 'risson' Schmitt
f01ab7ccb2 we proxying
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-01-21 17:53:56 +01:00
Marc 'risson' Schmitt
13f7ac6eca wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-01-20 17:57:09 +01:00
Marc 'risson' Schmitt
24202f9a3f wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-01-20 15:26:56 +01:00
Marc 'risson' Schmitt
5a72130576 cleanup
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-01-19 17:52:35 +01:00
Marc 'risson' Schmitt
fe5d24004e Merge branch 'main' into rust-server
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-01-19 16:30:12 +01:00
Marc 'risson' Schmitt
dd7c13c5bd wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-01-12 18:12:52 +01:00
Marc 'risson' Schmitt
32de1ab6c6 Merge branch 'main' into rust-server
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2026-01-12 14:07:32 +01:00
Marc 'risson' Schmitt
6e4384d672 wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2025-11-17 14:26:40 +01:00
Marc 'risson' Schmitt
79f7759d4b Merge branch 'main' into rust-server 2025-11-14 14:48:26 +01:00
Marc 'risson' Schmitt
0ca41cb184 wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2025-11-13 19:01:40 +01:00
Marc 'risson' Schmitt
f8e5c895d6 wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2025-11-13 18:58:30 +01:00
Marc 'risson' Schmitt
2ba8991a3b wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2025-11-13 18:30:57 +01:00
Marc 'risson' Schmitt
19b36d2e0d wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2025-11-13 14:08:54 +01:00
Marc 'risson' Schmitt
fb802a53bc Merge branch 'main' into rust-server 2025-11-13 03:28:17 +01:00
Marc 'risson' Schmitt
2f6465d5a0 Merge branch 'main' into rust-server 2025-11-12 15:16:35 +01:00
Marc 'risson' Schmitt
c5437d2b0b wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2025-11-07 18:46:21 +01:00
Marc 'risson' Schmitt
8e2e90a87f wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2025-11-06 19:14:07 +01:00
Marc 'risson' Schmitt
4deb3d45cf wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2025-11-06 18:43:33 +01:00
Marc 'risson' Schmitt
b61bb3cc17 wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2025-11-05 16:07:11 +01:00
Marc 'risson' Schmitt
af3332df9f wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2025-11-04 19:30:43 +01:00
Marc 'risson' Schmitt
0849df7478 wip
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
2025-11-04 17:53:54 +01:00
2605 changed files with 38000 additions and 330838 deletions

View File

@@ -1,5 +1,2 @@
[alias]
t = ["nextest", "run", "--workspace"]
[build]
rustflags = ["--cfg", "tokio_unstable"]

View File

@@ -12,4 +12,5 @@ reorder_impl_items = true
style_edition = "2024"
use_field_init_shorthand = true
use_try_shorthand = true
where_single_line = true
wrap_comments = true

View File

@@ -9,5 +9,7 @@ build_docs/**
**/*Dockerfile
blueprints/local
.git
!gen-ts-api/node_modules
!gen-ts-api/dist/**
!gen-go-api/
.venv
target

9
.gitattributes vendored
View File

@@ -1,9 +0,0 @@
packages/client-*/** linguist-generated
web/packages/lex/* linguist-vendored
web/packages/node-domexception/* linguist-vendored
web/packages/formdata-polyfill/* linguist-vendored
web/packages/sfe/vendored/* linguist-vendored
website/vendored/* linguist-vendored
website/docs/** linguist-documentation
website/integrations/** linguist-documentation
website/api/** linguist-documentation

View File

@@ -115,13 +115,20 @@ runs:
shell: bash
env:
GITHUB_TOKEN: ${{ inputs.token }}
PR_NUMBER: ${{ steps.should_run.outputs.pr_number }}
REASON: ${{ steps.should_run.outputs.reason }}
run: |
set -e -o pipefail
PR_NUMBER="${{ steps.should_run.outputs.pr_number }}"
# Get PR details
PR_DATA=$(gh api repos/${{ github.repository }}/pulls/$PR_NUMBER)
PR_TITLE=$(echo "$PR_DATA" | jq -r '.title')
PR_AUTHOR=$(echo "$PR_DATA" | jq -r '.user.login')
echo "pr_title=$PR_TITLE" >> $GITHUB_OUTPUT
echo "pr_author=$PR_AUTHOR" >> $GITHUB_OUTPUT
# Determine which labels to process
if [ "${REASON}" = "label_added_to_merged_pr" ]; then
if [ "${{ steps.should_run.outputs.reason }}" = "label_added_to_merged_pr" ]; then
# Only process the specific label that was just added
if [ "${{ github.event_name }}" = "issues" ]; then
LABEL_NAME="${{ github.event.label.name }}"
@@ -145,13 +152,13 @@ runs:
shell: bash
env:
GITHUB_TOKEN: ${{ inputs.token }}
PR_NUMBER: '${{ steps.should_run.outputs.pr_number }}'
COMMIT_SHA: '${{ steps.should_run.outputs.merge_commit_sha }}'
PR_TITLE: ${{ github.event.pull_request.title }}
PR_AUTHOR: ${{ github.event.pull_request.user.login }}
LABELS: '${{ steps.pr_details.outputs.labels }}'
run: |
set -e -o pipefail
PR_NUMBER='${{ steps.should_run.outputs.pr_number }}'
COMMIT_SHA='${{ steps.should_run.outputs.merge_commit_sha }}'
PR_TITLE='${{ steps.pr_details.outputs.pr_title }}'
PR_AUTHOR='${{ steps.pr_details.outputs.pr_author }}'
LABELS='${{ steps.pr_details.outputs.labels }}'
echo "Processing PR #$PR_NUMBER (reason: ${{ steps.should_run.outputs.reason }})"
echo "Found backport labels: $LABELS"

View File

@@ -54,6 +54,10 @@ outputs:
runs:
using: "composite"
steps:
- name: Setup authentik env
uses: ./.github/actions/setup
with:
dependencies: "python"
- name: Generate config
id: ev
shell: bash
@@ -64,4 +68,4 @@ runs:
PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
REF: ${{ github.ref }}
run: |
python3 ${{ github.action_path }}/push_vars.py
uv run python3 ${{ github.action_path }}/push_vars.py

View File

@@ -2,19 +2,10 @@
import os
from json import dumps
from pathlib import Path
from sys import exit as sysexit
from time import time
from typing import Any
def authentik_version() -> str:
init = Path(__file__).parent.parent.parent.parent / "authentik" / "__init__.py"
with open(init) as f:
content = f.read()
locals: dict[str, Any] = {}
exec(content, None, locals) # nosec
return str(locals["VERSION"])
from authentik import authentik_version
def must_or_fail(input: str | None, error: str) -> str:
@@ -106,7 +97,6 @@ if os.getenv("RELEASE", "false").lower() == "true":
image_build_args = [f"VERSION={os.getenv('REF')}"]
else:
image_build_args = [f"GIT_BUILD_HASH={sha}"]
image_build_args_str = "\n".join(image_build_args)
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
print(f"shouldPush={str(should_push).lower()}", file=_output)
@@ -119,4 +109,4 @@ with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
print(f"imageMainTag={image_main_tag}", file=_output)
print(f"imageMainName={image_tags[0]}", file=_output)
print(f"cacheTo={cache_to}", file=_output)
print(f"imageBuildArgs={image_build_args_str}", file=_output)
print(f"imageBuildArgs={"\n".join(image_build_args)}", file=_output)

View File

@@ -8,91 +8,71 @@ inputs:
postgresql_version:
description: "Optional postgresql image tag"
default: "16"
working-directory:
description: |
Optional working directory if this repo isn't in the root of the actions workspace.
When set, needs to contain a trailing slash
default: ""
runs:
using: "composite"
steps:
- name: Cleanup apt
if: ${{ contains(inputs.dependencies, 'system') || contains(inputs.dependencies, 'python') }}
shell: bash
run: sudo apt-get remove --purge man-db
- name: Install apt deps
if: ${{ contains(inputs.dependencies, 'system') || contains(inputs.dependencies, 'python') }}
uses: gerlero/apt-install@f4fa5265092af9e750549565d28c99aec7189639
with:
packages: libpq-dev openssl libxmlsec1-dev pkg-config gettext krb5-multidev libkrb5-dev heimdal-multidev libclang-dev krb5-kdc krb5-user krb5-admin-server
update: true
upgrade: false
install-recommends: false
- name: Make space on disk
- name: Install apt deps & cleanup
if: ${{ contains(inputs.dependencies, 'system') || contains(inputs.dependencies, 'python') }}
shell: bash
run: |
sudo mkdir -p /tmp/empty/
sudo rsync -a --delete /tmp/empty/ /usr/local/lib/android/
sudo apt-get remove --purge man-db
sudo apt-get update
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext krb5-multidev libkrb5-dev heimdal-multidev libclang-dev krb5-kdc krb5-user krb5-admin-server
sudo rm -rf /usr/local/lib/android
- name: Install uv
if: ${{ contains(inputs.dependencies, 'python') }}
uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v5
uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v5
with:
enable-cache: true
- name: Setup python
if: ${{ contains(inputs.dependencies, 'python') }}
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v5
with:
python-version-file: "${{ inputs.working-directory }}pyproject.toml"
python-version-file: "pyproject.toml"
- name: Install Python deps
if: ${{ contains(inputs.dependencies, 'python') }}
shell: bash
working-directory: ${{ inputs.working-directory }}
run: uv sync --all-extras --dev --frozen
- name: Setup rust (stable)
if: ${{ contains(inputs.dependencies, 'rust') && !contains(inputs.dependencies, 'rust-nightly') }}
uses: actions-rust-lang/setup-rust-toolchain@2b1f5e9b395427c92ee4e3331786ca3c37afe2d7 # v1
with:
rustflags: ""
uses: actions-rust-lang/setup-rust-toolchain@a0b538fa0b742a6aa35d6e2c169b4bd06d225a98 # v1
- name: Setup rust (nightly)
if: ${{ contains(inputs.dependencies, 'rust-nightly') }}
uses: actions-rust-lang/setup-rust-toolchain@2b1f5e9b395427c92ee4e3331786ca3c37afe2d7 # v1
uses: actions-rust-lang/setup-rust-toolchain@a0b538fa0b742a6aa35d6e2c169b4bd06d225a98 # v1
with:
toolchain: nightly
components: rustfmt
rustflags: ""
- name: Setup rust dependencies
if: ${{ contains(inputs.dependencies, 'rust') }}
uses: taiki-e/install-action@787505cde8a44ea468a00478fe52baf23b15bccd # v2
uses: taiki-e/install-action@64c5c20c872907b6f7cd50994ac189e7274160f2 # v2
with:
tool: cargo-deny cargo-machete cargo-llvm-cov nextest
- name: Setup node (web)
if: ${{ contains(inputs.dependencies, 'node') }}
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v4
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v4
with:
node-version-file: "${{ inputs.working-directory }}web/package.json"
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: "${{ inputs.working-directory }}web/package-lock.json"
cache-dependency-path: web/package-lock.json
registry-url: "https://registry.npmjs.org"
- name: Setup node (root)
if: ${{ contains(inputs.dependencies, 'node') }}
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v4
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v4
with:
node-version-file: "${{ inputs.working-directory }}package.json"
node-version-file: package.json
cache: "npm"
cache-dependency-path: "${{ inputs.working-directory }}package-lock.json"
cache-dependency-path: package-lock.json
registry-url: "https://registry.npmjs.org"
- name: Install Node deps
if: ${{ contains(inputs.dependencies, 'node') }}
shell: bash
working-directory: ${{ inputs.working-directory }}
run: npm ci
- name: Setup go
if: ${{ contains(inputs.dependencies, 'go') }}
uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v5
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v5
with:
go-version-file: "${{ inputs.working-directory }}go.mod"
go-version-file: "go.mod"
- name: Setup docker cache
if: ${{ contains(inputs.dependencies, 'runtime') }}
uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7
@@ -101,7 +81,6 @@ runs:
- name: Setup dependencies
if: ${{ contains(inputs.dependencies, 'runtime') }}
shell: bash
working-directory: ${{ inputs.working-directory }}
run: |
export PSQL_TAG=${{ inputs.postgresql_version }}
docker compose -f .github/actions/setup/compose.yml up -d
@@ -109,7 +88,6 @@ runs:
- name: Generate config
if: ${{ contains(inputs.dependencies, 'python') }}
shell: uv run python {0}
working-directory: ${{ inputs.working-directory }}
run: |
from authentik.lib.generators import generate_id
from yaml import safe_dump

View File

@@ -2,7 +2,7 @@ services:
postgresql:
image: docker.io/library/postgres:${PSQL_TAG:-16}
volumes:
- db-data:/var/lib/postgresql
- db-data:/var/lib/postgresql/data
command: "-c log_statement=all"
environment:
POSTGRES_USER: authentik

View File

@@ -10,12 +10,12 @@ inputs:
runs:
using: "composite"
steps:
- uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
- uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
with:
files: ${{ inputs.files }}
flags: ${{ inputs.flags }}
use_oidc: true
- uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
- uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
with:
files: ${{ inputs.files }}
flags: ${{ inputs.flags }}

3
.github/codecov.yml vendored
View File

@@ -8,6 +8,3 @@ coverage:
threshold: 1%
comment:
after_n_builds: 3
ignore:
- packages/client-rust
- packages/client-ts

View File

@@ -20,8 +20,6 @@ updates:
prefix: "ci:"
labels:
- dependencies
cooldown:
default-days: 3
#endregion
@@ -37,44 +35,11 @@ updates:
prefix: "core:"
labels:
- dependencies
cooldown:
default-days: 7
semver-major-days: 14
semver-patch-days: 3
exclude:
- "golang.org/x/crypto"
- "golang.org/x/net"
- "github.com/golang-jwt/jwt/*"
- "github.com/coreos/go-oidc/*"
- "github.com/go-ldap/ldap/*"
#endregion
#region Rust
- package-ecosystem: cargo
directory: "/"
schedule:
interval: daily
time: "04:00"
open-pull-requests-limit: 10
commit-message:
prefix: "core:"
labels:
- dependencies
cooldown:
default-days: 7
semver-major-days: 14
semver-patch-days: 3
exclude:
- aws-lc-fips-sys
- aws-lc-rs
- aws-lc-sys
- rustls
- rustls-pki-types
- rustls-platform-verifier
- rustls-webpki
- package-ecosystem: rust-toolchain
directory: "/"
schedule:
@@ -85,8 +50,6 @@ updates:
prefix: "core:"
labels:
- dependencies
cooldown:
default-days: 3
#endregion
@@ -105,10 +68,6 @@ updates:
open-pull-requests-limit: 10
commit-message:
prefix: "web:"
cooldown:
default-days: 7
semver-major-days: 14
semver-patch-days: 3
groups:
sentry:
patterns:
@@ -172,10 +131,6 @@ updates:
open-pull-requests-limit: 10
commit-message:
prefix: "core, web:"
cooldown:
default-days: 7
semver-major-days: 14
semver-patch-days: 3
groups:
sentry:
patterns:
@@ -234,10 +189,6 @@ updates:
prefix: "website:"
labels:
- dependencies
cooldown:
default-days: 7
semver-major-days: 14
semver-patch-days: 3
groups:
docusaurus:
patterns:
@@ -276,10 +227,6 @@ updates:
prefix: "lifecycle/aws:"
labels:
- dependencies
cooldown:
default-days: 7
semver-major-days: 14
semver-patch-days: 3
#endregion
@@ -295,18 +242,6 @@ updates:
prefix: "core:"
labels:
- dependencies
cooldown:
default-days: 7
semver-major-days: 14
semver-patch-days: 3
exclude:
- "django"
- "cryptography"
- "pyjwt"
- "xmlsec"
- "lxml"
- "psycopg"
- "pyopenssl"
#endregion
@@ -324,14 +259,10 @@ updates:
prefix: "core:"
labels:
- dependencies
cooldown:
default-days: 3
- package-ecosystem: docker-compose
directories:
- /packages/client-go
- /packages/client-rust
- /packages/client-ts
# - /scripts # Maybe
- /scripts/api
- /tests/e2e
schedule:
interval: daily
@@ -341,7 +272,5 @@ updates:
prefix: "core:"
labels:
- dependencies
cooldown:
default-days: 3
#endregion

View File

@@ -26,7 +26,7 @@ REPLACE ME
If an API change has been made
- [ ] The API schema and clients have been updated (`make gen`)
- [ ] The API schema has been updated (`make gen-build`)
If changes to the frontend have been made

View File

@@ -56,19 +56,31 @@ jobs:
release: ${{ inputs.release }}
- name: Login to Docker Hub
if: ${{ inputs.registry_dockerhub }}
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
username: ${{ secrets.DOCKER_CORP_USERNAME }}
password: ${{ secrets.DOCKER_CORP_PASSWORD }}
- name: Login to GitHub Container Registry
if: ${{ inputs.registry_ghcr }}
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
with:
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
with:
go-version-file: "go.mod"
- name: Generate API Clients
run: |
make gen-client-ts
make gen-client-go
- name: Build Docker Image
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
id: push
with:
context: .

View File

@@ -79,18 +79,18 @@ jobs:
image-name: ${{ inputs.image_name }}
- name: Login to Docker Hub
if: ${{ inputs.registry_dockerhub }}
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
username: ${{ secrets.DOCKER_CORP_USERNAME }}
password: ${{ secrets.DOCKER_CORP_PASSWORD }}
- name: Login to GitHub Container Registry
if: ${{ inputs.registry_ghcr }}
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: int128/docker-manifest-create-action@7df7f9e221d927eaadf87db231ddf728047308a4 # v2
- uses: int128/docker-manifest-create-action@8aac06098a12365ccdf99372dcfb453ccce8a0b0 # v2
id: build
with:
tags: ${{ matrix.tag }}

66
.github/workflows/api-ts-publish.yml vendored Normal file
View File

@@ -0,0 +1,66 @@
---
name: API - Publish Typescript client
on:
push:
branches: [main]
paths:
- "schema.yml"
workflow_dispatch:
permissions:
# Required for NPM OIDC trusted publisher
id-token: write
contents: read
jobs:
build:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
token: ${{ steps.generate_token.outputs.token }}
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
with:
node-version-file: web/package.json
registry-url: "https://registry.npmjs.org"
- name: Generate API Client
run: make gen-client-ts
- name: Publish package
working-directory: gen-ts-api/
run: |
npm i
npm publish --tag generated
- name: Upgrade /web
working-directory: web
run: |
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
npm i @goauthentik/api@$VERSION
- name: Upgrade /web/packages/sfe
working-directory: web/packages/sfe
run: |
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
npm i @goauthentik/api@$VERSION
- uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
id: cpr
with:
token: ${{ steps.generate_token.outputs.token }}
branch: update-web-api-client
commit-message: "web: bump API Client version"
title: "web: bump API Client version"
body: "web: bump API Client version"
delete-branch: true
signoff: true
# ID from https://api.github.com/users/authentik-automation[bot]
author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
labels: dependencies
- uses: peter-evans/enable-pull-request-automerge@a660677d5469627102a1c1e11409dd063606628d # v3
with:
token: ${{ steps.generate_token.outputs.token }}
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
merge-method: squash

View File

@@ -33,7 +33,7 @@ jobs:
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
with:
node-version-file: website/package.json
cache: "npm"
@@ -41,7 +41,7 @@ jobs:
- working-directory: website/
name: Install Dependencies
run: npm ci
- uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v4
- uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v4
with:
path: |
${{ github.workspace }}/website/api/.docusaurus
@@ -55,7 +55,7 @@ jobs:
env:
NODE_ENV: production
run: npm run build -w api
- uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v4
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v4
with:
name: api-docs
path: website/api/build
@@ -71,7 +71,7 @@ jobs:
with:
name: api-docs
path: website/api/build
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
with:
node-version-file: website/package.json
cache: "npm"

View File

@@ -24,7 +24,7 @@ jobs:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- name: Setup authentik env
uses: ./.github/actions/setup
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
with:
node-version-file: lifecycle/aws/package.json
cache: "npm"

View File

@@ -36,7 +36,7 @@ jobs:
NODE_ENV: production
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
with:
node-version-file: website/package.json
cache: "npm"
@@ -53,7 +53,7 @@ jobs:
NODE_ENV: production
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
with:
node-version-file: website/package.json
cache: "npm"
@@ -89,14 +89,14 @@ jobs:
image-name: ghcr.io/goauthentik/dev-docs
- name: Login to Container Registry
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
id: push
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
with:
tags: ${{ steps.ev.outputs.imageTags }}
file: website/Dockerfile

View File

@@ -20,19 +20,13 @@ jobs:
version:
- docs
- version-2025-12
- version-2026-2
- version-2025-10
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- run: |
set -euo pipefail
current="$(pwd)"
dir="/tmp/authentik/${{ matrix.version }}"
# 2025.12 still serves the legacy docker-compose filename; newer sites use compose.yml.
compose_path="compose.yml"
if [ "${{ matrix.version }}" = "version-2025-12" ]; then
compose_path="docker-compose.yml"
fi
mkdir -p "${dir}/lifecycle/container"
cd "${dir}"
wget "https://${{ matrix.version }}.goauthentik.io/${compose_path}" -O "${dir}/lifecycle/container/compose.yml"
wget "https://${{ matrix.version }}.goauthentik.io/docker-compose.yml" -O "${dir}/lifecycle/container/compose.yml"
"${current}/scripts/test_docker.sh"

View File

@@ -16,6 +16,7 @@ env:
POSTGRES_DB: authentik
POSTGRES_USER: authentik
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
RUSTFLAGS: "-Dwarnings"
permissions:
# Needed for checkout
@@ -58,22 +59,16 @@ jobs:
dependencies: ${{ matrix.deps }}
- name: run job
run: make ci-lint-${{ matrix.job }}
test-gen:
test-gen-build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Setup authentik env
uses: ./.github/actions/setup
with:
dependencies: "system,python,go,node,runtime,rust-nightly"
- name: generate schema
run: make migrate gen-build
- name: generate API clients
run: make gen-clients
- name: ensure schema is up-to-date
run: git diff --exit-code -- schema.yml blueprints/schema.json packages/client-go packages/client-rust packages/client-ts
run: git diff --exit-code -- schema.yml blueprints/schema.json
test-migrations:
runs-on: ubuntu-latest
steps:
@@ -127,10 +122,7 @@ jobs:
with:
postgresql_version: ${{ matrix.psql }}
- name: run migrations to stable
run: |
docker ps
docker logs setup-postgresql-1
uv run python -m lifecycle.migrate
run: uv run python -m lifecycle.migrate
- name: checkout current code
run: |
set -x
@@ -152,6 +144,7 @@ jobs:
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
CI_RUN_ID: ${{ matrix.run_id }}
CI_TOTAL_RUNS: "5"
PROMETHEUS_MULTIPROC_DIR: /tmp
run: |
uv run make ci-test
- uses: ./.github/actions/test-results
@@ -181,6 +174,7 @@ jobs:
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
CI_RUN_ID: ${{ matrix.run_id }}
CI_TOTAL_RUNS: "5"
PROMETHEUS_MULTIPROC_DIR: /tmp
run: |
uv run make ci-test
- uses: ./.github/actions/test-results
@@ -197,9 +191,10 @@ jobs:
- name: Create k8s Kind Cluster
uses: helm/kind-action@ef37e7f390d99f746eb8b610417061a60e82a6cc # v1.14.0
- name: run integration
env:
PROMETHEUS_MULTIPROC_DIR: /tmp
run: |
uv run coverage run manage.py test tests/integration
uv run coverage combine
uv run coverage xml
- uses: ./.github/actions/test-results
if: ${{ always() }}
@@ -215,60 +210,49 @@ jobs:
job:
- name: proxy
glob: tests/e2e/test_provider_proxy*
profiles: selenium
- name: oauth
glob: tests/e2e/test_provider_oauth2* tests/e2e/test_source_oauth*
profiles: selenium
- name: oauth-oidc
glob: tests/e2e/test_provider_oidc*
profiles: selenium
- name: saml
glob: tests/e2e/test_provider_saml* tests/e2e/test_source_saml*
profiles: selenium
- name: ldap
glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap*
- name: rac
glob: tests/e2e/test_provider_rac*
profiles: selenium
- name: ws-fed
glob: tests/e2e/test_provider_ws_fed*
profiles: selenium
- name: radius
glob: tests/e2e/test_provider_radius*
- name: scim
glob: tests/e2e/test_source_scim*
- name: flows
glob: tests/e2e/test_flows*
profiles: selenium
- name: endpoints
glob: tests/e2e/test_endpoints_*
profiles: selenium
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Setup e2e env
env:
COMPOSE_PROFILES: ${{ matrix.job.profiles }}
- name: Setup e2e env (chrome, etc)
run: |
docker compose -f tests/e2e/compose.yml up -d --quiet-pull
- id: cache-web
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v4
if: contains(matrix.job.profiles, 'selenium')
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v4
with:
path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
- name: prepare web ui
if: steps.cache-web.outputs.cache-hit != 'true' && contains(matrix.job.profiles, 'selenium')
if: steps.cache-web.outputs.cache-hit != 'true'
working-directory: web
run: |
npm ci
make -C .. gen-client-ts
npm run build
npm run build:sfe
- name: run e2e
env:
PROMETHEUS_MULTIPROC_DIR: /tmp
run: |
uv run coverage run manage.py test ${{ matrix.job.glob }}
uv run coverage combine
uv run coverage xml
- uses: ./.github/actions/test-results
if: ${{ always() }}
@@ -291,15 +275,13 @@ jobs:
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Setup e2e env (chrome, etc)
env:
COMPOSE_PROFILES: selenium
run: |
docker compose -f tests/e2e/compose.yml up -d --quiet-pull
- name: Setup conformance suite
run: |
docker compose -f tests/openid_conformance/compose.yml up -d --quiet-pull
- id: cache-web
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v4
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v4
with:
path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
@@ -308,19 +290,21 @@ jobs:
working-directory: web
run: |
npm ci
make -C .. gen-client-ts
npm run build
npm run build:sfe
- name: run conformance
env:
PROMETHEUS_MULTIPROC_DIR: /tmp
run: |
uv run coverage run manage.py test ${{ matrix.job.glob }}
uv run coverage combine
uv run coverage xml
- uses: ./.github/actions/test-results
if: ${{ always() }}
with:
flags: conformance
- if: ${{ !cancelled() }}
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: conformance-certification-${{ matrix.job.name }}
path: tests/openid_conformance/exports/
@@ -332,7 +316,7 @@ jobs:
- name: Setup authentik env
uses: ./.github/actions/setup
with:
dependencies: rust,runtime
dependencies: rust
- name: run tests
run: |
cargo llvm-cov --no-report nextest --workspace
@@ -343,7 +327,7 @@ jobs:
files: target/llvm-cov-target/rust.json
flags: rust
- if: ${{ !cancelled() }}
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: test-rust
path: target/llvm-cov-target/rust.json
@@ -351,7 +335,7 @@ jobs:
if: always()
needs:
- lint
- test-gen
- test-gen-build
- test-migrations
- test-migrations-from-stable
- test-unittest

View File

@@ -22,7 +22,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
with:
go-version-file: "go.mod"
- name: Prepare and generate API
@@ -31,6 +31,8 @@ jobs:
mkdir -p web/dist
mkdir -p website/help
touch web/dist/test website/help/test
- name: Generate API
run: make gen-client-go
- name: golangci-lint
uses: golangci/golangci-lint-action@1e7e51e771db61008b38414a730f564565cf7c20 # v8
with:
@@ -41,11 +43,13 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
with:
go-version-file: "go.mod"
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Generate API
run: make gen-client-go
- name: prepare database
run: |
uv run make migrate
@@ -98,14 +102,16 @@ jobs:
image-name: ghcr.io/goauthentik/dev-${{ matrix.type }}
- name: Login to Container Registry
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Generate API
run: make gen-client-go
- name: Build Docker Image
id: push
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
with:
tags: ${{ steps.ev.outputs.imageTags }}
file: lifecycle/container/${{ matrix.type }}.Dockerfile
@@ -142,14 +148,16 @@ jobs:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
ref: ${{ github.event.pull_request.head.sha }}
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
with:
go-version-file: "go.mod"
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
with:
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- name: Generate API
run: make gen-client-go
- name: Build web
working-directory: web/
run: |

View File

@@ -32,7 +32,7 @@ jobs:
project: web
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
with:
node-version-file: ${{ matrix.project }}/package.json
cache: "npm"
@@ -40,6 +40,8 @@ jobs:
- working-directory: ${{ matrix.project }}/
run: |
npm ci
- name: Generate API
run: make gen-client-ts
- name: Lint
working-directory: ${{ matrix.project }}/
run: npm run ${{ matrix.command }}
@@ -47,13 +49,15 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
with:
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
- name: Generate API
run: make gen-client-ts
- name: build
working-directory: web/
run: npm run build
@@ -73,13 +77,15 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
with:
node-version-file: web/package.json
cache: "npm"
cache-dependency-path: web/package-lock.json
- working-directory: web/
run: npm ci
- name: Generate API
run: make gen-client-ts
- name: test
working-directory: web/
run: npm run test || exit 0

View File

@@ -29,20 +29,20 @@ jobs:
github.event.pull_request.head.repo.full_name == github.repository)
steps:
- id: generate_token
uses: actions/create-github-app-token@1b10c78c7865c340bc4f6099eb2f838309f1e8c3 # v2
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
token: ${{ steps.generate_token.outputs.token }}
- name: Compress images
id: compress
uses: calibreapp/image-actions@e2cc8db5d49c849e00844dfebf01438318e96fa2 # main
uses: calibreapp/image-actions@03c976c29803442fc4040a9de5509669e7759b81 # main
with:
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }}
compressOnly: ${{ github.event_name != 'pull_request' }}
- uses: peter-evans/create-pull-request@5f6978faf089d4d20b00c7766989d076bb2fc7f1 # v7
- uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
id: cpr
with:

View File

@@ -16,17 +16,17 @@ jobs:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: actions/create-github-app-token@1b10c78c7865c340bc4f6099eb2f838309f1e8c3 # v2
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
token: ${{ steps.generate_token.outputs.token }}
- name: Setup authentik env
uses: ./.github/actions/setup
- run: uv run ak update_webauthn_mds
- uses: peter-evans/create-pull-request@5f6978faf089d4d20b00c7766989d076bb2fc7f1 # v7
- uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
id: cpr
with:
token: ${{ steps.generate_token.outputs.token }}

View File

@@ -10,11 +10,11 @@ jobs:
steps:
- id: app-token
name: Generate app token
uses: actions/create-github-app-token@1b10c78c7865c340bc4f6099eb2f838309f1e8c3 # v2
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
if: ${{ env.GH_APP_ID != '' }}
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
env:
GH_APP_ID: ${{ secrets.GH_APP_ID }}
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5

View File

@@ -16,10 +16,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: actions/create-github-app-token@1b10c78c7865c340bc4f6099eb2f838309f1e8c3 # v2
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- name: Delete 'dev' containers older than a week
uses: snok/container-retention-policy@3b0972b2276b171b212f8c4efbca59ebba26eceb # v3.0.1
with:

View File

@@ -35,13 +35,13 @@ jobs:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
fetch-depth: 2
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
with:
node-version-file: ${{ matrix.package }}/package.json
registry-url: "https://registry.npmjs.org"
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@9426d40962ed5378910ee2e21d5f8c6fcbf2dd96 # 24d32ffd492484c1d75e0c0b894501ddb9d30d62
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # 24d32ffd492484c1d75e0c0b894501ddb9d30d62
with:
files: |
${{ matrix.package }}/package.json

View File

@@ -29,10 +29,10 @@ jobs:
steps:
- id: app-token
name: Generate app token
uses: actions/create-github-app-token@1b10c78c7865c340bc4f6099eb2f838309f1e8c3 # v2
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- name: Checkout main
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
@@ -57,10 +57,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: actions/create-github-app-token@1b10c78c7865c340bc4f6099eb2f838309f1e8c3 # v2
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- name: Checkout main
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
with:
@@ -73,7 +73,7 @@ jobs:
- name: Bump version
run: "make bump version=${{ inputs.next_version }}.0-rc1"
- name: Create pull request
uses: peter-evans/create-pull-request@5f6978faf089d4d20b00c7766989d076bb2fc7f1 # v7
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
with:
token: ${{ steps.generate_token.outputs.token }}
branch: release-bump-${{ inputs.next_version }}

View File

@@ -44,14 +44,14 @@ jobs:
with:
image-name: ghcr.io/goauthentik/docs
- name: Login to GitHub Container Registry
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
id: push
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
with:
tags: ${{ steps.ev.outputs.imageTags }}
file: website/Dockerfile
@@ -84,10 +84,10 @@ jobs:
- rac
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
with:
go-version-file: "go.mod"
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
with:
node-version-file: web/package.json
cache: "npm"
@@ -103,19 +103,23 @@ jobs:
DOCKER_USERNAME: ${{ secrets.DOCKER_CORP_USERNAME }}
with:
image-name: ghcr.io/goauthentik/${{ matrix.type }},authentik/${{ matrix.type }}
- name: Generate API Clients
run: |
make gen-client-ts
make gen-client-go
- name: Docker Login Registry
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
username: ${{ secrets.DOCKER_CORP_USERNAME }}
password: ${{ secrets.DOCKER_CORP_PASSWORD }}
- name: Login to GitHub Container Registry
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker Image
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
id: push
with:
push: true
@@ -148,10 +152,10 @@ jobs:
goarch: [amd64, arm64]
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
- uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
with:
go-version-file: "go.mod"
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v5
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v5
with:
node-version-file: web/package.json
cache: "npm"
@@ -160,6 +164,10 @@ jobs:
working-directory: web/
run: |
npm ci
- name: Generate API Clients
run: |
make gen-client-ts
make gen-client-go
- name: Build web
working-directory: web/
run: |
@@ -191,7 +199,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: aws-actions/configure-aws-credentials@ec61189d14ec14c8efccab744f656cffd0e33f37 # v6.1.0
- uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6.0.0
with:
role-to-assume: "arn:aws:iam::016170277896:role/github_goauthentik_authentik"
aws-region: ${{ env.AWS_REGION }}
@@ -236,7 +244,7 @@ jobs:
container=$(docker container create ${{ steps.ev.outputs.imageMainName }})
docker cp ${container}:web/ .
- name: Create a Sentry.io release
uses: getsentry/action-release@5657c9e888b4e2cc85f4d29143ea4131fde4a73a # v3
uses: getsentry/action-release@dab6548b3c03c4717878099e43782cf5be654289 # v3
continue-on-error: true
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}

View File

@@ -67,10 +67,10 @@ jobs:
steps:
- id: app-token
name: Generate app token
uses: actions/create-github-app-token@1b10c78c7865c340bc4f6099eb2f838309f1e8c3 # v2
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- id: get-user-id
name: Get GitHub app user ID
run: echo "user-id=$(gh api "/users/${{ steps.app-token.outputs.app-slug }}[bot]" --jq .id)" >> "$GITHUB_OUTPUT"
@@ -96,7 +96,7 @@ jobs:
git tag "version/${{ inputs.version }}" HEAD -m "version/${{ inputs.version }}"
git push --follow-tags
- name: Create Release
uses: softprops/action-gh-release@b4309332981a82ec1c5618f44dd2e27cc8bfbfda # v3.0.0
uses: softprops/action-gh-release@153bb8e04406b158c6c84fc1615b65b24149a1fe # v2.6.1
with:
token: "${{ steps.app-token.outputs.token }}"
tag_name: "version/${{ inputs.version }}"
@@ -115,10 +115,10 @@ jobs:
steps:
- id: app-token
name: Generate app token
uses: actions/create-github-app-token@1b10c78c7865c340bc4f6099eb2f838309f1e8c3 # v2
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
repositories: helm
- id: get-user-id
name: Get GitHub app user ID
@@ -137,7 +137,7 @@ jobs:
sed -E -i 's/[0-9]{4}\.[0-9]{1,2}\.[0-9]+$/${{ inputs.version }}/' charts/authentik/Chart.yaml
./scripts/helm-docs.sh
- name: Create pull request
uses: peter-evans/create-pull-request@5f6978faf089d4d20b00c7766989d076bb2fc7f1 # v7
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
with:
token: "${{ steps.app-token.outputs.token }}"
branch: bump-${{ inputs.version }}
@@ -157,10 +157,10 @@ jobs:
steps:
- id: app-token
name: Generate app token
uses: actions/create-github-app-token@1b10c78c7865c340bc4f6099eb2f838309f1e8c3 # v2
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
repositories: version
- id: get-user-id
name: Get GitHub app user ID
@@ -196,7 +196,7 @@ jobs:
'.stable.version = $version | .stable.changelog = $changelog | .stable.changelog_url = $changelog_url | .stable.reason = $reason' version.json > version.new.json
mv version.new.json version.json
- name: Create pull request
uses: peter-evans/create-pull-request@5f6978faf089d4d20b00c7766989d076bb2fc7f1 # v7
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
with:
token: "${{ steps.app-token.outputs.token }}"
branch: bump-${{ inputs.version }}

View File

@@ -15,10 +15,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: actions/create-github-app-token@1b10c78c7865c340bc4f6099eb2f838309f1e8c3 # v2
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/stale@b5d41d4e1d5dceea10e7104786b73624c18a190f # v10
with:
repo-token: ${{ steps.generate_token.outputs.token }}

View File

@@ -21,10 +21,10 @@ jobs:
steps:
- id: generate_token
if: ${{ github.event_name != 'pull_request' }}
uses: actions/create-github-app-token@1b10c78c7865c340bc4f6099eb2f838309f1e8c3 # v2
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIV_KEY }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
if: ${{ github.event_name != 'pull_request' }}
with:
@@ -33,6 +33,8 @@ jobs:
if: ${{ github.event_name == 'pull_request' }}
- name: Setup authentik env
uses: ./.github/actions/setup
- name: Generate API
run: make gen-client-ts
- name: run extract
run: |
uv run make i18n-extract
@@ -42,7 +44,7 @@ jobs:
make web-check-compile
- name: Create Pull Request
if: ${{ github.event_name != 'pull_request' }}
uses: peter-evans/create-pull-request@5f6978faf089d4d20b00c7766989d076bb2fc7f1 # v7
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
with:
token: ${{ steps.generate_token.outputs.token }}
branch: extract-compile-backend-translation

1
.gitignore vendored
View File

@@ -220,6 +220,7 @@ media/
*mmdb
.idea/
/gen-*/
data/
# Local Netlify folder

View File

@@ -1,7 +1,6 @@
# Prettier Ignorefile
## Static Files
CODEOWNERS
**/LICENSE
authentik/stages/**/*

View File

@@ -17,6 +17,6 @@
"ms-python.vscode-pylance",
"redhat.vscode-yaml",
"Tobermory.es6-string-html",
"unifiedjs.vscode-mdx"
"unifiedjs.vscode-mdx",
]
}

18
.vscode/settings.json vendored
View File

@@ -38,10 +38,10 @@
"!AtIndex scalar",
"!ParseJSON scalar"
],
"js/ts.preferences.importModuleSpecifier": "non-relative",
"js/ts.preferences.importModuleSpecifierEnding": "index",
"js/ts.tsdk.path": "./node_modules/typescript/lib",
"js/ts.tsdk.promptToUseWorkspaceVersion": true,
"typescript.preferences.importModuleSpecifier": "non-relative",
"typescript.preferences.importModuleSpecifierEnding": "index",
"typescript.tsdk": "./node_modules/typescript/lib",
"typescript.enablePromptUseWorkspaceTsdk": true,
"yaml.schemas": {
"./blueprints/schema.json": "blueprints/**/*.yaml"
},
@@ -57,13 +57,5 @@
"go.testEnvVars": {
"WORKSPACE_DIR": "${workspaceFolder}"
},
"github-actions.workflows.pinned.workflows": [".github/workflows/ci-main.yml"],
"search.exclude": {
"**/*.code-search": true,
"**/bower_components": true,
"**/node_modules": true,
"**/playwright-report/**": true,
"**/website/**/build": true,
"**/client-*": true
}
"github-actions.workflows.pinned.workflows": [".github/workflows/ci-main.yml"]
}

View File

@@ -16,7 +16,7 @@ Cargo.toml @goauthentik/backend
Cargo.lock @goauthentik/backend
go.mod @goauthentik/backend
go.sum @goauthentik/backend
.cargo/ @goauthentik/backend
.config/ @goauthentik/backend
rust-toolchain.toml @goauthentik/backend
# Infrastructure
.github/ @goauthentik/infrastructure
@@ -27,18 +27,14 @@ Makefile @goauthentik/infrastructure
.editorconfig @goauthentik/infrastructure
CODEOWNERS @goauthentik/infrastructure
# Backend packages
packages/ak-* @goauthentik/backend
packages/client-rust @goauthentik/backend
packages/django-channels-postgres @goauthentik/backend
packages/django-postgres-cache @goauthentik/backend
packages/django-dramatiq-postgres @goauthentik/backend
# Web packages
tsconfig.json @goauthentik/frontend
package.json @goauthentik/frontend
package-lock.json @goauthentik/frontend
packages/package.json @goauthentik/frontend
packages/package-lock.json @goauthentik/frontend
packages/client-ts @goauthentik/frontend
packages/docusaurus-config @goauthentik/frontend
packages/esbuild-plugin-live-reload @goauthentik/frontend
packages/eslint-config @goauthentik/frontend

710
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,16 +1,9 @@
[workspace]
members = [
"packages/ak-axum",
"packages/ak-common",
"packages/client-rust",
"website/scripts/docsmg",
]
members = [".", "website/scripts/docsmg"]
resolver = "3"
[workspace.package]
version = "2026.5.0-rc1"
authors = ["authentik Team <hello@goauthentik.io>"]
description = "Making authentication simple."
edition = "2024"
readme = "README.md"
homepage = "https://goauthentik.io"
@@ -19,54 +12,54 @@ license-file = "LICENSE"
publish = false
[workspace.dependencies]
arc-swap = "= 1.9.1"
argh = "= 0.1.19"
axum-server = { version = "= 0.8.0", features = ["tls-rustls-no-provider"] }
aws-lc-rs = { version = "= 1.16.3", features = ["fips"] }
axum = { version = "= 0.8.9", features = ["http2", "macros", "ws"] }
clap = { version = "= 4.6.1", features = ["derive", "env"] }
arc-swap = "1.8.2"
argh = "0.1.17"
async-trait = "0.1.89"
aws-lc-rs = { version = "1.16.1", features = ["fips"] }
axum = { version = "0.8.8", features = ["http2", "macros", "ws"] }
axum-server = { version = "0.8.0", features = ["tls-rustls-no-provider"] }
bytes = "1.11.1"
chrono = "0.4.44"
clap = { version = "4.5.59", features = ["derive", "env"] }
client-ip = { version = "0.2.1", features = ["forwarded-header"] }
color-eyre = "= 0.6.5"
colored = "= 3.1.1"
config-rs = { package = "config", version = "= 0.15.22", default-features = false, features = [
"json",
color-eyre = "0.6.5"
colored = "3.1.1"
config = { version = "0.15.19", default-features = false, features = [
"yaml",
"async",
] }
console-subscriber = "= 0.5.0"
dotenvy = "= 0.15.7"
durstr = "= 0.5.1"
eyre = "= 0.6.12"
forwarded-header-value = "= 0.1.1"
futures = "= 0.3.32"
glob = "= 0.3.3"
hyper-unix-socket = "= 0.6.1"
hyper-util = "= 0.1.20"
ipnet = { version = "= 2.12.0", features = ["serde"] }
json-subscriber = "= 0.2.8"
metrics = "= 0.24.3"
metrics-exporter-prometheus = { version = "= 0.18.1", default-features = false }
nix = { version = "= 0.31.2", features = ["hostname", "signal"] }
notify = "= 8.2.0"
pin-project-lite = "= 0.2.17"
pyo3 = "= 0.28.3"
regex = "= 1.12.3"
reqwest = { version = "= 0.13.2", features = [
"form",
"json",
"multipart",
"query",
"rustls",
"stream",
console-subscriber = "0.5.0"
dotenvy = "0.15.7"
durstr = "0.4.0"
eyre = "0.6.12"
forwarded-header-value = "0.1.1"
futures = "0.3.32"
glob = "0.3.3"
http-body-util = "0.1.3"
hyper = "1.8.1"
hyper-unix-socket = "0.3.0"
hyper-util = "0.1.20"
ipnet = { version = "2.12.0", features = ["serde"] }
# See https://github.com/mladedav/json-subscriber/pull/23
json-subscriber = { git = "https://github.com/rissson/json-subscriber.git", rev = "950ad7cb887a0a14fd5cb8afb8e76db1f456c032" }
jsonwebtoken = { version = "10.3.0", default-features = false, features = [
"aws_lc_rs",
] }
reqwest-middleware = { version = "= 0.5.1", features = [
"form",
"json",
"multipart",
"query",
"rustls",
metrics = "0.24.3"
metrics-exporter-prometheus = { version = "0.18.1", default-features = false }
nix = { version = "0.31.2", features = ["hostname", "signal"] }
notify = "8.2.0"
pem = "3.0.6"
pin-project-lite = "0.2.17"
pyo3 = "0.28.2"
percent-encoding = "2.3.2"
rcgen = { version = "0.14.7", default-features = false, features = [
"aws_lc_rs",
"fips",
] }
rustls = { version = "= 0.23.39", features = ["fips"] }
sentry = { version = "= 0.47.0", default-features = false, features = [
regex = "1.12.3"
rustls = { version = "0.23.37", features = ["fips"] }
sentry = { version = "0.47.0", default-features = false, features = [
"backtrace",
"contexts",
"debug-images",
@@ -76,13 +69,9 @@ sentry = { version = "= 0.47.0", default-features = false, features = [
"tower",
"tracing",
] }
serde = { version = "= 1.0.228", features = ["derive"] }
serde_json = "= 1.0.149"
serde_repr = "= 0.1.20"
serde_with = { version = "= 3.18.0", default-features = false, features = [
"base64",
] }
sqlx = { version = "= 0.8.6", default-features = false, features = [
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.149"
sqlx = { version = "0.8.6", default-features = false, features = [
"runtime-tokio",
"tls-rustls-aws-lc-rs",
"postgres",
@@ -93,29 +82,38 @@ sqlx = { version = "= 0.8.6", default-features = false, features = [
"ipnet",
"json",
] }
tempfile = "= 3.27.0"
thiserror = "= 2.0.18"
time = { version = "= 0.3.47", features = ["macros"] }
tokio = { version = "= 1.52.1", features = ["full", "tracing"] }
tokio-retry2 = "= 0.9.1"
tokio-rustls = "= 0.26.4"
tokio-util = { version = "= 0.7.18", features = ["full"] }
tower = "= 0.5.3"
tower-http = { version = "= 0.6.8", features = ["timeout"] }
tracing = "= 0.1.44"
tracing-error = "= 0.2.1"
tracing-subscriber = { version = "= 0.3.23", features = [
time = "0.3.47"
thiserror = "2.0.18"
tokio = { version = "1.50.0", features = ["full"] }
tokio-rustls = "0.26.4"
tokio-tungstenite = "0.28.0"
tokio-util = "0.7.18"
tower = "0.5.3"
tower-http = { version = "0.6.8", features = [
"compression-br",
"compression-deflate",
"compression-gzip",
"compression-zstd",
"fs",
"timeout",
] }
tower-service = "0.3.3"
tracing = "0.1.44"
tracing-error = "0.2.1"
tracing-subscriber = { version = "0.3.22", features = [
"env-filter",
"json",
"local-time",
"tracing-log",
] }
url = "= 2.5.8"
uuid = { version = "= 1.23.1", features = ["serde", "v4"] }
url = "2.5.8"
uuid = { version = "1.22.0", features = ["v4"] }
ak-axum = { package = "authentik-axum", version = "2026.5.0-rc1", path = "./packages/ak-axum" }
ak-client = { package = "authentik-client", version = "2026.5.0-rc1", path = "./packages/client-rust" }
ak-common = { package = "authentik-common", version = "2026.5.0-rc1", path = "./packages/ak-common", default-features = false }
[profile.dev.package.backtrace]
opt-level = 3
[profile.release]
lto = true
debug = 2
[workspace.lints.rust]
ambiguous_negative_literals = "warn"
@@ -155,17 +153,13 @@ suspicious = { priority = -1, level = "warn" }
### cargo group
multiple_crate_versions = "allow"
### pedantic group
missing_errors_doc = "allow"
missing_panics_doc = "allow"
must_use_candidate = "allow"
redundant_closure_for_method_calls = "allow"
struct_field_names = "allow"
too_many_lines = "allow"
### nursery
missing_const_for_fn = "allow"
option_if_let_else = "allow"
redundant_pub_crate = "allow"
significant_drop_tightening = "allow"
option_if_let_else = "allow"
### restriction group
allow_attributes = "warn"
allow_attributes_without_reason = "warn"
@@ -231,22 +225,9 @@ unwrap_in_result = "warn"
unwrap_used = "warn"
verbose_file_reads = "warn"
[profile.dev.package.backtrace]
opt-level = 3
[profile.dev]
panic = "abort"
[profile.release]
debug = 2
lto = "fat"
# Because of the async runtime, we want to die straightaway if we panic.
panic = "abort"
strip = true
[package]
name = "authentik"
version.workspace = true
version = "2026.5.0-rc1"
authors.workspace = true
edition.workspace = true
readme.workspace = true
@@ -257,26 +238,58 @@ publish.workspace = true
[features]
default = ["core", "proxy"]
core = ["ak-common/core", "dep:pyo3", "dep:sqlx"]
proxy = ["ak-common/proxy"]
proxy = []
core = ["proxy", "dep:sqlx", "dep:pyo3"]
[dependencies]
ak-axum.workspace = true
ak-common.workspace = true
arc-swap.workspace = true
argh.workspace = true
async-trait.workspace = true
aws-lc-rs.workspace = true
axum-server.workspace = true
axum.workspace = true
client-ip.workspace = true
color-eyre.workspace = true
config.workspace = true
console-subscriber.workspace = true
durstr.workspace = true
eyre.workspace = true
forwarded-header-value.workspace = true
futures.workspace = true
glob.workspace = true
http-body-util.workspace = true
hyper-unix-socket.workspace = true
hyper-util.workspace = true
hyper.workspace = true
ipnet.workspace = true
json-subscriber.workspace = true
jsonwebtoken.workspace = true
metrics.workspace = true
metrics-exporter-prometheus.workspace = true
nix.workspace = true
notify.workspace = true
pem.workspace = true
percent-encoding.workspace = true
pin-project-lite.workspace = true
pyo3 = { workspace = true, optional = true }
rcgen.workspace = true
rustls.workspace = true
sentry.workspace = true
serde.workspace = true
serde_json.workspace = true
sqlx = { workspace = true, optional = true }
thiserror.workspace = true
time.workspace = true
tokio-rustls.workspace = true
tokio-tungstenite.workspace = true
tokio-util.workspace = true
tokio.workspace = true
tower-http.workspace = true
tower.workspace = true
tracing-error.workspace = true
tracing-subscriber.workspace = true
tracing.workspace = true
url.workspace = true
uuid.workspace = true
[lints]

105
Makefile
View File

@@ -15,6 +15,10 @@ else
SED_INPLACE = sed -i
endif
GEN_API_TS = gen-ts-api
GEN_API_PY = gen-py-api
GEN_API_GO = gen-go-api
BREW_LDFLAGS :=
BREW_CPPFLAGS :=
BREW_PKG_CONFIG_PATH :=
@@ -74,16 +78,13 @@ rust-test: ## Run the Rust tests
test: ## Run the server tests and produce a coverage report (locally)
$(UV) run coverage run manage.py test --keepdb $(or $(filter-out $@,$(MAKECMDGOALS)),authentik)
$(UV) run coverage combine
$(UV) run coverage html
$(UV) run coverage report
lint-fix-rust:
$(CARGO) +nightly fmt --all -- --config-path "${PWD}/.cargo/rustfmt.toml"
lint-fix: lint-fix-rust ## Lint and automatically fix errors in the python source code. Reports spelling errors.
lint-fix: ## Lint and automatically fix errors in the python source code. Reports spelling errors.
$(UV) run black $(PY_SOURCES)
$(UV) run ruff check --fix $(PY_SOURCES)
$(CARGO) +nightly fmt --all -- --config-path .cargo/rustfmt.toml
lint-spellcheck: ## Reports spelling errors.
npm run lint:spellcheck
@@ -109,14 +110,23 @@ i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that requir
aws-cfn:
cd lifecycle/aws && npm i && $(UV) run npm run aws-cfn
run-server: ## Run the main authentik server process
run: ## Run the authentik server and worker, without auto reloading
$(UV) run ak allinone
run-watch: ## Run the authentik server and worker, with auto reloading
$(UV) run watchexec --on-busy-update=restart --stop-signal=SIGINT --exts py,rs --no-meta --notify -- ak allinone
run-server: ## Run the authentik server, without auto reloading
$(UV) run ak server
run-worker: ## Run the main authentik worker process
run-server-watch: ## Run the authentik server, with auto reloading
$(UV) run watchexec --on-busy-update=restart --stop-signal=SIGINT --exts py,rs --no-meta --notify -- ak server
run-worker: ## Run the authentik worker, without auto reloading
$(UV) run ak worker
run-worker-watch: ## Run the authentik worker, with auto reloading
watchexec --on-busy-update=restart --stop-signal=SIGINT --exts py,rs --no-meta --notify -- $(UV) run ak worker
$(UV) run watchexec --on-busy-update=restart --stop-signal=SIGINT --exts py,rs --no-meta --notify -- ak worker
core-i18n-extract:
$(UV) run ak makemessages \
@@ -124,7 +134,8 @@ core-i18n-extract:
--no-obsolete \
--ignore web \
--ignore internal \
--ignore packages/client-ts \
--ignore ${GEN_API_TS} \
--ignore ${GEN_API_GO} \
--ignore website \
-l en
@@ -147,23 +158,16 @@ dev-create-db:
dev-reset: dev-drop-db dev-create-db migrate ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state.
update-test-mmdb: ## Update test GeoIP and ASN Databases
curl \
-L \
-o ${PWD}/tests/geoip/GeoLite2-ASN-Test.mmdb \
https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-ASN-Test.mmdb
curl \
-L \
-o ${PWD}/tests/geoip/GeoLite2-City-Test.mmdb \
https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-City-Test.mmdb
curl -L https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-ASN-Test.mmdb -o ${PWD}/tests/GeoLite2-ASN-Test.mmdb
curl -L https://raw.githubusercontent.com/maxmind/MaxMind-DB/refs/heads/main/test-data/GeoLite2-City-Test.mmdb -o ${PWD}/tests/GeoLite2-City-Test.mmdb
bump: ## Bump authentik version. Usage: make bump version=20xx.xx.xx
ifndef version
$(error Usage: make bump version=20xx.xx.xx )
endif
$(eval current_version := $(shell cat ${PWD}/internal/constants/VERSION))
$(SED_INPLACE) 's/^version = ".*"/version = "$(version)"/' ${PWD}/pyproject.toml
$(SED_INPLACE) 's/^version = ".*"/version = "$(version)"/' ${PWD}/pyproject.toml ${PWD}/Cargo.toml
$(SED_INPLACE) 's/^VERSION = ".*"/VERSION = "$(version)"/' ${PWD}/authentik/__init__.py
$(SED_INPLACE) "s/version = \"${current_version}\"/version = \"$(version)\"" ${PWD}/Cargo.toml ${PWD}/Cargo.lock
$(MAKE) gen-build gen-compose aws-cfn
$(SED_INPLACE) "s/\"${current_version}\"/\"$(version)\"/" ${PWD}/package.json ${PWD}/package-lock.json ${PWD}/web/package.json ${PWD}/web/package-lock.json
echo -n $(version) > ${PWD}/internal/constants/VERSION
@@ -197,7 +201,7 @@ gen-changelog: ## (Release) generate the changelog based from the commits since
gen-diff: ## (Release) generate the changelog diff between the current schema and the last version
$(eval last_version := $(shell git tag --list 'version/*' --sort 'version:refname' | grep -vE 'rc\d+$$' | tail -1))
git show ${last_version}:schema.yml > schema-old.yml
docker compose -f scripts/compose.yml run --rm --user "${UID}:${GID}" diff \
docker compose -f scripts/api/compose.yml run --rm --user "${UID}:${GID}" diff \
--markdown \
/local/diff.md \
/local/schema-old.yml \
@@ -207,26 +211,51 @@ gen-diff: ## (Release) generate the changelog diff between the current schema a
$(SED_INPLACE) 's/}/&#125;/g' diff.md
npx prettier --write diff.md
gen-client-go: ## Build and install the authentik API for Golang
$(UV) run make -C "${PWD}/packages/client-go" build
gen-clean-ts: ## Remove generated API client for TypeScript
rm -rf ${PWD}/${GEN_API_TS}/
rm -rf ${PWD}/web/node_modules/@goauthentik/api/
gen-client-rust: ## Build and install the authentik API for Rust
$(UV) run make -C "${PWD}/packages/client-rust" build version=${NPM_VERSION}
make lint-fix-rust
gen-clean-py: ## Remove generated API client for Python
rm -rf ${PWD}/${GEN_API_PY}
gen-client-ts: ## Build and install the authentik API for Typescript into the authentik UI Application
make -C "${PWD}/packages/client-ts" build
npm --prefix web install
gen-clean-go: ## Remove generated API client for Go
rm -rf ${PWD}/${GEN_API_GO}
_gen-clients: gen-client-go gen-client-rust gen-client-ts
gen-clients: ## Build and install API clients used by authentik
$(MAKE) _gen-clients -j
gen-clean: gen-clean-ts gen-clean-go gen-clean-py ## Remove generated API clients
gen: gen-build gen-clients ## Build and install API schema and clients used by authentik
gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescript into the authentik UI Application
docker compose -f scripts/api/compose.yml run --rm --user "${UID}:${GID}" gen \
generate \
-i /local/schema.yml \
-g typescript-fetch \
-o /local/${GEN_API_TS} \
-c /local/scripts/api/ts-config.yaml \
--additional-properties=npmVersion=${NPM_VERSION} \
--git-repo-id authentik \
--git-user-id goauthentik
cd ${PWD}/${GEN_API_TS} && npm i
cd ${PWD}/${GEN_API_TS} && npm link
cd ${PWD}/web && npm link @goauthentik/api
gen-client-py: gen-clean-py ## Build and install the authentik API for Python
mkdir -p ${PWD}/${GEN_API_PY}
git clone --depth 1 https://github.com/goauthentik/client-python.git ${PWD}/${GEN_API_PY}
cp ${PWD}/schema.yml ${PWD}/${GEN_API_PY}
make -C ${PWD}/${GEN_API_PY} build version=${NPM_VERSION}
gen-client-go: gen-clean-go ## Build and install the authentik API for Golang
mkdir -p ${PWD}/${GEN_API_GO}
git clone --depth 1 https://github.com/goauthentik/client-go.git ${PWD}/${GEN_API_GO}
cp ${PWD}/schema.yml ${PWD}/${GEN_API_GO}
make -C ${PWD}/${GEN_API_GO} build version=${NPM_VERSION}
go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO}
gen-dev-config: ## Generate a local development config file
$(UV) run scripts/generate_config.py
gen: gen-build gen-client-ts
#########################
## Node.js
#########################
@@ -295,7 +324,7 @@ docs-api-build:
npm run --prefix website -w api build
docs-api-watch: ## Build and watch the API documentation
npm run --prefix website -w api generate
npm run --prefix website -w api build:api
npm run --prefix website -w api start
docs-api-clean: ## Clean generated API documentation
@@ -306,6 +335,7 @@ docs-api-clean: ## Clean generated API documentation
#########################
docker: ## Build a docker image of the current source tree
mkdir -p ${GEN_API_TS}
DOCKER_BUILDKIT=1 docker build . -f lifecycle/container/Dockerfile --progress plain --tag ${DOCKER_IMAGE}
test-docker:
@@ -341,19 +371,18 @@ ci-lint-pending-migrations: ci--meta-debug
$(UV) run ak makemigrations --check
ci-lint-cargo-deny: ci--meta-debug
$(CARGO) deny --locked --workspace check --config "${PWD}/.cargo/deny.toml"
$(CARGO) deny --locked --workspace check --config .cargo/deny.toml
ci-lint-cargo-machete: ci--meta-debug
$(CARGO) machete
ci-lint-rustfmt: ci--meta-debug
$(CARGO) +nightly fmt --all --check -- --config-path "${PWD}/.cargo/rustfmt.toml"
$(CARGO) +nightly fmt --all --check -- --config-path .cargo/rustfmt.toml
ci-lint-clippy: ci--meta-debug
$(CARGO) clippy --workspace -- -D warnings
$(CARGO) clippy -- -D warnings
ci-test: ci--meta-debug
$(UV) run coverage run manage.py test --keepdb --parallel auto authentik
$(UV) run coverage combine
$(UV) run coverage run manage.py test --keepdb authentik
$(UV) run coverage report
$(UV) run coverage xml

View File

@@ -18,10 +18,10 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
(.x being the latest patch release for each version)
| Version | Supported |
| --------- | --------- |
| 2025.12.x | ✅ |
| 2026.2.x | ✅ |
| Version | Supported |
| ---------- | ---------- |
| 2025.12.x | ✅ |
| 2026.2.x | ✅ |
## Reporting a Vulnerability
@@ -60,40 +60,6 @@ authentik reserves the right to reclassify CVSS as necessary. To determine sever
| 7.0 8.9 | High |
| 9.0 10.0 | Critical |
## Intended functionality
The following capabilities are part of intentional system design and should not be reported as security vulnerabilities:
- Expressions (property mappings/policies/prompts) can execute arbitrary Python code without safeguards.
This is expected behavior. Any user with permission to create or modify objects containing expression fields can write code that is executed within authentik. If a vulnerability allows a user without the required permissions to write or modify code and have it executed, that would be a valid security report.
However, the fact that expressions are executed as part of normal operations is not considered a privilege escalation or security vulnerability.
- Blueprints can access all files on the filesystem.
This access is intentional to allow legitimate configuration and deployment tasks. It does not represent a security problem by itself.
- Importing blueprints allows arbitrary modification of application objects.
This is intended functionality. This behavior reflects the privileged design of blueprint imports. It is "exploitable" when importing blueprints from untrusted sources without reviewing the blueprint beforehand. However, any method to create, modify or execute blueprints without the required permissions would be a valid security report.
- Flow imports may contain objects other than flows (such as policies, users, groups, etc.)
This is expected behavior as flow imports are blueprint files.
- Prompt HTML is not escaped.
Prompts intentionally allow raw HTML, including script tags, so they can be used to create interactive or customized user interface elements. Because of this, scripts within prompts may affect or interact with the surrounding page as designed.
- Open redirects that do not include tokens or other sensitive information are not considered a security vulnerability.
Redirects that only change navigation flow and do not expose session tokens, API keys, or other confidential data are considered acceptable and do not require reporting.
- Outgoing network requests are not filtered.
The destinations of outgoing network requests (HTTP, TCP, etc.) made by authentik to configurable endpoints through objects such as OAuth Sources, SSO Providers, and others are not validated. Depending on your threat model, these requests should be restricted at the network level using appropriate firewall or network policies.
## Disclosure process
1. Report from Github or Issue is reported via Email as listed above.

View File

@@ -8,8 +8,8 @@ from rest_framework.response import Response
from rest_framework.viewsets import ViewSet
from authentik.core.api.utils import PassiveSerializer
from authentik.lib.api import Models
from authentik.lib.utils.reflection import get_apps
from authentik.policies.event_matcher.models import model_choices
class AppSerializer(PassiveSerializer):
@@ -42,6 +42,6 @@ class ModelViewSet(ViewSet):
def list(self, request: Request) -> Response:
"""Read-only view list all installed models"""
data = []
for name, label in Models.choices:
for name, label in model_choices():
data.append({"name": name, "label": label})
return Response(AppSerializer(data, many=True).data)

View File

@@ -106,7 +106,6 @@ class Backend:
self,
name: str,
request: HttpRequest | None = None,
use_cache: bool = True,
) -> dict[str, str] | None:
"""
Get URLs for each theme variant when filename contains %(theme)s.
@@ -122,7 +121,7 @@ class Backend:
return None
return {
theme: self.file_url(substitute_theme(name, theme), request, use_cache=use_cache)
theme: self.file_url(substitute_theme(name, theme), request, use_cache=True)
for theme in get_valid_themes()
}

View File

@@ -92,6 +92,7 @@ class FileBackend(ManageableBackend):
"nbf": now() - timedelta(seconds=15),
},
key=sha256(f"{settings.SECRET_KEY}:{self.usage}".encode()).hexdigest(),
# Must match crates/authentik-server/src/static.rs
algorithm="HS256",
)
url = f"{prefix}/files/{path}?token={token}"

View File

@@ -51,7 +51,6 @@ class PassthroughBackend(Backend):
self,
name: str,
request: HttpRequest | None = None,
use_cache: bool = True,
) -> dict[str, str] | None:
"""Support themed URLs for external URLs with %(theme)s placeholder.

View File

@@ -1,7 +1,7 @@
from collections.abc import Generator, Iterator
from contextlib import contextmanager
from tempfile import SpooledTemporaryFile
from urllib.parse import urlsplit, urlunsplit
from urllib.parse import urlsplit
import boto3
from botocore.config import Config
@@ -164,19 +164,16 @@ class S3Backend(ManageableBackend):
)
def _file_url(name: str, request: HttpRequest | None) -> str:
client = self.client
params = {
"Bucket": self.bucket_name,
"Key": f"{self.base_path}/{name}",
}
operation_name = "GetObject"
operation_model = client.meta.service_model.operation_model(operation_name)
request_dict = client._convert_to_request_dict(
params,
operation_model,
endpoint_url=client.meta.endpoint_url,
context={"is_presign_request": True},
url = self.client.generate_presigned_url(
"get_object",
Params=params,
ExpiresIn=expires_in,
HttpMethod="GET",
)
# Support custom domain for S3-compatible storage (so not AWS)
@@ -186,8 +183,9 @@ class S3Backend(ManageableBackend):
CONFIG.get(f"storage.{self.name}.custom_domain", None),
)
if custom_domain:
parsed = urlsplit(url)
scheme = "https" if use_https else "http"
path = request_dict["url_path"]
path = parsed.path
# When using path-style addressing, the presigned URL contains the bucket
# name in the path (e.g., /bucket-name/key). Since custom_domain must
@@ -202,22 +200,9 @@ class S3Backend(ManageableBackend):
if not path.startswith("/"):
path = f"/{path}"
custom_base = urlsplit(f"{scheme}://{custom_domain}")
url = f"{scheme}://{custom_domain}{path}?{parsed.query}"
# Sign the final public URL instead of signing the internal S3 endpoint and
# rewriting it afterwards. Presigned SigV4 URLs include the host header in the
# canonical request, so post-sign host changes break strict backends like RustFS.
public_path = f"{custom_base.path.rstrip('/')}{path}" if custom_base.path else path
request_dict["url_path"] = public_path
request_dict["url"] = urlunsplit(
(custom_base.scheme, custom_base.netloc, public_path, "", "")
)
return client._request_signer.generate_presigned_url(
request_dict,
operation_name,
expires_in=expires_in,
)
return url
if use_cache:
return self._cache_get_or_set(name, request, _file_url, expires_in)

View File

@@ -1,5 +1,4 @@
from unittest import skipUnless
from urllib.parse import parse_qs, urlsplit
from botocore.exceptions import UnsupportedSignatureVersionError
from django.test import TestCase
@@ -169,44 +168,6 @@ class TestS3Backend(FileTestS3BackendMixin, TestCase):
f"URL: {url}",
)
@CONFIG.patch("storage.s3.secure_urls", False)
@CONFIG.patch("storage.s3.addressing_style", "path")
def test_file_url_custom_domain_resigns_for_custom_host(self):
"""Test presigned URLs are signed for the custom domain host.
Host-changing custom domains must produce a signature query string for
the public host, not reuse the internal endpoint signature.
"""
bucket_name = self.media_s3_bucket_name
key_name = "application-icons/test.svg"
custom_domain = f"files.example.test:8020/{bucket_name}"
endpoint_signed_url = self.media_s3_backend.client.generate_presigned_url(
"get_object",
Params={
"Bucket": bucket_name,
"Key": f"{self.media_s3_backend.base_path}/{key_name}",
},
ExpiresIn=900,
HttpMethod="GET",
)
with CONFIG.patch("storage.media.s3.custom_domain", custom_domain):
custom_url = self.media_s3_backend.file_url(key_name, use_cache=False)
endpoint_parts = urlsplit(endpoint_signed_url)
custom_parts = urlsplit(custom_url)
self.assertEqual(custom_parts.scheme, "http")
self.assertEqual(custom_parts.netloc, "files.example.test:8020")
self.assertEqual(parse_qs(custom_parts.query)["X-Amz-SignedHeaders"], ["host"])
self.assertNotEqual(
custom_parts.query,
endpoint_parts.query,
"Custom-domain URLs must be signed for the public host, not reuse the endpoint "
"signature query string.",
)
def test_themed_urls_without_theme_variable(self):
"""Test themed_urls returns None when filename has no %(theme)s"""
result = self.media_s3_backend.themed_urls("logo.png")

View File

@@ -74,10 +74,6 @@ class FileManager:
) -> str:
"""
Get URL for accessing the file.
Set ``use_cache=False`` when the caller needs a fresh signed URL instead
of a cached one, for example when serializing flow/login payloads that
may be refreshed after the previous JWT has expired.
"""
if not name:
return ""
@@ -87,7 +83,7 @@ class FileManager:
for backend in self.backends:
if backend.supports_file(name):
return backend.file_url(name, request, use_cache=use_cache)
return backend.file_url(name, request)
LOGGER.warning(f"Could not find file backend for file: {name}")
return ""
@@ -96,14 +92,10 @@ class FileManager:
self,
name: str | None,
request: HttpRequest | Request | None = None,
use_cache: bool = True,
) -> dict[str, str] | None:
"""
Get URLs for each theme variant when filename contains %(theme)s.
``use_cache`` has the same semantics as ``file_url()`` and allows
callers to force regeneration of expiring signed URLs.
Returns dict mapping theme to URL if %(theme)s present, None otherwise.
"""
if not name:
@@ -114,7 +106,7 @@ class FileManager:
for backend in self.backends:
if backend.supports_file(name):
return backend.themed_urls(name, request, use_cache=use_cache)
return backend.themed_urls(name, request)
return None

View File

@@ -1,7 +1,6 @@
"""Test file service layer"""
from unittest import skipUnless
from unittest.mock import Mock
from urllib.parse import urlparse
from django.http import HttpRequest
@@ -54,19 +53,6 @@ class TestResolveFileUrlBasic(TestCase):
result = manager.file_url("/static/authentik/sources/icon.svg")
self.assertEqual(result, "/static/authentik/sources/icon.svg")
def test_file_url_forwards_use_cache(self):
"""Test file_url forwards use_cache to backend."""
manager = FileManager(FileUsage.MEDIA)
backend = Mock()
backend.supports_file.return_value = True
backend.file_url.return_value = "/files/media/public/test.png?token=fresh"
manager.backends = [backend]
result = manager.file_url("test.png", use_cache=False)
self.assertEqual(result, "/files/media/public/test.png?token=fresh")
backend.file_url.assert_called_once_with("test.png", None, use_cache=False)
class TestResolveFileUrlFileBackend(FileTestFileBackendMixin, TestCase):
def test_resolve_storage_file(self):

View File

@@ -106,14 +106,14 @@ class TokenAuthentication(BaseAuthentication):
if not auth_credentials:
return None
# first, check traditional tokens
key_token = Token.objects.filter(
key_token = Token.filter_not_expired(
key=auth_credentials, intent=TokenIntents.INTENT_API
).first()
if key_token:
CTX_AUTH_VIA.set("api_token")
return key_token.user, key_token
# then try to auth via JWT
jwt_token = AccessToken.objects.filter(
jwt_token = AccessToken.filter_not_expired(
token=auth_credentials, _scope__icontains=SCOPE_AUTHENTIK_API
).first()
if jwt_token:

View File

@@ -1,18 +1,10 @@
"""Pagination which includes total pages and current page"""
from typing import TYPE_CHECKING
from drf_spectacular.plumbing import build_object_type
from rest_framework import pagination
from rest_framework.response import Response
from authentik.api.search.ql import QLSearch
from authentik.api.v3.schema.pagination import PAGINATION
from authentik.api.v3.schema.search import AUTOCOMPLETE_SCHEMA
if TYPE_CHECKING:
from django.db.models import QuerySet
from rest_framework.request import Request
from authentik.api.v3.schema.response import PAGINATION
class Pagination(pagination.PageNumberPagination):
@@ -21,14 +13,14 @@ class Pagination(pagination.PageNumberPagination):
page_query_param = "page"
page_size_query_param = "page_size"
def get_page_size(self, request: Request) -> int:
def get_page_size(self, request):
if self.page_size_query_param in request.query_params:
page_size = super().get_page_size(request)
if page_size is not None:
return min(super().get_page_size(request), request.tenant.pagination_max_page_size)
return request.tenant.pagination_default_page_size
def get_paginated_response(self, data) -> Response:
def get_paginated_response(self, data):
previous_page_number = 0
if self.page.has_previous():
previous_page_number = self.page.previous_page_number()
@@ -47,33 +39,16 @@ class Pagination(pagination.PageNumberPagination):
"end_index": self.page.end_index(),
},
"results": data,
"autocomplete": self.get_autocomplete(),
}
)
def paginate_queryset(self, queryset: QuerySet, request: Request, view=None):
self.view = view
return super().paginate_queryset(queryset, request, view)
def get_autocomplete(self):
schema = QLSearch().get_schema(self.request, self.view)
introspections = {}
if hasattr(self.view, "get_ql_fields"):
from authentik.api.search.schema import AKQLSchemaSerializer
introspections = AKQLSchemaSerializer().serialize(
schema(self.page.paginator.object_list.model)
)
return introspections
def get_paginated_response_schema(self, schema):
return build_object_type(
properties={
"pagination": PAGINATION.ref,
"results": schema,
"autocomplete": AUTOCOMPLETE_SCHEMA.ref,
},
required=["pagination", "results", "autocomplete"],
required=["pagination", "results"],
)

103
authentik/api/schema.py Normal file
View File

@@ -0,0 +1,103 @@
"""Error Response schema, from https://github.com/axnsan12/drf-yasg/issues/224"""
from collections.abc import Callable
from typing import Any
from drf_spectacular.generators import SchemaGenerator
from drf_spectacular.plumbing import ResolvedComponent
from drf_spectacular.renderers import OpenApiJsonRenderer
from drf_spectacular.settings import spectacular_settings
from structlog.stdlib import get_logger
from authentik.api.apps import AuthentikAPIConfig
from authentik.api.v3.schema.query import QUERY_PARAMS
from authentik.api.v3.schema.response import (
GENERIC_ERROR,
GENERIC_ERROR_RESPONSE,
PAGINATION,
VALIDATION_ERROR,
VALIDATION_ERROR_RESPONSE,
)
LOGGER = get_logger()
def preprocess_schema_exclude_non_api(endpoints: list[tuple[str, Any, Any, Callable]], **kwargs):
"""Filter out all API Views which are not mounted under /api"""
return [
(path, path_regex, method, callback)
for path, path_regex, method, callback in endpoints
if path.startswith("/" + AuthentikAPIConfig.mountpoint)
]
def postprocess_schema_register(
result: dict[str, Any], generator: SchemaGenerator, **kwargs
) -> dict[str, Any]:
"""Register custom schema components"""
LOGGER.debug("Registering custom schemas")
generator.registry.register_on_missing(PAGINATION)
generator.registry.register_on_missing(GENERIC_ERROR)
generator.registry.register_on_missing(GENERIC_ERROR_RESPONSE)
generator.registry.register_on_missing(VALIDATION_ERROR)
generator.registry.register_on_missing(VALIDATION_ERROR_RESPONSE)
for query in QUERY_PARAMS.values():
generator.registry.register_on_missing(query)
return result
def postprocess_schema_responses(
result: dict[str, Any], generator: SchemaGenerator, **kwargs
) -> dict[str, Any]:
"""Default error responses"""
LOGGER.debug("Adding default error responses")
for path in result["paths"].values():
for method in path.values():
method["responses"].setdefault("400", VALIDATION_ERROR_RESPONSE.ref)
method["responses"].setdefault("403", GENERIC_ERROR_RESPONSE.ref)
result["components"] = generator.registry.build(spectacular_settings.APPEND_COMPONENTS)
# This is a workaround for authentik/stages/prompt/stage.py
# since the serializer PromptChallengeResponse
# accepts dynamic keys
for component in result["components"]["schemas"]:
if component == "PromptChallengeResponseRequest":
comp = result["components"]["schemas"][component]
comp["additionalProperties"] = {}
return result
def postprocess_schema_query_params(
result: dict[str, Any], generator: SchemaGenerator, **kwargs
) -> dict[str, Any]:
"""Optimize pagination parameters, instead of redeclaring parameters for each endpoint
declare them globally and refer to them"""
LOGGER.debug("Deduplicating query parameters")
for path in result["paths"].values():
for method in path.values():
for idx, param in enumerate(method.get("parameters", [])):
if param["name"] not in QUERY_PARAMS:
continue
method["parameters"][idx] = QUERY_PARAMS[param["name"]].ref
return result
def postprocess_schema_remove_unused(
result: dict[str, Any], generator: SchemaGenerator, **kwargs
) -> dict[str, Any]:
"""Remove unused components"""
# To check if the schema is used, render it to JSON and then substring check that
# less efficient than walking through the tree but a lot simpler and no
# possibility that we miss something
raw = OpenApiJsonRenderer().render(result, renderer_context={}).decode()
count = 0
for key in result["components"][ResolvedComponent.SCHEMA].keys():
schema_usages = raw.count(f"#/components/{ResolvedComponent.SCHEMA}/{key}")
if schema_usages >= 1:
continue
del generator.registry[(key, ResolvedComponent.SCHEMA)]
count += 1
LOGGER.debug("Removing unused components", count=count)
result["components"] = generator.registry.build(spectacular_settings.APPEND_COMPONENTS)
return result

View File

@@ -1,75 +0,0 @@
"""Error Response schema, from https://github.com/axnsan12/drf-yasg/issues/224"""
from collections.abc import Callable
from typing import Any
from drf_spectacular.contrib.django_filters import (
DjangoFilterExtension as BaseDjangoFilterExtension,
)
from drf_spectacular.generators import SchemaGenerator
from drf_spectacular.plumbing import (
ResolvedComponent,
follow_field_source,
)
from drf_spectacular.renderers import OpenApiJsonRenderer
from drf_spectacular.settings import spectacular_settings
from structlog.stdlib import get_logger
from authentik.api.apps import AuthentikAPIConfig
LOGGER = get_logger()
def preprocess_schema_exclude_non_api(endpoints: list[tuple[str, Any, Any, Callable]], **kwargs):
"""Filter out all API Views which are not mounted under /api"""
return [
(path, path_regex, method, callback)
for path, path_regex, method, callback in endpoints
if path.startswith("/" + AuthentikAPIConfig.mountpoint)
]
def postprocess_schema_remove_unused(
result: dict[str, Any], generator: SchemaGenerator, **kwargs
) -> dict[str, Any]:
"""Remove unused components"""
# To check if the schema is used, render it to JSON and then substring check that
# less efficient than walking through the tree but a lot simpler and no
# possibility that we miss something
raw = OpenApiJsonRenderer().render(result, renderer_context={}).decode()
count = 0
for key in result["components"][ResolvedComponent.SCHEMA].keys():
schema_usages = raw.count(f"#/components/{ResolvedComponent.SCHEMA}/{key}")
if schema_usages >= 1:
continue
del generator.registry[(key, ResolvedComponent.SCHEMA)]
count += 1
LOGGER.debug("Removing unused components", count=count)
result["components"] = generator.registry.build(spectacular_settings.APPEND_COMPONENTS)
return result
class DjangoFilterExtension(BaseDjangoFilterExtension):
"""
From https://github.com/netbox-community/netbox/pull/21521:
Overrides drf-spectacular's DjangoFilterExtension to fix a regression in v0.29.0 where
_get_model_field() incorrectly double-appends to_field_name when field_name already ends
with that value (e.g. field_name='tags__slug', to_field_name='slug' produces the invalid
path ['tags', 'slug', 'slug']). This caused hundreds of spurious warnings during schema
generation for filters such as TagFilter, TenancyFilterSet.tenant, and OwnerFilterMixin.owner.
See: https://github.com/netbox-community/netbox/issues/20787
https://github.com/tfranzel/drf-spectacular/issues/1475
"""
priority = 1
def _get_model_field(self, filter_field, model):
if not filter_field.field_name:
return None
path = filter_field.field_name.split("__")
to_field_name = filter_field.extra.get("to_field_name")
if to_field_name is not None and path[-1] != to_field_name:
path.append(to_field_name)
return follow_field_source(model, path, emit_warnings=False)

View File

@@ -1,287 +0,0 @@
"""Error Response schema, from https://github.com/axnsan12/drf-yasg/issues/224"""
import functools
import inspect
import re
from collections import defaultdict
from enum import Enum
from django.db.models import Choices
from django.utils.translation import get_language
from drf_spectacular.drainage import error, warn
from drf_spectacular.hooks import postprocess_schema_enum_id_removal
from drf_spectacular.plumbing import (
ResolvedComponent,
deep_import_string,
list_hash,
safe_ref,
)
from drf_spectacular.settings import spectacular_settings
from inflection import camelize
from structlog.stdlib import get_logger
LOGGER = get_logger()
# See https://github.com/tfranzel/drf-spectacular/blob/master/drf_spectacular/hooks.py
# and https://github.com/tfranzel/drf-spectacular/issues/520
def postprocess_schema_enums(result, generator, **kwargs): # noqa: PLR0912, PLR0915
"""
simple replacement of Enum/Choices that globally share the same name and have
the same choices. Aids client generation to not generate a separate enum for
every occurrence. only takes effect when replacement is guaranteed to be correct.
"""
def is_enum_prop(prop_schema):
return (
"enum" in prop_schema
or prop_schema.get("type") == "array"
and "enum" in prop_schema.get("items", {})
)
def iter_field_schemas():
def iter_prop_containers(schema, component_name=None):
if not component_name:
for _component_name, _schema in schema.items():
if spectacular_settings.COMPONENT_SPLIT_PATCH:
_component_name = re.sub("^Patched(.+)", r"\1", _component_name)
if spectacular_settings.COMPONENT_SPLIT_REQUEST:
_component_name = re.sub("(.+)Request$", r"\1", _component_name)
yield from iter_prop_containers(_schema, _component_name)
elif isinstance(schema, list):
for item in schema:
yield from iter_prop_containers(item, component_name)
elif isinstance(schema, dict):
if schema.get("properties"):
yield component_name, schema["properties"]
yield from iter_prop_containers(schema.get("oneOf", []), component_name)
yield from iter_prop_containers(schema.get("allOf", []), component_name)
yield from iter_prop_containers(schema.get("anyOf", []), component_name)
def iter_path_parameters():
for path in result.get("paths", {}).values():
for operation in path.values():
for parameter in operation.get("parameters", []):
parameter_schema = parameter.get("schema", {})
if is_enum_prop(parameter_schema):
# Move description into enum schema
if "description" in parameter:
parameter_schema["description"] = parameter.pop("description")
if "name" not in parameter:
continue
yield "", {parameter["name"]: parameter_schema}
component_schemas = result.get("components", {}).get("schemas", {})
yield from iter_prop_containers(component_schemas)
yield from iter_path_parameters()
def create_enum_component(name, schema):
component = ResolvedComponent(
name=name,
type=ResolvedComponent.SCHEMA,
schema=schema,
object=name,
)
generator.registry.register_on_missing(component)
return component
def extract_hash(schema):
if "x-spec-enum-id" in schema:
# try to use the injected enum hash first as it generated from (name, value) tuples,
# which prevents collisions on choice sets only differing in labels not values.
return schema["x-spec-enum-id"]
else:
# fall back to actual list hashing when we encounter enums not generated by us.
# remove blank/null entry for hashing. will be reconstructed in the last step
return list_hash([(i, i) for i in schema["enum"] if i not in ("", None)])
overrides = load_enum_name_overrides()
prop_hash_mapping = defaultdict(set)
hash_name_mapping = defaultdict(set)
# collect all enums, their names and choice sets
for component_name, props in iter_field_schemas():
for prop_name, prop_schema in props.items():
_prop_schema = prop_schema
if prop_schema.get("type") == "array":
_prop_schema = prop_schema.get("items", {})
if "enum" not in _prop_schema:
continue
prop_enum_cleaned_hash = extract_hash(_prop_schema)
prop_hash_mapping[prop_name].add(prop_enum_cleaned_hash)
hash_name_mapping[prop_enum_cleaned_hash].add((component_name, prop_name))
# get the suffix to be used for enums from settings
enum_suffix = spectacular_settings.ENUM_SUFFIX
# traverse all enum properties and generate a name for the choice set. naming collisions
# are resolved and a warning is emitted. giving a choice set multiple names is technically
# correct but potentially unwanted. also emit a warning there to make the user aware.
enum_name_mapping = {}
for prop_name, prop_hash_set in prop_hash_mapping.items():
for prop_hash in prop_hash_set:
if prop_hash in overrides:
enum_name = overrides[prop_hash]
elif len(prop_hash_set) == 1:
# prop_name has been used exclusively for one choice set (best case)
enum_name = f"{camelize(prop_name)}{enum_suffix}"
elif len(hash_name_mapping[prop_hash]) == 1:
# prop_name has multiple choice sets, but each one limited to one component only
component_name, _ = next(iter(hash_name_mapping[prop_hash]))
enum_name = f"{camelize(component_name)}{camelize(prop_name)}{enum_suffix}"
else:
enum_name = f"{camelize(prop_name)}{prop_hash[:3].capitalize()}{enum_suffix}"
warn(
f"enum naming encountered a non-optimally resolvable collision for fields "
f'named "{prop_name}". The same name has been used for multiple choice sets '
f'in multiple components. The collision was resolved with "{enum_name}". '
f"add an entry to ENUM_NAME_OVERRIDES to fix the naming."
)
if enum_name_mapping.get(prop_hash, enum_name) != enum_name:
warn(
f"encountered multiple names for the same choice set ({enum_name}). This "
f"may be unwanted even though the generated schema is technically correct. "
f"Add an entry to ENUM_NAME_OVERRIDES to fix the naming."
)
del enum_name_mapping[prop_hash]
else:
enum_name_mapping[prop_hash] = enum_name
enum_name_mapping[(prop_hash, prop_name)] = enum_name
# replace all enum occurrences with a enum schema component. cut out the
# enum, replace it with a reference and add a corresponding component.
for _, props in iter_field_schemas():
for prop_name, _prop_schema in props.items():
prop_schema = _prop_schema
is_array = prop_schema.get("type") == "array"
if is_array:
prop_schema = prop_schema.get("items", {})
if "enum" not in prop_schema:
continue
prop_enum_original_list = prop_schema["enum"]
prop_schema["enum"] = [i for i in prop_schema["enum"] if i not in ["", None]]
prop_hash = extract_hash(prop_schema)
# when choice sets are reused under multiple names, the generated name cannot be
# resolved from the hash alone. fall back to prop_name and hash for resolution.
enum_name = enum_name_mapping.get(prop_hash) or enum_name_mapping[prop_hash, prop_name]
# split property into remaining property and enum component parts
enum_schema = {k: v for k, v in prop_schema.items() if k in ["type", "enum"]}
prop_schema = {
k: v for k, v in prop_schema.items() if k not in ["type", "enum", "x-spec-enum-id"]
}
# separate actual description from name-value tuples
if spectacular_settings.ENUM_GENERATE_CHOICE_DESCRIPTION:
if prop_schema.get("description", "").startswith("*"):
enum_schema["description"] = prop_schema.pop("description")
elif "\n\n*" in prop_schema.get("description", ""):
_, _, post = prop_schema["description"].partition("\n\n*")
enum_schema["description"] = "*" + post
components = [create_enum_component(enum_name, schema=enum_schema)]
if spectacular_settings.ENUM_ADD_EXPLICIT_BLANK_NULL_CHOICE:
if "" in prop_enum_original_list:
components.append(
create_enum_component(f"Blank{enum_suffix}", schema={"enum": [""]})
)
if None in prop_enum_original_list:
if spectacular_settings.OAS_VERSION.startswith("3.1"):
components.append(
create_enum_component(f"Null{enum_suffix}", schema={"type": "null"})
)
else:
components.append(
create_enum_component(f"Null{enum_suffix}", schema={"enum": [None]})
)
# undo OAS 3.1 type list NULL construction as we cover
# this in a separate component already
if spectacular_settings.OAS_VERSION.startswith("3.1") and isinstance(
enum_schema["type"], list
):
enum_schema["type"] = [t for t in enum_schema["type"] if t != "null"][0]
if len(components) == 1:
prop_schema.update(components[0].ref)
else:
prop_schema.update({"oneOf": [c.ref for c in components]})
patch_target = props[prop_name] # noqa: PLR1733
if is_array:
patch_target = patch_target["items"]
# Replace existing schema information with reference
patch_target.clear()
patch_target.update(safe_ref(prop_schema))
# sort again with additional components
result["components"] = generator.registry.build(spectacular_settings.APPEND_COMPONENTS)
# remove remaining ids that were not part of this hook (operation parameters mainly)
postprocess_schema_enum_id_removal(result, generator)
return result
# Fixed version of `load_enum_name_overrides()` with a LRU cache based on language
# *and* enum overrides.
# Without this, API generation breaks if there is more than 1 API present (such as in split APIs)
# Original source: drf-spectacular/drf_spectacular/plumbing.py
def load_enum_name_overrides():
cache_key = get_language() or ""
for k, v in sorted(spectacular_settings.ENUM_NAME_OVERRIDES.items()):
cache_key += f";{k}:{v}"
return _load_enum_name_overrides(cache_key)
# Original source: drf-spectacular/drf_spectacular/plumbing.py
# Only change: cache_key argument instead of language.
@functools.lru_cache
def _load_enum_name_overrides(cache_key):
overrides = {}
for name, _choices in spectacular_settings.ENUM_NAME_OVERRIDES.items():
choices = _choices
if isinstance(choices, str):
choices = deep_import_string(choices)
if not choices:
warn(
f"unable to load choice override for {name} from ENUM_NAME_OVERRIDES. "
f"please check module path string."
)
continue
if inspect.isclass(choices) and issubclass(choices, Choices):
choices = choices.choices
if inspect.isclass(choices) and issubclass(choices, Enum):
choices = [(c.value, c.name) for c in choices]
normalized_choices = []
for choice in choices:
# Allow None values in the simple values list case
if isinstance(choice, str) or choice is None:
# TODO warning
normalized_choices.append((choice, choice)) # simple choice list
elif isinstance(choice[1], (list, tuple)):
normalized_choices.extend(choice[1]) # categorized nested choices
else:
normalized_choices.append(choice) # normal 2-tuple form
# Get all of choice values that should be used in the hash, blank and
# None values get excluded in the post-processing hook for enum overrides,
# so we do the same here to ensure the hashes match
hashable_values = [
(value, label) for value, label in normalized_choices if value not in ["", None]
]
overrides[list_hash(hashable_values)] = name
if len(spectacular_settings.ENUM_NAME_OVERRIDES) != len(overrides):
error(
"ENUM_NAME_OVERRIDES has duplication issues. Encountered multiple names "
"for the same choice set. Enum naming might be unexpected."
)
return overrides

View File

@@ -1,32 +0,0 @@
from drf_spectacular.plumbing import (
ResolvedComponent,
build_basic_type,
build_object_type,
)
from drf_spectacular.types import OpenApiTypes
PAGINATION = ResolvedComponent(
name="Pagination",
type=ResolvedComponent.SCHEMA,
object="Pagination",
schema=build_object_type(
properties={
"next": build_basic_type(OpenApiTypes.NUMBER),
"previous": build_basic_type(OpenApiTypes.NUMBER),
"count": build_basic_type(OpenApiTypes.NUMBER),
"current": build_basic_type(OpenApiTypes.NUMBER),
"total_pages": build_basic_type(OpenApiTypes.NUMBER),
"start_index": build_basic_type(OpenApiTypes.NUMBER),
"end_index": build_basic_type(OpenApiTypes.NUMBER),
},
required=[
"next",
"previous",
"count",
"current",
"total_pages",
"start_index",
"end_index",
],
),
)

View File

@@ -1,17 +1,10 @@
from typing import Any
from django.utils.translation import gettext_lazy as _
from drf_spectacular.generators import SchemaGenerator
from drf_spectacular.plumbing import (
ResolvedComponent,
build_basic_type,
build_parameter_type,
)
from drf_spectacular.types import OpenApiTypes
from structlog.stdlib import get_logger
LOGGER = get_logger()
QUERY_PARAMS = {
"ordering": ResolvedComponent(
@@ -70,18 +63,3 @@ QUERY_PARAMS = {
),
),
}
def postprocess_schema_query_params(
result: dict[str, Any], generator: SchemaGenerator, **kwargs
) -> dict[str, Any]:
"""Optimize pagination parameters, instead of redeclaring parameters for each endpoint
declare them globally and refer to them"""
LOGGER.debug("Deduplicating query parameters")
for path in result["paths"].values():
for method in path.values():
for idx, param in enumerate(method.get("parameters", [])):
if param["name"] not in QUERY_PARAMS:
continue
method["parameters"][idx] = QUERY_PARAMS[param["name"]].ref
return result

View File

@@ -1,22 +1,12 @@
from typing import Any
from django.utils.translation import gettext_lazy as _
from drf_spectacular.generators import SchemaGenerator
from drf_spectacular.plumbing import (
ResolvedComponent,
build_array_type,
build_basic_type,
build_object_type,
)
from drf_spectacular.settings import spectacular_settings
from drf_spectacular.types import OpenApiTypes
from rest_framework.settings import api_settings
from structlog.stdlib import get_logger
from authentik.api.v3.schema.pagination import PAGINATION
from authentik.api.v3.schema.query import QUERY_PARAMS
LOGGER = get_logger()
GENERIC_ERROR = ResolvedComponent(
name="GenericError",
@@ -67,40 +57,28 @@ VALIDATION_ERROR_RESPONSE = ResolvedComponent(
"description": "",
},
)
def postprocess_schema_register(
result: dict[str, Any], generator: SchemaGenerator, **kwargs
) -> dict[str, Any]:
"""Register custom schema components"""
LOGGER.debug("Registering custom schemas")
generator.registry.register_on_missing(PAGINATION)
generator.registry.register_on_missing(GENERIC_ERROR)
generator.registry.register_on_missing(GENERIC_ERROR_RESPONSE)
generator.registry.register_on_missing(VALIDATION_ERROR)
generator.registry.register_on_missing(VALIDATION_ERROR_RESPONSE)
for query in QUERY_PARAMS.values():
generator.registry.register_on_missing(query)
return result
def postprocess_schema_responses(
result: dict[str, Any], generator: SchemaGenerator, **kwargs
) -> dict[str, Any]:
"""Default error responses"""
LOGGER.debug("Adding default error responses")
for path in result["paths"].values():
for method in path.values():
method["responses"].setdefault("400", VALIDATION_ERROR_RESPONSE.ref)
method["responses"].setdefault("403", GENERIC_ERROR_RESPONSE.ref)
result["components"] = generator.registry.build(spectacular_settings.APPEND_COMPONENTS)
# This is a workaround for authentik/stages/prompt/stage.py
# since the serializer PromptChallengeResponse
# accepts dynamic keys
for component in result["components"]["schemas"]:
if component == "PromptChallengeResponseRequest":
comp = result["components"]["schemas"][component]
comp["additionalProperties"] = {}
return result
PAGINATION = ResolvedComponent(
name="Pagination",
type=ResolvedComponent.SCHEMA,
object="Pagination",
schema=build_object_type(
properties={
"next": build_basic_type(OpenApiTypes.NUMBER),
"previous": build_basic_type(OpenApiTypes.NUMBER),
"count": build_basic_type(OpenApiTypes.NUMBER),
"current": build_basic_type(OpenApiTypes.NUMBER),
"total_pages": build_basic_type(OpenApiTypes.NUMBER),
"start_index": build_basic_type(OpenApiTypes.NUMBER),
"end_index": build_basic_type(OpenApiTypes.NUMBER),
},
required=[
"next",
"previous",
"count",
"current",
"total_pages",
"start_index",
"end_index",
],
),
)

View File

@@ -1,20 +0,0 @@
from typing import TYPE_CHECKING
from drf_spectacular.plumbing import ResolvedComponent, build_object_type
if TYPE_CHECKING:
from drf_spectacular.generators import SchemaGenerator
AUTOCOMPLETE_SCHEMA = ResolvedComponent(
name="Autocomplete",
object="Autocomplete",
type=ResolvedComponent.SCHEMA,
schema=build_object_type(additionalProperties={}),
)
def postprocess_schema_search_autocomplete(result, generator: SchemaGenerator, **kwargs):
generator.registry.register_on_missing(AUTOCOMPLETE_SCHEMA)
return result

View File

@@ -1,60 +1,24 @@
"""Serializer mixin for managed models"""
from typing import cast
from django.conf import settings
from django.core.files.uploadedfile import InMemoryUploadedFile
from django.utils.translation import gettext_lazy as _
from drf_spectacular.utils import extend_schema, inline_serializer
from rest_framework.decorators import action
from rest_framework.exceptions import PermissionDenied, ValidationError
from rest_framework.fields import (
BooleanField,
CharField,
DateTimeField,
FileField,
)
from rest_framework.parsers import MultiPartParser
from rest_framework.exceptions import ValidationError
from rest_framework.fields import CharField, DateTimeField
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer
from rest_framework.viewsets import ModelViewSet
from authentik.api.validation import validate
from authentik.blueprints.models import BlueprintInstance
from authentik.blueprints.v1.common import Blueprint
from authentik.blueprints.v1.importer import Importer
from authentik.blueprints.v1.oci import OCI_PREFIX
from authentik.blueprints.v1.tasks import apply_blueprint, blueprints_find_dict
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import JSONDictField, ModelSerializer, PassiveSerializer
from authentik.core.models import User
from authentik.events.logs import LogEventSerializer
from authentik.rbac.decorators import permission_required
def get_blueprints():
if settings.DEBUG:
return blueprints_find_dict()
return blueprints_find_dict.send().get_result(block=True)
class BlueprintUploadSerializer(PassiveSerializer):
"""Serializer to upload file"""
file = FileField(required=False)
path = CharField(required=False)
def validate_path(self, path: str) -> str:
"""Ensure the path (if set) specified is retrievable"""
if path == "":
return path
files: list[dict] = get_blueprints()
if path not in [file["path"] for file in files]:
raise ValidationError(_("Blueprint file does not exist"))
return path
class ManagedSerializer:
"""Managed Serializer"""
@@ -75,7 +39,7 @@ class BlueprintInstanceSerializer(ModelSerializer):
"""Ensure the path (if set) specified is retrievable"""
if path == "" or path.startswith(OCI_PREFIX):
return path
files: list[dict] = get_blueprints()
files: list[dict] = blueprints_find_dict.send().get_result(block=True)
if path not in [file["path"] for file in files]:
raise ValidationError(_("Blueprint file does not exist"))
return path
@@ -124,33 +88,6 @@ class BlueprintInstanceSerializer(ModelSerializer):
}
def check_blueprint_perms(blueprint: Blueprint, user: User, explicit_action: str | None = None):
"""Check for individual permissions for each model in a blueprint"""
for entry in blueprint.entries:
full_model = entry.get_model(blueprint)
app, __, model = full_model.partition(".")
perms = [
f"{app}.add_{model}",
f"{app}.change_{model}",
f"{app}.delete_{model}",
]
if explicit_action:
perms = [f"{app}.{explicit_action}_{model}"]
for perm in perms:
if not user.has_perm(perm):
raise PermissionDenied(
{
entry.id: _(
"User lacks permission to create {model}".format_map(
{
"model": full_model,
}
)
)
}
)
class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
"""Blueprint instances"""
@@ -160,12 +97,6 @@ class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
filterset_fields = ["name", "path"]
ordering = ["name"]
class BlueprintImportResultSerializer(PassiveSerializer):
"""Logs of an attempted blueprint import"""
logs = LogEventSerializer(many=True, read_only=True)
success = BooleanField(read_only=True)
@extend_schema(
responses={
200: ListSerializer(
@@ -184,7 +115,7 @@ class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
@action(detail=False, pagination_class=None, filter_backends=[])
def available(self, request: Request) -> Response:
"""Get blueprints"""
files: list[dict] = get_blueprints()
files: list[dict] = blueprints_find_dict.send().get_result(block=True)
return Response(files)
@permission_required("authentik_blueprints.view_blueprintinstance")
@@ -200,53 +131,3 @@ class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
blueprint = self.get_object()
apply_blueprint.send_with_options(args=(blueprint.pk,), rel_obj=blueprint)
return self.retrieve(request, *args, **kwargs)
@extend_schema(
request={"multipart/form-data": BlueprintUploadSerializer},
responses={
204: BlueprintImportResultSerializer,
400: BlueprintImportResultSerializer,
},
)
@action(url_path="import", detail=False, methods=["POST"], parser_classes=(MultiPartParser,))
@validate(
BlueprintUploadSerializer,
)
def import_(self, request: Request, body: BlueprintUploadSerializer) -> Response:
"""Import blueprint from .yaml file and apply it once, without creating an instance"""
string_contents = ""
if body.validated_data.get("file"):
file = cast(InMemoryUploadedFile, body.validated_data["file"])
string_contents = file.read().decode()
elif body.validated_data.get("path"):
string_contents = BlueprintInstance(
path=body.validated_data.get("path")
).retrieve_file()
else:
raise ValidationError("Either path or file must be set")
importer = Importer.from_string(string_contents)
check_blueprint_perms(importer.blueprint, request.user)
valid, logs = importer.validate()
import_response = self.BlueprintImportResultSerializer(
data={
"logs": [],
"success": False,
}
)
import_response.is_valid(raise_exception=True)
import_response.initial_data["logs"] = [LogEventSerializer(log).data for log in logs]
import_response.initial_data["success"] = valid
import_response.is_valid()
if not valid:
return Response(data=import_response.initial_data, status=200)
successful = importer.apply()
import_response.initial_data["success"] = successful
import_response.is_valid()
if not successful:
return Response(data=import_response.initial_data, status=200)
return Response(data=import_response.initial_data, status=200)

View File

@@ -3,6 +3,7 @@
import traceback
from collections.abc import Callable
from importlib import import_module
from inspect import ismethod
from django.apps import AppConfig
from django.conf import settings
@@ -71,19 +72,12 @@ class ManagedAppConfig(AppConfig):
def _reconcile(self, prefix: str) -> None:
for meth_name in dir(self):
# Check the attribute on the class to avoid evaluating @property descriptors.
# Using getattr(self, ...) on a @property would evaluate it, which can trigger
# expensive side effects (e.g. tenant_schedule_specs iterating all providers
# and running PolicyEngine queries for every user).
class_attr = getattr(type(self), meth_name, None)
if class_attr is None or isinstance(class_attr, property):
meth = getattr(self, meth_name)
if not ismethod(meth):
continue
if not callable(class_attr):
continue
category = getattr(class_attr, "_authentik_managed_reconcile", None)
category = getattr(meth, "_authentik_managed_reconcile", None)
if category != prefix:
continue
meth = getattr(self, meth_name)
name = meth_name.replace(prefix, "")
try:
self.logger.debug("Starting reconciler", name=name)

View File

@@ -1,8 +1,5 @@
"""Apply blueprint from commandline"""
from argparse import ArgumentParser
from sys import exit as sys_exit
from django.core.management.base import BaseCommand, no_translations
from structlog.stdlib import get_logger
@@ -29,8 +26,8 @@ class Command(BaseCommand):
self.stderr.write("Blueprint invalid")
for log in logs:
self.stderr.write(f"\t{log.logger}: {log.event}: {log.attributes}")
sys_exit(1)
raise RuntimeError("Blueprint invalid")
importer.apply()
def add_arguments(self, parser: ArgumentParser):
def add_arguments(self, parser):
parser.add_argument("blueprints", nargs="+", type=str)

View File

@@ -101,23 +101,13 @@ class Brand(SerializerModel):
"""Get themed URLs for branding_favicon if it contains %(theme)s"""
return get_file_manager(FileUsage.MEDIA).themed_urls(self.branding_favicon)
def branding_default_flow_background_url(self, request=None, use_cache: bool = True) -> str:
def branding_default_flow_background_url(self) -> str:
"""Get branding_default_flow_background URL"""
return get_file_manager(FileUsage.MEDIA).file_url(
self.branding_default_flow_background,
request,
use_cache=use_cache,
)
return get_file_manager(FileUsage.MEDIA).file_url(self.branding_default_flow_background)
def branding_default_flow_background_themed_urls(
self, request=None, use_cache: bool = True
) -> dict[str, str] | None:
def branding_default_flow_background_themed_urls(self) -> dict[str, str] | None:
"""Get themed URLs for branding_default_flow_background if it contains %(theme)s"""
return get_file_manager(FileUsage.MEDIA).themed_urls(
self.branding_default_flow_background,
request,
use_cache=use_cache,
)
return get_file_manager(FileUsage.MEDIA).themed_urls(self.branding_default_flow_background)
@property
def serializer(self) -> type[Serializer]:

View File

@@ -66,5 +66,4 @@ def context_processor(request: HttpRequest) -> dict[str, Any]:
"footer_links": tenant.footer_links,
"html_meta": {**get_http_meta()},
"version": authentik_full_version(),
"csp_nonce": request.request_id,
}

View File

@@ -5,7 +5,6 @@ from django.utils.translation import gettext_lazy as _
GRANT_TYPE_AUTHORIZATION_CODE = "authorization_code"
GRANT_TYPE_IMPLICIT = "implicit"
GRANT_TYPE_HYBRID = "hybrid"
GRANT_TYPE_REFRESH_TOKEN = "refresh_token" # nosec
GRANT_TYPE_CLIENT_CREDENTIALS = "client_credentials"
GRANT_TYPE_PASSWORD = "password" # nosec
@@ -22,9 +21,6 @@ PROMPT_CONSENT = "consent"
PROMPT_LOGIN = "login"
PLAN_CONTEXT_OIDC_LOGOUT_IFRAME_SESSIONS = "goauthentik.io/providers/oauth2/iframe_sessions"
PLAN_CONTEXT_POST_LOGOUT_REDIRECT_URI = "goauthentik.io/providers/oauth2/post_logout_redirect_uri"
OAUTH2_BINDING = "redirect"
SCOPE_OPENID = "openid"
SCOPE_OPENID_PROFILE = "profile"
@@ -41,9 +37,6 @@ TOKEN_TYPE = "Bearer" # nosec
SCOPE_AUTHENTIK_API = "goauthentik.io/api"
# URI schemes that are forbidden for redirect URIs
FORBIDDEN_URI_SCHEMES = {"javascript", "data", "vbscript"}
# Read/write full user (including email)
SCOPE_GITHUB_USER = "user"
# Read user (without email)

View File

@@ -4,7 +4,7 @@ from collections.abc import Iterator
from copy import copy
from django.core.cache import cache
from django.db.models import Case, Q, QuerySet
from django.db.models import Case, QuerySet
from django.db.models.expressions import When
from django.shortcuts import get_object_or_404
from django.utils.translation import gettext as _
@@ -25,7 +25,6 @@ from authentik.core.api.providers import ProviderSerializer
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.users import UserSerializer
from authentik.core.api.utils import ModelSerializer, ThemedUrlsSerializer
from authentik.core.apps import AppAccessWithoutBindings
from authentik.core.models import Application, User
from authentik.events.logs import LogEventSerializer, capture_logs
from authentik.policies.api.exec import PolicyTestResultSerializer
@@ -36,13 +35,9 @@ from authentik.rbac.filters import ObjectFilter
LOGGER = get_logger()
def user_app_cache_key(
user_pk: str, page_number: int | None = None, only_with_launch_url: bool = False
) -> str:
def user_app_cache_key(user_pk: str, page_number: int | None = None) -> str:
"""Cache key where application list for user is saved"""
key = f"{CACHE_PREFIX}app_access/{user_pk}"
if only_with_launch_url:
key += "/launch"
if page_number:
key += f"/{page_number}"
return key
@@ -52,12 +47,7 @@ class ApplicationSerializer(ModelSerializer):
"""Application Serializer"""
launch_url = SerializerMethodField()
provider_obj = ProviderSerializer(
source="get_provider",
required=False,
read_only=True,
allow_null=True,
)
provider_obj = ProviderSerializer(source="get_provider", required=False, read_only=True)
backchannel_providers_obj = ProviderSerializer(
source="backchannel_providers", required=False, read_only=True, many=True
)
@@ -173,7 +163,6 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
request.user = user
for application in paginated_apps:
engine = PolicyEngine(application, request.user, request)
engine.empty_result = AppAccessWithoutBindings.get()
engine.build()
if engine.passing:
applications.append(application)
@@ -231,7 +220,6 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
if not for_user:
raise ValidationError({"for_user": "User not found"})
engine = PolicyEngine(application, for_user, request)
engine.empty_result = AppAccessWithoutBindings.get()
engine.use_cache = False
with capture_logs() as logs:
engine.build()
@@ -278,19 +266,11 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
if superuser_full_list and request.user.is_superuser:
return super().list(request)
only_with_launch_url = (
str(request.query_params.get("only_with_launch_url", "false")).lower()
) == "true"
only_with_launch_url = str(
request.query_params.get("only_with_launch_url", "false")
).lower()
queryset = self._filter_queryset_for_list(self.get_queryset())
if only_with_launch_url:
# Pre-filter at DB level to skip expensive per-app policy evaluation
# for apps that can never appear in the launcher:
# - No meta_launch_url AND no provider: no possible launch URL
# - meta_launch_url="blank://blank": documented convention to hide from launcher
queryset = queryset.exclude(
Q(meta_launch_url="", provider__isnull=True) | Q(meta_launch_url="blank://blank")
)
paginator: Pagination = self.paginator
paginated_apps = paginator.paginate_queryset(queryset, request)
@@ -307,6 +287,7 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
except ValueError as exc:
raise ValidationError from exc
allowed_applications = self._get_allowed_applications(paginated_apps, user=for_user)
allowed_applications = self._expand_applications(allowed_applications)
serializer = self.get_serializer(allowed_applications, many=True)
return self.get_paginated_response(serializer.data)
@@ -316,26 +297,19 @@ class ApplicationViewSet(UsedByMixin, ModelViewSet):
allowed_applications = self._get_allowed_applications(paginated_apps)
if should_cache:
allowed_applications = cache.get(
user_app_cache_key(
self.request.user.pk, paginator.page.number, only_with_launch_url
)
user_app_cache_key(self.request.user.pk, paginator.page.number)
)
if allowed_applications:
# Re-fetch cached applications since pickled instances lose prefetched
# relationships, causing N+1 queries during serialization
allowed_applications = self._expand_applications(allowed_applications)
else:
if not allowed_applications:
LOGGER.debug("Caching allowed application list", page=paginator.page.number)
allowed_applications = self._get_allowed_applications(paginated_apps)
cache.set(
user_app_cache_key(
self.request.user.pk, paginator.page.number, only_with_launch_url
),
user_app_cache_key(self.request.user.pk, paginator.page.number),
allowed_applications,
timeout=86400,
)
allowed_applications = self._expand_applications(allowed_applications)
if only_with_launch_url:
if only_with_launch_url == "true":
allowed_applications = self._filter_applications_with_launch_url(allowed_applications)
serializer = self.get_serializer(allowed_applications, many=True)

View File

@@ -7,7 +7,6 @@ from django.http import Http404
from django.utils.translation import gettext as _
from django_filters.filters import CharFilter, ModelMultipleChoiceFilter
from django_filters.filterset import FilterSet
from djangoql.schema import BoolField, StrField
from drf_spectacular.utils import (
OpenApiParameter,
OpenApiResponse,
@@ -26,9 +25,6 @@ from rest_framework.serializers import ListSerializer, ValidationError
from rest_framework.viewsets import ModelViewSet
from authentik.api.authentication import TokenAuthentication
from authentik.api.search.fields import (
JSONSearchField,
)
from authentik.api.validation import validate
from authentik.core.api.used_by import UsedByMixin
from authentik.core.api.utils import JSONDictField, ModelSerializer, PassiveSerializer
@@ -269,6 +265,12 @@ class GroupViewSet(UsedByMixin, ModelViewSet):
]
def get_ql_fields(self):
from djangoql.schema import BoolField, StrField
from authentik.enterprise.search.fields import (
JSONSearchField,
)
return [
StrField(Group, "name"),
BoolField(Group, "is_superuser", nullable=True),

View File

@@ -124,7 +124,7 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
"""Token Viewset"""
lookup_field = "identifier"
queryset = Token.objects.including_expired().all()
queryset = Token.objects.all()
serializer_class = TokenSerializer
search_fields = [
"identifier",

View File

@@ -2,8 +2,9 @@
from django.apps import apps
from django.db.models import Model
from django.utils.translation import gettext as _
from drf_spectacular.utils import PolymorphicProxySerializer, extend_schema, extend_schema_field
from rest_framework.exceptions import ValidationError
from rest_framework.exceptions import PermissionDenied, ValidationError
from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, ListField
from rest_framework.permissions import IsAuthenticated
from rest_framework.request import Request
@@ -12,7 +13,6 @@ from rest_framework.views import APIView
from yaml import ScalarNode
from authentik.api.validation import validate
from authentik.blueprints.api import check_blueprint_perms
from authentik.blueprints.v1.common import (
Blueprint,
BlueprintEntry,
@@ -165,7 +165,21 @@ class TransactionalApplicationView(APIView):
def put(self, request: Request, body: TransactionApplicationSerializer) -> Response:
"""Convert data into a blueprint, validate it and apply it"""
blueprint: Blueprint = body.validated_data
check_blueprint_perms(blueprint, request.user, explicit_action="add")
for entry in blueprint.entries:
full_model = entry.get_model(blueprint)
app, __, model = full_model.partition(".")
if not request.user.has_perm(f"{app}.add_{model}"):
raise PermissionDenied(
{
entry.id: _(
"User lacks permission to create {model}".format_map(
{
"model": full_model,
}
)
)
}
)
importer = Importer(blueprint, {})
applied = importer.apply()
response = {"applied": False, "logs": []}

View File

@@ -22,7 +22,6 @@ from django_filters.filters import (
UUIDFilter,
)
from django_filters.filterset import FilterSet
from djangoql.schema import BoolField, StrField
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import (
OpenApiParameter,
@@ -56,10 +55,6 @@ from rest_framework.viewsets import ModelViewSet
from structlog.stdlib import get_logger
from authentik.api.authentication import TokenAuthentication
from authentik.api.search.fields import (
ChoiceSearchField,
JSONSearchField,
)
from authentik.api.validation import validate
from authentik.blueprints.v1.importer import SERIALIZER_CONTEXT_BLUEPRINT
from authentik.brands.models import Brand
@@ -529,6 +524,13 @@ class UserViewSet(
]
def get_ql_fields(self):
from djangoql.schema import BoolField, StrField
from authentik.enterprise.search.fields import (
ChoiceSearchField,
JSONSearchField,
)
return [
StrField(User, "username"),
StrField(User, "name"),

View File

@@ -1,26 +1,7 @@
"""authentik core app config"""
from django.utils.translation import gettext_lazy as _
from authentik.blueprints.apps import ManagedAppConfig
from authentik.tasks.schedules.common import ScheduleSpec
from authentik.tenants.flags import Flag
class Setup(Flag[bool], key="setup"):
default = False
visibility = "system"
class AppAccessWithoutBindings(Flag[bool], key="core_default_app_access"):
default = True
visibility = "none"
description = _(
"Configure if applications without any policy/group/user bindings "
"should be accessible to any user."
)
class AuthentikCoreConfig(ManagedAppConfig):
@@ -32,10 +13,6 @@ class AuthentikCoreConfig(ManagedAppConfig):
mountpoint = ""
default = True
def import_related(self):
super().import_related()
self.import_module("authentik.core.setup.signals")
@ManagedAppConfig.reconcile_tenant
def source_inbuilt(self):
"""Reconcile inbuilt source"""

View File

@@ -81,7 +81,7 @@ class TokenBackend(InbuiltBackend):
User().set_password(password, request=request)
return None
tokens = Token.objects.filter(
tokens = Token.filter_not_expired(
user=user, key=password, intent=TokenIntents.INTENT_APP_PASSWORD
)
if not tokens.exists():

View File

@@ -1,61 +0,0 @@
# Generated by Django 5.2.13 on 2026-04-21 18:49
from django.apps.registry import Apps
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db import migrations
def check_is_already_setup(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
from django.conf import settings
from authentik.flows.models import FlowAuthenticationRequirement
VersionHistory = apps.get_model("authentik_admin", "VersionHistory")
Flow = apps.get_model("authentik_flows", "Flow")
User = apps.get_model("authentik_core", "User")
db_alias = schema_editor.connection.alias
# Upgrading from a previous version
if not settings.TEST and VersionHistory.objects.using(db_alias).count() > 1:
return True
# OOBE flow sets itself to this authentication requirement once finished
if (
Flow.objects.using(db_alias)
.filter(
slug="initial-setup", authentication=FlowAuthenticationRequirement.REQUIRE_SUPERUSER
)
.exists()
):
return True
# non-akadmin and non-guardian anonymous user exist
if (
User.objects.using(db_alias)
.exclude(username="akadmin")
.exclude(username="AnonymousUser")
.exists()
):
return True
return False
def update_setup_flag(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
from authentik.core.apps import Setup
from authentik.tenants.utils import get_current_tenant
is_already_setup = check_is_already_setup(apps, schema_editor)
if is_already_setup:
tenant = get_current_tenant()
tenant.flags[Setup().key] = True
tenant.save()
class Migration(migrations.Migration):
dependencies = [
("authentik_core", "0057_remove_user_groups_remove_user_user_permissions_and_more"),
# 0024_flow_authentication adds the `authentication` field.
("authentik_flows", "0024_flow_authentication"),
]
operations = [migrations.RunPython(update_setup_flag, migrations.RunPython.noop)]

View File

@@ -2,7 +2,7 @@
import re
import traceback
from datetime import datetime
from datetime import datetime, timedelta
from enum import StrEnum
from hashlib import sha256
from typing import Any, Self
@@ -16,7 +16,7 @@ from django.contrib.auth.models import UserManager as DjangoUserManager
from django.contrib.sessions.base_session import AbstractBaseSession
from django.core.validators import validate_slug
from django.db import models
from django.db.models import Manager, Q, QuerySet, options
from django.db.models import Q, QuerySet, options
from django.http import HttpRequest
from django.utils.functional import cached_property
from django.utils.timezone import now
@@ -45,7 +45,6 @@ from authentik.lib.models import (
SerializerModel,
)
from authentik.lib.utils.inheritance import get_deepest_child
from authentik.lib.utils.reflection import class_to_path
from authentik.lib.utils.time import timedelta_from_string
from authentik.policies.models import PolicyBindingModel
from authentik.rbac.models import Role
@@ -518,7 +517,7 @@ class User(SerializerModel, AttributesMixin, AbstractUser):
@property
def ak_groups(self):
"""This is a proxy for a renamed, deprecated field."""
from authentik.events.models import Event
from authentik.events.models import Event, EventAction
deprecation = "authentik.core.models.User.ak_groups"
replacement = "authentik.core.models.User.groups"
@@ -545,9 +544,21 @@ class User(SerializerModel, AttributesMixin, AbstractUser):
cause=cause,
stacktrace=stacktrace,
)
Event.log_deprecation(
deprecation, message=message_event, cause=cause, replacement=replacement
)
if not Event.filter_not_expired(
action=EventAction.CONFIGURATION_WARNING,
context__deprecation=deprecation,
context__cause=cause,
).exists():
event = Event.new(
EventAction.CONFIGURATION_WARNING,
deprecation=deprecation,
replacement=replacement,
message=message_event,
cause=cause,
)
event.expires = datetime.now() + timedelta(days=30)
event.save()
return self.groups
def set_password(self, raw_password, signal=True, sender=None, request=None):
@@ -790,21 +801,17 @@ class Application(SerializerModel, PolicyBindingModel):
def get_provider(self) -> Provider | None:
"""Get casted provider instance. Needs Application queryset with_provider"""
if hasattr(self, "_cached_provider"):
return self._cached_provider
if not self.provider:
self._cached_provider = None
return None
self._cached_provider = get_deepest_child(self.provider)
return self._cached_provider
return get_deepest_child(self.provider)
def backchannel_provider_for[T: Provider](self, provider_type: type[T], **kwargs) -> T | None:
"""Get Backchannel provider for a specific type"""
provider: BackchannelProvider | None = self.backchannel_providers.filter(
providers = self.backchannel_providers.filter(
**{f"{provider_type._meta.model_name}__isnull": False},
**kwargs,
).first()
return getattr(provider, provider_type._meta.model_name) if provider else None
)
return getattr(providers.first(), provider_type._meta.model_name)
def __str__(self):
return str(self.name)
@@ -955,34 +962,21 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
objects = InheritanceManager()
def get_icon_url(self, request=None, use_cache: bool = True) -> str | None:
"""Get the URL to the source icon."""
if not self.icon:
return None
return get_file_manager(FileUsage.MEDIA).file_url(self.icon, request, use_cache=use_cache)
@property
def icon_url(self) -> str | None:
"""Get the URL to the source icon"""
return self.get_icon_url()
def get_icon_themed_urls(
self,
request=None,
use_cache: bool = True,
) -> dict[str, str] | None:
"""Get themed URLs for icon if it contains %(theme)s."""
if not self.icon:
return None
return get_file_manager(FileUsage.MEDIA).themed_urls(
self.icon,
request,
use_cache=use_cache,
)
return get_file_manager(FileUsage.MEDIA).file_url(self.icon)
@property
def icon_themed_urls(self) -> dict[str, str] | None:
return self.get_icon_themed_urls()
"""Get themed URLs for icon if it contains %(theme)s"""
if not self.icon:
return None
return get_file_manager(FileUsage.MEDIA).themed_urls(self.icon)
def get_user_path(self) -> str:
"""Get user path, fallback to default for formatting errors"""
@@ -1102,24 +1096,12 @@ class GroupSourceConnection(SerializerModel, CreatedUpdatedModel):
unique_together = (("group", "source"),)
class ExpiringManager(Manager):
"""Manager for expiring objects which filters out expired objects by default"""
def get_queryset(self):
return QuerySet(self.model, using=self._db).exclude(expires__lt=now(), expiring=True)
def including_expired(self):
return QuerySet(self.model, using=self._db)
class ExpiringModel(models.Model):
"""Base Model which can expire, and is automatically cleaned up."""
expires = models.DateTimeField(default=None, null=True)
expiring = models.BooleanField(default=True)
objects = ExpiringManager()
class Meta:
abstract = True
indexes = [
@@ -1143,23 +1125,7 @@ class ExpiringModel(models.Model):
def filter_not_expired(cls, **kwargs) -> QuerySet[Self]:
"""Filer for tokens which are not expired yet or are not expiring,
and match filters in `kwargs`"""
from authentik.events.models import Event
deprecation_id = f"{class_to_path(cls)}.filter_not_expired"
Event.log_deprecation(
deprecation_id,
message=(
".filter_not_expired() is deprecated as the default lookup now excludes "
"expired objects."
),
)
for obj in (
cls.objects.including_expired()
.filter(**kwargs)
.filter(Q(expires__lt=now(), expiring=True))
):
for obj in cls.objects.filter(**kwargs).filter(Q(expires__lt=now(), expiring=True)):
obj.delete()
return cls.objects.filter(**kwargs)

View File

@@ -72,7 +72,6 @@ class SessionStore(SessionBase):
# and their descriptors fail to initialize (e.g., missing storage)
# TypeError - can happen with incompatible pickled objects
# If any of these happen, just return an empty dictionary (an empty session)
LOGGER.warning("Failed to decode session data", exc_info=True)
pass
return {}

View File

@@ -1,38 +0,0 @@
from os import getenv
from django.dispatch import receiver
from structlog.stdlib import get_logger
from authentik.blueprints.models import BlueprintInstance
from authentik.blueprints.v1.importer import Importer
from authentik.core.apps import Setup
from authentik.root.signals import post_startup
from authentik.tenants.models import Tenant
BOOTSTRAP_BLUEPRINT = "system/bootstrap.yaml"
LOGGER = get_logger()
@receiver(post_startup)
def post_startup_setup_bootstrap(sender, **_):
if not getenv("AUTHENTIK_BOOTSTRAP_PASSWORD") and not getenv("AUTHENTIK_BOOTSTRAP_TOKEN"):
return
LOGGER.info("Configuring authentik through bootstrap environment variables")
content = BlueprintInstance(path=BOOTSTRAP_BLUEPRINT).retrieve()
# If we have bootstrap credentials set, run bootstrap tasks outside of main server
# sync, so that we can sure the first start actually has working bootstrap
# credentials
for tenant in Tenant.objects.filter(ready=True):
if Setup.get(tenant=tenant):
LOGGER.info("Tenant is already setup, skipping", tenant=tenant.schema_name)
continue
with tenant:
importer = Importer.from_string(content)
valid, logs = importer.validate()
if not valid:
LOGGER.warning("Blueprint invalid", tenant=tenant.schema_name)
for log in logs:
log.log()
importer.apply()
Setup.set(True, tenant=tenant)

View File

@@ -1,80 +0,0 @@
from functools import lru_cache
from http import HTTPMethod, HTTPStatus
from django.contrib.staticfiles import finders
from django.db import transaction
from django.http import HttpRequest, HttpResponse
from django.shortcuts import redirect
from django.urls import reverse
from django.views import View
from structlog.stdlib import get_logger
from authentik.blueprints.models import BlueprintInstance
from authentik.core.apps import Setup
from authentik.flows.models import Flow, FlowAuthenticationRequirement, in_memory_stage
from authentik.flows.planner import FlowPlanner
from authentik.flows.stage import StageView
LOGGER = get_logger()
FLOW_CONTEXT_START_BY = "goauthentik.io/core/setup/started-by"
@lru_cache
def read_static(path: str) -> str | None:
result = finders.find(path)
if not result:
return None
with open(result, encoding="utf8") as _file:
return _file.read()
class SetupView(View):
setup_flow_slug = "initial-setup"
def dispatch(self, request: HttpRequest, *args, **kwargs):
if request.method != HTTPMethod.HEAD and Setup.get():
return redirect(reverse("authentik_core:root-redirect"))
return super().dispatch(request, *args, **kwargs)
def head(self, request: HttpRequest, *args, **kwargs):
if Setup.get():
return HttpResponse(status=HTTPStatus.SERVICE_UNAVAILABLE)
if not Flow.objects.filter(slug=self.setup_flow_slug).exists():
return HttpResponse(status=HTTPStatus.SERVICE_UNAVAILABLE)
return HttpResponse(status=HTTPStatus.OK)
def get(self, request: HttpRequest):
flow = Flow.objects.filter(slug=self.setup_flow_slug).first()
if not flow:
LOGGER.info("Setup flow does not exist yet, waiting for worker to finish")
return HttpResponse(
read_static("dist/standalone/loading/startup.html"),
status=HTTPStatus.SERVICE_UNAVAILABLE,
)
planner = FlowPlanner(flow)
plan = planner.plan(request, {FLOW_CONTEXT_START_BY: "setup"})
plan.append_stage(in_memory_stage(PostSetupStageView))
return plan.to_redirect(request, flow)
class PostSetupStageView(StageView):
"""Run post-setup tasks"""
def post(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
"""Wrapper when this stage gets hit with a post request"""
return self.get(request, *args, **kwargs)
def get(self, requeset: HttpRequest, *args, **kwargs):
with transaction.atomic():
# Remember we're setup
Setup.set(True)
# Disable OOBE Blueprints
BlueprintInstance.objects.filter(
**{"metadata__labels__blueprints.goauthentik.io/system-oobe": "true"}
).update(enabled=False)
# Make flow inaccessible
Flow.objects.filter(slug="initial-setup").update(
authentication=FlowAuthenticationRequirement.REQUIRE_SUPERUSER
)
return self.executor.stage_ok()

View File

@@ -24,8 +24,7 @@ from authentik.root.ws.consumer import build_device_group
# Arguments: user: User, password: str
password_changed = Signal()
# Arguments: credentials: dict[str, any], request: HttpRequest,
# stage: Stage, context: dict[str, any]
# Arguments: credentials: dict[str, any], request: HttpRequest, stage: Stage
login_failed = Signal()
LOGGER = get_logger()

View File

@@ -27,10 +27,7 @@ def clean_expired_models():
for cls in ExpiringModel.__subclasses__():
cls: ExpiringModel
objects = (
cls.objects.including_expired()
.all()
.exclude(expiring=False)
.exclude(expiring=True, expires__gt=now())
cls.objects.all().exclude(expiring=False).exclude(expiring=True, expires__gt=now())
)
amount = objects.count()
for obj in chunked_queryset(objects):

View File

@@ -1,7 +1,7 @@
{% load i18n %}
{% get_current_language as LANGUAGE_CODE %}
<script data-id="authentik-config" nonce="{{ csp_nonce }}">
<script data-id="authentik-config">
"use strict";
window.authentik = {

View File

@@ -14,7 +14,6 @@
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1">
{# Darkreader breaks the site regardless of theme as its not compatible with webcomponents, and we default to a dark theme based on preferred colour-scheme #}
<meta name="darkreader-lock">
<script nonce="{{ csp_nonce }}">window.litNonce = "{{ csp_nonce }}";</script>
<title>{% block title %}{% trans title|default:brand.branding_title %}{% endblock %}</title>
<link rel="icon" href="{{ brand.branding_favicon_url }}">
<link rel="shortcut icon" href="{{ brand.branding_favicon_url }}">
@@ -28,7 +27,7 @@
{% include "base/theme.html" %}
<style data-id="brand-css" nonce="{{ csp_nonce }}">{{ brand_css }}</style>
<style data-id="brand-css">{{ brand_css }}</style>
<script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script>
{% block head %}
{% endblock %}

View File

@@ -9,7 +9,7 @@
<meta name="color-scheme" content="light" />
<meta name="theme-color" content="#ffffff">
{% else %}
<script data-id="theme-script" nonce="{{ csp_nonce }}">
<script data-id="theme-script">
"use strict";
(function () {
@@ -22,7 +22,7 @@
if (!(["auto", "light", "dark"].includes(locallyStoredTheme))) {
locallyStoredTheme = null;
}
const initialThemeChoice =
new URLSearchParams(window.location.search).get("theme") || locallyStoredTheme;

View File

@@ -10,7 +10,7 @@
{% endblock %}
{% block head %}
<style data-id="static-styles" nonce="{{ csp_nonce }}">
<style data-id="static-styles">
:root {
--ak-global--background-image: url("{{ request.brand.branding_default_flow_background_url }}");
}

View File

@@ -4,7 +4,6 @@ from django.test import TestCase
from django.urls import reverse
from authentik.brands.models import Brand
from authentik.core.apps import Setup
from authentik.core.models import Application, UserTypes
from authentik.core.tests.utils import create_test_brand, create_test_user
@@ -13,7 +12,6 @@ class TestInterfaceRedirects(TestCase):
"""Test RootRedirectView and BrandDefaultRedirectView redirect logic by user type"""
def setUp(self):
Setup.set(True)
self.app = Application.objects.create(name="test-app", slug="test-app")
self.brand: Brand = create_test_brand(default_application=self.app)

View File

@@ -2,7 +2,6 @@
from collections.abc import Callable
from datetime import timedelta
from unittest.mock import patch
from django.test import RequestFactory, TestCase
from django.utils.timezone import now
@@ -10,9 +9,6 @@ from freezegun import freeze_time
from guardian.shortcuts import get_anonymous_user
from authentik.core.models import Provider, Source, Token
from authentik.events.models import Event, EventAction
from authentik.flows.models import Flow
from authentik.lib.generators import generate_id
from authentik.lib.utils.reflection import all_subclasses
@@ -33,74 +29,6 @@ class TestModels(TestCase):
freeze.tick(timedelta(seconds=1))
self.assertFalse(token.is_expired)
def test_filter_not_expired_warning(self):
"""Test filter_not_expired's deprecation message"""
id = generate_id()
Token.objects.create(
expires=now() - timedelta(hours=1),
expiring=True,
user=get_anonymous_user(),
identifier=id,
)
self.assertFalse(Token.filter_not_expired(identifier=id).exists())
event = Event.objects.filter(action=EventAction.CONFIGURATION_WARNING).first()
self.assertIsNotNone(event)
self.assertEqual(
event.context["deprecation"], "authentik.core.models.Token.filter_not_expired"
)
@patch("authentik.core.models.get_file_manager")
def test_source_icon_url_can_bypass_cache(self, get_file_manager):
request = RequestFactory().get("/")
manager = get_file_manager.return_value
manager.file_url.return_value = "/files/media/public/source-icons/icon.svg?token=fresh"
source = Source(icon="source-icons/icon.svg")
self.assertEqual(
source.get_icon_url(request, use_cache=False),
"/files/media/public/source-icons/icon.svg?token=fresh",
)
manager.file_url.assert_called_once_with(
"source-icons/icon.svg",
request,
use_cache=False,
)
@patch("authentik.flows.models.get_file_manager")
def test_flow_background_urls_can_bypass_cache(self, get_file_manager):
request = RequestFactory().get("/")
manager = get_file_manager.return_value
manager.file_url.return_value = "/files/media/public/background.svg?token=fresh"
manager.themed_urls.return_value = {
"light": "/files/media/public/background-light.svg?token=fresh",
"dark": "/files/media/public/background-dark.svg?token=fresh",
}
flow = Flow(background="background-%(theme)s.svg")
self.assertEqual(
flow.background_url(request, use_cache=False),
"/files/media/public/background.svg?token=fresh",
)
self.assertEqual(
flow.background_themed_urls(request, use_cache=False),
{
"light": "/files/media/public/background-light.svg?token=fresh",
"dark": "/files/media/public/background-dark.svg?token=fresh",
},
)
manager.file_url.assert_called_once_with(
"background-%(theme)s.svg",
request,
use_cache=False,
)
manager.themed_urls.assert_called_once_with(
"background-%(theme)s.svg",
request,
use_cache=False,
)
def source_tester_factory(test_model: type[Source]) -> Callable:
"""Test source"""

View File

@@ -1,156 +0,0 @@
from http import HTTPStatus
from os import environ
from django.urls import reverse
from authentik.blueprints.tests import apply_blueprint
from authentik.core.apps import Setup
from authentik.core.models import Token, TokenIntents, User
from authentik.flows.models import Flow
from authentik.flows.tests import FlowTestCase
from authentik.lib.generators import generate_id
from authentik.root.signals import post_startup, pre_startup
from authentik.tenants.flags import patch_flag
class TestSetup(FlowTestCase):
def tearDown(self):
environ.pop("AUTHENTIK_BOOTSTRAP_PASSWORD", None)
environ.pop("AUTHENTIK_BOOTSTRAP_TOKEN", None)
@patch_flag(Setup, True)
def test_setup(self):
"""Test existing instance"""
res = self.client.get(reverse("authentik_core:root-redirect"))
self.assertEqual(res.status_code, HTTPStatus.FOUND)
self.assertRedirects(
res,
reverse("authentik_flows:default-authentication") + "?next=/",
fetch_redirect_response=False,
)
res = self.client.head(reverse("authentik_core:setup"))
self.assertEqual(res.status_code, HTTPStatus.SERVICE_UNAVAILABLE)
res = self.client.get(reverse("authentik_core:setup"))
self.assertEqual(res.status_code, HTTPStatus.FOUND)
self.assertRedirects(
res,
reverse("authentik_core:root-redirect"),
fetch_redirect_response=False,
)
@patch_flag(Setup, False)
def test_not_setup_no_flow(self):
"""Test case on initial startup; setup flag is not set and oobe flow does
not exist yet"""
Flow.objects.filter(slug="initial-setup").delete()
res = self.client.get(reverse("authentik_core:root-redirect"))
self.assertEqual(res.status_code, HTTPStatus.FOUND)
self.assertRedirects(res, reverse("authentik_core:setup"), fetch_redirect_response=False)
# Flow does not exist, hence 503
res = self.client.get(reverse("authentik_core:setup"))
self.assertEqual(res.status_code, HTTPStatus.SERVICE_UNAVAILABLE)
res = self.client.head(reverse("authentik_core:setup"))
self.assertEqual(res.status_code, HTTPStatus.SERVICE_UNAVAILABLE)
@patch_flag(Setup, False)
@apply_blueprint("default/flow-oobe.yaml")
def test_not_setup(self):
"""Test case for when worker comes up, and has created flow"""
res = self.client.get(reverse("authentik_core:root-redirect"))
self.assertEqual(res.status_code, HTTPStatus.FOUND)
self.assertRedirects(res, reverse("authentik_core:setup"), fetch_redirect_response=False)
# Flow does not exist, hence 503
res = self.client.head(reverse("authentik_core:setup"))
self.assertEqual(res.status_code, HTTPStatus.OK)
res = self.client.get(reverse("authentik_core:setup"))
self.assertEqual(res.status_code, HTTPStatus.FOUND)
self.assertRedirects(
res,
reverse("authentik_core:if-flow", kwargs={"flow_slug": "initial-setup"}),
fetch_redirect_response=False,
)
@apply_blueprint("default/flow-oobe.yaml")
@apply_blueprint("system/bootstrap.yaml")
def test_setup_flow_full(self):
"""Test full setup flow"""
Setup.set(False)
res = self.client.get(reverse("authentik_core:setup"))
self.assertEqual(res.status_code, HTTPStatus.FOUND)
self.assertRedirects(
res,
reverse("authentik_core:if-flow", kwargs={"flow_slug": "initial-setup"}),
fetch_redirect_response=False,
)
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": "initial-setup"}),
)
self.assertEqual(res.status_code, HTTPStatus.OK)
self.assertStageResponse(res, component="ak-stage-prompt")
pw = generate_id()
res = self.client.post(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": "initial-setup"}),
{
"email": f"{generate_id()}@t.goauthentik.io",
"password": pw,
"password_repeat": pw,
"component": "ak-stage-prompt",
},
)
self.assertEqual(res.status_code, HTTPStatus.FOUND)
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": "initial-setup"}),
)
self.assertEqual(res.status_code, HTTPStatus.FOUND)
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": "initial-setup"}),
)
self.assertEqual(res.status_code, HTTPStatus.FOUND)
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": "initial-setup"}),
)
self.assertEqual(res.status_code, HTTPStatus.OK)
self.assertTrue(Setup.get())
user = User.objects.get(username="akadmin")
self.assertTrue(user.check_password(pw))
@patch_flag(Setup, False)
@apply_blueprint("default/flow-oobe.yaml")
@apply_blueprint("system/bootstrap.yaml")
def test_setup_flow_direct(self):
"""Test setup flow, directly accessing the flow"""
res = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": "initial-setup"})
)
self.assertStageResponse(
res,
component="ak-stage-access-denied",
error_message="Access the authentik setup by navigating to http://testserver/",
)
def test_setup_bootstrap_env(self):
"""Test setup with env vars"""
User.objects.filter(username="akadmin").delete()
Setup.set(False)
environ["AUTHENTIK_BOOTSTRAP_PASSWORD"] = generate_id()
environ["AUTHENTIK_BOOTSTRAP_TOKEN"] = generate_id()
pre_startup.send(sender=self)
post_startup.send(sender=self)
self.assertTrue(Setup.get())
user = User.objects.get(username="akadmin")
self.assertTrue(user.check_password(environ["AUTHENTIK_BOOTSTRAP_PASSWORD"]))
token = Token.objects.filter(identifier="authentik-bootstrap-token").first()
self.assertEqual(token.intent, TokenIntents.INTENT_API)
self.assertEqual(token.key, environ["AUTHENTIK_BOOTSTRAP_TOKEN"])

View File

@@ -173,7 +173,7 @@ class TestTokenAPI(APITestCase):
def test_list(self):
"""Test Token List (Test normal authentication)"""
Token.objects.including_expired().all().delete()
Token.objects.all().delete()
token_should: Token = Token.objects.create(
identifier="test", expiring=False, user=self.user
)
@@ -185,7 +185,7 @@ class TestTokenAPI(APITestCase):
def test_list_with_permission(self):
"""Test Token List (Test with `view_token` permission)"""
Token.objects.including_expired().all().delete()
Token.objects.all().delete()
token_should: Token = Token.objects.create(
identifier="test", expiring=False, user=self.user
)

View File

@@ -1,9 +1,6 @@
"""Test token auth"""
from datetime import timedelta
from django.test import TestCase
from django.utils.timezone import now
from authentik.core.auth import TokenBackend
from authentik.core.models import Token, TokenIntents, User
@@ -31,15 +28,6 @@ class TestTokenAuth(TestCase):
TokenBackend().authenticate(self.request, "test-user", self.token.key), self.user
)
def test_token_auth_expired(self):
"""Test auth with token"""
self.token.expiring = True
self.token.expires = now() - timedelta(hours=1)
self.token.save()
self.assertEqual(
TokenBackend().authenticate(self.request, "test-user", self.token.key), None
)
def test_token_auth_none(self):
"""Test auth with token (non-existent user)"""
self.assertIsNone(

View File

@@ -1,6 +1,7 @@
"""authentik URL Configuration"""
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.urls import path
from authentik.core.api.application_entitlements import ApplicationEntitlementViewSet
@@ -18,7 +19,6 @@ from authentik.core.api.sources import (
from authentik.core.api.tokens import TokenViewSet
from authentik.core.api.transactional_applications import TransactionalApplicationView
from authentik.core.api.users import UserViewSet
from authentik.core.setup.views import SetupView
from authentik.core.views.apps import RedirectToAppLaunch
from authentik.core.views.debug import AccessDeniedView
from authentik.core.views.interface import (
@@ -35,7 +35,7 @@ from authentik.tenants.channels import TenantsAwareMiddleware
urlpatterns = [
path(
"",
RootRedirectView.as_view(),
login_required(RootRedirectView.as_view()),
name="root-redirect",
),
path(
@@ -62,11 +62,6 @@ urlpatterns = [
FlowInterfaceView.as_view(),
name="if-flow",
),
path(
"setup",
SetupView.as_view(),
name="setup",
),
# Fallback for WS
path("ws/outpost/<uuid:pk>/", InterfaceView.as_view(template_name="if/admin.html")),
path(

View File

@@ -3,7 +3,6 @@
from json import dumps
from typing import Any
from django.contrib.auth.mixins import AccessMixin
from django.http import HttpRequest
from django.http.response import HttpResponse
from django.shortcuts import redirect
@@ -15,13 +14,12 @@ from authentik.admin.tasks import LOCAL_VERSION
from authentik.api.v3.config import ConfigView
from authentik.brands.api import CurrentBrandSerializer
from authentik.brands.models import Brand
from authentik.core.apps import Setup
from authentik.core.models import UserTypes
from authentik.lib.config import CONFIG
from authentik.policies.denied import AccessDeniedResponse
class RootRedirectView(AccessMixin, RedirectView):
class RootRedirectView(RedirectView):
"""Root redirect view, redirect to brand's default application if set"""
pattern_name = "authentik_core:if-user"
@@ -42,10 +40,6 @@ class RootRedirectView(AccessMixin, RedirectView):
return None
def dispatch(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
if not Setup.get():
return redirect("authentik_core:setup")
if not request.user.is_authenticated:
return self.handle_no_permission()
if redirect_response := RootRedirectView().redirect_to_app(request):
return redirect_response
return super().dispatch(request, *args, **kwargs)

View File

@@ -114,16 +114,15 @@ def certificate_discovery():
discovered = 0
for file in glob(CONFIG.get("cert_discovery_dir") + "/**", recursive=True):
path = Path(file)
if not path.exists() or path.is_dir():
if not path.exists():
continue
if path.is_dir():
continue
# For certbot setups, we want to ignore archive.
if "archive" in file:
continue
# Handle additionalOutputFormats from cert-manager gracefully
if path.name in ["ca.crt", "tls-combined.pem", "key.der"]:
continue
# Support certbot & kubernetes.io/tls directory structure
if path.name in ["fullchain.pem", "privkey.pem", "tls.crt", "tls.key"]:
# Support certbot's directory structure
if path.name in ["fullchain.pem", "privkey.pem"]:
cert_name = path.parent.name
else:
cert_name = path.name.replace(path.suffix, "")

View File

@@ -355,16 +355,6 @@ class TestCrypto(APITestCase):
subject_alt_names=[],
validity_days=3,
)
name3 = generate_id()
builder3 = CertificateBuilder(name3)
with self.assertRaises(ValueError):
builder3.save()
builder3.build(
subject_alt_names=[],
validity_days=3,
)
with TemporaryDirectory() as temp_dir:
with open(f"{temp_dir}/foo.pem", "w+", encoding="utf-8") as _cert:
_cert.write(builder.certificate)
@@ -375,8 +365,6 @@ class TestCrypto(APITestCase):
_cert.write(builder2.certificate)
with open(f"{temp_dir}/foo.bar/privkey.pem", "w+", encoding="utf-8") as _key:
_key.write(builder2.private_key)
with open(f"{temp_dir}/tls-combined.pem", "w+", encoding="utf-8") as _cert:
_cert.write(builder3.certificate)
with CONFIG.patch("cert_discovery_dir", temp_dir):
certificate_discovery.send()
keypair: CertificateKeyPair = CertificateKeyPair.objects.filter(
@@ -388,9 +376,6 @@ class TestCrypto(APITestCase):
self.assertTrue(
CertificateKeyPair.objects.filter(managed=MANAGED_DISCOVERED % "foo.bar").exists()
)
self.assertFalse(
CertificateKeyPair.objects.filter(managed=MANAGED_DISCOVERED % "tls-combined").exists()
)
def test_discovery_updating_same_private_key(self):
"""Test certificate discovery updating certs with matching private keys"""

View File

@@ -97,7 +97,7 @@ class DeviceViewSet(
def summary(self, request: Request) -> Response:
delta = now() - timedelta(hours=24)
unreachable = (
Device.objects.all()
Device.filter_not_expired()
.annotate(
latest_snapshot=Subquery(
DeviceFactSnapshot.objects.filter(connection__device=OuterRef("pk"))
@@ -110,7 +110,7 @@ class DeviceViewSet(
.count()
)
data = {
"total_count": Device.objects.all().count(),
"total_count": Device.filter_not_expired().count(),
"unreachable_count": unreachable,
# Currently not supported
"outdated_agent_count": 0,

View File

@@ -65,9 +65,7 @@ class AgentConnectorSerializer(ConnectorSerializer):
class MDMConfigSerializer(PassiveSerializer):
platform = ChoiceField(choices=OSFamily.choices)
enrollment_token = PrimaryKeyRelatedField(
queryset=EnrollmentToken.objects.including_expired().all()
)
enrollment_token = PrimaryKeyRelatedField(queryset=EnrollmentToken.objects.all())
def validate_platform(self, platform: OSFamily) -> OSFamily:
if platform not in [OSFamily.iOS, OSFamily.macOS, OSFamily.windows]:
@@ -138,7 +136,7 @@ class AgentConnectorViewSet(
device=device,
connector=token.connector,
)
DeviceToken.objects.including_expired().filter(device=connection).delete()
DeviceToken.objects.filter(device=connection).delete()
token = DeviceToken.objects.create(device=connection, expiring=False)
return Response(
{

View File

@@ -18,10 +18,7 @@ from authentik.rbac.decorators import permission_required
class EnrollmentTokenSerializer(ModelSerializer):
device_group_obj = DeviceAccessGroupSerializer(
source="device_group",
read_only=True,
required=False,
allow_null=True,
source="device_group", read_only=True, required=False
)
def __init__(self, *args, **kwargs) -> None:

View File

@@ -34,11 +34,9 @@ class AgentEnrollmentAuth(BaseAuthentication):
def authenticate(self, request: Request) -> tuple[User, Any] | None:
auth = get_authorization_header(request)
key = validate_auth(auth)
token = EnrollmentToken.objects.filter(key=key).first()
token = EnrollmentToken.filter_not_expired(key=key).first()
if not token:
raise PermissionDenied()
if not token.connector.enabled:
raise PermissionDenied()
CTX_AUTH_VIA.set("endpoint_token_enrollment")
return (DeviceUser(), token)
@@ -50,11 +48,9 @@ class AgentAuth(BaseAuthentication):
key = validate_auth(auth, format="bearer+agent")
if not key:
return None
device_token = DeviceToken.objects.filter(key=key).first()
device_token = DeviceToken.filter_not_expired(key=key).first()
if not device_token:
raise PermissionDenied()
if not device_token.device.connector.enabled:
raise PermissionDenied()
if device_token.device.device.is_expired:
raise PermissionDenied()
CTX_AUTH_VIA.set("endpoint_token")
@@ -91,7 +87,7 @@ class DeviceAuthFedAuthentication(BaseAuthentication):
if not raw_token:
LOGGER.warning("Missing token")
return None
device = Device.objects.filter(name=request.query_params.get("device")).first()
device = Device.filter_not_expired(name=request.query_params.get("device")).first()
if not device:
LOGGER.warning("Couldn't find device")
return None

View File

@@ -138,7 +138,13 @@ class AgentConnectorController(BaseController[AgentConnector]):
"AllowDeviceIdentifiersInAttestation": True,
"AuthenticationMethod": "UserSecureEnclaveKey",
"EnableAuthorization": True,
"EnableCreateUserAtLogin": True,
"FileVaultPolicy": ["RequireAuthentication"],
"LoginPolicy": ["RequireAuthentication"],
"NewUserAuthorizationMode": "Standard",
"UnlockPolicy": ["RequireAuthentication"],
"UseSharedDeviceKeys": True,
"UserAuthorizationMode": "Standard",
},
},
],

View File

@@ -1,54 +0,0 @@
# Generated by Django 5.2.12 on 2026-03-06 14:38
import django.db.models.deletion
import uuid
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"authentik_endpoints_connectors_agent",
"0004_agentconnector_challenge_idle_timeout_and_more",
),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name="AppleIndependentSecureEnclave",
fields=[
("created", models.DateTimeField(auto_now_add=True)),
("last_updated", models.DateTimeField(auto_now=True)),
(
"name",
models.CharField(
help_text="The human-readable name of this device.", max_length=64
),
),
(
"confirmed",
models.BooleanField(default=True, help_text="Is this device ready for use?"),
),
("last_used", models.DateTimeField(null=True)),
("uuid", models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
("apple_secure_enclave_key", models.TextField()),
("apple_enclave_key_id", models.TextField()),
("device_type", models.TextField()),
(
"user",
models.ForeignKey(
help_text="The user that this device belongs to.",
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"verbose_name": "Apple Independent Secure Enclave",
"verbose_name_plural": "Apple Independent Secure Enclaves",
},
),
]

View File

@@ -19,7 +19,6 @@ from authentik.flows.stage import StageView
from authentik.lib.generators import generate_key
from authentik.lib.models import InternallyManagedMixin, SerializerModel
from authentik.lib.utils.time import timedelta_string_validator
from authentik.stages.authenticator.models import Device as Authenticator
if TYPE_CHECKING:
from authentik.endpoints.connectors.agent.controller import AgentConnectorController
@@ -173,17 +172,3 @@ class AppleNonce(InternallyManagedMixin, ExpiringModel):
class Meta(ExpiringModel.Meta):
verbose_name = _("Apple Nonce")
verbose_name_plural = _("Apple Nonces")
class AppleIndependentSecureEnclave(Authenticator):
"""A device-independent secure enclave key, used by Tap-to-login"""
uuid = models.UUIDField(primary_key=True, default=uuid4)
apple_secure_enclave_key = models.TextField()
apple_enclave_key_id = models.TextField()
device_type = models.TextField()
class Meta:
verbose_name = _("Apple Independent Secure Enclave")
verbose_name_plural = _("Apple Independent Secure Enclaves")

View File

@@ -53,11 +53,11 @@ class EndpointAgentChallengeResponse(ChallengeResponse):
except PyJWTError as exc:
self.stage.logger.warning("Could not parse response", exc=exc)
raise ValidationError("Invalid challenge response") from None
device = Device.objects.filter(identifier=raw["iss"]).first()
device = Device.filter_not_expired(identifier=raw["iss"]).first()
if not device:
self.stage.logger.warning("Could not find device for challenge")
raise ValidationError("Invalid challenge response")
for token in DeviceToken.objects.filter(
for token in DeviceToken.filter_not_expired(
device__device=device,
device__connector=self.stage.executor.current_stage.connector,
).values_list("key", flat=True):

Some files were not shown because too many files have changed in this diff Show More