Compare commits

..

6 Commits

Author SHA1 Message Date
Teffen Ellis
d4a8703329 Fix theming. 2026-03-11 03:53:02 +01:00
Teffen Ellis
3db9450651 Fix invokers, lazy load. 2026-03-11 03:53:01 +01:00
Teffen Ellis
f55e53ea6a Fix url parameters, wizards. 2026-03-11 03:53:01 +01:00
Teffen Ellis
c2261d2528 Fix portal elements not using dialog scope. 2026-03-11 03:53:01 +01:00
Teffen Ellis
f5a772abbc Flesh out lazy modal. 2026-03-11 03:53:01 +01:00
Teffen Ellis
7bfd40f555 Flesh out ak-modal, about modal. 2026-03-11 03:53:00 +01:00
276 changed files with 4439 additions and 15936 deletions

View File

@@ -1,2 +0,0 @@
[build]
rustflags = ["--cfg", "tokio_unstable"]

View File

@@ -1,47 +0,0 @@
[licenses]
allow = [
"Apache-2.0 WITH LLVM-exception",
"Apache-2.0",
"BSD-3-Clause",
"CC0-1.0",
"CDLA-Permissive-2.0",
"ISC",
"MIT",
"MPL-2.0",
"OpenSSL",
"Unicode-3.0",
"Zlib",
]
[licenses.private]
ignore = true
[bans]
multiple-versions = "allow"
wildcards = "deny"
[bans.workspace-dependencies]
duplicates = "deny"
include-path-dependencies = true
unused = "deny"
# No non-FIPS compliant dependencies
[[bans.deny]]
name = "native-tls"
[[bans.deny]]
name = "openssl"
[[bans.deny]]
name = "openssl-sys"
[[bans.deny]]
name = "ring"
[[bans.features]]
allow = [
"alloc",
"aws-lc-sys",
"default",
"fips",
"prebuilt-nasm",
"ring-io",
"ring-sig-verify",
]
name = "aws-lc-rs"
exact = true

View File

@@ -1,16 +0,0 @@
comment_width = 100
format_code_in_doc_comments = true
format_strings = true
group_imports = "StdExternalCrate"
hex_literal_case = "Lower"
imports_granularity = "Crate"
max_width = 100
newline_style = "Unix"
normalize_comments = true
normalize_doc_attributes = true
reorder_impl_items = true
style_edition = "2024"
use_field_init_shorthand = true
use_try_shorthand = true
where_single_line = true
wrap_comments = true

View File

@@ -4,7 +4,7 @@ description: "Setup authentik testing environment"
inputs:
dependencies:
description: "List of dependencies to setup"
default: "system,python,rust,node,go,runtime"
default: "system,python,node,go,runtime"
postgresql_version:
description: "Optional postgresql image tag"
default: "16"
@@ -22,7 +22,7 @@ runs:
sudo rm -rf /usr/local/lib/android
- name: Install uv
if: ${{ contains(inputs.dependencies, 'python') }}
uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v5
uses: astral-sh/setup-uv@5a095e7a2014a4212f075830d4f7277575a9d098 # v5
with:
enable-cache: true
- name: Setup python
@@ -34,20 +34,6 @@ runs:
if: ${{ contains(inputs.dependencies, 'python') }}
shell: bash
run: uv sync --all-extras --dev --frozen
- name: Setup rust (stable)
if: ${{ contains(inputs.dependencies, 'rust') && !contains(inputs.dependencies, 'rust-nightly') }}
uses: actions-rust-lang/setup-rust-toolchain@a0b538fa0b742a6aa35d6e2c169b4bd06d225a98 # v1
- name: Setup rust (nightly)
if: ${{ contains(inputs.dependencies, 'rust-nightly') }}
uses: actions-rust-lang/setup-rust-toolchain@a0b538fa0b742a6aa35d6e2c169b4bd06d225a98 # v1
with:
toolchain: nightly
components: rustfmt
- name: Setup rust dependencies
if: ${{ contains(inputs.dependencies, 'rust') }}
uses: taiki-e/install-action@64c5c20c872907b6f7cd50994ac189e7274160f2 # v2
with:
tool: cargo-deny cargo-machete cargo-llvm-cov nextest
- name: Setup node (web)
if: ${{ contains(inputs.dependencies, 'node') }}
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v4

View File

@@ -2,22 +2,18 @@ name: "Process test results"
description: Convert test results to JUnit, add them to GitHub Actions and codecov
inputs:
files:
description: Comma-separated explicit list of files to upload
flags:
description: Codecov flags
runs:
using: "composite"
steps:
- uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
- uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5
with:
files: ${{ inputs.files }}
flags: ${{ inputs.flags }}
use_oidc: true
- uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
- uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5
with:
files: ${{ inputs.files }}
flags: ${{ inputs.flags }}
use_oidc: true
report_type: test_results

View File

@@ -18,7 +18,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}

View File

@@ -41,7 +41,7 @@ jobs:
- working-directory: website/
name: Install Dependencies
run: npm ci
- uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v4
- uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v4
with:
path: |
${{ github.workspace }}/website/api/.docusaurus
@@ -67,7 +67,7 @@ jobs:
- build
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v5
- uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v5
with:
name: api-docs
path: website/api/build

View File

@@ -16,7 +16,6 @@ env:
POSTGRES_DB: authentik
POSTGRES_USER: authentik
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
RUSTFLAGS: "-Dwarnings"
permissions:
# Needed for checkout
@@ -29,36 +28,20 @@ jobs:
strategy:
fail-fast: false
matrix:
include:
- job: bandit
deps: python
- job: black
deps: python
- job: spellcheck
deps: node
- job: pending-migrations
deps: python,runtime
- job: ruff
deps: python
- job: mypy
deps: python
- job: cargo-deny
deps: rust
- job: cargo-machete
deps: rust
- job: clippy
deps: rust
- job: rustfmt
deps: rust-nightly
job:
- bandit
- black
- spellcheck
- pending-migrations
- ruff
- mypy
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- name: Setup authentik env
uses: ./.github/actions/setup
with:
dependencies: ${{ matrix.deps }}
- name: run job
run: make ci-lint-${{ matrix.job }}
run: uv run make ci-${{ matrix.job }}
test-gen-build:
runs-on: ubuntu-latest
steps:
@@ -144,7 +127,6 @@ jobs:
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
CI_RUN_ID: ${{ matrix.run_id }}
CI_TOTAL_RUNS: "5"
PROMETHEUS_MULTIPROC_DIR: /tmp
run: |
uv run make ci-test
- uses: ./.github/actions/test-results
@@ -174,7 +156,6 @@ jobs:
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
CI_RUN_ID: ${{ matrix.run_id }}
CI_TOTAL_RUNS: "5"
PROMETHEUS_MULTIPROC_DIR: /tmp
run: |
uv run make ci-test
- uses: ./.github/actions/test-results
@@ -191,8 +172,6 @@ jobs:
- name: Create k8s Kind Cluster
uses: helm/kind-action@ef37e7f390d99f746eb8b610417061a60e82a6cc # v1.14.0
- name: run integration
env:
PROMETHEUS_MULTIPROC_DIR: /tmp
run: |
uv run coverage run manage.py test tests/integration
uv run coverage xml
@@ -236,7 +215,7 @@ jobs:
run: |
docker compose -f tests/e2e/compose.yml up -d --quiet-pull
- id: cache-web
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v4
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v4
with:
path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
@@ -249,8 +228,6 @@ jobs:
npm run build
npm run build:sfe
- name: run e2e
env:
PROMETHEUS_MULTIPROC_DIR: /tmp
run: |
uv run coverage run manage.py test ${{ matrix.job.glob }}
uv run coverage xml
@@ -281,7 +258,7 @@ jobs:
run: |
docker compose -f tests/openid_conformance/compose.yml up -d --quiet-pull
- id: cache-web
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v4
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v4
with:
path: web/dist
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
@@ -294,8 +271,6 @@ jobs:
npm run build
npm run build:sfe
- name: run conformance
env:
PROMETHEUS_MULTIPROC_DIR: /tmp
run: |
uv run coverage run manage.py test ${{ matrix.job.glob }}
uv run coverage xml
@@ -308,29 +283,6 @@ jobs:
with:
name: conformance-certification-${{ matrix.job.name }}
path: tests/openid_conformance/exports/
test-rust:
runs-on: ubuntu-latest
timeout-minutes: 30
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
- name: Setup authentik env
uses: ./.github/actions/setup
with:
dependencies: rust
- name: run tests
run: |
cargo llvm-cov --no-report nextest --workspace
cargo llvm-cov report --codecov --output-path target/llvm-cov-target/rust.json
- uses: ./.github/actions/test-results
if: ${{ always() }}
with:
files: target/llvm-cov-target/rust.json
flags: rust
- if: ${{ !cancelled() }}
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: test-rust
path: target/llvm-cov-target/rust.json
ci-core-mark:
if: always()
needs:

View File

@@ -29,7 +29,7 @@ jobs:
github.event.pull_request.head.repo.full_name == github.repository)
steps:
- id: generate_token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
@@ -38,7 +38,7 @@ jobs:
token: ${{ steps.generate_token.outputs.token }}
- name: Compress images
id: compress
uses: calibreapp/image-actions@03c976c29803442fc4040a9de5509669e7759b81 # main
uses: calibreapp/image-actions@d9c8ee5c3dc52ae4622c82ead88d658f4b16b65f # main
with:
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }}
compressOnly: ${{ github.event_name != 'pull_request' }}

View File

@@ -16,7 +16,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}

View File

@@ -10,7 +10,7 @@ jobs:
steps:
- id: app-token
name: Generate app token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
if: ${{ env.GH_APP_ID != '' }}
with:
app-id: ${{ secrets.GH_APP_ID }}

View File

@@ -16,7 +16,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}

View File

@@ -29,7 +29,7 @@ jobs:
steps:
- id: app-token
name: Generate app token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
@@ -57,7 +57,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}

View File

@@ -180,7 +180,7 @@ jobs:
export CGO_ENABLED=0
go build -tags=outpost_static_embed -v -o ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }} ./cmd/${{ matrix.type }}
- name: Upload binaries to release
uses: svenstaro/upload-release-action@29e53e917877a24fad85510ded594ab3c9ca12de # v2
uses: svenstaro/upload-release-action@b98a3b12e86552593f3e4e577ca8a62aa2f3f22b # v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: ./authentik-outpost-${{ matrix.type }}_${{ matrix.goos }}_${{ matrix.goarch }}

View File

@@ -67,7 +67,7 @@ jobs:
steps:
- id: app-token
name: Generate app token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
@@ -96,7 +96,7 @@ jobs:
git tag "version/${{ inputs.version }}" HEAD -m "version/${{ inputs.version }}"
git push --follow-tags
- name: Create Release
uses: softprops/action-gh-release@153bb8e04406b158c6c84fc1615b65b24149a1fe # v2.6.1
uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2.5.0
with:
token: "${{ steps.app-token.outputs.token }}"
tag_name: "version/${{ inputs.version }}"
@@ -115,7 +115,7 @@ jobs:
steps:
- id: app-token
name: Generate app token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
@@ -157,7 +157,7 @@ jobs:
steps:
- id: app-token
name: Generate app token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}

View File

@@ -15,7 +15,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- id: generate_token
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}

View File

@@ -21,7 +21,7 @@ jobs:
steps:
- id: generate_token
if: ${{ github.event_name != 'pull_request' }}
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v2
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}

18
.gitignore vendored
View File

@@ -195,24 +195,6 @@ pyvenv.cfg
pip-selfcheck.json
# End of https://www.gitignore.io/api/python,django
# Created by https://www.toptal.com/developers/gitignore/api/rust
# Edit at https://www.toptal.com/developers/gitignore?templates=rust
### Rust ###
# Generated by Cargo
# will have compiled files and executables
debug/
target/
# These are backup files generated by rustfmt
**/*.rs.bk
# MSVC Windows builds of rustc generate these, which store debugging information
*.pdb
# End of https://www.toptal.com/developers/gitignore/api/rust
/static/
local.env.yml

View File

@@ -3,7 +3,6 @@
# Backend
authentik/ @goauthentik/backend
blueprints/ @goauthentik/backend
src/ @goauthentik/backend
cmd/ @goauthentik/backend
internal/ @goauthentik/backend
lifecycle/ @goauthentik/backend
@@ -12,12 +11,8 @@ scripts/ @goauthentik/backend
tests/ @goauthentik/backend
pyproject.toml @goauthentik/backend
uv.lock @goauthentik/backend
Cargo.toml @goauthentik/backend
Cargo.lock @goauthentik/backend
go.mod @goauthentik/backend
go.sum @goauthentik/backend
.config/ @goauthentik/backend
rust-toolchain.toml @goauthentik/backend
# Infrastructure
.github/ @goauthentik/infrastructure
lifecycle/aws/ @goauthentik/infrastructure

5181
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,296 +0,0 @@
[workspace]
members = [".", "website/scripts/docsmg"]
resolver = "3"
[workspace.package]
authors = ["authentik Team <hello@goauthentik.io>"]
edition = "2024"
readme = "README.md"
homepage = "https://goauthentik.io"
repository = "https://github.com/goauthentik/authentik.git"
license-file = "LICENSE"
publish = false
[workspace.dependencies]
arc-swap = "1.8.2"
argh = "0.1.17"
async-trait = "0.1.89"
aws-lc-rs = { version = "1.16.1", features = ["fips"] }
axum = { version = "0.8.8", features = ["http2", "macros", "ws"] }
axum-server = { version = "0.8.0", features = ["tls-rustls-no-provider"] }
bytes = "1.11.1"
chrono = "0.4.44"
clap = { version = "4.5.59", features = ["derive", "env"] }
client-ip = { version = "0.2.1", features = ["forwarded-header"] }
color-eyre = "0.6.5"
colored = "3.1.1"
config = { version = "0.15.19", default-features = false, features = [
"yaml",
"async",
] }
console-subscriber = "0.5.0"
dotenvy = "0.15.7"
durstr = "0.4.0"
eyre = "0.6.12"
forwarded-header-value = "0.1.1"
futures = "0.3.32"
glob = "0.3.3"
http-body-util = "0.1.3"
hyper = "1.8.1"
hyper-unix-socket = "0.3.0"
hyper-util = "0.1.20"
ipnet = { version = "2.12.0", features = ["serde"] }
# See https://github.com/mladedav/json-subscriber/pull/23
json-subscriber = { git = "https://github.com/rissson/json-subscriber.git", rev = "950ad7cb887a0a14fd5cb8afb8e76db1f456c032" }
jsonwebtoken = { version = "10.3.0", default-features = false, features = [
"aws_lc_rs",
] }
metrics = "0.24.3"
metrics-exporter-prometheus = { version = "0.18.1", default-features = false }
nix = { version = "0.31.2", features = ["hostname", "signal"] }
notify = "8.2.0"
pem = "3.0.6"
pin-project-lite = "0.2.17"
pyo3 = "0.28.2"
percent-encoding = "2.3.2"
rcgen = { version = "0.14.7", default-features = false, features = [
"aws_lc_rs",
"fips",
] }
regex = "1.12.3"
rustls = { version = "0.23.37", features = ["fips"] }
sentry = { version = "0.47.0", default-features = false, features = [
"backtrace",
"contexts",
"debug-images",
"panic",
"rustls",
"reqwest",
"tower",
"tracing",
] }
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.149"
sqlx = { version = "0.8.6", default-features = false, features = [
"runtime-tokio",
"tls-rustls-aws-lc-rs",
"postgres",
"derive",
"macros",
"uuid",
"chrono",
"ipnet",
"json",
] }
time = "0.3.47"
thiserror = "2.0.18"
tokio = { version = "1.50.0", features = ["full"] }
tokio-rustls = "0.26.4"
tokio-tungstenite = "0.28.0"
tokio-util = "0.7.18"
tower = "0.5.3"
tower-http = { version = "0.6.8", features = [
"compression-br",
"compression-deflate",
"compression-gzip",
"compression-zstd",
"fs",
"timeout",
] }
tower-service = "0.3.3"
tracing = "0.1.44"
tracing-error = "0.2.1"
tracing-subscriber = { version = "0.3.22", features = [
"env-filter",
"json",
"tracing-log",
] }
url = "2.5.8"
uuid = { version = "1.22.0", features = ["v4"] }
[profile.dev.package.backtrace]
opt-level = 3
[profile.release]
lto = true
debug = 2
[workspace.lints.rust]
ambiguous_negative_literals = "warn"
closure_returning_async_block = "warn"
macro_use_extern_crate = "deny"
# must_not_suspend = "deny", unstable see https://github.com/rust-lang/rust/issues/83310
non_ascii_idents = "deny"
redundant_imports = "warn"
semicolon_in_expressions_from_macros = "warn"
trivial_casts = "warn"
trivial_numeric_casts = "warn"
unit_bindings = "warn"
unreachable_pub = "warn"
unsafe_code = "deny"
unused_extern_crates = "warn"
unused_import_braces = "warn"
unused_lifetimes = "warn"
unused_macro_rules = "warn"
unused_qualifications = "warn"
[workspace.lints.rustdoc]
unescaped_backticks = "warn"
[workspace.lints.clippy]
### enable all lints
cargo = { priority = -1, level = "warn" }
complexity = { priority = -1, level = "warn" }
correctness = { priority = -1, level = "warn" }
nursery = { priority = -1, level = "warn" }
pedantic = { priority = -1, level = "warn" }
perf = { priority = -1, level = "warn" }
# Those are too restrictive and disabled by default, however we enable some below
# restriction = { priority = -1, level = "warn" }
style = { priority = -1, level = "warn" }
suspicious = { priority = -1, level = "warn" }
### and disable the ones we don't want
### cargo group
multiple_crate_versions = "allow"
### pedantic group
redundant_closure_for_method_calls = "allow"
struct_field_names = "allow"
too_many_lines = "allow"
### nursery
missing_const_for_fn = "allow"
redundant_pub_crate = "allow"
option_if_let_else = "allow"
### restriction group
allow_attributes = "warn"
allow_attributes_without_reason = "warn"
as_conversions = "warn"
as_pointer_underscore = "warn"
as_underscore = "warn"
assertions_on_result_states = "warn"
clone_on_ref_ptr = "warn"
create_dir = "warn"
dbg_macro = "warn"
default_numeric_fallback = "warn"
disallowed_script_idents = "warn"
empty_drop = "warn"
empty_enum_variants_with_brackets = "warn"
empty_structs_with_brackets = "warn"
error_impl_error = "warn"
exit = "warn"
filetype_is_file = "warn"
float_cmp_const = "warn"
fn_to_numeric_cast_any = "warn"
get_unwrap = "warn"
if_then_some_else_none = "warn"
impl_trait_in_params = "warn"
infinite_loop = "warn"
lossy_float_literal = "warn"
map_with_unused_argument_over_ranges = "warn"
mem_forget = "warn"
missing_asserts_for_indexing = "warn"
missing_trait_methods = "warn"
mixed_read_write_in_expression = "warn"
mutex_atomic = "warn"
mutex_integer = "warn"
needless_raw_strings = "warn"
non_zero_suggestions = "warn"
panic_in_result_fn = "warn"
pathbuf_init_then_push = "warn"
print_stdout = "warn"
rc_buffer = "warn"
redundant_test_prefix = "warn"
redundant_type_annotations = "warn"
ref_patterns = "warn"
renamed_function_params = "warn"
rest_pat_in_fully_bound_structs = "warn"
return_and_then = "warn"
same_name_method = "warn"
semicolon_inside_block = "warn"
str_to_string = "warn"
string_add = "warn"
suspicious_xor_used_as_pow = "warn"
tests_outside_test_module = "warn"
todo = "warn"
try_err = "warn"
undocumented_unsafe_blocks = "warn"
unimplemented = "warn"
unnecessary_safety_comment = "warn"
unnecessary_safety_doc = "warn"
unnecessary_self_imports = "warn"
unneeded_field_pattern = "warn"
unseparated_literal_suffix = "warn"
unused_result_ok = "warn"
unused_trait_names = "warn"
unwrap_in_result = "warn"
unwrap_used = "warn"
verbose_file_reads = "warn"
[package]
name = "authentik"
version = "2026.5.0-rc1"
authors.workspace = true
edition.workspace = true
readme.workspace = true
homepage.workspace = true
repository.workspace = true
license-file.workspace = true
publish.workspace = true
[features]
default = ["core", "proxy"]
proxy = []
core = ["proxy", "dep:sqlx", "dep:pyo3"]
[dependencies]
arc-swap.workspace = true
argh.workspace = true
async-trait.workspace = true
aws-lc-rs.workspace = true
axum-server.workspace = true
axum.workspace = true
client-ip.workspace = true
color-eyre.workspace = true
config.workspace = true
console-subscriber.workspace = true
durstr.workspace = true
eyre.workspace = true
forwarded-header-value.workspace = true
futures.workspace = true
glob.workspace = true
http-body-util.workspace = true
hyper-unix-socket.workspace = true
hyper-util.workspace = true
hyper.workspace = true
ipnet.workspace = true
json-subscriber.workspace = true
jsonwebtoken.workspace = true
metrics.workspace = true
metrics-exporter-prometheus.workspace = true
nix.workspace = true
notify.workspace = true
pem.workspace = true
percent-encoding.workspace = true
pin-project-lite.workspace = true
pyo3 = { workspace = true, optional = true }
rcgen.workspace = true
rustls.workspace = true
sentry.workspace = true
serde.workspace = true
serde_json.workspace = true
sqlx = { workspace = true, optional = true }
thiserror.workspace = true
time.workspace = true
tokio-rustls.workspace = true
tokio-tungstenite.workspace = true
tokio-util.workspace = true
tokio.workspace = true
tower-http.workspace = true
tower.workspace = true
tracing-error.workspace = true
tracing-subscriber.workspace = true
tracing.workspace = true
url.workspace = true
uuid.workspace = true
[lints]
workspace = true

View File

@@ -23,7 +23,6 @@ BREW_LDFLAGS :=
BREW_CPPFLAGS :=
BREW_PKG_CONFIG_PATH :=
CARGO := cargo
UV := uv
# For macOS users, add the libxml2 installed from brew libxmlsec1 to the build path
@@ -70,26 +69,22 @@ help: ## Show this help
sort
@echo ""
go-test: ## Run the golang tests
go-test:
go test -timeout 0 -v -race -cover ./...
rust-test: ## Run the Rust tests
$(CARGO) nextest run --workspace
test: ## Run the server tests and produce a coverage report (locally)
$(UV) run coverage run manage.py test --keepdb $(or $(filter-out $@,$(MAKECMDGOALS)),authentik)
$(UV) run coverage html
$(UV) run coverage report
lint-fix: ## Lint and automatically fix errors in the python source code. Reports spelling errors.
lint-fix: lint-spellcheck ## Lint and automatically fix errors in the python source code. Reports spelling errors.
$(UV) run black $(PY_SOURCES)
$(UV) run ruff check --fix $(PY_SOURCES)
$(CARGO) +nightly fmt --all -- --config-path .cargo/rustfmt.toml
lint-spellcheck: ## Reports spelling errors.
npm run lint:spellcheck
lint: ci-lint-bandit ci-lint-mypy ci-lint-cargo-deny ci-lint-cargo-machete ## Lint the python and golang sources
lint: ci-bandit ci-mypy ## Lint the python and golang sources
golangci-lint run -v
core-install:
@@ -110,24 +105,12 @@ i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that requir
aws-cfn:
cd lifecycle/aws && npm i && $(UV) run npm run aws-cfn
run: ## Run the authentik server and worker, without auto reloading
$(UV) run ak allinone
run-watch: ## Run the authentik server and worker, with auto reloading
$(UV) run watchexec --on-busy-update=restart --stop-signal=SIGINT --exts py,rs --no-meta --notify -- ak allinone
run-server: ## Run the authentik server, without auto reloading
run-server: ## Run the main authentik server process
$(UV) run ak server
run-server-watch: ## Run the authentik server, with auto reloading
$(UV) run watchexec --on-busy-update=restart --stop-signal=SIGINT --exts py,rs --no-meta --notify -- ak server
run-worker: ## Run the authentik worker, without auto reloading
run-worker: ## Run the main authentik worker process
$(UV) run ak worker
run-worker-watch: ## Run the authentik worker, with auto reloading
$(UV) run watchexec --on-busy-update=restart --stop-signal=SIGINT --exts py,rs --no-meta --notify -- ak worker
core-i18n-extract:
$(UV) run ak makemessages \
--add-location file \
@@ -166,7 +149,7 @@ ifndef version
$(error Usage: make bump version=20xx.xx.xx )
endif
$(eval current_version := $(shell cat ${PWD}/internal/constants/VERSION))
$(SED_INPLACE) 's/^version = ".*"/version = "$(version)"/' ${PWD}/pyproject.toml ${PWD}/Cargo.toml
$(SED_INPLACE) 's/^version = ".*"/version = "$(version)"/' ${PWD}/pyproject.toml
$(SED_INPLACE) 's/^VERSION = ".*"/VERSION = "$(version)"/' ${PWD}/authentik/__init__.py
$(MAKE) gen-build gen-compose aws-cfn
$(SED_INPLACE) "s/\"${current_version}\"/\"$(version)\"/" ${PWD}/package.json ${PWD}/package-lock.json ${PWD}/web/package.json ${PWD}/web/package-lock.json
@@ -348,40 +331,27 @@ test-docker:
# which makes the YAML File a lot smaller
ci--meta-debug:
$(UV) run python -V || echo "No python installed"
$(CARGO) --version || echo "No rust installed"
node --version || echo "No node installed"
$(UV) run python -V
node --version
ci-lint-mypy: ci--meta-debug
ci-mypy: ci--meta-debug
$(UV) run mypy --strict $(PY_SOURCES)
ci-lint-black: ci--meta-debug
ci-black: ci--meta-debug
$(UV) run black --check $(PY_SOURCES)
ci-lint-ruff: ci--meta-debug
ci-ruff: ci--meta-debug
$(UV) run ruff check $(PY_SOURCES)
ci-lint-spellcheck: ci--meta-debug
ci-spellcheck: ci--meta-debug
npm run lint:spellcheck
ci-lint-bandit: ci--meta-debug
ci-bandit: ci--meta-debug
$(UV) run bandit -c pyproject.toml -r $(PY_SOURCES) -iii
ci-lint-pending-migrations: ci--meta-debug
ci-pending-migrations: ci--meta-debug
$(UV) run ak makemigrations --check
ci-lint-cargo-deny: ci--meta-debug
$(CARGO) deny --locked --workspace check --config .cargo/deny.toml
ci-lint-cargo-machete: ci--meta-debug
$(CARGO) machete
ci-lint-rustfmt: ci--meta-debug
$(CARGO) +nightly fmt --all --check -- --config-path .cargo/rustfmt.toml
ci-lint-clippy: ci--meta-debug
$(CARGO) clippy -- -D warnings
ci-test: ci--meta-debug
$(UV) run coverage run manage.py test --keepdb authentik
$(UV) run coverage report

View File

@@ -92,7 +92,6 @@ class FileBackend(ManageableBackend):
"nbf": now() - timedelta(seconds=15),
},
key=sha256(f"{settings.SECRET_KEY}:{self.usage}".encode()).hexdigest(),
# Must match crates/authentik-server/src/static.rs
algorithm="HS256",
)
url = f"{prefix}/files/{path}?token={token}"

View File

@@ -1,5 +1,7 @@
"""Apply blueprint from commandline"""
from sys import exit as sys_exit
from django.core.management.base import BaseCommand, no_translations
from structlog.stdlib import get_logger
@@ -26,7 +28,7 @@ class Command(BaseCommand):
self.stderr.write("Blueprint invalid")
for log in logs:
self.stderr.write(f"\t{log.logger}: {log.event}: {log.attributes}")
raise RuntimeError("Blueprint invalid")
sys_exit(1)
importer.apply()
def add_arguments(self, parser):

View File

@@ -1115,11 +1115,7 @@ class ExpiringModel(models.Model):
default the object is deleted. This is less efficient compared
to bulk deleting objects, but classes like Token() need to change
values instead of being deleted."""
try:
return self.delete(*args, **kwargs)
except self.DoesNotExist:
# Object has already been deleted, so this should be fine
return None
return self.delete(*args, **kwargs)
@classmethod
def filter_not_expired(cls, **kwargs) -> QuerySet[Self]:

View File

@@ -1,101 +0,0 @@
"""Test interface view redirect behavior by user type"""
from django.test import TestCase
from django.urls import reverse
from authentik.brands.models import Brand
from authentik.core.models import Application, UserTypes
from authentik.core.tests.utils import create_test_brand, create_test_user
class TestInterfaceRedirects(TestCase):
"""Test RootRedirectView and BrandDefaultRedirectView redirect logic by user type"""
def setUp(self):
self.app = Application.objects.create(name="test-app", slug="test-app")
self.brand: Brand = create_test_brand(default_application=self.app)
def _assert_redirects_to_app(self, url_name: str, user_type: UserTypes):
user = create_test_user(type=user_type)
self.client.force_login(user)
response = self.client.get(reverse(f"authentik_core:{url_name}"))
self.assertRedirects(
response,
reverse(
"authentik_core:application-launch", kwargs={"application_slug": self.app.slug}
),
fetch_redirect_response=False,
)
def _assert_no_redirect(self, url_name: str, user_type: UserTypes):
"""Internal users should not be redirected away."""
user = create_test_user(type=user_type)
self.client.force_login(user)
response = self.client.get(reverse(f"authentik_core:{url_name}"))
# Internal users get a 200 (rendered template) or redirect to if-user, not to the app
app_url = reverse(
"authentik_core:application-launch", kwargs={"application_slug": self.app.slug}
)
self.assertNotEqual(response.get("Location"), app_url)
# --- RootRedirectView ---
def test_root_redirect_external_user(self):
"""External users are redirected to the default app from root"""
self._assert_redirects_to_app("root-redirect", UserTypes.EXTERNAL)
def test_root_redirect_service_account(self):
"""Service accounts are redirected to the default app from root"""
self._assert_redirects_to_app("root-redirect", UserTypes.SERVICE_ACCOUNT)
def test_root_redirect_internal_service_account(self):
"""Internal service accounts are redirected to the default app from root"""
self._assert_redirects_to_app("root-redirect", UserTypes.INTERNAL_SERVICE_ACCOUNT)
def test_root_redirect_internal_user(self):
"""Internal users are NOT redirected to the app from root"""
self._assert_no_redirect("root-redirect", UserTypes.INTERNAL)
# --- BrandDefaultRedirectView (if/user/) ---
def test_if_user_external_user(self):
"""External users are redirected to the default app from if/user/"""
self._assert_redirects_to_app("if-user", UserTypes.EXTERNAL)
def test_if_user_service_account(self):
"""Service accounts are redirected to the default app from if/user/"""
self._assert_redirects_to_app("if-user", UserTypes.SERVICE_ACCOUNT)
def test_if_user_internal_service_account(self):
"""Internal service accounts are redirected to the default app from if/user/"""
self._assert_redirects_to_app("if-user", UserTypes.INTERNAL_SERVICE_ACCOUNT)
def test_if_user_internal_user(self):
"""Internal users are NOT redirected to the app from if/user/"""
self._assert_no_redirect("if-user", UserTypes.INTERNAL)
# --- BrandDefaultRedirectView (if/admin/) ---
def test_if_admin_service_account(self):
"""Service accounts are redirected to the default app from if/admin/"""
self._assert_redirects_to_app("if-admin", UserTypes.SERVICE_ACCOUNT)
def test_if_admin_internal_service_account(self):
"""Internal service accounts are redirected to the default app from if/admin/"""
self._assert_redirects_to_app("if-admin", UserTypes.INTERNAL_SERVICE_ACCOUNT)
def test_if_admin_internal_user(self):
"""Internal users are NOT redirected to the app from if/admin/"""
self._assert_no_redirect("if-admin", UserTypes.INTERNAL)
# --- No default app set ---
def test_service_account_no_default_app_access_denied(self):
"""Service accounts get access denied when no default app is configured"""
self.brand.default_application = None
self.brand.save()
user = create_test_user(type=UserTypes.SERVICE_ACCOUNT)
self.client.force_login(user)
response = self.client.get(reverse("authentik_core:if-user"))
self.assertEqual(response.status_code, 200)
self.assertIn(b"Interface can only be accessed by internal users", response.content)

View File

@@ -26,11 +26,7 @@ class RootRedirectView(RedirectView):
query_string = True
def redirect_to_app(self, request: HttpRequest):
if request.user.is_authenticated and request.user.type in (
UserTypes.EXTERNAL,
UserTypes.SERVICE_ACCOUNT,
UserTypes.INTERNAL_SERVICE_ACCOUNT,
):
if request.user.is_authenticated and request.user.type == UserTypes.EXTERNAL:
brand: Brand = request.brand
if brand.default_application:
return redirect(
@@ -66,11 +62,7 @@ class BrandDefaultRedirectView(InterfaceView):
"""By default redirect to default app"""
def dispatch(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
if request.user.is_authenticated and request.user.type in (
UserTypes.EXTERNAL,
UserTypes.SERVICE_ACCOUNT,
UserTypes.INTERNAL_SERVICE_ACCOUNT,
):
if request.user.is_authenticated and request.user.type == UserTypes.EXTERNAL:
brand: Brand = request.brand
if brand.default_application:
return redirect(

View File

@@ -44,6 +44,3 @@ class BaseController[T: "Connector"]:
def stage_view_authentication(self) -> StageView | None:
return None
def sync_endpoints(self):
raise NotImplementedError

View File

@@ -162,11 +162,8 @@ class Connector(ScheduledModel, SerializerModel):
@property
def schedule_specs(self) -> list[ScheduleSpec]:
from authentik.endpoints.controller import Capabilities
from authentik.endpoints.tasks import endpoints_sync
if Capabilities.ENROLL_AUTOMATIC_API not in self.controller(self).capabilities():
return []
return [
ScheduleSpec(
actor=endpoints_sync,

View File

@@ -21,7 +21,7 @@ def endpoints_sync(connector_pk: Any):
return
controller = connector.controller
ctrl = controller(connector)
if Capabilities.ENROLL_AUTOMATIC_API not in ctrl.capabilities():
if Capabilities.AUTOMATIC_API not in ctrl.capabilities():
return
LOGGER.info("Syncing connector", connector=connector.name)
ctrl.sync_endpoints()

View File

@@ -1,35 +0,0 @@
from unittest.mock import PropertyMock, patch
from rest_framework.test import APITestCase
from authentik.endpoints.controller import BaseController, Capabilities
from authentik.endpoints.models import Connector
from authentik.endpoints.tasks import endpoints_sync
from authentik.lib.generators import generate_id
class TestEndpointTasks(APITestCase):
def test_agent_sync(self):
class controller(BaseController):
def capabilities(self):
return [Capabilities.ENROLL_AUTOMATIC_API]
def sync_endpoints(self):
pass
with patch.object(Connector, "controller", PropertyMock(return_value=controller)):
connector = Connector.objects.create(name=generate_id())
self.assertEqual(len(connector.schedule_specs), 1)
endpoints_sync.send(connector.pk).get_result(block=True)
def test_agent_no_sync(self):
class controller(BaseController):
def capabilities(self):
return []
with patch.object(Connector, "controller", PropertyMock(return_value=controller)):
connector = Connector.objects.create(name=generate_id())
self.assertEqual(len(connector.schedule_specs), 0)
endpoints_sync.send(connector.pk).get_result(block=True)

View File

@@ -63,7 +63,6 @@ class NotificationTransportSerializer(ModelSerializer):
"mode",
"mode_verbose",
"webhook_url",
"webhook_ca",
"webhook_mapping_body",
"webhook_mapping_headers",
"email_subject_prefix",

View File

@@ -1,26 +0,0 @@
# Generated by Django 5.2.12 on 2026-03-10 10:40
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("authentik_crypto", "0006_certificatekeypair_cert_expiry_and_more"),
("authentik_events", "0016_alter_event_action"),
]
operations = [
migrations.AddField(
model_name="notificationtransport",
name="webhook_ca",
field=models.ForeignKey(
default=None,
help_text="When set, the selected ceritifcate is used to validate the certificate of the webhook server.",
null=True,
on_delete=django.db.models.deletion.SET_DEFAULT,
to="authentik_crypto.certificatekeypair",
),
),
]

View File

@@ -28,7 +28,6 @@ from authentik.core.middleware import (
SESSION_KEY_IMPERSONATE_USER,
)
from authentik.core.models import ExpiringModel, Group, PropertyMapping, User
from authentik.crypto.models import CertificateKeyPair
from authentik.events.context_processors.base import get_context_processors
from authentik.events.utils import (
cleanse_dict,
@@ -42,7 +41,6 @@ from authentik.lib.sentry import SentryIgnoredException
from authentik.lib.utils.errors import exception_to_dict
from authentik.lib.utils.http import get_http_session
from authentik.lib.utils.time import timedelta_from_string
from authentik.outposts.docker_tls import DockerInlineTLS
from authentik.policies.models import PolicyBindingModel
from authentik.root.middleware import ClientIPMiddleware
from authentik.root.ws.consumer import build_user_group
@@ -328,16 +326,6 @@ class NotificationTransport(TasksModel, SerializerModel):
email_template = models.TextField(default=EmailTemplates.EVENT_NOTIFICATION)
webhook_url = models.TextField(blank=True, validators=[DomainlessURLValidator()])
webhook_ca = models.ForeignKey(
CertificateKeyPair,
null=True,
default=None,
on_delete=models.SET_DEFAULT,
help_text=_(
"When set, the selected ceritifcate is used to "
"validate the certificate of the webhook server."
),
)
webhook_mapping_body = models.ForeignKey(
"NotificationWebhookMapping",
on_delete=models.SET_DEFAULT,
@@ -421,29 +409,21 @@ class NotificationTransport(TasksModel, SerializerModel):
notification=notification,
)
)
def send(**kwargs):
try:
response = get_http_session().post(
self.webhook_url,
json=default_body,
headers=headers,
**kwargs,
)
response.raise_for_status()
except RequestException as exc:
raise NotificationTransportError(
exc.response.text if exc.response else str(exc)
) from exc
return [
response.status_code,
response.text,
]
if self.webhook_ca:
with DockerInlineTLS(self.webhook_ca, authentication_kp=None) as tls:
return send(verify=tls.ca_cert)
return send()
try:
response = get_http_session().post(
self.webhook_url,
json=default_body,
headers=headers,
)
response.raise_for_status()
except RequestException as exc:
raise NotificationTransportError(
exc.response.text if exc.response else str(exc)
) from exc
return [
response.status_code,
response.text,
]
def send_webhook_slack(self, notification: Notification) -> list[str]:
"""Send notification to slack or slack-compatible endpoints"""

View File

@@ -10,7 +10,6 @@ from requests_mock import Mocker
from authentik import authentik_full_version
from authentik.core.tests.utils import create_test_admin_user
from authentik.crypto.models import CertificateKeyPair
from authentik.events.api.notification_transports import NotificationTransportSerializer
from authentik.events.models import (
Event,
@@ -62,37 +61,6 @@ class TestEventTransports(TestCase):
},
)
def test_transport_webhook_ca_invalid_unset(self):
"""Test webhook transport"""
transport: NotificationTransport = NotificationTransport.objects.create(
name=generate_id(),
mode=TransportMode.WEBHOOK,
webhook_url="https://localhost:1234/test",
)
with Mocker() as mocker:
mocker.post("https://localhost:1234/test")
transport.send(self.notification)
self.assertEqual(mocker.call_count, 1)
self.assertTrue(mocker.request_history[0].verify)
def test_transport_webhook_ca(self):
"""Test webhook transport"""
kp = CertificateKeyPair.objects.create(
name=generate_id(),
certificate_data="foo",
)
transport: NotificationTransport = NotificationTransport.objects.create(
name=generate_id(),
mode=TransportMode.WEBHOOK,
webhook_url="https://localhost:1234/test",
webhook_ca=kp,
)
with Mocker() as mocker:
mocker.post("https://localhost:1234/test")
transport.send(self.notification)
self.assertEqual(mocker.call_count, 1)
self.assertIsNotNone(mocker.request_history[0].verify)
def test_transport_webhook_mapping(self):
"""Test webhook transport with custom mapping"""
mapping_body = NotificationWebhookMapping.objects.create(

View File

@@ -27,10 +27,8 @@
"layout": "{{ flow.layout }}",
};
</script>
{% endblock %}
{% block interface_stylesheet %}
<link rel="stylesheet" type="text/css" href="{% versioned_script 'dist/styles/flow-%v.css' %}" />
<link rel="stylesheet" type="text/css" href="{% versioned_script 'dist/styles/static-%v.css' %}" />
{% endblock %}
{% block head %}

View File

@@ -342,10 +342,10 @@ def django_db_config(config: ConfigLoader | None = None) -> dict:
"default": {
"ENGINE": "psqlextra.backend",
"HOST": config.get("postgresql.host"),
"PORT": config.get("postgresql.port"),
"NAME": config.get("postgresql.name"),
"USER": config.get("postgresql.user"),
"PASSWORD": config.get("postgresql.password"),
"NAME": config.get("postgresql.name"),
"PORT": config.get("postgresql.port"),
"OPTIONS": {
"sslmode": config.get("postgresql.sslmode"),
"sslrootcert": config.get("postgresql.sslrootcert"),
@@ -423,5 +423,4 @@ if __name__ == "__main__":
if len(argv) < 2: # noqa: PLR2004
print(dumps(CONFIG.raw, indent=4, cls=AttrEncoder))
else:
for arg in argv[1:]:
print(CONFIG.get(arg))
print(CONFIG.get(argv[-1]))

View File

@@ -17,13 +17,11 @@
postgresql:
host: localhost
port: 5432
user: authentik
password: "env://POSTGRES_PASSWORD"
name: authentik
user: authentik
port: 5432
password: "env://POSTGRES_PASSWORD"
sslmode: disable
conn_max_age: 60
conn_health_checks: false
use_pool: False
test:
name: test_authentik
@@ -34,18 +32,12 @@ postgresql:
# host: replica1.example.com
listen:
http:
- "[::]:9000"
https:
- "[::]:9443"
ldap:
- "[::]:3389"
ldaps:
- "[::]:6636"
radius:
- "[::]:1812"
metrics:
- "[::]:9300"
http: 0.0.0.0:9000
https: 0.0.0.0:9443
ldap: 0.0.0.0:3389
ldaps: 0.0.0.0:6636
radius: 0.0.0.0:1812
metrics: 0.0.0.0:9300
debug: 0.0.0.0:9900
debug_py: 0.0.0.0:9901
trusted_proxy_cidrs:
@@ -74,19 +66,6 @@ log_level: info
log:
http_headers:
- User-Agent
rust_log:
"console_subscriber": info
"h2": info
"hyper_util": warn
"mio": info
"notify": info
"reqwest": info
"runtime": info
"rustls": info
"sqlx": info
"sqlx_postgres": info
"tokio": info
"tungstenite": info
sessions:
unauthenticated_age: days=1
@@ -158,7 +137,8 @@ tenants:
blueprints_dir: /blueprints
web:
workers: 2
# No default here as it's set dynamically
# workers: 2
threads: 4
path: /
timeout_http_read_header: 5s
@@ -203,5 +183,3 @@ storage:
# backend: file # or s3
# file: {}
# s3: {}
skip_migrations: false

View File

@@ -41,7 +41,7 @@ def structlog_configure():
add_process_id,
add_tenant_information,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt="iso", utc=True),
structlog.processors.TimeStamper(fmt="iso", utc=False),
structlog.processors.StackInfoRenderer(),
structlog.processors.ExceptionRenderer(
structlog.tracebacks.ExceptionDictTransformer(show_locals=CONFIG.get_bool("debug"))

View File

@@ -27,12 +27,6 @@ class DockerInlineTLS:
self.authentication_kp = authentication_kp
self._paths = []
def __enter__(self):
return self.write()
def __exit__(self, exc_type, exc, tb):
self.cleanup()
def write_file(self, name: str, contents: str) -> str:
"""Wrapper for mkstemp that uses fdopen"""
path = Path(gettempdir(), name)

View File

@@ -163,5 +163,4 @@ def outpost_pre_delete_cleanup(sender, instance: Outpost, **_):
@receiver(pre_delete, sender=AuthenticatedSession)
def outpost_logout_revoke(sender: type[AuthenticatedSession], instance: AuthenticatedSession, **_):
"""Catch logout by expiring sessions being deleted"""
if Outpost.objects.exists():
outpost_session_end.send(instance.session.session_key)
outpost_session_end.send(instance.session.session_key)

View File

@@ -7,6 +7,7 @@ For example: The 'dummy' policy is available at `authentik.policies.dummy`.
from prometheus_client import Gauge, Histogram
from authentik.blueprints.apps import ManagedAppConfig
from authentik.tenants.flags import Flag
GAUGE_POLICIES_CACHED = Gauge(
"authentik_policies_cached",
@@ -31,6 +32,12 @@ HIST_POLICIES_EXECUTION_TIME = Histogram(
)
class BufferedPolicyAccessViewFlag(Flag[bool], key="policies_buffered_access_view"):
default = False
visibility = "public"
class AuthentikPoliciesConfig(ManagedAppConfig):
"""authentik policies app config"""

View File

@@ -1,19 +1,29 @@
from django.http import Http404, HttpResponse
from django.test import TestCase
from django.urls import reverse
from authentik.blueprints.tests import apply_blueprint
from authentik.core.models import Application, Group, Provider
from authentik.core.tests.utils import (
RequestFactory,
create_test_brand,
create_test_flow,
create_test_user,
)
from authentik.flows.models import Flow, FlowDesignation
from authentik.flows.planner import FlowPlan
from authentik.flows.views.executor import SESSION_KEY_PLAN
from authentik.lib.generators import generate_id
from authentik.policies.apps import BufferedPolicyAccessViewFlag
from authentik.policies.models import PolicyBinding
from authentik.policies.views import (
QS_BUFFER_ID,
SESSION_KEY_BUFFER,
BufferedPolicyAccessView,
BufferView,
PolicyAccessView,
)
from authentik.tenants.flags import patch_flag
class TestPolicyViews(TestCase):
@@ -114,3 +124,71 @@ class TestPolicyViews(TestCase):
res = TestView.as_view()(req)
self.assertEqual(res.status_code, 302)
self.assertEqual(res.url, "/if/flow/default-authentication-flow/?next=%2F")
@patch_flag(BufferedPolicyAccessViewFlag, True)
def test_pav_buffer(self):
"""Test simple policy access view"""
provider = Provider.objects.create(
name=generate_id(),
)
app = Application.objects.create(name=generate_id(), slug=generate_id(), provider=provider)
flow = create_test_flow(FlowDesignation.AUTHENTICATION)
class TestView(BufferedPolicyAccessView):
def resolve_provider_application(self):
self.provider = provider
self.application = app
def get(self, *args, **kwargs):
return HttpResponse("foo")
req = self.factory.get("/")
req.session[SESSION_KEY_PLAN] = FlowPlan(flow.pk)
req.session.save()
res = TestView.as_view()(req)
self.assertEqual(res.status_code, 302)
self.assertTrue(res.url.startswith(reverse("authentik_policies:buffer")))
@patch_flag(BufferedPolicyAccessViewFlag, True)
@apply_blueprint("default/flow-default-authentication-flow.yaml")
def test_pav_buffer_skip(self):
"""Test simple policy access view (skip buffer)"""
provider = Provider.objects.create(
name=generate_id(),
)
app = Application.objects.create(name=generate_id(), slug=generate_id(), provider=provider)
flow = Flow.objects.get(slug="default-authentication-flow")
class TestView(BufferedPolicyAccessView):
def resolve_provider_application(self):
self.provider = provider
self.application = app
def get(self, *args, **kwargs):
return HttpResponse("foo")
req = self.factory.get("/?skip_buffer=true")
req.brand = create_test_brand(flow_authentication=flow)
req.session[SESSION_KEY_PLAN] = FlowPlan(flow.pk)
req.session.save()
res = TestView.as_view()(req)
self.assertEqual(res.status_code, 302)
self.assertTrue(
res.url.startswith(reverse("authentik_core:if-flow", kwargs={"flow_slug": flow.slug}))
)
def test_buffer(self):
"""Test buffer view"""
uid = generate_id()
req = self.factory.get(f"/?{QS_BUFFER_ID}={uid}")
ts = generate_id()
req.session[SESSION_KEY_BUFFER % uid] = {
"method": "get",
"body": {},
"url": f"/{ts}",
}
req.session.save()
res = BufferView.as_view()(req)
self.assertEqual(res.status_code, 200)
self.assertIn(ts, res.render().content.decode())

View File

@@ -1,10 +1,12 @@
"""authentik access helper classes"""
from typing import Any
from uuid import uuid4
from django.contrib import messages
from django.contrib.auth.mixins import AccessMixin
from django.http import Http404, HttpRequest, HttpResponse, QueryDict
from django.shortcuts import redirect
from django.urls import reverse
from django.utils.http import urlencode
from django.utils.translation import gettext as _
@@ -17,13 +19,16 @@ from authentik.flows.models import Flow, FlowDesignation
from authentik.flows.planner import (
PLAN_CONTEXT_APPLICATION,
PLAN_CONTEXT_POST,
FlowPlan,
FlowPlanner,
)
from authentik.flows.views.executor import (
SESSION_KEY_PLAN,
SESSION_KEY_POST,
ToDefaultFlow,
)
from authentik.lib.sentry import SentryIgnoredException
from authentik.policies.apps import BufferedPolicyAccessViewFlag
from authentik.policies.denied import AccessDeniedResponse
from authentik.policies.engine import PolicyEngine
from authentik.policies.models import PolicyBindingModel
@@ -189,3 +194,39 @@ class BufferView(TemplateView):
kwargs["check_auth_url"] = reverse("authentik_api:user-me")
kwargs["continue_url"] = url_with_qs(buffer["url"], **{QS_BUFFER_ID: buf_id})
return super().get_context_data(**kwargs)
class BufferedPolicyAccessView(PolicyAccessView):
"""PolicyAccessView which buffers access requests in case the user is not logged in"""
def handle_no_permission(self):
plan: FlowPlan | None = self.request.session.get(SESSION_KEY_PLAN)
if plan:
flow = Flow.objects.filter(pk=plan.flow_pk).first()
if not flow or flow.designation != FlowDesignation.AUTHENTICATION:
LOGGER.debug("Not buffering request, no flow or flow not for authentication")
return super().handle_no_permission()
if not plan:
LOGGER.debug("Not buffering request, no flow plan active")
return super().handle_no_permission()
if not BufferedPolicyAccessViewFlag.get():
return super().handle_no_permission()
if self.request.GET.get(QS_SKIP_BUFFER):
LOGGER.debug("Not buffering request, explicit skip")
return super().handle_no_permission()
buffer_id = str(uuid4())
LOGGER.debug("Buffering access request", bf_id=buffer_id)
self.request.session[SESSION_KEY_BUFFER % buffer_id] = {
"body": self.request.POST,
"url": self.request.build_absolute_uri(self.request.get_full_path()),
"method": self.request.method.lower(),
}
return redirect(
url_with_qs(reverse("authentik_policies:buffer"), **{QS_BUFFER_ID: buffer_id})
)
def dispatch(self, request, *args, **kwargs):
response = super().dispatch(request, *args, **kwargs)
if QS_BUFFER_ID in self.request.GET:
self.request.session.pop(SESSION_KEY_BUFFER % self.request.GET[QS_BUFFER_ID], None)
return response

View File

@@ -15,7 +15,7 @@ from authentik.common.oauth.constants import (
SCOPE_OPENID_PROFILE,
TOKEN_TYPE,
)
from authentik.core.models import USERNAME_MAX_LENGTH, Application, Group, User
from authentik.core.models import Application, Group
from authentik.core.tests.utils import create_test_cert, create_test_flow
from authentik.lib.generators import generate_id
from authentik.policies.models import PolicyBinding
@@ -27,7 +27,7 @@ from authentik.providers.oauth2.models import (
)
from authentik.providers.oauth2.tests.utils import OAuthTestCase
from authentik.providers.oauth2.views.jwks import JWKSView
from authentik.sources.oauth.models import OAuthSource, OAuthSourcePropertyMapping
from authentik.sources.oauth.models import OAuthSource
class TestTokenClientCredentialsJWTSource(OAuthTestCase):
@@ -220,10 +220,6 @@ class TestTokenClientCredentialsJWTSource(OAuthTestCase):
},
)
self.assertEqual(response.status_code, 200)
user = User.objects.filter(username=f"{self.provider.name}-foo").first()
self.assertIsNotNone(user)
body = loads(response.content.decode())
self.assertEqual(body["token_type"], TOKEN_TYPE)
_, alg = self.provider.jwt_key
@@ -237,54 +233,3 @@ class TestTokenClientCredentialsJWTSource(OAuthTestCase):
jwt["given_name"], "Autogenerated user from application test (client credentials JWT)"
)
self.assertEqual(jwt["preferred_username"], "test-foo")
def test_successful_mapping(self):
"""test successful"""
test_username = ("mapped-foo" + ("a" * 150))[:USERNAME_MAX_LENGTH]
mapping = OAuthSourcePropertyMapping.objects.create(
name="test-mapping",
expression="""return {
"email": oauth_userinfo.get("email"),
"name": oauth_userinfo.get("name"),
"username": oauth_userinfo.get("username"),
}""",
)
self.source.user_property_mappings.add(mapping)
token = self.helper_provider.encode(
{
"sub": "foo",
"email": "test-user@example.com",
"name": "Mapped Test User",
"username": "mapped-foo" + ("a" * 150),
"exp": datetime.now() + timedelta(hours=2),
}
)
response = self.client.post(
reverse("authentik_providers_oauth2:token"),
{
"grant_type": GRANT_TYPE_CLIENT_CREDENTIALS,
"scope": f"{SCOPE_OPENID} {SCOPE_OPENID_EMAIL} {SCOPE_OPENID_PROFILE}",
"client_id": self.provider.client_id,
"client_assertion_type": "urn:ietf:params:oauth:client-assertion-type:jwt-bearer",
"client_assertion": token,
},
)
self.assertEqual(response.status_code, 200)
user = User.objects.filter(username=test_username).first()
self.assertIsNotNone(user)
body = loads(response.content.decode())
self.assertEqual(body["token_type"], TOKEN_TYPE)
key_obj, alg = self.provider.jwt_key
jwt = decode(
body["access_token"],
key=key_obj.public_key(),
algorithms=[alg],
audience=self.provider.client_id,
)
self.assertEqual(jwt["email"], "test-user@example.com")
self.assertEqual(jwt["given_name"], "Mapped Test User")
self.assertEqual(jwt["preferred_username"], test_username)

View File

@@ -45,7 +45,7 @@ from authentik.flows.stage import PLAN_CONTEXT_PENDING_USER_IDENTIFIER, StageVie
from authentik.lib.utils.time import timedelta_from_string
from authentik.lib.views import bad_request_message
from authentik.policies.types import PolicyRequest
from authentik.policies.views import PolicyAccessView, RequestValidationError
from authentik.policies.views import BufferedPolicyAccessView, RequestValidationError
from authentik.providers.oauth2.errors import (
AuthorizeError,
ClientIdError,
@@ -338,7 +338,7 @@ class OAuthAuthorizationParams:
return code
class AuthorizationFlowInitView(PolicyAccessView):
class AuthorizationFlowInitView(BufferedPolicyAccessView):
"""OAuth2 Flow initializer, checks access to application and starts flow"""
params: OAuthAuthorizationParams

View File

@@ -45,7 +45,6 @@ from authentik.core.models import (
User,
UserTypes,
)
from authentik.core.sources.mapper import SourceMapper
from authentik.events.middleware import audit_ignore
from authentik.events.models import Event, EventAction
from authentik.events.signals import get_login_event
@@ -477,7 +476,7 @@ class TokenParams:
self.__check_policy_access(app, request, oauth_jwt=token)
if not provider:
self.__create_user_from_jwt(token, app, source, request)
self.__create_user_from_jwt(token, app, source)
method_args = {
"jwt": token,
@@ -531,30 +530,18 @@ class TokenParams:
raise TokenError("invalid_grant")
self.device_code = code
def __create_user_from_jwt(
self, token: dict[str, Any], app: Application, source: OAuthSource, request: HttpRequest
):
def __create_user_from_jwt(self, token: dict[str, Any], app: Application, source: OAuthSource):
"""Create user from JWT"""
with audit_ignore():
# Run the JWT payload through the core mapping engine
mapped = SourceMapper(source).build_object_properties(
User, request=request, info=token, oauth_userinfo=token
)
self.user, created = User.objects.update_or_create(
username=mapped.get("username", f"{self.provider.name}-{token.get('sub')}")[
:USERNAME_MAX_LENGTH
],
username=f"{self.provider.name}-{token.get('sub')}",
defaults={
"last_login": timezone.now(),
"name": mapped.get(
"name",
f"Autogenerated user from application {app.name} (client credentials JWT)",
"name": (
f"Autogenerated user from application {app.name} (client credentials JWT)"
),
"email": mapped.get("email", ""),
"path": source.get_user_path(),
"type": UserTypes.SERVICE_ACCOUNT,
"attributes": mapped.get("attributes", {}),
},
)
self.user.attributes[USER_ATTRIBUTE_GENERATED] = True

View File

@@ -0,0 +1,13 @@
"""Proxy provider signals"""
from django.db.models.signals import pre_delete
from django.dispatch import receiver
from authentik.core.models import AuthenticatedSession
from authentik.providers.proxy.tasks import proxy_on_logout
@receiver(pre_delete, sender=AuthenticatedSession)
def logout_proxy_revoke(sender: type[AuthenticatedSession], instance: AuthenticatedSession, **_):
"""Catch logout by expiring sessions being deleted"""
proxy_on_logout.send(instance.session.session_key)

View File

@@ -0,0 +1,25 @@
"""proxy provider tasks"""
from channels.layers import get_channel_layer
from django.utils.translation import gettext_lazy as _
from dramatiq.actor import actor
from authentik.outposts.consumer import build_outpost_group
from authentik.outposts.models import Outpost, OutpostType
from authentik.providers.oauth2.id_token import hash_session_key
@actor(description=_("Terminate session on Proxy outpost."))
def proxy_on_logout(session_id: str):
layer = get_channel_layer()
hashed_session_id = hash_session_key(session_id)
for outpost in Outpost.objects.filter(type=OutpostType.PROXY):
group = build_outpost_group(outpost.pk)
layer.group_send_blocking(
group,
{
"type": "event.provider.specific",
"sub_type": "logout",
"session_id": hashed_session_id,
},
)

View File

@@ -18,14 +18,14 @@ from authentik.flows.planner import PLAN_CONTEXT_APPLICATION, FlowPlanner
from authentik.flows.stage import RedirectStage
from authentik.lib.utils.time import timedelta_from_string
from authentik.policies.engine import PolicyEngine
from authentik.policies.views import PolicyAccessView
from authentik.policies.views import BufferedPolicyAccessView
from authentik.providers.rac.models import ConnectionToken, Endpoint, RACProvider
from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
PLAN_CONNECTION_SETTINGS = "connection_settings"
class RACStartView(PolicyAccessView):
class RACStartView(BufferedPolicyAccessView):
"""Start a RAC connection by checking access and creating a connection token"""
endpoint: Endpoint

View File

@@ -15,7 +15,7 @@ from authentik.flows.models import in_memory_stage
from authentik.flows.planner import PLAN_CONTEXT_APPLICATION, PLAN_CONTEXT_SSO, FlowPlanner
from authentik.flows.views.executor import SESSION_KEY_POST
from authentik.lib.views import bad_request_message
from authentik.policies.views import PolicyAccessView
from authentik.policies.views import BufferedPolicyAccessView
from authentik.providers.saml.exceptions import CannotHandleAssertion
from authentik.providers.saml.models import SAMLBindings, SAMLProvider
from authentik.providers.saml.processors.authn_request_parser import AuthNRequestParser
@@ -35,7 +35,7 @@ from authentik.stages.consent.stage import (
LOGGER = get_logger()
class SAMLSSOView(PolicyAccessView):
class SAMLSSOView(BufferedPolicyAccessView):
"""SAML SSO Base View, which plans a flow and injects our final stage.
Calls get/post handler."""
@@ -88,7 +88,7 @@ class SAMLSSOView(PolicyAccessView):
def post(self, request: HttpRequest, application_slug: str) -> HttpResponse:
"""GET and POST use the same handler, but we can't
override .dispatch easily because PolicyAccessView's dispatch"""
override .dispatch easily because BufferedPolicyAccessView's dispatch"""
return self.get(request, application_slug)

View File

@@ -7,6 +7,7 @@ from django.db import transaction
from django.utils.http import urlencode
from orjson import dumps
from pydantic import ValidationError
from pydanticscim.group import GroupMember
from authentik.core.models import Group
from authentik.lib.merge import MERGE_LIST_UNIQUE
@@ -24,7 +25,6 @@ from authentik.providers.scim.clients.exceptions import (
)
from authentik.providers.scim.clients.schema import (
SCIM_GROUP_SCHEMA,
GroupMember,
PatchOp,
PatchOperation,
PatchRequest,
@@ -111,7 +111,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]):
raise exc
groups = self._request(
"GET",
f"/Groups?{urlencode({'filter': f'displayName eq "{group.name}"'})}",
f"/Groups?{urlencode({'filter': f'displayName eq \"{group.name}\"'})}",
)
groups_res = groups.get("Resources", [])
if len(groups_res) < 1:
@@ -321,12 +321,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]):
PatchOperation(
op=PatchOp.add,
path="members",
value=[
GroupMember(value=x).model_dump(
mode="json",
exclude_unset=True,
)
],
value=[{"value": x}],
)
for x in users_to_add
],
@@ -334,12 +329,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]):
PatchOperation(
op=PatchOp.remove,
path="members",
value=[
GroupMember(value=x).model_dump(
mode="json",
exclude_unset=True,
)
],
value=[{"value": x}],
)
for x in users_to_remove
],
@@ -362,12 +352,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]):
PatchOperation(
op=PatchOp.add,
path="members",
value=[
GroupMember(value=x).model_dump(
mode="json",
exclude_unset=True,
)
],
value=[{"value": x}],
)
for x in user_ids
],
@@ -390,12 +375,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]):
PatchOperation(
op=PatchOp.remove,
path="members",
value=[
GroupMember(value=x).model_dump(
mode="json",
exclude_unset=True,
)
],
value=[{"value": x}],
)
for x in user_ids
],

View File

@@ -7,7 +7,6 @@ from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from pydantic import AnyUrl, BaseModel, ConfigDict, Field, model_validator
from pydanticscim.group import Group as BaseGroup
from pydanticscim.group import GroupMember as BaseGroupMember
from pydanticscim.responses import PatchOperation as BasePatchOperation
from pydanticscim.responses import PatchRequest as BasePatchRequest
from pydanticscim.responses import SCIMError as BaseSCIMError
@@ -161,13 +160,6 @@ class Group(BaseGroup):
schemas: list[str] = [SCIM_GROUP_SCHEMA]
externalId: str | None = None
meta: dict | None = None
members: list[GroupMember] | None = Field(None, description="A list of members of the Group.")
class GroupMember(BaseGroupMember):
"""Modified GroupMember that allows extra fields"""
model_config = ConfigDict(extra="allow")
class Bulk(BaseBulk):

View File

@@ -339,9 +339,6 @@ class LoggingMiddleware:
def log(self, request: HttpRequest, status_code: int, runtime: int, **kwargs):
"""Log request"""
# Those are logged by the server above
if request.path in ("/-/metrics/", "/-/health/ready/"):
return
for header in self.headers_to_log:
header_value = request.headers.get(header)
if not header_value:

View File

@@ -1,21 +1,37 @@
"""Metrics view"""
from hmac import compare_digest
from pathlib import Path
from tempfile import gettempdir
from django.conf import settings
from django.db import connections
from django.db.utils import OperationalError
from django.dispatch import Signal
from django.http import HttpRequest, HttpResponse
from django.views import View
from django_prometheus.exports import ExportToDjangoView
monitoring_set = Signal()
class MetricsView(View):
"""View for metrics monitoring_set signal, accessed by the authentik router"""
"""Wrapper around ExportToDjangoView with authentication, accessed by the authentik router"""
def __init__(self, **kwargs):
_tmp = Path(gettempdir())
with open(_tmp / "authentik-core-metrics.key") as _f:
self.monitoring_key = _f.read()
def get(self, request: HttpRequest) -> HttpResponse:
"""Check for HTTP-Basic auth"""
auth_header = request.META.get("HTTP_AUTHORIZATION", "")
auth_type, _, given_credentials = auth_header.partition(" ")
authed = auth_type == "Bearer" and compare_digest(given_credentials, self.monitoring_key)
if not authed and not settings.DEBUG:
return HttpResponse(status=401)
monitoring_set.send_robust(self)
return HttpResponse(status=204)
return ExportToDjangoView(request)
class LiveView(View):

View File

@@ -186,7 +186,6 @@ SPECTACULAR_SETTINGS = {
"SAMLBindingsEnum": "authentik.providers.saml.models.SAMLBindings",
"UserTypeEnum": "authentik.core.models.UserTypes",
"UserVerificationEnum": "authentik.stages.authenticator_webauthn.models.UserVerification",
"WebAuthnHintEnum": "authentik.stages.authenticator_webauthn.models.WebAuthnHint",
"SCIMAuthenticationModeEnum": "authentik.providers.scim.models.SCIMAuthenticationMode",
"PKCEMethodEnum": "authentik.sources.oauth.models.PKCEMethod",
"DeviceFactsOSFamily": "authentik.endpoints.facts.OSFamily",
@@ -440,6 +439,8 @@ DRAMATIQ = {
("authentik.tasks.middleware.TaskLogMiddleware", {}),
("authentik.tasks.middleware.LoggingMiddleware", {}),
("authentik.tasks.middleware.DescriptionMiddleware", {}),
("authentik.tasks.middleware.WorkerHealthcheckMiddleware", {}),
("authentik.tasks.middleware.WorkerStatusMiddleware", {}),
(
"authentik.tasks.middleware.MetricsMiddleware",
{

View File

@@ -14,12 +14,12 @@ class TestRoot(TransactionTestCase):
def setUp(self):
_tmp = Path(gettempdir())
self.token = token_urlsafe(32)
with open(_tmp / "authentik-metrics-gunicorn.key", "w") as _f:
with open(_tmp / "authentik-core-metrics.key", "w") as _f:
_f.write(self.token)
def tearDown(self):
_tmp = Path(gettempdir())
(_tmp / "authentik-metrics-gunicorn.key").unlink()
(_tmp / "authentik-core-metrics.key").unlink()
def test_monitoring_error(self):
"""Test monitoring without any credentials"""

View File

@@ -36,7 +36,6 @@ class AuthenticatorValidateStageSerializer(StageSerializer):
"configuration_stages",
"last_auth_threshold",
"webauthn_user_verification",
"webauthn_hints",
"webauthn_allowed_device_types",
"webauthn_allowed_device_types_obj",
]

View File

@@ -80,10 +80,7 @@ def get_webauthn_challenge_without_user(
authentication_options.challenge
)
options_dict = options_to_json_dict(authentication_options)
if stage.webauthn_hints:
options_dict["hints"] = list(stage.webauthn_hints)
return options_dict
return options_to_json_dict(authentication_options)
def get_webauthn_challenge(
@@ -112,10 +109,7 @@ def get_webauthn_challenge(
authentication_options.challenge
)
options_dict = options_to_json_dict(authentication_options)
if stage.webauthn_hints:
options_dict["hints"] = list(stage.webauthn_hints)
return options_dict
return options_to_json_dict(authentication_options)
def select_challenge(request: HttpRequest, device: Device):

View File

@@ -1,33 +0,0 @@
# Generated by Django 5.2.11 on 2026-03-04 02:30
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"authentik_stages_authenticator_validate",
"0014_alter_authenticatorvalidatestage_device_classes",
),
]
operations = [
migrations.AddField(
model_name="authenticatorvalidatestage",
name="webauthn_hints",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.TextField(
choices=[
("security-key", "Security Key"),
("client-device", "Client Device"),
("hybrid", "Hybrid"),
]
),
blank=True,
default=list,
size=None,
),
),
]

View File

@@ -8,7 +8,7 @@ from rest_framework.serializers import BaseSerializer
from authentik.flows.models import NotConfiguredAction, Stage
from authentik.lib.utils.time import timedelta_string_validator
from authentik.stages.authenticator_webauthn.models import UserVerification, WebAuthnHint
from authentik.stages.authenticator_webauthn.models import UserVerification
class DeviceClasses(models.TextChoices):
@@ -73,11 +73,6 @@ class AuthenticatorValidateStage(Stage):
choices=UserVerification.choices,
default=UserVerification.PREFERRED,
)
webauthn_hints = ArrayField(
models.TextField(choices=WebAuthnHint.choices),
default=list,
blank=True,
)
webauthn_allowed_device_types = models.ManyToManyField(
"authentik_stages_authenticator_webauthn.WebAuthnDeviceType", blank=True
)

View File

@@ -28,7 +28,6 @@ from authentik.stages.authenticator_webauthn.models import (
UserVerification,
WebAuthnDevice,
WebAuthnDeviceType,
WebAuthnHint,
)
from authentik.stages.authenticator_webauthn.stage import PLAN_CONTEXT_WEBAUTHN_CHALLENGE
from authentik.stages.authenticator_webauthn.tasks import webauthn_mds_import
@@ -257,105 +256,6 @@ class AuthenticatorValidateStageWebAuthnTests(FlowTestCase):
self.assertEqual(challenge["timeout"], 60000)
self.assertEqual(challenge["userVerification"], "preferred")
def test_device_challenge_webauthn_with_hints(self):
"""Test that webauthn hints are included in authentication challenge"""
request = self.request_factory.get("/")
request.user = self.user
webauthn_device = WebAuthnDevice.objects.create(
user=self.user,
public_key=bytes_to_base64url(b"qwerqwerqre"),
credential_id=bytes_to_base64url(b"foobarbaz"),
sign_count=0,
rp_id=generate_id(),
)
stage = AuthenticatorValidateStage.objects.create(
name=generate_id(),
last_auth_threshold="milliseconds=0",
not_configured_action=NotConfiguredAction.CONFIGURE,
device_classes=[DeviceClasses.WEBAUTHN],
webauthn_user_verification=UserVerification.PREFERRED,
webauthn_hints=[WebAuthnHint.CLIENT_DEVICE, WebAuthnHint.HYBRID],
)
plan = FlowPlan("")
stage_view = AuthenticatorValidateStageView(
FlowExecutorView(flow=None, current_stage=stage, plan=plan), request=request
)
challenge = get_challenge_for_device(stage_view, stage, webauthn_device)
self.assertEqual(challenge["hints"], ["client-device", "hybrid"])
def test_device_challenge_webauthn_no_hints(self):
"""Test that hints key is absent when no hints configured"""
request = self.request_factory.get("/")
request.user = self.user
webauthn_device = WebAuthnDevice.objects.create(
user=self.user,
public_key=bytes_to_base64url(b"qwerqwerqre"),
credential_id=bytes_to_base64url(b"foobarbaz"),
sign_count=0,
rp_id=generate_id(),
)
stage = AuthenticatorValidateStage.objects.create(
name=generate_id(),
last_auth_threshold="milliseconds=0",
not_configured_action=NotConfiguredAction.CONFIGURE,
device_classes=[DeviceClasses.WEBAUTHN],
webauthn_user_verification=UserVerification.PREFERRED,
)
plan = FlowPlan("")
stage_view = AuthenticatorValidateStageView(
FlowExecutorView(flow=None, current_stage=stage, plan=plan), request=request
)
challenge = get_challenge_for_device(stage_view, stage, webauthn_device)
self.assertNotIn("hints", challenge)
def test_get_challenge_userless_with_hints(self):
"""Test that hints are included in userless/passwordless challenge"""
request = self.request_factory.get("/")
stage = AuthenticatorValidateStage.objects.create(
name=generate_id(),
webauthn_user_verification=UserVerification.PREFERRED,
webauthn_hints=[WebAuthnHint.SECURITY_KEY, WebAuthnHint.CLIENT_DEVICE],
)
plan = FlowPlan("")
stage_view = AuthenticatorValidateStageView(
FlowExecutorView(flow=None, current_stage=stage, plan=plan), request=request
)
challenge = get_webauthn_challenge_without_user(stage_view, stage)
self.assertEqual(challenge["hints"], ["security-key", "client-device"])
def test_device_challenge_webauthn_hints_order_preserved(self):
"""Test that hint order is preserved in authentication challenge"""
request = self.request_factory.get("/")
request.user = self.user
webauthn_device = WebAuthnDevice.objects.create(
user=self.user,
public_key=bytes_to_base64url(b"qwerqwerqre"),
credential_id=bytes_to_base64url(b"foobarbaz"),
sign_count=0,
rp_id=generate_id(),
)
stage = AuthenticatorValidateStage.objects.create(
name=generate_id(),
last_auth_threshold="milliseconds=0",
not_configured_action=NotConfiguredAction.CONFIGURE,
device_classes=[DeviceClasses.WEBAUTHN],
webauthn_user_verification=UserVerification.PREFERRED,
webauthn_hints=[
WebAuthnHint.HYBRID,
WebAuthnHint.SECURITY_KEY,
WebAuthnHint.CLIENT_DEVICE,
],
)
plan = FlowPlan("")
stage_view = AuthenticatorValidateStageView(
FlowExecutorView(flow=None, current_stage=stage, plan=plan), request=request
)
challenge = get_challenge_for_device(stage_view, stage, webauthn_device)
self.assertEqual(challenge["hints"], ["hybrid", "security-key", "client-device"])
def test_validate_challenge_unrestricted(self):
"""Test webauthn authentication (unrestricted webauthn device)"""
webauthn_mds_import.send(force=True).get_result()

View File

@@ -23,7 +23,6 @@ class AuthenticatorWebAuthnStageSerializer(StageSerializer):
"user_verification",
"authenticator_attachment",
"resident_key_requirement",
"hints",
"device_type_restrictions",
"device_type_restrictions_obj",
"max_attempts",
@@ -35,14 +34,6 @@ class AuthenticatorWebAuthnStageViewSet(UsedByMixin, ModelViewSet):
queryset = AuthenticatorWebAuthnStage.objects.all()
serializer_class = AuthenticatorWebAuthnStageSerializer
filterset_fields = [
"name",
"configure_flow",
"user_verification",
"authenticator_attachment",
"resident_key_requirement",
"device_type_restrictions",
"max_attempts",
]
filterset_fields = "__all__"
ordering = ["name"]
search_fields = ["name"]

View File

@@ -191,8 +191,5 @@
"name": "Sticky Password Manager",
"icon_dark": "data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9Im5vIj8+CjxzdmcKICAgdmlld0JveD0iMCAwIDEwOCAxMDgiCiAgIHdpZHRoPSIxMDgiCiAgIGhlaWdodD0iMTA4IgogICB2ZXJzaW9uPSIxLjEiCiAgIGlkPSJzdmc1IgogICB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciCiAgIHhtbG5zOnN2Zz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciPgogIDx0aXRsZT5TdGlja3kgUGFzc3dvcmQgTWFuYWdlcjwvdGl0bGU+CiAgPGRlZnMKICAgICBpZD0iZGVmczUiIC8+CiAgPGNpcmNsZQogICAgIHN0eWxlPSJmaWxsOiNmZmZmZmY7c3Ryb2tlLXdpZHRoOjEuNzYyMDEiCiAgICAgaWQ9InBhdGg2IgogICAgIGN4PSI1NCIKICAgICBjeT0iNTQiCiAgICAgcj0iNTQiIC8+CiAgPGcKICAgICBpZD0iZzYiCiAgICAgdHJhbnNmb3JtPSJtYXRyaXgoMS4zNDMzMjQzLDAsMCwxLjM0MzMyNDMsLTE4LjU0MDYwNywtMTguNTI3NTk0KSI+CiAgICA8cGF0aAogICAgICAgZD0ibSA2NC4zNSw1My4xMSAtOS41LC05LjUgYyAtMC40OCwtMC40OCAtMS4yNiwtMC40OCAtMS43NSwwIGwgLTkuNSw5LjUgYyAtMC40OCwwLjQ4IC0wLjQ4LDEuMjYgMCwxLjc0IGwgOS41LDkuNSBjIDAuMjQsMC4yNCAwLjU2LDAuMzYgMC44NywwLjM2IDAuMzEsMCAwLjYzLC0wLjEyIDAuODcsLTAuMzYgbCA5LjUsLTkuNSBjIDAuMjMsLTAuMjMgMC4zNiwtMC41NCAwLjM2LC0wLjg3IDAsLTAuMzMgLTAuMTMsLTAuNjQgLTAuMzYsLTAuODcgeiIKICAgICAgIGZpbGw9IiMwMDAwMDAiCiAgICAgICBmaWxsLXJ1bGU9ImV2ZW5vZGQiCiAgICAgICBzdHJva2U9IiMwMDAwMDAiCiAgICAgICBzdHJva2Utd2lkdGg9IjMuNjIiCiAgICAgICBzdHJva2Utb3BhY2l0eT0iMCIKICAgICAgIGlkPSJwYXRoMSIgLz4KICAgIDxwYXRoCiAgICAgICBkPSJtIDUwLjExLDY3LjM0IC05LjUsLTkuNSBjIC0wLjQ4LC0wLjQ4IC0xLjI2LC0wLjQ4IC0xLjc0LDAgbCAtOS40OSw5LjUgYyAtMC40OCwwLjQ4IC0wLjQ4LDEuMjYgMCwxLjc1IGwgOS40OSw5LjUgYyAwLjI0LDAuMjQgMC41NiwwLjM2IDAuODcsMC4zNiAwLjMxLDAgMC42MywtMC4xMiAwLjg3LC0wLjM2IGwgOS41LC05LjUgYyAwLjIzLC0wLjI0IDAuMzYsLTAuNTQgMC4zNiwtMC44NyAwLC0wLjMzIC0wLjEzLC0wLjY0IC0wLjM2LC0wLjg3IgogICAgICAgZmlsbD0iIzAwYTllMCIKICAgICAgIGZpbGwtcnVsZT0iZXZlbm9kZCIKICAgICAgIHN0cm9rZT0iIzAwMDAwMCIKICAgICAgIHN0cm9rZS13aWR0aD0iMy42MiIKICAgICAgIHN0cm9rZS1vcGFjaXR5PSIwIgogICAgICAgaWQ9InBhdGgyIiAvPgogICAgPHBhdGgKICAgICAgIGQ9Im0gNzguNTcsMzguODggLTkuNSwtOS40OSBjIC0wLjQ4LC0wLjQ4IC0xLjI3LC0wLjQ4IC0xLjc0LDAgbCAtOS41LDkuNSBjIC0wLjQ4LDAuNDggLTAuNDgsMS4yNiAwLDEuNzQgbCA5LjUsOS41IGMgMC4yNCwwLjI0IDAuNTUsMC4zNiAwLjg3LDAuMzYgMC4zMSwwIDAuNjMsLTAuMTIgMC44NywtMC4zNiBsIDkuNSwtOS41IGMgMC4yMywtMC4yNCAwLjM2LC0wLjU0IDAuMzYsLTAuODcgMCwtMC4zMyAtMC4xMywtMC42NCAtMC4zNiwtMC44NyIKICAgICAgIGZpbGw9IiNkNjE4MTgiCiAgICAgICBmaWxsLXJ1bGU9ImV2ZW5vZGQiCiAgICAgICBzdHJva2U9IiMwMDAwMDAiCiAgICAgICBzdHJva2Utd2lkdGg9IjMuNjIiCiAgICAgICBzdHJva2Utb3BhY2l0eT0iMCIKICAgICAgIGlkPSJwYXRoMyIgLz4KICAgIDxwYXRoCiAgICAgICBkPSJtIDUwLjEsMzguODYgLTkuNSwtOS41IGMgLTAuNDgsLTAuNDggLTEuMjcsLTAuNDggLTEuNzQsMCBsIC05LjUsOS41IGMgLTAuNDgsMC40OCAtMC40OCwxLjI2IDAsMS43NSBsIDkuNSw5LjUgYyAwLjI0LDAuMjQgMC41NSwwLjM2IDAuODcsMC4zNiAwLjMyLDAgMC42MywtMC4xMiAwLjg3LC0wLjM2IGwgOS40OSwtOS41IGMgMC4yMywtMC4yMyAwLjM3LC0wLjU0IDAuMzcsLTAuODggMCwtMC4zMyAtMC4xMywtMC42NCAtMC4zNywtMC44NyIKICAgICAgIGZpbGw9IiM3YWI4MDAiCiAgICAgICBmaWxsLXJ1bGU9ImV2ZW5vZGQiCiAgICAgICBzdHJva2U9IiMwMDAwMDAiCiAgICAgICBzdHJva2Utd2lkdGg9IjMuNjIiCiAgICAgICBzdHJva2Utb3BhY2l0eT0iMCIKICAgICAgIGlkPSJwYXRoNCIgLz4KICAgIDxwYXRoCiAgICAgICBkPSJtIDc4LjY0LDY3LjM5IC05LjUsLTkuNDkgYyAtMC40OCwtMC40OCAtMS4yNywtMC40OCAtMS43NSwwIGwgLTkuNDksOS40OSBjIC0wLjQ4LDAuNDggLTAuNDgsMS4yNiAwLDEuNzUgbCA5LjQ5LDkuNSBjIDAuMjQsMC4yNCAwLjU1LDAuMzYgMC44NywwLjM2IDAuMzIsMCAwLjYzLC0wLjEyIDAuODcsLTAuMzYgbCA5LjUsLTkuNSBjIDAuMjMsLTAuMjMgMC4zNiwtMC41NCAwLjM2LC0wLjg3IDAsLTAuMzMgLTAuMTMsLTAuNjQgLTAuMzYsLTAuODgiCiAgICAgICBmaWxsPSIjMDA0NmFkIgogICAgICAgZmlsbC1ydWxlPSJldmVub2RkIgogICAgICAgc3Ryb2tlPSIjMDAwMDAwIgogICAgICAgc3Ryb2tlLXdpZHRoPSIzLjYyIgogICAgICAgc3Ryb2tlLW9wYWNpdHk9IjAiCiAgICAgICBpZD0icGF0aDUiIC8+CiAgPC9nPgo8L3N2Zz4K",
"icon_light": "data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9Im5vIj8+CjxzdmcKICAgdmlld0JveD0iMCAwIDEwOCAxMDgiCiAgIHdpZHRoPSIxMDgiCiAgIGhlaWdodD0iMTA4IgogICB2ZXJzaW9uPSIxLjEiCiAgIGlkPSJzdmc1IgogICB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciCiAgIHhtbG5zOnN2Zz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciPgogIDx0aXRsZT5TdGlja3kgUGFzc3dvcmQgTWFuYWdlcjwvdGl0bGU+CiAgPGRlZnMKICAgICBpZD0iZGVmczUiIC8+CiAgPGNpcmNsZQogICAgIHN0eWxlPSJmaWxsOiNmZmZmZmY7c3Ryb2tlLXdpZHRoOjEuNzYyMDEiCiAgICAgaWQ9InBhdGg2IgogICAgIGN4PSI1NCIKICAgICBjeT0iNTQiCiAgICAgcj0iNTQiIC8+CiAgPGcKICAgICBpZD0iZzYiCiAgICAgdHJhbnNmb3JtPSJtYXRyaXgoMS4zNDMzMjQzLDAsMCwxLjM0MzMyNDMsLTE4LjU0MDYwNywtMTguNTI3NTk0KSI+CiAgICA8cGF0aAogICAgICAgZD0ibSA2NC4zNSw1My4xMSAtOS41LC05LjUgYyAtMC40OCwtMC40OCAtMS4yNiwtMC40OCAtMS43NSwwIGwgLTkuNSw5LjUgYyAtMC40OCwwLjQ4IC0wLjQ4LDEuMjYgMCwxLjc0IGwgOS41LDkuNSBjIDAuMjQsMC4yNCAwLjU2LDAuMzYgMC44NywwLjM2IDAuMzEsMCAwLjYzLC0wLjEyIDAuODcsLTAuMzYgbCA5LjUsLTkuNSBjIDAuMjMsLTAuMjMgMC4zNiwtMC41NCAwLjM2LC0wLjg3IDAsLTAuMzMgLTAuMTMsLTAuNjQgLTAuMzYsLTAuODcgeiIKICAgICAgIGZpbGw9IiMwMDAwMDAiCiAgICAgICBmaWxsLXJ1bGU9ImV2ZW5vZGQiCiAgICAgICBzdHJva2U9IiMwMDAwMDAiCiAgICAgICBzdHJva2Utd2lkdGg9IjMuNjIiCiAgICAgICBzdHJva2Utb3BhY2l0eT0iMCIKICAgICAgIGlkPSJwYXRoMSIgLz4KICAgIDxwYXRoCiAgICAgICBkPSJtIDUwLjExLDY3LjM0IC05LjUsLTkuNSBjIC0wLjQ4LC0wLjQ4IC0xLjI2LC0wLjQ4IC0xLjc0LDAgbCAtOS40OSw5LjUgYyAtMC40OCwwLjQ4IC0wLjQ4LDEuMjYgMCwxLjc1IGwgOS40OSw5LjUgYyAwLjI0LDAuMjQgMC41NiwwLjM2IDAuODcsMC4zNiAwLjMxLDAgMC42MywtMC4xMiAwLjg3LC0wLjM2IGwgOS41LC05LjUgYyAwLjIzLC0wLjI0IDAuMzYsLTAuNTQgMC4zNiwtMC44NyAwLC0wLjMzIC0wLjEzLC0wLjY0IC0wLjM2LC0wLjg3IgogICAgICAgZmlsbD0iIzAwYTllMCIKICAgICAgIGZpbGwtcnVsZT0iZXZlbm9kZCIKICAgICAgIHN0cm9rZT0iIzAwMDAwMCIKICAgICAgIHN0cm9rZS13aWR0aD0iMy42MiIKICAgICAgIHN0cm9rZS1vcGFjaXR5PSIwIgogICAgICAgaWQ9InBhdGgyIiAvPgogICAgPHBhdGgKICAgICAgIGQ9Im0gNzguNTcsMzguODggLTkuNSwtOS40OSBjIC0wLjQ4LC0wLjQ4IC0xLjI3LC0wLjQ4IC0xLjc0LDAgbCAtOS41LDkuNSBjIC0wLjQ4LDAuNDggLTAuNDgsMS4yNiAwLDEuNzQgbCA5LjUsOS41IGMgMC4yNCwwLjI0IDAuNTUsMC4zNiAwLjg3LDAuMzYgMC4zMSwwIDAuNjMsLTAuMTIgMC44NywtMC4zNiBsIDkuNSwtOS41IGMgMC4yMywtMC4yNCAwLjM2LC0wLjU0IDAuMzYsLTAuODcgMCwtMC4zMyAtMC4xMywtMC42NCAtMC4zNiwtMC44NyIKICAgICAgIGZpbGw9IiNkNjE4MTgiCiAgICAgICBmaWxsLXJ1bGU9ImV2ZW5vZGQiCiAgICAgICBzdHJva2U9IiMwMDAwMDAiCiAgICAgICBzdHJva2Utd2lkdGg9IjMuNjIiCiAgICAgICBzdHJva2Utb3BhY2l0eT0iMCIKICAgICAgIGlkPSJwYXRoMyIgLz4KICAgIDxwYXRoCiAgICAgICBkPSJtIDUwLjEsMzguODYgLTkuNSwtOS41IGMgLTAuNDgsLTAuNDggLTEuMjcsLTAuNDggLTEuNzQsMCBsIC05LjUsOS41IGMgLTAuNDgsMC40OCAtMC40OCwxLjI2IDAsMS43NSBsIDkuNSw5LjUgYyAwLjI0LDAuMjQgMC41NSwwLjM2IDAuODcsMC4zNiAwLjMyLDAgMC42MywtMC4xMiAwLjg3LC0wLjM2IGwgOS40OSwtOS41IGMgMC4yMywtMC4yMyAwLjM3LC0wLjU0IDAuMzcsLTAuODggMCwtMC4zMyAtMC4xMywtMC42NCAtMC4zNywtMC44NyIKICAgICAgIGZpbGw9IiM3YWI4MDAiCiAgICAgICBmaWxsLXJ1bGU9ImV2ZW5vZGQiCiAgICAgICBzdHJva2U9IiMwMDAwMDAiCiAgICAgICBzdHJva2Utd2lkdGg9IjMuNjIiCiAgICAgICBzdHJva2Utb3BhY2l0eT0iMCIKICAgICAgIGlkPSJwYXRoNCIgLz4KICAgIDxwYXRoCiAgICAgICBkPSJtIDc4LjY0LDY3LjM5IC05LjUsLTkuNDkgYyAtMC40OCwtMC40OCAtMS4yNywtMC40OCAtMS43NSwwIGwgLTkuNDksOS40OSBjIC0wLjQ4LDAuNDggLTAuNDgsMS4yNiAwLDEuNzUgbCA5LjQ5LDkuNSBjIDAuMjQsMC4yNCAwLjU1LDAuMzYgMC44NywwLjM2IDAuMzIsMCAwLjYzLC0wLjEyIDAuODcsLTAuMzYgbCA5LjUsLTkuNSBjIDAuMjMsLTAuMjMgMC4zNiwtMC41NCAwLjM2LC0wLjg3IDAsLTAuMzMgLTAuMTMsLTAuNjQgLTAuMzYsLTAuODgiCiAgICAgICBmaWxsPSIjMDA0NmFkIgogICAgICAgZmlsbC1ydWxlPSJldmVub2RkIgogICAgICAgc3Ryb2tlPSIjMDAwMDAwIgogICAgICAgc3Ryb2tlLXdpZHRoPSIzLjYyIgogICAgICAgc3Ryb2tlLW9wYWNpdHk9IjAiCiAgICAgICBpZD0icGF0aDUiIC8+CiAgPC9nPgo8L3N2Zz4K"
},
"70617373-7761-6c6c-6669-646f32303236": {
"name": "Passwall"
}
}

File diff suppressed because one or more lines are too long

View File

@@ -1,33 +0,0 @@
# Generated by Django 5.2.11 on 2026-03-04 02:30
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"authentik_stages_authenticator_webauthn",
"0014_alter_authenticatorwebauthnstage_friendly_name",
),
]
operations = [
migrations.AddField(
model_name="authenticatorwebauthnstage",
name="hints",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.TextField(
choices=[
("security-key", "Security Key"),
("client-device", "Client Device"),
("hybrid", "Hybrid"),
]
),
blank=True,
default=list,
size=None,
),
),
]

View File

@@ -1,7 +1,6 @@
"""WebAuthn stage"""
from django.contrib.auth import get_user_model
from django.contrib.postgres.fields.array import ArrayField
from django.db import models
from django.utils.timezone import now
from django.utils.translation import gettext_lazy as _
@@ -68,24 +67,6 @@ class AuthenticatorAttachment(models.TextChoices):
CROSS_PLATFORM = "cross-platform"
class WebAuthnHint(models.TextChoices):
"""Hints to guide the browser in prioritizing the preferred authenticator during
WebAuthn registration and authentication. Unlike authenticatorAttachment, hints are
advisory and browsers may ignore them.
Members:
`SECURITY_KEY`: A portable FIDO2 authenticator, like a YubiKey
`CLIENT_DEVICE`: The device WebAuthn is being called on, like TouchID or Windows Hello
`HYBRID`: A platform authenticator on a mobile device, accessed via QR code
https://w3c.github.io/webauthn/#enumdef-publickeycredentialhint
"""
SECURITY_KEY = "security-key"
CLIENT_DEVICE = "client-device"
HYBRID = "hybrid"
class AuthenticatorWebAuthnStage(ConfigurableStage, FriendlyNamedStage, Stage):
"""Setup WebAuthn-based authentication for the user."""
@@ -101,12 +82,6 @@ class AuthenticatorWebAuthnStage(ConfigurableStage, FriendlyNamedStage, Stage):
choices=AuthenticatorAttachment.choices, default=None, null=True
)
hints = ArrayField(
models.TextField(choices=WebAuthnHint.choices),
default=list,
blank=True,
)
device_type_restrictions = models.ManyToManyField("WebAuthnDeviceType", blank=True)
max_attempts = models.PositiveIntegerField(default=0)

View File

@@ -16,7 +16,6 @@ from webauthn.helpers.structs import (
AuthenticatorAttachment,
AuthenticatorSelectionCriteria,
PublicKeyCredentialCreationOptions,
PublicKeyCredentialHint,
ResidentKeyRequirement,
UserVerificationRequirement,
)
@@ -128,20 +127,6 @@ class AuthenticatorWebAuthnStageView(ChallengeStageView):
if authenticator_attachment:
authenticator_attachment = AuthenticatorAttachment(str(authenticator_attachment))
hints = [PublicKeyCredentialHint(h) for h in stage.hints] or None
# For compatibility with older user agents that don't support hints,
# auto-infer authenticatorAttachment from hints when not explicitly set.
# https://w3c.github.io/webauthn/#enum-hints
if hints and not authenticator_attachment:
hint_values = set(stage.hints)
cross_platform = {"security-key", "hybrid"}
platform = {"client-device"}
if hint_values <= cross_platform:
authenticator_attachment = AuthenticatorAttachment.CROSS_PLATFORM
elif hint_values <= platform:
authenticator_attachment = AuthenticatorAttachment.PLATFORM
registration_options: PublicKeyCredentialCreationOptions = generate_registration_options(
rp_id=get_rp_id(self.request),
rp_name=self.request.brand.branding_title,
@@ -154,7 +139,6 @@ class AuthenticatorWebAuthnStageView(ChallengeStageView):
authenticator_attachment=authenticator_attachment,
),
attestation=AttestationConveyancePreference.DIRECT,
hints=hints,
)
self.executor.plan.context[PLAN_CONTEXT_WEBAUTHN_CHALLENGE] = registration_options.challenge

View File

@@ -17,7 +17,6 @@ from authentik.stages.authenticator_webauthn.models import (
AuthenticatorWebAuthnStage,
WebAuthnDevice,
WebAuthnDeviceType,
WebAuthnHint,
)
from authentik.stages.authenticator_webauthn.stage import PLAN_CONTEXT_WEBAUTHN_CHALLENGE
from authentik.stages.authenticator_webauthn.tasks import webauthn_mds_import
@@ -303,145 +302,6 @@ class TestAuthenticatorWebAuthnStage(FlowTestCase):
self.assertStageRedirects(response, reverse("authentik_core:root-redirect"))
self.assertTrue(WebAuthnDevice.objects.filter(user=self.user).exists())
def test_registration_options_with_hints(self):
"""Test that hints are included in registration options"""
self.stage.hints = [WebAuthnHint.CLIENT_DEVICE, WebAuthnHint.SECURITY_KEY]
self.stage.save()
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
session = self.client.session
session[SESSION_KEY_PLAN] = plan
session.save()
response = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
)
self.assertEqual(response.status_code, 200)
registration = response.json()["registration"]
self.assertEqual(registration["hints"], ["client-device", "security-key"])
def test_registration_options_hints_empty(self):
"""Test that no hints key is present when hints are empty"""
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
session = self.client.session
session[SESSION_KEY_PLAN] = plan
session.save()
response = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
)
self.assertEqual(response.status_code, 200)
registration = response.json()["registration"]
self.assertNotIn("hints", registration)
def test_registration_options_hints_infer_attachment_cross_platform(self):
"""Test that authenticatorAttachment is auto-inferred as cross-platform
from security-key/hybrid hints for backwards compatibility"""
self.stage.hints = [WebAuthnHint.SECURITY_KEY]
self.stage.authenticator_attachment = None
self.stage.save()
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
session = self.client.session
session[SESSION_KEY_PLAN] = plan
session.save()
response = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
)
self.assertEqual(response.status_code, 200)
registration = response.json()["registration"]
self.assertEqual(
registration["authenticatorSelection"]["authenticatorAttachment"], "cross-platform"
)
def test_registration_options_hints_infer_attachment_platform(self):
"""Test that authenticatorAttachment is auto-inferred as platform
from client-device hint for backwards compatibility"""
self.stage.hints = [WebAuthnHint.CLIENT_DEVICE]
self.stage.authenticator_attachment = None
self.stage.save()
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
session = self.client.session
session[SESSION_KEY_PLAN] = plan
session.save()
response = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
)
self.assertEqual(response.status_code, 200)
registration = response.json()["registration"]
self.assertEqual(
registration["authenticatorSelection"]["authenticatorAttachment"], "platform"
)
def test_registration_options_hints_no_infer_when_attachment_set(self):
"""Test that authenticatorAttachment is NOT overridden when explicitly set"""
self.stage.hints = [WebAuthnHint.SECURITY_KEY]
self.stage.authenticator_attachment = "platform"
self.stage.save()
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
session = self.client.session
session[SESSION_KEY_PLAN] = plan
session.save()
response = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
)
self.assertEqual(response.status_code, 200)
registration = response.json()["registration"]
self.assertEqual(
registration["authenticatorSelection"]["authenticatorAttachment"], "platform"
)
def test_registration_options_hints_no_infer_mixed(self):
"""Test that authenticatorAttachment is NOT inferred when hints are mixed"""
self.stage.hints = [WebAuthnHint.SECURITY_KEY, WebAuthnHint.CLIENT_DEVICE]
self.stage.authenticator_attachment = None
self.stage.save()
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
session = self.client.session
session[SESSION_KEY_PLAN] = plan
session.save()
response = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
)
self.assertEqual(response.status_code, 200)
registration = response.json()["registration"]
self.assertNotIn("authenticatorAttachment", registration["authenticatorSelection"])
def test_registration_options_hints_order_preserved(self):
"""Test that hint order is preserved (first hint = highest priority)"""
self.stage.hints = [
WebAuthnHint.HYBRID,
WebAuthnHint.CLIENT_DEVICE,
WebAuthnHint.SECURITY_KEY,
]
self.stage.save()
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
session = self.client.session
session[SESSION_KEY_PLAN] = plan
session.save()
response = self.client.get(
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
)
self.assertEqual(response.status_code, 200)
registration = response.json()["registration"]
self.assertEqual(registration["hints"], ["hybrid", "client-device", "security-key"])
def test_register_max_retries(self):
"""Test registration (exceeding max retries)"""
self.stage.max_attempts = 2

View File

@@ -1,6 +1,5 @@
from datetime import timedelta
from django.utils.timezone import now
import pglock
from django.utils.timezone import now, timedelta
from drf_spectacular.utils import extend_schema, inline_serializer
from packaging.version import parse
from rest_framework.fields import BooleanField, CharField
@@ -32,13 +31,18 @@ class WorkerView(APIView):
def get(self, request: Request) -> Response:
response = []
our_version = parse(authentik_full_version())
for status in WorkerStatus.objects.filter(last_seen__gt=now() - timedelta(seconds=45)):
version_matching = parse(status.version) == our_version
response.append(
{
"worker_id": f"{status.pk}@{status.hostname}",
"version": status.version,
"version_matching": version_matching,
}
)
for status in WorkerStatus.objects.filter(last_seen__gt=now() - timedelta(minutes=2)):
lock_id = f"goauthentik.io/worker/status/{status.pk}"
with pglock.advisory(lock_id, timeout=0, side_effect=pglock.Return) as acquired:
# The worker doesn't hold the lock, it isn't running
if acquired:
continue
version_matching = parse(status.version) == our_version
response.append(
{
"worker_id": f"{status.pk}@{status.hostname}",
"version": status.version,
"version_matching": version_matching,
}
)
return Response(response)

View File

@@ -1,23 +1,42 @@
import socket
from collections.abc import Callable
from http.server import BaseHTTPRequestHandler
from threading import Event as TEvent
from threading import Thread, current_thread
from typing import Any, cast
from django.db import OperationalError
import pglock
from django.db import OperationalError, connections, transaction
from django.utils.timezone import now
from django_dramatiq_postgres.middleware import (
CurrentTask as BaseCurrentTask,
)
from django_dramatiq_postgres.middleware import (
HTTPServer,
HTTPServerThread,
)
from django_dramatiq_postgres.middleware import (
MetricsMiddleware as BaseMetricsMiddleware,
)
from django_dramatiq_postgres.middleware import (
_MetricsHandler as BaseMetricsHandler,
)
from dramatiq import Worker
from dramatiq.broker import Broker
from dramatiq.message import Message
from dramatiq.middleware import Middleware
from psycopg.errors import Error
from setproctitle import setthreadtitle
from structlog.stdlib import get_logger
from authentik import authentik_full_version
from authentik.events.models import Event, EventAction
from authentik.lib.config import CONFIG
from authentik.lib.sentry import should_ignore_exception
from authentik.lib.utils.reflection import class_to_path
from authentik.root.monitoring import monitoring_set
from authentik.root.signals import post_startup, pre_startup, startup
from authentik.tasks.models import Task, TaskLog, TaskStatus
from authentik.tasks.models import Task, TaskLog, TaskStatus, WorkerStatus
from authentik.tenants.models import Tenant
from authentik.tenants.utils import get_current_tenant
@@ -174,15 +193,148 @@ class DescriptionMiddleware(Middleware):
return {"description"}
class _healthcheck_handler(BaseHTTPRequestHandler):
def log_request(self, code="-", size="-"):
HEALTHCHECK_LOGGER.info(
self.path,
method=self.command,
status=code,
)
def log_error(self, format, *args):
HEALTHCHECK_LOGGER.warning(format, *args)
def do_HEAD(self):
try:
for db_conn in connections.all():
# Force connection reload
db_conn.connect()
_ = db_conn.cursor()
self.send_response(200)
except DB_ERRORS: # pragma: no cover
self.send_response(503)
self.send_header("Content-Type", "text/plain; charset=utf-8")
self.send_header("Content-Length", "0")
self.end_headers()
do_GET = do_HEAD
class WorkerHealthcheckMiddleware(Middleware):
thread: HTTPServerThread | None
def __init__(self):
host, _, port = CONFIG.get("listen.http").rpartition(":")
try:
port = int(port)
except ValueError:
LOGGER.error(f"Invalid port entered: {port}")
self.host, self.port = host, port
def after_worker_boot(self, broker: Broker, worker: Worker):
self.thread = HTTPServerThread(
target=WorkerHealthcheckMiddleware.run, args=(self.host, self.port)
)
self.thread.start()
def before_worker_shutdown(self, broker: Broker, worker: Worker):
server = self.thread.server
if server:
server.shutdown()
LOGGER.debug("Stopping WorkerHealthcheckMiddleware")
self.thread.join()
@staticmethod
def run(addr: str, port: int):
setthreadtitle("authentik Worker Healthcheck server")
try:
server = HTTPServer((addr, port), _healthcheck_handler)
thread = cast(HTTPServerThread, current_thread())
thread.server = server
server.serve_forever()
except OSError as exc:
get_logger(__name__, type(WorkerHealthcheckMiddleware)).warning(
"Port is already in use, not starting healthcheck server",
exc=exc,
)
class WorkerStatusMiddleware(Middleware):
thread: Thread | None
thread_event: TEvent | None
def after_worker_boot(self, broker: Broker, worker: Worker):
self.thread_event = TEvent()
self.thread = Thread(target=WorkerStatusMiddleware.run, args=(self.thread_event,))
self.thread.start()
def before_worker_shutdown(self, broker: Broker, worker: Worker):
self.thread_event.set()
LOGGER.debug("Stopping WorkerStatusMiddleware")
self.thread.join()
@staticmethod
def run(event: TEvent):
setthreadtitle("authentik Worker status")
with transaction.atomic():
hostname = socket.gethostname()
WorkerStatus.objects.filter(hostname=hostname).delete()
status, _ = WorkerStatus.objects.update_or_create(
hostname=hostname,
version=authentik_full_version(),
)
while not event.is_set():
try:
WorkerStatusMiddleware.keep(event, status)
except DB_ERRORS: # pragma: no cover
event.wait(10)
try:
connections.close_all()
except DB_ERRORS:
pass
@staticmethod
def keep(event: TEvent, status: WorkerStatus):
lock_id = f"goauthentik.io/worker/status/{status.pk}"
with pglock.advisory(lock_id, side_effect=pglock.Raise):
while not event.is_set():
status.refresh_from_db()
old_last_seen = status.last_seen
status.last_seen = now()
if old_last_seen != status.last_seen:
status.save(update_fields=("last_seen",))
event.wait(30)
class _MetricsHandler(BaseMetricsHandler):
def do_GET(self) -> None:
monitoring_set.send_robust(self)
return super().do_GET()
class MetricsMiddleware(BaseMetricsMiddleware):
thread: HTTPServerThread | None
handler_class = _MetricsHandler
@property
def forks(self):
def forks(self) -> list[Callable[[], None]]:
return []
def before_worker_boot(self, broker: Broker, worker: Any) -> None:
from prometheus_client import values
from prometheus_client.values import MultiProcessValue
def after_worker_boot(self, broker: Broker, worker: Worker):
addr, _, port = CONFIG.get("listen.metrics").rpartition(":")
values.ValueClass = MultiProcessValue(lambda: worker.worker_id)
try:
port = int(port)
except ValueError:
LOGGER.error(f"Invalid port entered: {port}")
self.thread = HTTPServerThread(target=MetricsMiddleware.run, args=(addr, port))
self.thread.start()
return super().before_worker_boot(broker, worker)
def before_worker_shutdown(self, broker: Broker, worker: Worker):
server = self.thread.server
if server:
server.shutdown()
LOGGER.debug("Stopping MetricsMiddleware")
self.thread.join()

View File

@@ -1,6 +1,4 @@
from datetime import timedelta
from django.utils.timezone import now
from django.utils.timezone import now, timedelta
from django.utils.translation import gettext_lazy as _
from dramatiq import actor

View File

@@ -10,6 +10,7 @@ from dramatiq.results.middleware import Results
from dramatiq.worker import Worker, _ConsumerThread, _WorkerThread
from authentik.tasks.broker import PostgresBroker
from authentik.tasks.middleware import WorkerHealthcheckMiddleware
TESTING_QUEUE = "testing"
@@ -17,7 +18,6 @@ TESTING_QUEUE = "testing"
class TestWorker(Worker):
def __init__(self, broker: Broker):
super().__init__(broker=broker)
self.worker_id = 1000
self.work_queue = PriorityQueue()
self.consumers = {
TESTING_QUEUE: _ConsumerThread(
@@ -82,6 +82,8 @@ def use_test_broker():
middleware: Middleware = import_string(middleware_class)(
**middleware_kwargs,
)
if isinstance(middleware, WorkerHealthcheckMiddleware):
middleware.port = 9102
if isinstance(middleware, Retries):
middleware.max_retries = 0
if isinstance(middleware, Results):

View File

@@ -5,5 +5,5 @@ from authentik.tasks.api.workers import WorkerView
api_urlpatterns = [
("tasks/tasks", TaskViewSet),
path("tasks/workers/", WorkerView.as_view(), name="tasks_workers"),
path("tasks/workers", WorkerView.as_view(), name="tasks_workers"),
]

View File

@@ -8236,12 +8236,6 @@
"type": "string",
"title": "Webhook url"
},
"webhook_ca": {
"type": "string",
"format": "uuid",
"title": "Webhook ca",
"description": "When set, the selected ceritifcate is used to validate the certificate of the webhook server."
},
"webhook_mapping_body": {
"type": "string",
"format": "uuid",
@@ -14734,19 +14728,6 @@
"title": "Webauthn user verification",
"description": "Enforce user verification for WebAuthn devices."
},
"webauthn_hints": {
"type": "array",
"items": {
"type": "string",
"enum": [
"security-key",
"client-device",
"hybrid"
],
"title": "Webauthn hints"
},
"title": "Webauthn hints"
},
"webauthn_allowed_device_types": {
"type": "array",
"items": {
@@ -14832,19 +14813,6 @@
],
"title": "Resident key requirement"
},
"hints": {
"type": "array",
"items": {
"type": "string",
"enum": [
"security-key",
"client-device",
"hybrid"
],
"title": "Hints"
},
"title": "Hints"
},
"device_type_restrictions": {
"type": "array",
"items": {

View File

@@ -1,9 +1,7 @@
package main
import (
"context"
"fmt"
"net"
"net/http"
"os"
"path"
@@ -14,8 +12,7 @@ import (
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"goauthentik.io/internal/config"
utils "goauthentik.io/internal/utils/web"
"goauthentik.io/internal/web"
"goauthentik.io/internal/utils/web"
)
var workerPidFile = path.Join(os.TempDir(), "authentik-worker.pid")
@@ -47,15 +44,9 @@ func init() {
func checkServer() int {
h := &http.Client{
Transport: utils.NewUserAgentTransport("goauthentik.io/healthcheck",
&http.Transport{
DialContext: func(_ context.Context, _, _ string) (net.Conn, error) {
return net.Dial("unix", path.Join(os.TempDir(), web.SocketName))
},
},
),
Transport: web.NewUserAgentTransport("goauthentik.io/healthcheck", http.DefaultTransport),
}
url := fmt.Sprintf("http://localhost%s-/health/live/", config.Get().Web.Path)
url := fmt.Sprintf("http://%s%s-/health/live/", config.Get().Listen.HTTP, config.Get().Web.Path)
res, err := h.Head(url)
if err != nil {
log.WithError(err).Warning("failed to send healthcheck request")

View File

@@ -4,7 +4,6 @@ import (
"fmt"
"net/http"
"net/url"
"os"
"time"
"github.com/getsentry/sentry-go"
@@ -52,10 +51,9 @@ var rootCmd = &cobra.Command{
ex := common.Init()
defer common.Defer()
u := url.URL{
Scheme: "unix",
Host: fmt.Sprintf("%s/%s", os.TempDir(), web.SocketName),
Path: config.Get().Web.Path,
u, err := url.Parse(fmt.Sprintf("http://%s%s", config.Get().Listen.HTTP, config.Get().Web.Path))
if err != nil {
panic(err)
}
ws := web.NewWebServer()
@@ -72,13 +70,13 @@ var rootCmd = &cobra.Command{
},
}
func attemptProxyStart(ws *web.WebServer, u url.URL) {
func attemptProxyStart(ws *web.WebServer, u *url.URL) {
maxTries := 100
attempt := 0
l := log.WithField("logger", "authentik.server")
for {
l.Debug("attempting to init outpost")
ac := ak.NewAPIController(u, config.Get().SecretKey)
ac := ak.NewAPIController(*u, config.Get().SecretKey)
if ac == nil {
attempt += 1
time.Sleep(1 * time.Second)

View File

@@ -12,12 +12,7 @@
},
"reporters": [
"default",
[
"@cspell/cspell-json-reporter",
{
"outFile": "./cspell-report.json"
}
]
["@cspell/cspell-json-reporter", { "outFile": "./cspell-report.json" }]
],
"dictionaryDefinitions": [
{
@@ -37,16 +32,12 @@
"path": "./locale/en/dictionaries/python.txt",
"addWords": true
},
{
"name": "en-x-authentik-rust",
"path": "./locale/en/dictionaries/rust.txt",
"addWords": true
},
{
"name": "en-x-authentik-golang",
"path": "./locale/en/dictionaries/golang.txt",
"addWords": true
},
{
"name": "en-x-authentik-people",
"path": "./locale/en/dictionaries/people.txt",
@@ -90,10 +81,7 @@
{
"name": "ConfSuffix",
"description": "Variables with `conf` or `config` suffix",
"pattern": [
"\\w+(conf|config)\\b",
"\\b(conf|config)\\w+"
]
"pattern": ["\\w+(conf|config)\\b", "\\b(conf|config)\\w+"]
}
],
"ignoreRegExpList": [
@@ -131,6 +119,7 @@
"\\w+l?ified\\b",
// "ifying" suffix, e.g. "stringifying", "classifying".
"\\w+l?ifying\\b",
"SpellCheckerIgnoreInDocSetting",
"EncodedURI",
"Urls",
@@ -146,11 +135,7 @@
"languageSettings": [
{
"languageId": "markdown,mdx",
"dictionaries": [
"en-x-authentik-python",
"en-x-authentik-rust",
"en-x-authentik-golang"
],
"dictionaries": ["en-x-authentik-python", "en-x-authentik-golang"],
"ignoreRegExpList": [
// Fenced code blocks
"/^\\s*```[\\s\\S]*?^\\s*```/gm",
@@ -161,6 +146,7 @@
},
{
"languageId": "typescript,javascript,typescriptreact,javascriptreact,mdx,astro",
"ignoreRegExpList": [
// Event handlers e.g. onClick, onmouseover
"\\bon\\w+\\b",
@@ -180,33 +166,18 @@
},
{
"languageId": "python",
"dictionaries": [
"en-x-authentik-python"
],
"includeRegExpList": [
"comments"
]
},
{
"languageId": "rust",
"dictionaries": [
"en-x-authentik-rust"
]
"dictionaries": ["en-x-authentik-python"],
"includeRegExpList": ["comments"]
},
{
"languageId": "go",
"dictionaries": [
"en-x-authentik-golang"
]
"dictionaries": ["en-x-authentik-golang"]
},
{
"languageId": "makefile,toml,yaml",
"dictionaries": [
"en-x-authentik-python",
"en-x-authentik-rust",
"en-x-authentik-golang"
]
"languageId": "makefile",
"dictionaries": ["en-x-authentik-python", "en-x-authentik-golang"]
},
{
"languageId": "css,scss",
"ignoreRegExpList": [
@@ -217,15 +188,20 @@
],
"ignorePaths": [
//#region i18n
"{cspell.*,cSpell.*,.cspell.*,cspell.config.*}", // CSpell configuration files
"cspell-report.{json,html,txt}", // CSpell report files
"dictionaries", // Custom dictionary files
"ignore.txt", // Custom ignore list files
"./locale", // Locale files (Django, CSpell)
"web/xliff", // XLIFF translation files
"web/src/locales", // Generated TypeScript locale
//#endregion
//#region Monorepo
"CODEOWNERS", // GitHub code owners file
"LICENSE", // License file
".gitignore", // Git ignore file
@@ -248,7 +224,9 @@
"fixtures", // Test fixtures
"tests/e2e/**/*.php", // PHP fixtures
"compose.yml", // Docker Compose files
//#region JavaScript/TypeScript
".eslintignore", // ESLint ignore file
".prettierignore", // Prettier ignore file
".yarn", // Yarn cache and configuration
@@ -261,6 +239,7 @@
"*.min.{js,css}", // Minified JS and CSS files
"*.min.{js,css}.map", // Source maps for minified files
//#region Python
"pyproject.toml",
"unittest.xml", // Pytest output
".venv", // Python virtual environment
@@ -269,25 +248,37 @@
"blueprints",
"mds",
//#endregion
//#region Rust
"./target", // Rust compilation artifacts
//#endregion
//#region Docusaurus
"*.api.mdx", // Generated API docs
".docusaurus/**", // Cache
"./{docs,website}/build", // Topic docs build output
"./{docs,website}/**/build", // Workspaces output
//#endregion
//#region Golang
"go.mod", // Go module file
"go.sum", // Go module file
"htmlcov", // Coverage HTML output
"coverage.txt", // Coverage text output
//#endregion
//#region Media
"./data", // Media files
"./media", // Legacy media files
"*.{png,jpg,pdf,svg}" // Binary files
//#endregion
],
"useGitignore": true,

6
go.mod
View File

@@ -9,7 +9,7 @@ require (
github.com/coreos/go-oidc/v3 v3.17.0
github.com/getsentry/sentry-go v0.43.0
github.com/go-http-utils/etag v0.0.0-20161124023236-513ea8f21eb1
github.com/go-ldap/ldap/v3 v3.4.13
github.com/go-ldap/ldap/v3 v3.4.12
github.com/go-openapi/runtime v0.29.3
github.com/golang-jwt/jwt/v5 v5.3.1
github.com/google/uuid v1.6.0
@@ -30,7 +30,7 @@ require (
github.com/spf13/cobra v1.10.2
github.com/stretchr/testify v1.11.1
github.com/wwt/guac v1.3.2
goauthentik.io/api/v3 v3.2026020.17-0.20260317190750-6ec0d12b221b
goauthentik.io/api/v3 v3.2026020.17-0.20260309103029-7c71e7d5673a
golang.org/x/exp v0.0.0-20230210204819-062eb4c674ab
golang.org/x/oauth2 v0.36.0
golang.org/x/sync v0.20.0
@@ -41,7 +41,7 @@ require (
)
require (
github.com/Azure/go-ntlmssp v0.1.0 // indirect
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect

14
go.sum
View File

@@ -2,8 +2,8 @@ beryju.io/ldap v0.1.0 h1:rPjGE3qR1Klbvn9N+iECWdzt/tK87XHgz8W5wZJg9B8=
beryju.io/ldap v0.1.0/go.mod h1:sOrYV+ZlDTDu/IvIiEiuAaXzjcpMBE+XXr4V+NJ0pWI=
beryju.io/radius-eap v0.1.0 h1:5M3HwkzH3nIEBcKDA2z5+sb4nCY3WdKL/SDDKTBvoqw=
beryju.io/radius-eap v0.1.0/go.mod h1:yYtO59iyoLNEepdyp1gZ0i1tGdjPbrR2M/v5yOz7Fkc=
github.com/Azure/go-ntlmssp v0.1.0 h1:DjFo6YtWzNqNvQdrwEyr/e4nhU3vRiwenz5QX7sFz+A=
github.com/Azure/go-ntlmssp v0.1.0/go.mod h1:NYqdhxd/8aAct/s4qSYZEerdPuH1liG2/X9DiVTbhpk=
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 h1:mFRzDkZVAjdal+s7s0MwaRv9igoPqLRdzOLzw/8Xvq8=
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU=
github.com/alexbrainman/sspi v0.0.0-20250919150558-7d374ff0d59e h1:4dAU9FXIyQktpoUAgOJK3OTFc/xug0PCXYCqU0FgDKI=
github.com/alexbrainman/sspi v0.0.0-20250919150558-7d374ff0d59e/go.mod h1:cEWa1LVoE5KvSD9ONXsZrj0z6KqySlCCNKHlLzbqAt4=
github.com/avast/retry-go/v4 v4.7.0 h1:yjDs35SlGvKwRNSykujfjdMxMhMQQM0TnIjJaHB+Zio=
@@ -34,8 +34,8 @@ github.com/go-http-utils/headers v0.0.0-20181008091004-fed159eddc2a h1:v6zMvHuY9
github.com/go-http-utils/headers v0.0.0-20181008091004-fed159eddc2a/go.mod h1:I79BieaU4fxrw4LMXby6q5OS9XnoR9UIKLOzDFjUmuw=
github.com/go-jose/go-jose/v4 v4.1.3 h1:CVLmWDhDVRa6Mi/IgCgaopNosCaHz7zrMeF9MlZRkrs=
github.com/go-jose/go-jose/v4 v4.1.3/go.mod h1:x4oUasVrzR7071A4TnHLGSPpNOm2a21K9Kf04k1rs08=
github.com/go-ldap/ldap/v3 v3.4.13 h1:+x1nG9h+MZN7h/lUi5Q3UZ0fJ1GyDQYbPvbuH38baDQ=
github.com/go-ldap/ldap/v3 v3.4.13/go.mod h1:LxsGZV6vbaK0sIvYfsv47rfh4ca0JXokCoKjZxsszv0=
github.com/go-ldap/ldap/v3 v3.4.12 h1:1b81mv7MagXZ7+1r7cLTWmyuTqVqdwbtJSjC0DAp9s4=
github.com/go-ldap/ldap/v3 v3.4.12/go.mod h1:+SPAGcTtOfmGsCb3h1RFiq4xpp4N636G75OEace8lNo=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
@@ -213,8 +213,10 @@ go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI=
go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU=
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
goauthentik.io/api/v3 v3.2026020.17-0.20260317190750-6ec0d12b221b h1:p+iDEXjvC15pC1VscaR59Vud9/c/xeNeTFmlv4arkNI=
goauthentik.io/api/v3 v3.2026020.17-0.20260317190750-6ec0d12b221b/go.mod h1:uYa+yGMglhJy8ymyUQ8KQiJjOb3UZTuPQ24Ot2s9BCo=
goauthentik.io/api/v3 v3.2026020.17-0.20260304104333-840924fe52c4 h1:zjmi1QNVQPABt0Yx5hws1lXR3tuTI23Ae7MwXffbP/s=
goauthentik.io/api/v3 v3.2026020.17-0.20260304104333-840924fe52c4/go.mod h1:uYa+yGMglhJy8ymyUQ8KQiJjOb3UZTuPQ24Ot2s9BCo=
goauthentik.io/api/v3 v3.2026020.17-0.20260309103029-7c71e7d5673a h1:CipAaiYqzzyhQDO6xg3YfEC0saoyVCFFbUjRfAsJrxs=
goauthentik.io/api/v3 v3.2026020.17-0.20260309103029-7c71e7d5673a/go.mod h1:uYa+yGMglhJy8ymyUQ8KQiJjOb3UZTuPQ24Ot2s9BCo=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=

View File

@@ -50,12 +50,12 @@ type PostgreSQLConfig struct {
}
type ListenConfig struct {
HTTP []string `yaml:"http" env:"HTTP, overwrite"`
HTTPS []string `yaml:"https" env:"HTTPS, overwrite"`
LDAP []string `yaml:"ldap" env:"LDAP, overwrite"`
LDAPS []string `yaml:"ldaps" env:"LDAPS, overwrite"`
Radius []string `yaml:"radius" env:"RADIUS, overwrite"`
Metrics []string `yaml:"metrics" env:"METRICS, overwrite"`
HTTP string `yaml:"http" env:"HTTP, overwrite"`
HTTPS string `yaml:"https" env:"HTTPS, overwrite"`
LDAP string `yaml:"ldap" env:"LDAP, overwrite"`
LDAPS string `yaml:"ldaps" env:"LDAPS, overwrite"`
Radius string `yaml:"radius" env:"RADIUS, overwrite"`
Metrics string `yaml:"metrics" env:"METRICS, overwrite"`
Debug string `yaml:"debug" env:"DEBUG, overwrite"`
TrustedProxyCIDRs []string `yaml:"trusted_proxy_cidrs" env:"TRUSTED_PROXY_CIDRS, overwrite"`
}

View File

@@ -5,7 +5,6 @@ import (
"crypto/fips140"
"fmt"
"math/rand"
"net"
"net/http"
"net/url"
"os"
@@ -55,44 +54,19 @@ type APIController struct {
// NewAPIController initialise new API Controller instance from URL and API token
func NewAPIController(akURL url.URL, token string) *APIController {
rsp := sentry.StartSpan(context.Background(), "authentik.outposts.init")
log := log.WithField("logger", "authentik.outpost.ak-api-controller")
originalAkURL := akURL
var client http.Client
if akURL.Scheme == "unix" {
log.WithField("host", akURL.Host).WithField("path", akURL.Path).Debug("using unix socket")
socketPath := akURL.Host
client = http.Client{
Transport: web.NewUserAgentTransport(
constants.UserAgentOutpost(),
web.NewTracingTransport(
rsp.Context(),
&http.Transport{
DialContext: func(_ context.Context, _, _ string) (net.Conn, error) {
return net.Dial("unix", socketPath)
},
},
),
),
}
akURL.Scheme = "http"
akURL.Host = "localhost"
} else {
client = http.Client{
Transport: web.NewUserAgentTransport(
constants.UserAgentOutpost(),
web.NewTracingTransport(
rsp.Context(),
GetTLSTransport(),
),
),
}
}
apiConfig := api.NewConfiguration()
apiConfig.Host = akURL.Host
apiConfig.Scheme = akURL.Scheme
apiConfig.HTTPClient = &client
apiConfig.HTTPClient = &http.Client{
Transport: web.NewUserAgentTransport(
constants.UserAgentOutpost(),
web.NewTracingTransport(
rsp.Context(),
GetTLSTransport(),
),
),
}
apiConfig.Servers = api.ServerConfigurations{
{
URL: fmt.Sprintf("%sapi/v3", akURL.Path),
@@ -103,6 +77,8 @@ func NewAPIController(akURL url.URL, token string) *APIController {
// create the API client, with the transport
apiClient := api.NewAPIClient(apiConfig)
log := log.WithField("logger", "authentik.outpost.ak-api-controller")
// Because we don't know the outpost UUID, we simply do a list and pick the first
// The service account this token belongs to should only have access to a single outpost
outposts, _ := retry.DoWithData[*api.PaginatedOutpostList](
@@ -148,7 +124,7 @@ func NewAPIController(akURL url.URL, token string) *APIController {
}
ac.logger.WithField("embedded", ac.IsEmbedded()).Info("Outpost mode")
ac.logger.WithField("offset", ac.reloadOffset.String()).Debug("HA Reload offset")
err = ac.initEvent(originalAkURL, outpost.Pk)
err = ac.initEvent(akURL, outpost.Pk)
if err != nil {
go ac.recentEvents()
}

View File

@@ -5,7 +5,6 @@ import (
"crypto/tls"
"fmt"
"maps"
"net"
"net/http"
"net/url"
"strconv"
@@ -46,19 +45,9 @@ func (ac *APIController) initEvent(akURL url.URL, outpostUUID string) error {
dialer := websocket.Dialer{
Proxy: http.ProxyFromEnvironment,
HandshakeTimeout: 10 * time.Second,
}
if akURL.Scheme == "unix" {
ac.logger.WithField("host", akURL.Host).WithField("path", akURL.Path).Debug("websocket is using unix connection")
socketPath := akURL.Host
dialer.NetDialContext = func(ctx context.Context, _, _ string) (net.Conn, error) {
return (&net.Dialer{}).DialContext(ctx, "unix", socketPath)
}
akURL.Scheme = "http"
akURL.Host = "localhost"
} else {
dialer.TLSClientConfig = &tls.Config{
TLSClientConfig: &tls.Config{
InsecureSkipVerify: config.Get().AuthentikInsecure,
}
},
}
wsu := ac.getWebsocketURL(akURL, outpostUUID, query).String()

View File

@@ -1,16 +1,13 @@
package healthcheck
import (
"context"
"net"
"fmt"
"net/http"
"os"
"path"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"goauthentik.io/internal/config"
"goauthentik.io/internal/outpost/ak"
"goauthentik.io/internal/utils/web"
)
@@ -24,15 +21,9 @@ var Command = &cobra.Command{
func check() int {
h := &http.Client{
Transport: web.NewUserAgentTransport("goauthentik.io/healthcheck",
&http.Transport{
DialContext: func(_ context.Context, _, _ string) (net.Conn, error) {
return net.Dial("unix", path.Join(os.TempDir(), ak.MetricsSocketName))
},
},
),
Transport: web.NewUserAgentTransport("goauthentik.io/healthcheck", http.DefaultTransport),
}
url := "http://localhost/outpost.goauthentik.io/ping"
url := fmt.Sprintf("http://%s/outpost.goauthentik.io/ping", config.Get().Listen.Metrics)
res, err := h.Head(url)
if err != nil {
log.WithError(err).Warning("failed to send healthcheck request")

View File

@@ -1,22 +1,12 @@
package ak
import (
"net/http"
"os"
"path"
"github.com/gorilla/mux"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/prometheus/client_golang/prometheus/promhttp"
log "github.com/sirupsen/logrus"
"goauthentik.io/internal/utils/sentry"
"goauthentik.io/internal/utils/unix"
)
var (
MetricsSocketName = "authentik-metrics.sock"
OutpostInfo = promauto.NewGaugeVec(prometheus.GaugeOpts{
OutpostInfo = promauto.NewGaugeVec(prometheus.GaugeOpts{
Name: "authentik_outpost_info",
Help: "Outpost info",
}, []string{"outpost_name", "outpost_type", "uuid", "version", "build"})
@@ -29,43 +19,3 @@ var (
Help: "Connection status",
}, []string{"outpost_name", "outpost_type", "uuid"})
)
func MetricsRouter() *mux.Router {
m := mux.NewRouter()
m.Use(sentry.SentryNoSampleMiddleware)
m.HandleFunc("/outpost.goauthentik.io/ping", func(rw http.ResponseWriter, r *http.Request) {
rw.WriteHeader(204)
})
m.Path("/metrics").Handler(promhttp.Handler())
return m
}
func RunMetricsServer(listen string, router *mux.Router) {
l := log.WithField("logger", "authentik.outpost.metrics").WithField("listen", listen)
l.Info("Starting Metrics server")
err := http.ListenAndServe(listen, router)
if err != nil {
l.WithError(err).Warning("Failed to start metrics listener")
}
}
func RunMetricsUnix(router *mux.Router) {
socketPath := path.Join(os.TempDir(), MetricsSocketName)
l := log.WithField("logger", "authentik.outpost.metrics").WithField("listen", socketPath)
ln, err := unix.Listen(socketPath)
if err != nil {
l.WithError(err).Warning("failed to listen")
return
}
defer func() {
err := ln.Close()
if err != nil {
l.WithError(err).Warning("failed to close listener")
}
}()
l.WithField("listen", socketPath).Info("Starting Metrics server")
err = http.Serve(ln, router)
if err != nil {
l.WithError(err).Warning("Failed to start metrics listener")
}
}

View File

@@ -11,6 +11,7 @@ import (
"goauthentik.io/internal/config"
"goauthentik.io/internal/crypto"
"goauthentik.io/internal/outpost/ak"
"goauthentik.io/internal/outpost/ldap/metrics"
"goauthentik.io/internal/utils"
"beryju.io/ldap"
@@ -62,7 +63,9 @@ func (ls *LDAPServer) Type() string {
return "ldap"
}
func (ls *LDAPServer) StartLDAPServer(listen string) error {
func (ls *LDAPServer) StartLDAPServer() error {
listen := config.Get().Listen.LDAP
ln, err := net.Listen("tcp", listen)
if err != nil {
ls.log.WithField("listen", listen).WithError(err).Warning("Failed to listen (SSL)")
@@ -86,40 +89,26 @@ func (ls *LDAPServer) StartLDAPServer(listen string) error {
}
func (ls *LDAPServer) Start() error {
listenLdap := config.Get().Listen.LDAP
listenLdaps := config.Get().Listen.LDAPS
listenMetrics := config.Get().Listen.Metrics
metricsRouter := ak.MetricsRouter()
wg := sync.WaitGroup{}
wg.Add(len(listenLdap) + len(listenLdaps) + 1 + len(listenMetrics))
for _, listen := range listenLdap {
go func() {
defer wg.Done()
err := ls.StartLDAPServer(listen)
if err != nil {
panic(err)
}
}()
}
for _, listen := range listenLdaps {
go func() {
defer wg.Done()
err := ls.StartLDAPTLSServer(listen)
if err != nil {
panic(err)
}
}()
}
wg.Add(3)
go func() {
defer wg.Done()
ak.RunMetricsUnix(metricsRouter)
metrics.RunServer()
}()
go func() {
defer wg.Done()
err := ls.StartLDAPServer()
if err != nil {
panic(err)
}
}()
go func() {
defer wg.Done()
err := ls.StartLDAPTLSServer()
if err != nil {
panic(err)
}
}()
for _, listen := range listenMetrics {
go func() {
defer wg.Done()
ak.RunMetricsServer(listen, metricsRouter)
}()
}
wg.Wait()
return nil
}

View File

@@ -5,6 +5,7 @@ import (
"net"
"github.com/pires/go-proxyproto"
"goauthentik.io/internal/config"
"goauthentik.io/internal/utils"
)
@@ -36,7 +37,8 @@ func (ls *LDAPServer) getCertificates(info *tls.ClientHelloInfo) (*tls.Certifica
return ls.defaultCert, nil
}
func (ls *LDAPServer) StartLDAPTLSServer(listen string) error {
func (ls *LDAPServer) StartLDAPTLSServer() error {
listen := config.Get().Listen.LDAPS
tlsConfig := utils.GetTLSConfig()
tlsConfig.GetCertificate = ls.getCertificates

View File

@@ -1,8 +1,16 @@
package metrics
import (
"net/http"
log "github.com/sirupsen/logrus"
"goauthentik.io/internal/config"
"goauthentik.io/internal/utils/sentry"
"github.com/gorilla/mux"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/prometheus/client_golang/prometheus/promhttp"
)
var (
@@ -15,3 +23,19 @@ var (
Help: "Total number of rejected requests",
}, []string{"outpost_name", "type", "reason", "app"})
)
func RunServer() {
m := mux.NewRouter()
l := log.WithField("logger", "authentik.outpost.metrics")
m.Use(sentry.SentryNoSampleMiddleware)
m.HandleFunc("/outpost.goauthentik.io/ping", func(rw http.ResponseWriter, r *http.Request) {
rw.WriteHeader(204)
})
m.Path("/metrics").Handler(promhttp.Handler())
listen := config.Get().Listen.Metrics
l.WithField("listen", listen).Info("Starting Metrics server")
err := http.ListenAndServe(listen, m)
if err != nil {
l.WithError(err).Warning("Failed to start metrics listener")
}
}

View File

@@ -1,8 +1,16 @@
package metrics
import (
"net/http"
log "github.com/sirupsen/logrus"
"goauthentik.io/internal/config"
"goauthentik.io/internal/utils/sentry"
"github.com/gorilla/mux"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/prometheus/client_golang/prometheus/promhttp"
)
var (
@@ -15,3 +23,19 @@ var (
Help: "Proxy upstream response latencies in seconds",
}, []string{"outpost_name", "method", "scheme", "host", "upstream_host"})
)
func RunServer() {
m := mux.NewRouter()
l := log.WithField("logger", "authentik.outpost.metrics")
m.Use(sentry.SentryNoSampleMiddleware)
m.HandleFunc("/outpost.goauthentik.io/ping", func(rw http.ResponseWriter, r *http.Request) {
rw.WriteHeader(204)
})
m.Path("/metrics").Handler(promhttp.Handler())
listen := config.Get().Listen.Metrics
l.WithField("listen", listen).Info("Starting Metrics server")
err := http.ListenAndServe(listen, m)
if err != nil {
l.WithError(err).Warning("Failed to start metrics listener")
}
}

View File

@@ -18,6 +18,7 @@ import (
"goauthentik.io/internal/crypto"
"goauthentik.io/internal/outpost/ak"
"goauthentik.io/internal/outpost/proxyv2/application"
"goauthentik.io/internal/outpost/proxyv2/metrics"
"goauthentik.io/internal/utils"
sentryutils "goauthentik.io/internal/utils/sentry"
"goauthentik.io/internal/utils/web"
@@ -126,10 +127,11 @@ func (ps *ProxyServer) getCertificates(info *tls.ClientHelloInfo) (*tls.Certific
}
// ServeHTTP constructs a net.Listener and starts handling HTTP requests
func (ps *ProxyServer) ServeHTTP(listen string) {
listener, err := net.Listen("tcp", listen)
func (ps *ProxyServer) ServeHTTP() {
listenAddress := config.Get().Listen.HTTP
listener, err := net.Listen("tcp", listenAddress)
if err != nil {
ps.log.WithField("listen", listen).WithError(err).Warning("Failed to listen")
ps.log.WithField("listen", listenAddress).WithError(err).Warning("Failed to listen")
return
}
proxyListener := &proxyproto.Listener{Listener: listener, ConnPolicy: utils.GetProxyConnectionPolicy()}
@@ -140,17 +142,18 @@ func (ps *ProxyServer) ServeHTTP(listen string) {
}
}()
ps.log.WithField("listen", listen).Info("Starting HTTP server")
ps.log.WithField("listen", listenAddress).Info("Starting HTTP server")
ps.serve(proxyListener)
ps.log.WithField("listen", listen).Info("Stopping HTTP server")
ps.log.WithField("listen", listenAddress).Info("Stopping HTTP server")
}
// ServeHTTPS constructs a net.Listener and starts handling HTTPS requests
func (ps *ProxyServer) ServeHTTPS(listen string) {
func (ps *ProxyServer) ServeHTTPS() {
listenAddress := config.Get().Listen.HTTPS
tlsConfig := utils.GetTLSConfig()
tlsConfig.GetCertificate = ps.getCertificates
ln, err := net.Listen("tcp", listen)
ln, err := net.Listen("tcp", listenAddress)
if err != nil {
ps.log.WithError(err).Warning("Failed to listen (TLS)")
return
@@ -164,40 +167,26 @@ func (ps *ProxyServer) ServeHTTPS(listen string) {
}()
tlsListener := tls.NewListener(proxyListener, tlsConfig)
ps.log.WithField("listen", listen).Info("Starting HTTPS server")
ps.log.WithField("listen", listenAddress).Info("Starting HTTPS server")
ps.serve(tlsListener)
ps.log.WithField("listen", listen).Info("Stopping HTTPS server")
ps.log.WithField("listen", listenAddress).Info("Stopping HTTPS server")
}
func (ps *ProxyServer) Start() error {
listenHttp := config.Get().Listen.HTTP
listenHttps := config.Get().Listen.HTTPS
listenMetrics := config.Get().Listen.Metrics
metricsRouter := ak.MetricsRouter()
wg := sync.WaitGroup{}
wg.Add(len(listenHttp) + len(listenHttps) + 1 + len(listenMetrics))
for _, listen := range listenHttp {
go func() {
defer wg.Done()
ps.ServeHTTP(listen)
}()
}
for _, listen := range listenHttps {
go func() {
defer wg.Done()
ps.ServeHTTPS(listen)
}()
}
wg.Add(3)
go func() {
defer wg.Done()
ak.RunMetricsUnix(metricsRouter)
ps.ServeHTTP()
}()
go func() {
defer wg.Done()
ps.ServeHTTPS()
}()
go func() {
defer wg.Done()
metrics.RunServer()
}()
for _, listen := range listenMetrics {
go func() {
defer wg.Done()
ak.RunMetricsServer(listen, metricsRouter)
}()
}
return nil
}

View File

@@ -0,0 +1,28 @@
package metrics
import (
"net/http"
log "github.com/sirupsen/logrus"
"goauthentik.io/internal/config"
"goauthentik.io/internal/utils/sentry"
"github.com/gorilla/mux"
"github.com/prometheus/client_golang/prometheus/promhttp"
)
func RunServer() {
m := mux.NewRouter()
l := log.WithField("logger", "authentik.outpost.metrics")
m.Use(sentry.SentryNoSampleMiddleware)
m.HandleFunc("/outpost.goauthentik.io/ping", func(rw http.ResponseWriter, r *http.Request) {
rw.WriteHeader(204)
})
m.Path("/metrics").Handler(promhttp.Handler())
listen := config.Get().Listen.Metrics
l.WithField("listen", listen).Info("Starting Metrics server")
err := http.ListenAndServe(listen, m)
if err != nil {
l.WithError(err).Warning("Failed to start metrics listener")
}
}

View File

@@ -9,9 +9,9 @@ import (
log "github.com/sirupsen/logrus"
"github.com/wwt/guac"
"goauthentik.io/internal/config"
"goauthentik.io/internal/outpost/ak"
"goauthentik.io/internal/outpost/rac/connection"
"goauthentik.io/internal/outpost/rac/metrics"
)
type RACServer struct {
@@ -92,10 +92,12 @@ func (rs *RACServer) wsHandler(ctx context.Context, msg ak.Event) error {
}
func (rs *RACServer) Start() error {
listenMetrics := config.Get().Listen.Metrics
metricsRouter := ak.MetricsRouter()
wg := sync.WaitGroup{}
wg.Add(1 + 1 + len(listenMetrics))
wg.Add(2)
go func() {
defer wg.Done()
metrics.RunServer()
}()
go func() {
defer wg.Done()
err := rs.startGuac()
@@ -103,16 +105,6 @@ func (rs *RACServer) Start() error {
panic(err)
}
}()
go func() {
defer wg.Done()
ak.RunMetricsUnix(metricsRouter)
}()
for _, listen := range listenMetrics {
go func() {
defer wg.Done()
ak.RunMetricsServer(listen, metricsRouter)
}()
}
wg.Wait()
return nil
}

View File

@@ -1,8 +1,16 @@
package metrics
import (
"net/http"
log "github.com/sirupsen/logrus"
"goauthentik.io/internal/config"
"goauthentik.io/internal/utils/sentry"
"github.com/gorilla/mux"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/prometheus/client_golang/prometheus/promhttp"
)
var (
@@ -15,3 +23,19 @@ var (
Help: "Total number of rejected requests",
}, []string{"outpost_name", "reason", "app"})
)
func RunServer() {
m := mux.NewRouter()
l := log.WithField("logger", "authentik.outpost.metrics")
m.Use(sentry.SentryNoSampleMiddleware)
m.HandleFunc("/outpost.goauthentik.io/ping", func(rw http.ResponseWriter, r *http.Request) {
rw.WriteHeader(204)
})
m.Path("/metrics").Handler(promhttp.Handler())
listen := config.Get().Listen.Metrics
l.WithField("listen", listen).Info("Starting Metrics server")
err := http.ListenAndServe(listen, m)
if err != nil {
l.WithError(err).Warning("Failed to start metrics listener")
}
}

View File

@@ -10,7 +10,7 @@ import (
log "github.com/sirupsen/logrus"
"goauthentik.io/internal/config"
"goauthentik.io/internal/outpost/ak"
"golang.org/x/sync/errgroup"
"goauthentik.io/internal/outpost/radius/metrics"
"layeh.com/radius"
)
@@ -30,7 +30,7 @@ type ProviderInstance struct {
}
type RadiusServer struct {
s []*radius.PacketServer
s radius.PacketServer
log *log.Entry
ac *ak.APIController
cryptoStore *ak.CryptoStore
@@ -45,13 +45,10 @@ func NewServer(ac *ak.APIController) ak.Outpost {
providers: map[int32]*ProviderInstance{},
cryptoStore: ak.NewCryptoStore(ac.Client.CryptoAPI),
}
listenRadius := config.Get().Listen.Radius
for _, listen := range listenRadius {
rs.s = append(rs.s, &radius.PacketServer{
Handler: rs,
SecretSource: rs,
Addr: listen,
})
rs.s = radius.PacketServer{
Handler: rs,
SecretSource: rs,
Addr: config.Get().Listen.Radius,
}
return rs
}
@@ -98,44 +95,29 @@ func (rs *RadiusServer) RADIUSSecret(ctx context.Context, remoteAddr net.Addr) (
}
func (rs *RadiusServer) Start() error {
listenMetrics := config.Get().Listen.Metrics
metricsRouter := ak.MetricsRouter()
wg := sync.WaitGroup{}
wg.Add(len(rs.s) + 1 + len(listenMetrics))
for _, s := range rs.s {
go func() {
defer wg.Done()
rs.log.WithField("listen", s.Addr).Info("Starting radius server")
err := s.ListenAndServe()
if err != nil {
panic(err)
}
}()
}
wg.Add(2)
go func() {
defer wg.Done()
ak.RunMetricsUnix(metricsRouter)
metrics.RunServer()
}()
go func() {
defer wg.Done()
rs.log.WithField("listen", rs.s.Addr).Info("Starting radius server")
err := rs.s.ListenAndServe()
if err != nil {
panic(err)
}
}()
for _, listen := range listenMetrics {
go func() {
defer wg.Done()
ak.RunMetricsServer(listen, metricsRouter)
}()
}
wg.Wait()
return nil
}
func (rs *RadiusServer) Stop() error {
ctx, cancel := context.WithCancel(context.Background())
errs := new(errgroup.Group)
for _, s := range rs.s {
errs.Go(func() error {
return s.Shutdown(ctx)
})
}
err := rs.s.Shutdown(ctx)
cancel()
return errs.Wait()
return err
}
func (rs *RadiusServer) TimerFlowCacheExpiry(context.Context) {}

View File

@@ -1,43 +0,0 @@
package unix
import (
"net"
)
type Listener struct {
*net.UnixListener
}
type Conn struct {
net.Conn
}
func Listen(path string) (*Listener, error) {
addr, err := net.ResolveUnixAddr("unix", path)
if err != nil {
return nil, err
}
ln, err := net.ListenUnix("unix", addr)
if err != nil {
return nil, err
}
return &Listener{
ln,
}, nil
}
func (l *Listener) Accept() (net.Conn, error) {
c, err := l.UnixListener.Accept()
if err != nil {
return nil, err
}
return &Conn{c}, nil
}
func (c *Conn) LocalAddr() net.Addr {
return &net.TCPAddr{IP: net.IPv6loopback, Port: 0}
}
func (c *Conn) RemoteAddr() net.Addr {
return &net.TCPAddr{IP: net.IPv6loopback, Port: 0}
}

View File

@@ -19,7 +19,7 @@ var Requests = promauto.NewHistogramVec(prometheus.HistogramOpts{
Help: "API request latencies in seconds",
}, []string{"dest"})
func (ws *WebServer) runMetricsServer(listen string) {
func (ws *WebServer) runMetricsServer() {
l := log.WithField("logger", "authentik.router.metrics")
m := mux.NewRouter()
@@ -49,10 +49,10 @@ func (ws *WebServer) runMetricsServer(listen string) {
return
}
})
l.WithField("listen", listen).Info("Starting Metrics server")
err := http.ListenAndServe(listen, m)
l.WithField("listen", config.Get().Listen.Metrics).Info("Starting Metrics server")
err := http.ListenAndServe(config.Get().Listen.Metrics, m)
if err != nil {
l.WithError(err).Warning("Failed to start metrics server")
}
l.WithField("listen", listen).Info("Stopping Metrics server")
l.WithField("listen", config.Get().Listen.Metrics).Info("Stopping Metrics server")
}

View File

@@ -21,18 +21,17 @@ import (
"goauthentik.io/internal/config"
"goauthentik.io/internal/constants"
"goauthentik.io/internal/gounicorn"
"goauthentik.io/internal/outpost/ak"
"goauthentik.io/internal/outpost/proxyv2"
"goauthentik.io/internal/utils"
"goauthentik.io/internal/utils/unix"
"goauthentik.io/internal/utils/web"
"goauthentik.io/internal/web/brand_tls"
)
const (
SocketName = "authentik.sock"
IPCKeyFile = "authentik-core-ipc.key"
MetricsKeyFile = "authentik-core-metrics.key"
CoreSocketName = "authentik-core.sock"
UnixSocketName = "authentik-core.sock"
)
type WebServer struct {
@@ -65,7 +64,7 @@ func NewWebServer() *WebServer {
loggingHandler.Use(web.NewLoggingHandler(l, nil))
tmp := os.TempDir()
socketPath := path.Join(tmp, CoreSocketName)
socketPath := path.Join(tmp, UnixSocketName)
// create http client to talk to backend, normal client if we're in debug more
// and a client that connects to our socket when in non debug mode
@@ -141,8 +140,7 @@ func (ws *WebServer) prepareKeys() {
func (ws *WebServer) Start() {
ws.prepareKeys()
socketPath := path.Join(os.TempDir(), SocketName)
u, err := url.Parse(fmt.Sprintf("http://localhost%s", config.Get().Web.Path))
u, err := url.Parse(fmt.Sprintf("http://%s%s", config.Get().Listen.HTTP, config.Get().Web.Path))
if err != nil {
panic(err)
}
@@ -152,11 +150,7 @@ func (ws *WebServer) Start() {
apiConfig.HTTPClient = &http.Client{
Transport: web.NewUserAgentTransport(
constants.UserAgentIPC(),
&http.Transport{
DialContext: func(_ context.Context, _, _ string) (net.Conn, error) {
return net.Dial("unix", socketPath)
},
},
ak.GetTLSTransport(),
),
}
apiConfig.Servers = api.ServerConfigurations{
@@ -177,18 +171,10 @@ func (ws *WebServer) Start() {
go tw.Start()
})
for _, listen := range config.Get().Listen.Metrics {
go ws.runMetricsServer(listen)
}
go ws.runMetricsServer()
go ws.attemptStartBackend()
_ = os.Remove(socketPath)
go ws.listenUnix(socketPath)
for _, listen := range config.Get().Listen.HTTP {
go ws.listenPlain(listen)
}
for _, listen := range config.Get().Listen.HTTPS {
go ws.listenTLS(listen)
}
go ws.listenPlain()
go ws.listenTLS()
}
func (ws *WebServer) attemptStartBackend() {
@@ -239,41 +225,23 @@ func (ws *WebServer) Shutdown() {
ws.stop <- struct{}{}
}
func (ws *WebServer) listenUnix(listen string) {
ln, err := unix.Listen(listen)
func (ws *WebServer) listenPlain() {
ln, err := net.Listen("tcp", config.Get().Listen.HTTP)
if err != nil {
ws.log.WithField("listen", listen).WithError(err).Warning("failed to listen")
return
}
defer func() {
err := ln.Close()
if err != nil {
ws.log.WithField("listen", listen).WithError(err).Warning("failed to close listener")
}
}()
ws.log.WithField("listen", listen).Info("Starting HTTP server")
ws.serve(ln)
ws.log.WithField("listen", listen).Info("Stopping HTTP server")
}
func (ws *WebServer) listenPlain(listen string) {
ln, err := net.Listen("tcp", listen)
if err != nil {
ws.log.WithField("listen", listen).WithError(err).Warning("failed to listen")
ws.log.WithError(err).Warning("failed to listen")
return
}
proxyListener := &proxyproto.Listener{Listener: ln, ConnPolicy: utils.GetProxyConnectionPolicy()}
defer func() {
err := proxyListener.Close()
if err != nil {
ws.log.WithField("listen", listen).WithError(err).Warning("failed to close proxy listener")
ws.log.WithError(err).Warning("failed to close proxy listener")
}
}()
ws.log.WithField("listen", listen).Info("Starting HTTP server")
ws.log.WithField("listen", config.Get().Listen.HTTP).Info("Starting HTTP server")
ws.serve(proxyListener)
ws.log.WithField("listen", listen).Info("Stopping HTTP server")
ws.log.WithField("listen", config.Get().Listen.HTTP).Info("Stopping HTTP server")
}
func (ws *WebServer) serve(listener net.Listener) {

View File

@@ -6,6 +6,7 @@ import (
"github.com/pires/go-proxyproto"
"goauthentik.io/internal/config"
"goauthentik.io/internal/crypto"
"goauthentik.io/internal/utils"
"goauthentik.io/internal/utils/web"
@@ -47,13 +48,13 @@ func (ws *WebServer) GetCertificate() func(ch *tls.ClientHelloInfo) (*tls.Config
}
// ServeHTTPS constructs a net.Listener and starts handling HTTPS requests
func (ws *WebServer) listenTLS(listen string) {
func (ws *WebServer) listenTLS() {
tlsConfig := utils.GetTLSConfig()
tlsConfig.GetConfigForClient = ws.GetCertificate()
ln, err := net.Listen("tcp", listen)
ln, err := net.Listen("tcp", config.Get().Listen.HTTPS)
if err != nil {
ws.log.WithField("listen", listen).WithError(err).Warning("failed to listen (TLS)")
ws.log.WithError(err).Warning("failed to listen (TLS)")
return
}
proxyListener := &proxyproto.Listener{
@@ -70,7 +71,7 @@ func (ws *WebServer) listenTLS(listen string) {
}()
tlsListener := tls.NewListener(proxyListener, tlsConfig)
ws.log.WithField("listen", listen).Info("Starting HTTPS server")
ws.log.WithField("listen", config.Get().Listen.HTTPS).Info("Starting HTTPS server")
ws.serve(tlsListener)
ws.log.WithField("listen", listen).Info("Stopping HTTPS server")
ws.log.WithField("listen", config.Get().Listen.HTTPS).Info("Stopping HTTPS server")
}

View File

@@ -1,9 +1,13 @@
#!/usr/bin/env bash
#!/usr/bin/env -S bash
set -e -o pipefail
MODE_FILE="${TMPDIR}/authentik-mode"
if [[ -z "${PROMETHEUS_MULTIPROC_DIR}" ]]; then
export PROMETHEUS_MULTIPROC_DIR="${TMPDIR:-/tmp}/authentik_prometheus_tmp"
fi
function log {
printf '{"event": "%s", "level": "info", "logger": "bootstrap"}\n' "$@" >&2
printf '{"event": "%s", "level": "info", "logger": "bootstrap"}\n' "$@" >/dev/stderr
}
function wait_for_db {
@@ -11,18 +15,10 @@ function wait_for_db {
log "Bootstrap completed"
}
function run_authentik {
if [[ -x "$(command -v authentik)" ]]; then
echo authentik "$@"
else
echo cargo run -- "$@"
fi
}
function check_if_root_and_run {
function check_if_root {
if [[ $EUID -ne 0 ]]; then
log "Not running as root, disabling permission fixes"
exec $(run_authentik "$@")
exec $1
return
fi
SOCKET="/var/run/docker.sock"
@@ -30,19 +26,36 @@ function check_if_root_and_run {
if [[ -e "$SOCKET" ]]; then
# Get group ID of the docker socket, so we can create a matching group and
# add ourselves to it
DOCKER_GID="$(stat -c "%g" "${SOCKET}")"
DOCKER_GID=$(stat -c '%g' $SOCKET)
# Ensure group for the id exists
getent group "${DOCKER_GID}" || groupadd -f -g "${DOCKER_GID}" docker
usermod -a -G "${DOCKER_GID}" authentik
getent group $DOCKER_GID || groupadd -f -g $DOCKER_GID docker
usermod -a -G $DOCKER_GID authentik
# since the name of the group might not be docker, we need to lookup the group id
GROUP_NAME=$(getent group "${DOCKER_GID}" | sed 's/:/\n/g' | head -1)
GROUP_NAME=$(getent group $DOCKER_GID | sed 's/:/\n/g' | head -1)
GROUP="authentik:${GROUP_NAME}"
fi
# Fix permissions of certs and media
chown -R authentik:authentik /data /certs "${PROMETHEUS_MULTIPROC_DIR}"
chmod ug+rwx /data
chmod ug+rx /certs
exec chpst -u authentik:"${GROUP}" env HOME=/authentik $(run_authentik "$@")
exec chpst -u authentik:$GROUP env HOME=/authentik $1
}
function run_authentik {
if [[ -x "$(command -v authentik)" ]]; then
exec authentik $@
else
exec go run -v ./cmd/server/ $@
fi
}
function set_mode {
echo $1 >$MODE_FILE
trap cleanup EXIT
}
function cleanup {
rm -f ${MODE_FILE}
}
function prepare_debug {
@@ -59,31 +72,38 @@ function prepare_debug {
chown authentik:authentik /unittest.xml
}
if [[ -z "${PROMETHEUS_MULTIPROC_DIR}" ]]; then
export PROMETHEUS_MULTIPROC_DIR="${TMPDIR:-/tmp}/authentik_prometheus_tmp"
fi
mkdir -p "${PROMETHEUS_MULTIPROC_DIR}"
if [[ "$(python -m authentik.lib.config debugger 2>/dev/null)" == "True" ]]; then
prepare_debug
fi
if [[ "$1" == "bash" ]]; then
exec /usr/bin/env -S bash "$@"
elif [[ "$1" == "dump_config" ]]; then
shift 1
exec python -m authentik.lib.config "$@"
elif [[ "$1" == "debug" ]]; then
exec sleep infinity
if [[ "$1" == "server" ]]; then
set_mode "server"
run_authentik
elif [[ "$1" == "worker" ]]; then
set_mode "worker"
shift
# If we have bootstrap credentials set, run bootstrap tasks outside of main server
# sync, so that we can sure the first start actually has working bootstrap
# credentials
if [[ -n "${AUTHENTIK_BOOTSTRAP_PASSWORD}" || -n "${AUTHENTIK_BOOTSTRAP_TOKEN}" ]]; then
python -m manage apply_blueprint system/bootstrap.yaml || true
fi
check_if_root "python -m manage worker --pid-file ${TMPDIR}/authentik-worker.pid $@"
elif [[ "$1" == "bash" ]]; then
/bin/bash
elif [[ "$1" == "test-all" ]]; then
wait_for_db
prepare_debug
chmod 777 /root
check_if_root_and_run manage test authentik
elif [[ "$1" == "allinone" ]] || [[ "$1" == "server" ]] || [[ "$1" == "worker" ]] || [[ "$1" == "proxy" ]] || [[ "$1" == "manage" ]]; then
wait_for_db
check_if_root_and_run "$@"
check_if_root "python -m manage test authentik"
elif [[ "$1" == "healthcheck" ]]; then
run_authentik healthcheck $(cat $MODE_FILE)
elif [[ "$1" == "dump_config" ]]; then
shift
exec python -m authentik.lib.config $@
elif [[ "$1" == "debug" ]]; then
exec sleep infinity
else
wait_for_db
exec python -m manage "$@"
fi

View File

@@ -9,7 +9,7 @@
"version": "0.0.0",
"license": "MIT",
"devDependencies": {
"aws-cdk": "^2.1112.0",
"aws-cdk": "^2.1110.0",
"cross-env": "^10.1.0"
},
"engines": {
@@ -25,9 +25,9 @@
"license": "MIT"
},
"node_modules/aws-cdk": {
"version": "2.1112.0",
"resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.1112.0.tgz",
"integrity": "sha512-IYUbsd9tpBQRqEO2evWsG+p2ZNa6wG5/sJvmWaqo45V1ep8BW+mrf+jEpFLD9uDPXqRA57EZGVGils7QLbOhNA==",
"version": "2.1110.0",
"resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.1110.0.tgz",
"integrity": "sha512-t881rXhuHWbiCXf8nuzf81jyOzHCgX1DNiCD3COwVGpT6DYna2SjsrDbraenJM722Oc+2OOAAMpKNEtVNj7mEg==",
"dev": true,
"license": "Apache-2.0",
"bin": {

Some files were not shown because too many files have changed in this diff Show More