mirror of
https://github.com/goauthentik/authentik
synced 2026-05-07 07:32:23 +02:00
Compare commits
11 Commits
web/bundle
...
custom_opt
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
eab28d0ad6 | ||
|
|
a1ee3ca278 | ||
|
|
d9db8ac044 | ||
|
|
c9c0674711 | ||
|
|
93c2bb7e9f | ||
|
|
0bbadf3194 | ||
|
|
9f97a65094 | ||
|
|
c2a41940bc | ||
|
|
f30120e88a | ||
|
|
4582183da4 | ||
|
|
d8cf464f3c |
@@ -4,7 +4,7 @@ SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
|
||||
GITHUB_OUTPUT=/dev/stdout \
|
||||
GITHUB_REF=ref \
|
||||
GITHUB_SHA=sha \
|
||||
IMAGE_NAME=ghcr.io/goauthentik/server,authentik/server \
|
||||
IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \
|
||||
GITHUB_REPOSITORY=goauthentik/authentik \
|
||||
python $SCRIPT_DIR/push_vars.py
|
||||
|
||||
@@ -12,7 +12,7 @@ GITHUB_OUTPUT=/dev/stdout \
|
||||
GITHUB_OUTPUT=/dev/stdout \
|
||||
GITHUB_REF=ref \
|
||||
GITHUB_SHA=sha \
|
||||
IMAGE_NAME=ghcr.io/goauthentik/server,authentik/server \
|
||||
IMAGE_NAME=ghcr.io/goauthentik/server,beryju/authentik \
|
||||
GITHUB_REPOSITORY=goauthentik/authentik \
|
||||
DOCKER_USERNAME=foo \
|
||||
python $SCRIPT_DIR/push_vars.py
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
---
|
||||
# Re-usable workflow for a single-architecture build
|
||||
name: Reusable - Single-arch Container build
|
||||
name: Single-arch Container build
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
---
|
||||
# Re-usable workflow for a multi-architecture build
|
||||
name: Reusable - Multi-arch container build
|
||||
name: Multi-arch container build
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
5
.github/workflows/api-py-publish.yml
vendored
5
.github/workflows/api-py-publish.yml
vendored
@@ -1,13 +1,10 @@
|
||||
---
|
||||
name: API - Publish Python client
|
||||
|
||||
name: authentik-api-py-publish
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "schema.yml"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
|
||||
|
||||
5
.github/workflows/api-ts-publish.yml
vendored
5
.github/workflows/api-ts-publish.yml
vendored
@@ -1,13 +1,10 @@
|
||||
---
|
||||
name: API - Publish Typescript client
|
||||
|
||||
name: authentik-api-ts-publish
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "schema.yml"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
|
||||
|
||||
5
.github/workflows/ci-api-docs.yml
vendored
5
.github/workflows/ci-api-docs.yml
vendored
@@ -1,5 +1,4 @@
|
||||
---
|
||||
name: CI - API Docs
|
||||
name: authentik-ci-api-docs
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -67,7 +66,7 @@ jobs:
|
||||
- build
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/download-artifact@v5
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: api-docs
|
||||
path: website/api/build
|
||||
|
||||
3
.github/workflows/ci-aws-cfn.yml
vendored
3
.github/workflows/ci-aws-cfn.yml
vendored
@@ -1,5 +1,4 @@
|
||||
---
|
||||
name: CI - AWS cfn
|
||||
name: authentik-ci-aws-cfn
|
||||
|
||||
on:
|
||||
push:
|
||||
|
||||
3
.github/workflows/ci-docs.yml
vendored
3
.github/workflows/ci-docs.yml
vendored
@@ -1,5 +1,4 @@
|
||||
---
|
||||
name: CI - Docs
|
||||
name: authentik-ci-docs
|
||||
|
||||
on:
|
||||
push:
|
||||
|
||||
2
.github/workflows/ci-main-daily.yml
vendored
2
.github/workflows/ci-main-daily.yml
vendored
@@ -1,5 +1,5 @@
|
||||
---
|
||||
name: CI - Main daily
|
||||
name: authentik-ci-main-daily
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
20
.github/workflows/ci-main.yml
vendored
20
.github/workflows/ci-main.yml
vendored
@@ -1,5 +1,5 @@
|
||||
---
|
||||
name: CI - Main
|
||||
name: authentik-ci-main
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -17,12 +17,6 @@ env:
|
||||
POSTGRES_USER: authentik
|
||||
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
||||
|
||||
permissions:
|
||||
# Needed for checkout
|
||||
contents: read
|
||||
# Needed for codecov OIDC token
|
||||
id-token: write
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
strategy:
|
||||
@@ -142,13 +136,13 @@ jobs:
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: unit
|
||||
use_oidc: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- if: ${{ !cancelled() }}
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
flags: unit
|
||||
file: unittest.xml
|
||||
use_oidc: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
test-integration:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
@@ -166,13 +160,13 @@ jobs:
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: integration
|
||||
use_oidc: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- if: ${{ !cancelled() }}
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
flags: integration
|
||||
file: unittest.xml
|
||||
use_oidc: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
test-e2e:
|
||||
name: test-e2e (${{ matrix.job.name }})
|
||||
runs-on: ubuntu-latest
|
||||
@@ -225,13 +219,13 @@ jobs:
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: e2e
|
||||
use_oidc: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- if: ${{ !cancelled() }}
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
flags: e2e
|
||||
file: unittest.xml
|
||||
use_oidc: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
ci-core-mark:
|
||||
if: always()
|
||||
needs:
|
||||
|
||||
2
.github/workflows/ci-outpost.yml
vendored
2
.github/workflows/ci-outpost.yml
vendored
@@ -1,5 +1,5 @@
|
||||
---
|
||||
name: CI - Outpost
|
||||
name: authentik-ci-outpost
|
||||
|
||||
on:
|
||||
push:
|
||||
|
||||
3
.github/workflows/ci-web.yml
vendored
3
.github/workflows/ci-web.yml
vendored
@@ -1,5 +1,4 @@
|
||||
---
|
||||
name: CI - Web
|
||||
name: authentik-ci-web
|
||||
|
||||
on:
|
||||
push:
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
---
|
||||
name: QA - CodeQL
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -1,6 +1,4 @@
|
||||
---
|
||||
name: Gen - Webauthn MDS
|
||||
|
||||
name: authentik-gen-update-webauthn-mds
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
---
|
||||
# See https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#force-deleting-cache-entries
|
||||
name: GH - Cleanup actions cache after PR is closed
|
||||
|
||||
name: Cleanup cache after PR is closed
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
@@ -1,5 +1,4 @@
|
||||
---
|
||||
name: GH - GHCR retention
|
||||
name: ghcr-retention
|
||||
|
||||
on:
|
||||
# schedule:
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
name: Gen - Compress images
|
||||
name: authentik-compress-images
|
||||
|
||||
on:
|
||||
push:
|
||||
5
.github/workflows/packages-npm-publish.yml
vendored
5
.github/workflows/packages-npm-publish.yml
vendored
@@ -1,6 +1,4 @@
|
||||
---
|
||||
name: Packages - Publish NPM packages
|
||||
|
||||
name: authentik-packages-npm-publish
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
@@ -11,7 +9,6 @@ on:
|
||||
- packages/tsconfig/**
|
||||
- packages/esbuild-plugin-live-reload/**
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
---
|
||||
name: CI - Source code docs
|
||||
name: authentik-publish-source-docs
|
||||
|
||||
on:
|
||||
push:
|
||||
3
.github/workflows/release-next-branch.yml
vendored
3
.github/workflows/release-next-branch.yml
vendored
@@ -1,5 +1,4 @@
|
||||
---
|
||||
name: Release - Update next branch
|
||||
name: authentik-on-release-next-branch
|
||||
|
||||
on:
|
||||
schedule:
|
||||
|
||||
16
.github/workflows/release-publish.yml
vendored
16
.github/workflows/release-publish.yml
vendored
@@ -1,5 +1,5 @@
|
||||
---
|
||||
name: Release - On publish
|
||||
name: authentik-on-release
|
||||
|
||||
on:
|
||||
release:
|
||||
@@ -16,7 +16,7 @@ jobs:
|
||||
id-token: write
|
||||
attestations: write
|
||||
with:
|
||||
image_name: ghcr.io/goauthentik/server,authentik/server
|
||||
image_name: ghcr.io/goauthentik/server,beryju/authentik
|
||||
release: true
|
||||
registry_dockerhub: true
|
||||
registry_ghcr: true
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_CORP_USERNAME }}
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
with:
|
||||
image-name: ghcr.io/goauthentik/docs
|
||||
- name: Login to GitHub Container Registry
|
||||
@@ -92,9 +92,9 @@ jobs:
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_CORP_USERNAME }}
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
with:
|
||||
image-name: ghcr.io/goauthentik/${{ matrix.type }},authentik/${{ matrix.type }}
|
||||
image-name: ghcr.io/goauthentik/${{ matrix.type }},beryju/authentik-${{ matrix.type }}
|
||||
- name: make empty clients
|
||||
run: |
|
||||
mkdir -p ./gen-ts-api
|
||||
@@ -102,8 +102,8 @@ jobs:
|
||||
- name: Docker Login Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_CORP_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_CORP_PASSWORD }}
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
@@ -220,7 +220,7 @@ jobs:
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_CORP_USERNAME }}
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
with:
|
||||
image-name: ghcr.io/goauthentik/server
|
||||
- name: Get static files from docker image
|
||||
|
||||
2
.github/workflows/release-tag.yml
vendored
2
.github/workflows/release-tag.yml
vendored
@@ -1,5 +1,5 @@
|
||||
---
|
||||
name: Release - On tag
|
||||
name: authentik-on-tag
|
||||
|
||||
on:
|
||||
push:
|
||||
|
||||
3
.github/workflows/repo-mirror-cleanup.yml
vendored
3
.github/workflows/repo-mirror-cleanup.yml
vendored
@@ -1,5 +1,4 @@
|
||||
---
|
||||
name: Repo - Cleanup internal mirror
|
||||
name: "authentik-repo-mirror-cleanup"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
3
.github/workflows/repo-mirror.yml
vendored
3
.github/workflows/repo-mirror.yml
vendored
@@ -1,5 +1,4 @@
|
||||
---
|
||||
name: Repo - Mirror to internal
|
||||
name: "authentik-repo-mirror"
|
||||
|
||||
on: [push, delete]
|
||||
|
||||
|
||||
3
.github/workflows/repo-stale.yml
vendored
3
.github/workflows/repo-stale.yml
vendored
@@ -1,5 +1,4 @@
|
||||
---
|
||||
name: Repo - Mark and close stale issues
|
||||
name: "authentik-repo-stale"
|
||||
|
||||
on:
|
||||
schedule:
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
---
|
||||
name: QA - Semgrep
|
||||
|
||||
name: authentik-semgrep
|
||||
on:
|
||||
workflow_dispatch: {}
|
||||
pull_request: {}
|
||||
@@ -9,11 +7,10 @@ on:
|
||||
- main
|
||||
- master
|
||||
paths:
|
||||
- .github/workflows/qa-semgrep.yml
|
||||
- .github/workflows/semgrep.yml
|
||||
schedule:
|
||||
# random HH:MM to avoid a load spike on GitHub Actions at 00:00
|
||||
- cron: '12 15 * * *'
|
||||
|
||||
jobs:
|
||||
semgrep:
|
||||
name: semgrep/ci
|
||||
3
.github/workflows/translation-advice.yml
vendored
3
.github/workflows/translation-advice.yml
vendored
@@ -1,5 +1,4 @@
|
||||
---
|
||||
name: Translation - Post advice
|
||||
name: authentik-translation-advice
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
---
|
||||
name: Translation - Extract and compile
|
||||
|
||||
name: authentik-translate-extract-compile
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * *" # every day at midnight
|
||||
|
||||
3
.github/workflows/translation-rename.yml
vendored
3
.github/workflows/translation-rename.yml
vendored
@@ -1,7 +1,6 @@
|
||||
---
|
||||
# Rename transifex pull requests to have a correct naming
|
||||
# Also enables auto squash-merge
|
||||
name: Translation - Auto-rename Transifex PRs
|
||||
name: authentik-translation-transifex-rename
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -100,6 +100,9 @@ ipython_config.py
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
@@ -163,6 +166,8 @@ dmypy.json
|
||||
|
||||
# pyenv
|
||||
|
||||
# celery beat schedule file
|
||||
|
||||
# SageMath parsed files
|
||||
|
||||
# Environments
|
||||
|
||||
72
CODEOWNERS
72
CODEOWNERS
@@ -1,49 +1,43 @@
|
||||
# Fallback
|
||||
* @goauthentik/backend @goauthentik/frontend
|
||||
* @goauthentik/backend @goauthentik/frontend
|
||||
# Backend
|
||||
authentik/ @goauthentik/backend
|
||||
blueprints/ @goauthentik/backend
|
||||
cmd/ @goauthentik/backend
|
||||
internal/ @goauthentik/backend
|
||||
lifecycle/ @goauthentik/backend
|
||||
schemas/ @goauthentik/backend
|
||||
scripts/ @goauthentik/backend
|
||||
tests/ @goauthentik/backend
|
||||
pyproject.toml @goauthentik/backend
|
||||
uv.lock @goauthentik/backend
|
||||
go.mod @goauthentik/backend
|
||||
go.sum @goauthentik/backend
|
||||
authentik/ @goauthentik/backend
|
||||
blueprints/ @goauthentik/backend
|
||||
cmd/ @goauthentik/backend
|
||||
internal/ @goauthentik/backend
|
||||
lifecycle/ @goauthentik/backend
|
||||
schemas/ @goauthentik/backend
|
||||
scripts/ @goauthentik/backend
|
||||
tests/ @goauthentik/backend
|
||||
pyproject.toml @goauthentik/backend
|
||||
uv.lock @goauthentik/backend
|
||||
go.mod @goauthentik/backend
|
||||
go.sum @goauthentik/backend
|
||||
# Infrastructure
|
||||
.github/ @goauthentik/infrastructure
|
||||
lifecycle/aws/ @goauthentik/infrastructure
|
||||
Dockerfile @goauthentik/infrastructure
|
||||
*Dockerfile @goauthentik/infrastructure
|
||||
.dockerignore @goauthentik/infrastructure
|
||||
docker-compose.yml @goauthentik/infrastructure
|
||||
Makefile @goauthentik/infrastructure
|
||||
.editorconfig @goauthentik/infrastructure
|
||||
CODEOWNERS @goauthentik/infrastructure
|
||||
# Backend packages
|
||||
packages/django-dramatiq-postgres @goauthentik/backend
|
||||
.github/ @goauthentik/infrastructure
|
||||
lifecycle/aws/ @goauthentik/infrastructure
|
||||
Dockerfile @goauthentik/infrastructure
|
||||
*Dockerfile @goauthentik/infrastructure
|
||||
.dockerignore @goauthentik/infrastructure
|
||||
docker-compose.yml @goauthentik/infrastructure
|
||||
Makefile @goauthentik/infrastructure
|
||||
.editorconfig @goauthentik/infrastructure
|
||||
CODEOWNERS @goauthentik/infrastructure
|
||||
# Web packages
|
||||
packages/docusaurus-config @goauthentik/frontend
|
||||
packages/esbuild-plugin-live-reload @goauthentik/frontend
|
||||
packages/eslint-config @goauthentik/frontend
|
||||
packages/prettier-config @goauthentik/frontend
|
||||
packages/tsconfig @goauthentik/frontend
|
||||
packages/ @goauthentik/frontend
|
||||
# Web
|
||||
web/ @goauthentik/frontend
|
||||
tests/wdio/ @goauthentik/frontend
|
||||
web/ @goauthentik/frontend
|
||||
tests/wdio/ @goauthentik/frontend
|
||||
# Locale
|
||||
locale/ @goauthentik/backend @goauthentik/frontend
|
||||
web/xliff/ @goauthentik/backend @goauthentik/frontend
|
||||
locale/ @goauthentik/backend @goauthentik/frontend
|
||||
web/xliff/ @goauthentik/backend @goauthentik/frontend
|
||||
# Docs & Website
|
||||
docs/ @goauthentik/docs
|
||||
docs/ @goauthentik/docs
|
||||
# TODO Remove after moving website to docs
|
||||
website/ @goauthentik/docs
|
||||
CODE_OF_CONDUCT.md @goauthentik/docs
|
||||
website/ @goauthentik/docs
|
||||
CODE_OF_CONDUCT.md @goauthentik/docs
|
||||
# Security
|
||||
SECURITY.md @goauthentik/security @goauthentik/docs
|
||||
SECURITY.md @goauthentik/security @goauthentik/docs
|
||||
# TODO Remove after moving website to docs
|
||||
website/security/ @goauthentik/security @goauthentik/docs
|
||||
docs/security/ @goauthentik/security @goauthentik/docs
|
||||
website/security/ @goauthentik/security @goauthentik/docs
|
||||
docs/security/ @goauthentik/security @goauthentik/docs
|
||||
|
||||
19
Dockerfile
19
Dockerfile
@@ -76,7 +76,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
||||
/bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||
|
||||
# Stage 4: Download uv
|
||||
FROM ghcr.io/astral-sh/uv:0.8.6 AS uv
|
||||
FROM ghcr.io/astral-sh/uv:0.8.2 AS uv
|
||||
# Stage 5: Base python image
|
||||
FROM ghcr.io/goauthentik/fips-python:3.13.5-slim-bookworm-fips AS python-base
|
||||
|
||||
@@ -123,7 +123,6 @@ ENV UV_NO_BINARY_PACKAGE="cryptography lxml python-kadmin-rs xmlsec"
|
||||
|
||||
RUN --mount=type=bind,target=pyproject.toml,src=pyproject.toml \
|
||||
--mount=type=bind,target=uv.lock,src=uv.lock \
|
||||
--mount=type=bind,target=packages,src=packages \
|
||||
--mount=type=cache,target=/root/.cache/uv \
|
||||
uv sync --frozen --no-install-project --no-dev
|
||||
|
||||
@@ -134,16 +133,11 @@ ARG VERSION
|
||||
ARG GIT_BUILD_HASH
|
||||
ENV GIT_BUILD_HASH=$GIT_BUILD_HASH
|
||||
|
||||
LABEL org.opencontainers.image.authors="Authentik Security Inc." \
|
||||
org.opencontainers.image.description="goauthentik.io Main server image, see https://goauthentik.io for more info." \
|
||||
org.opencontainers.image.documentation="https://docs.goauthentik.io" \
|
||||
org.opencontainers.image.licenses="https://github.com/goauthentik/authentik/blob/main/LICENSE" \
|
||||
org.opencontainers.image.revision=${GIT_BUILD_HASH} \
|
||||
org.opencontainers.image.source="https://github.com/goauthentik/authentik" \
|
||||
org.opencontainers.image.title="authentik server image" \
|
||||
org.opencontainers.image.url="https://goauthentik.io" \
|
||||
org.opencontainers.image.vendor="Authentik Security Inc." \
|
||||
org.opencontainers.image.version=${VERSION}
|
||||
LABEL org.opencontainers.image.url=https://goauthentik.io
|
||||
LABEL org.opencontainers.image.description="goauthentik.io Main server image, see https://goauthentik.io for more info."
|
||||
LABEL org.opencontainers.image.source=https://github.com/goauthentik/authentik
|
||||
LABEL org.opencontainers.image.version=${VERSION}
|
||||
LABEL org.opencontainers.image.revision=${GIT_BUILD_HASH}
|
||||
|
||||
WORKDIR /
|
||||
|
||||
@@ -174,7 +168,6 @@ COPY ./blueprints /blueprints
|
||||
COPY ./lifecycle/ /lifecycle
|
||||
COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf
|
||||
COPY --from=go-builder /go/authentik /bin/authentik
|
||||
COPY ./packages/ /ak-root/packages
|
||||
COPY --from=python-deps /ak-root/.venv /ak-root/.venv
|
||||
COPY --from=node-builder /work/web/dist/ /web/dist/
|
||||
COPY --from=node-builder /work/web/authentik/ /web/authentik/
|
||||
|
||||
7
Makefile
7
Makefile
@@ -6,7 +6,7 @@ PWD = $(shell pwd)
|
||||
UID = $(shell id -u)
|
||||
GID = $(shell id -g)
|
||||
NPM_VERSION = $(shell python -m scripts.generate_semver)
|
||||
PY_SOURCES = authentik packages tests scripts lifecycle .github
|
||||
PY_SOURCES = authentik tests scripts lifecycle .github
|
||||
DOCKER_IMAGE ?= "authentik:test"
|
||||
|
||||
GEN_API_TS = gen-ts-api
|
||||
@@ -59,12 +59,9 @@ i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that requir
|
||||
aws-cfn:
|
||||
cd lifecycle/aws && npm run aws-cfn
|
||||
|
||||
run-server: ## Run the main authentik server process
|
||||
run: ## Run the main authentik server process
|
||||
uv run ak server
|
||||
|
||||
run-worker: ## Run the main authentik worker process
|
||||
uv run ak worker
|
||||
|
||||
core-i18n-extract:
|
||||
uv run ak makemessages \
|
||||
--add-location file \
|
||||
|
||||
@@ -9,8 +9,8 @@
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-outpost.yml)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-web.yml)
|
||||
[](https://codecov.io/gh/goauthentik/authentik)
|
||||

|
||||

|
||||

|
||||

|
||||
[](https://www.transifex.com/authentik/authentik/)
|
||||
|
||||
## What is authentik?
|
||||
|
||||
@@ -41,7 +41,7 @@ class VersionSerializer(PassiveSerializer):
|
||||
return __version__
|
||||
version_in_cache = cache.get(VERSION_CACHE_KEY)
|
||||
if not version_in_cache: # pragma: no cover
|
||||
update_latest_version.send()
|
||||
update_latest_version.delay()
|
||||
return __version__
|
||||
return version_in_cache
|
||||
|
||||
|
||||
57
authentik/admin/api/workers.py
Normal file
57
authentik/admin/api/workers.py
Normal file
@@ -0,0 +1,57 @@
|
||||
"""authentik administration overview"""
|
||||
|
||||
from socket import gethostname
|
||||
|
||||
from django.conf import settings
|
||||
from drf_spectacular.utils import extend_schema, inline_serializer
|
||||
from packaging.version import parse
|
||||
from rest_framework.fields import BooleanField, CharField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from authentik import get_full_version
|
||||
from authentik.rbac.permissions import HasPermission
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
|
||||
class WorkerView(APIView):
|
||||
"""Get currently connected worker count."""
|
||||
|
||||
permission_classes = [HasPermission("authentik_rbac.view_system_info")]
|
||||
|
||||
@extend_schema(
|
||||
responses=inline_serializer(
|
||||
"Worker",
|
||||
fields={
|
||||
"worker_id": CharField(),
|
||||
"version": CharField(),
|
||||
"version_matching": BooleanField(),
|
||||
},
|
||||
many=True,
|
||||
)
|
||||
)
|
||||
def get(self, request: Request) -> Response:
|
||||
"""Get currently connected worker count."""
|
||||
raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5)
|
||||
our_version = parse(get_full_version())
|
||||
response = []
|
||||
for worker in raw:
|
||||
key = list(worker.keys())[0]
|
||||
version = worker[key].get("version")
|
||||
version_matching = False
|
||||
if version:
|
||||
version_matching = parse(version) == our_version
|
||||
response.append(
|
||||
{"worker_id": key, "version": version, "version_matching": version_matching}
|
||||
)
|
||||
# In debug we run with `task_always_eager`, so tasks are ran on the main process
|
||||
if settings.DEBUG: # pragma: no cover
|
||||
response.append(
|
||||
{
|
||||
"worker_id": f"authentik-debug@{gethostname()}",
|
||||
"version": get_full_version(),
|
||||
"version_matching": True,
|
||||
}
|
||||
)
|
||||
return Response(response)
|
||||
@@ -3,9 +3,6 @@
|
||||
from prometheus_client import Info
|
||||
|
||||
from authentik.blueprints.apps import ManagedAppConfig
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.utils.time import fqdn_rand
|
||||
from authentik.tasks.schedules.common import ScheduleSpec
|
||||
|
||||
PROM_INFO = Info("authentik_version", "Currently running authentik version")
|
||||
|
||||
@@ -33,15 +30,3 @@ class AuthentikAdminConfig(ManagedAppConfig):
|
||||
notification_version = notification.event.context["new_version"]
|
||||
if LOCAL_VERSION >= parse(notification_version):
|
||||
notification.delete()
|
||||
|
||||
@property
|
||||
def global_schedule_specs(self) -> list[ScheduleSpec]:
|
||||
from authentik.admin.tasks import update_latest_version
|
||||
|
||||
return [
|
||||
ScheduleSpec(
|
||||
actor=update_latest_version,
|
||||
crontab=f"{fqdn_rand('admin_latest_version')} * * * *",
|
||||
paused=CONFIG.get_bool("disable_update_check"),
|
||||
),
|
||||
]
|
||||
|
||||
15
authentik/admin/settings.py
Normal file
15
authentik/admin/settings.py
Normal file
@@ -0,0 +1,15 @@
|
||||
"""authentik admin settings"""
|
||||
|
||||
from celery.schedules import crontab
|
||||
from django_tenants.utils import get_public_schema_name
|
||||
|
||||
from authentik.lib.utils.time import fqdn_rand
|
||||
|
||||
CELERY_BEAT_SCHEDULE = {
|
||||
"admin_latest_version": {
|
||||
"task": "authentik.admin.tasks.update_latest_version",
|
||||
"schedule": crontab(minute=fqdn_rand("admin_latest_version"), hour="*"),
|
||||
"tenant_schemas": [get_public_schema_name()],
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
}
|
||||
}
|
||||
35
authentik/admin/signals.py
Normal file
35
authentik/admin/signals.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""admin signals"""
|
||||
|
||||
from django.dispatch import receiver
|
||||
from packaging.version import parse
|
||||
from prometheus_client import Gauge
|
||||
|
||||
from authentik import get_full_version
|
||||
from authentik.root.celery import CELERY_APP
|
||||
from authentik.root.monitoring import monitoring_set
|
||||
|
||||
GAUGE_WORKERS = Gauge(
|
||||
"authentik_admin_workers",
|
||||
"Currently connected workers, their versions and if they are the same version as authentik",
|
||||
["version", "version_matched"],
|
||||
)
|
||||
|
||||
|
||||
_version = parse(get_full_version())
|
||||
|
||||
|
||||
@receiver(monitoring_set)
|
||||
def monitoring_set_workers(sender, **kwargs):
|
||||
"""Set worker gauge"""
|
||||
raw: list[dict[str, dict]] = CELERY_APP.control.ping(timeout=0.5)
|
||||
worker_version_count = {}
|
||||
for worker in raw:
|
||||
key = list(worker.keys())[0]
|
||||
version = worker[key].get("version")
|
||||
version_matching = False
|
||||
if version:
|
||||
version_matching = parse(version) == _version
|
||||
worker_version_count.setdefault(version, {"count": 0, "matching": version_matching})
|
||||
worker_version_count[version]["count"] += 1
|
||||
for version, stats in worker_version_count.items():
|
||||
GAUGE_WORKERS.labels(version, stats["matching"]).set(stats["count"])
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_dramatiq_postgres.middleware import CurrentTask
|
||||
from dramatiq import actor
|
||||
from packaging.version import parse
|
||||
from requests import RequestException
|
||||
from structlog.stdlib import get_logger
|
||||
@@ -11,9 +9,10 @@ from structlog.stdlib import get_logger
|
||||
from authentik import __version__, get_build_hash
|
||||
from authentik.admin.apps import PROM_INFO
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
from authentik.tasks.models import Task
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
VERSION_NULL = "0.0.0"
|
||||
@@ -33,12 +32,13 @@ def _set_prom_info():
|
||||
)
|
||||
|
||||
|
||||
@actor(description=_("Update latest version info."))
|
||||
def update_latest_version():
|
||||
self: Task = CurrentTask.get_task()
|
||||
@CELERY_APP.task(bind=True, base=SystemTask)
|
||||
@prefill_task
|
||||
def update_latest_version(self: SystemTask):
|
||||
"""Update latest version info"""
|
||||
if CONFIG.get_bool("disable_update_check"):
|
||||
cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT)
|
||||
self.info("Version check disabled.")
|
||||
self.set_status(TaskStatus.WARNING, "Version check disabled.")
|
||||
return
|
||||
try:
|
||||
response = get_http_session().get(
|
||||
@@ -48,7 +48,7 @@ def update_latest_version():
|
||||
data = response.json()
|
||||
upstream_version = data.get("stable", {}).get("version")
|
||||
cache.set(VERSION_CACHE_KEY, upstream_version, VERSION_CACHE_TIMEOUT)
|
||||
self.info("Successfully updated latest Version")
|
||||
self.set_status(TaskStatus.SUCCESSFUL, "Successfully updated latest Version")
|
||||
_set_prom_info()
|
||||
# Check if upstream version is newer than what we're running,
|
||||
# and if no event exists yet, create one.
|
||||
@@ -71,7 +71,7 @@ def update_latest_version():
|
||||
).save()
|
||||
except (RequestException, IndexError) as exc:
|
||||
cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT)
|
||||
raise exc
|
||||
self.set_error(exc)
|
||||
|
||||
|
||||
_set_prom_info()
|
||||
|
||||
@@ -29,6 +29,13 @@ class TestAdminAPI(TestCase):
|
||||
body = loads(response.content)
|
||||
self.assertEqual(body["version_current"], __version__)
|
||||
|
||||
def test_workers(self):
|
||||
"""Test Workers API"""
|
||||
response = self.client.get(reverse("authentik_api:admin_workers"))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
body = loads(response.content)
|
||||
self.assertEqual(len(body), 0)
|
||||
|
||||
def test_apps(self):
|
||||
"""Test apps API"""
|
||||
response = self.client.get(reverse("authentik_api:apps-list"))
|
||||
|
||||
@@ -30,7 +30,7 @@ class TestAdminTasks(TestCase):
|
||||
"""Test Update checker with valid response"""
|
||||
with Mocker() as mocker, CONFIG.patch("disable_update_check", False):
|
||||
mocker.get("https://version.goauthentik.io/version.json", json=RESPONSE_VALID)
|
||||
update_latest_version.send()
|
||||
update_latest_version.delay().get()
|
||||
self.assertEqual(cache.get(VERSION_CACHE_KEY), "99999999.9999999")
|
||||
self.assertTrue(
|
||||
Event.objects.filter(
|
||||
@@ -40,7 +40,7 @@ class TestAdminTasks(TestCase):
|
||||
).exists()
|
||||
)
|
||||
# test that a consecutive check doesn't create a duplicate event
|
||||
update_latest_version.send()
|
||||
update_latest_version.delay().get()
|
||||
self.assertEqual(
|
||||
len(
|
||||
Event.objects.filter(
|
||||
@@ -56,7 +56,7 @@ class TestAdminTasks(TestCase):
|
||||
"""Test Update checker with invalid response"""
|
||||
with Mocker() as mocker:
|
||||
mocker.get("https://version.goauthentik.io/version.json", status_code=400)
|
||||
update_latest_version.send()
|
||||
update_latest_version.delay().get()
|
||||
self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0")
|
||||
self.assertFalse(
|
||||
Event.objects.filter(
|
||||
@@ -67,15 +67,14 @@ class TestAdminTasks(TestCase):
|
||||
def test_version_disabled(self):
|
||||
"""Test Update checker while its disabled"""
|
||||
with CONFIG.patch("disable_update_check", True):
|
||||
update_latest_version.send()
|
||||
update_latest_version.delay().get()
|
||||
self.assertEqual(cache.get(VERSION_CACHE_KEY), "0.0.0")
|
||||
|
||||
def test_clear_update_notifications(self):
|
||||
"""Test clear of previous notification"""
|
||||
admin_config = apps.get_app_config("authentik_admin")
|
||||
Event.objects.create(
|
||||
action=EventAction.UPDATE_AVAILABLE,
|
||||
context={"new_version": "99999999.9999999.9999999"},
|
||||
action=EventAction.UPDATE_AVAILABLE, context={"new_version": "99999999.9999999.9999999"}
|
||||
)
|
||||
Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={"new_version": "1.1.1"})
|
||||
Event.objects.create(action=EventAction.UPDATE_AVAILABLE, context={})
|
||||
|
||||
@@ -6,11 +6,13 @@ from authentik.admin.api.meta import AppsViewSet, ModelViewSet
|
||||
from authentik.admin.api.system import SystemView
|
||||
from authentik.admin.api.version import VersionView
|
||||
from authentik.admin.api.version_history import VersionHistoryViewSet
|
||||
from authentik.admin.api.workers import WorkerView
|
||||
|
||||
api_urlpatterns = [
|
||||
("admin/apps", AppsViewSet, "apps"),
|
||||
("admin/models", ModelViewSet, "models"),
|
||||
path("admin/version/", VersionView.as_view(), name="admin_version"),
|
||||
("admin/version/history", VersionHistoryViewSet, "version_history"),
|
||||
path("admin/workers/", WorkerView.as_view(), name="admin_workers"),
|
||||
path("admin/system/", SystemView.as_view(), name="admin_system"),
|
||||
]
|
||||
|
||||
@@ -8,6 +8,8 @@ API Browser - {{ brand.branding_title }}
|
||||
|
||||
{% block head %}
|
||||
<script src="{% versioned_script 'dist/standalone/api-browser/index-%v.js' %}" type="module"></script>
|
||||
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: light)">
|
||||
<meta name="theme-color" content="#151515" media="(prefers-color-scheme: dark)">
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
|
||||
@@ -39,7 +39,7 @@ class BlueprintInstanceSerializer(ModelSerializer):
|
||||
"""Ensure the path (if set) specified is retrievable"""
|
||||
if path == "" or path.startswith(OCI_PREFIX):
|
||||
return path
|
||||
files: list[dict] = blueprints_find_dict.send().get_result(block=True)
|
||||
files: list[dict] = blueprints_find_dict.delay().get()
|
||||
if path not in [file["path"] for file in files]:
|
||||
raise ValidationError(_("Blueprint file does not exist"))
|
||||
return path
|
||||
@@ -115,7 +115,7 @@ class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
|
||||
@action(detail=False, pagination_class=None, filter_backends=[])
|
||||
def available(self, request: Request) -> Response:
|
||||
"""Get blueprints"""
|
||||
files: list[dict] = blueprints_find_dict.send().get_result(block=True)
|
||||
files: list[dict] = blueprints_find_dict.delay().get()
|
||||
return Response(files)
|
||||
|
||||
@permission_required("authentik_blueprints.view_blueprintinstance")
|
||||
@@ -129,5 +129,5 @@ class BlueprintInstanceViewSet(UsedByMixin, ModelViewSet):
|
||||
def apply(self, request: Request, *args, **kwargs) -> Response:
|
||||
"""Apply a blueprint"""
|
||||
blueprint = self.get_object()
|
||||
apply_blueprint.send_with_options(args=(blueprint.pk,), rel_obj=blueprint)
|
||||
apply_blueprint.delay(str(blueprint.pk)).get()
|
||||
return self.retrieve(request, *args, **kwargs)
|
||||
|
||||
@@ -6,12 +6,9 @@ from inspect import ismethod
|
||||
|
||||
from django.apps import AppConfig
|
||||
from django.db import DatabaseError, InternalError, ProgrammingError
|
||||
from dramatiq.broker import get_broker
|
||||
from structlog.stdlib import BoundLogger, get_logger
|
||||
|
||||
from authentik.lib.utils.time import fqdn_rand
|
||||
from authentik.root.signals import startup
|
||||
from authentik.tasks.schedules.common import ScheduleSpec
|
||||
|
||||
|
||||
class ManagedAppConfig(AppConfig):
|
||||
@@ -37,7 +34,7 @@ class ManagedAppConfig(AppConfig):
|
||||
|
||||
def import_related(self):
|
||||
"""Automatically import related modules which rely on just being imported
|
||||
to register themselves (mainly django signals and tasks)"""
|
||||
to register themselves (mainly django signals and celery tasks)"""
|
||||
|
||||
def import_relative(rel_module: str):
|
||||
try:
|
||||
@@ -83,16 +80,6 @@ class ManagedAppConfig(AppConfig):
|
||||
func._authentik_managed_reconcile = ManagedAppConfig.RECONCILE_GLOBAL_CATEGORY
|
||||
return func
|
||||
|
||||
@property
|
||||
def tenant_schedule_specs(self) -> list[ScheduleSpec]:
|
||||
"""Get a list of schedule specs that must exist in each tenant"""
|
||||
return []
|
||||
|
||||
@property
|
||||
def global_schedule_specs(self) -> list[ScheduleSpec]:
|
||||
"""Get a list of schedule specs that must exist in the default tenant"""
|
||||
return []
|
||||
|
||||
def _reconcile_tenant(self) -> None:
|
||||
"""reconcile ourselves for tenanted methods"""
|
||||
from authentik.tenants.models import Tenant
|
||||
@@ -113,12 +100,8 @@ class ManagedAppConfig(AppConfig):
|
||||
"""
|
||||
from django_tenants.utils import get_public_schema_name, schema_context
|
||||
|
||||
try:
|
||||
with schema_context(get_public_schema_name()):
|
||||
self._reconcile(self.RECONCILE_GLOBAL_CATEGORY)
|
||||
except (DatabaseError, ProgrammingError, InternalError) as exc:
|
||||
self.logger.debug("Failed to access database to run reconcile", exc=exc)
|
||||
return
|
||||
with schema_context(get_public_schema_name()):
|
||||
self._reconcile(self.RECONCILE_GLOBAL_CATEGORY)
|
||||
|
||||
|
||||
class AuthentikBlueprintsConfig(ManagedAppConfig):
|
||||
@@ -129,29 +112,19 @@ class AuthentikBlueprintsConfig(ManagedAppConfig):
|
||||
verbose_name = "authentik Blueprints"
|
||||
default = True
|
||||
|
||||
@ManagedAppConfig.reconcile_global
|
||||
def load_blueprints_v1_tasks(self):
|
||||
"""Load v1 tasks"""
|
||||
self.import_module("authentik.blueprints.v1.tasks")
|
||||
|
||||
@ManagedAppConfig.reconcile_tenant
|
||||
def blueprints_discovery(self):
|
||||
"""Run blueprint discovery"""
|
||||
from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints
|
||||
|
||||
blueprints_discovery.delay()
|
||||
clear_failed_blueprints.delay()
|
||||
|
||||
def import_models(self):
|
||||
super().import_models()
|
||||
self.import_module("authentik.blueprints.v1.meta.apply_blueprint")
|
||||
|
||||
@ManagedAppConfig.reconcile_global
|
||||
def tasks_middlewares(self):
|
||||
from authentik.blueprints.v1.tasks import BlueprintWatcherMiddleware
|
||||
|
||||
get_broker().add_middleware(BlueprintWatcherMiddleware())
|
||||
|
||||
@property
|
||||
def tenant_schedule_specs(self) -> list[ScheduleSpec]:
|
||||
from authentik.blueprints.v1.tasks import blueprints_discovery, clear_failed_blueprints
|
||||
|
||||
return [
|
||||
ScheduleSpec(
|
||||
actor=blueprints_discovery,
|
||||
crontab=f"{fqdn_rand('blueprints_v1_discover')} * * * *",
|
||||
send_on_startup=True,
|
||||
),
|
||||
ScheduleSpec(
|
||||
actor=clear_failed_blueprints,
|
||||
crontab=f"{fqdn_rand('blueprints_v1_cleanup')} * * * *",
|
||||
send_on_startup=True,
|
||||
),
|
||||
]
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
from pathlib import Path
|
||||
from uuid import uuid4
|
||||
|
||||
from django.contrib.contenttypes.fields import GenericRelation
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
@@ -72,13 +71,6 @@ class BlueprintInstance(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||
enabled = models.BooleanField(default=True)
|
||||
managed_models = ArrayField(models.TextField(), default=list)
|
||||
|
||||
# Manual link to tasks instead of using TasksModel because of loop imports
|
||||
tasks = GenericRelation(
|
||||
"authentik_tasks.Task",
|
||||
content_type_field="rel_obj_content_type",
|
||||
object_id_field="rel_obj_id",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("Blueprint Instance")
|
||||
verbose_name_plural = _("Blueprint Instances")
|
||||
|
||||
18
authentik/blueprints/settings.py
Normal file
18
authentik/blueprints/settings.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""blueprint Settings"""
|
||||
|
||||
from celery.schedules import crontab
|
||||
|
||||
from authentik.lib.utils.time import fqdn_rand
|
||||
|
||||
CELERY_BEAT_SCHEDULE = {
|
||||
"blueprints_v1_discover": {
|
||||
"task": "authentik.blueprints.v1.tasks.blueprints_discovery",
|
||||
"schedule": crontab(minute=fqdn_rand("blueprints_v1_discover"), hour="*"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
"blueprints_v1_cleanup": {
|
||||
"task": "authentik.blueprints.v1.tasks.clear_failed_blueprints",
|
||||
"schedule": crontab(minute=fqdn_rand("blueprints_v1_cleanup"), hour="*"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
}
|
||||
@@ -1,2 +0,0 @@
|
||||
# Import all v1 tasks for auto task discovery
|
||||
from authentik.blueprints.v1.tasks import * # noqa: F403
|
||||
@@ -54,7 +54,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
||||
file.seek(0)
|
||||
file_hash = sha512(file.read().encode()).hexdigest()
|
||||
file.flush()
|
||||
blueprints_discovery.send()
|
||||
blueprints_discovery()
|
||||
instance = BlueprintInstance.objects.filter(name=blueprint_id).first()
|
||||
self.assertEqual(instance.last_applied_hash, file_hash)
|
||||
self.assertEqual(
|
||||
@@ -82,7 +82,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
||||
)
|
||||
)
|
||||
file.flush()
|
||||
blueprints_discovery.send()
|
||||
blueprints_discovery()
|
||||
blueprint = BlueprintInstance.objects.filter(name="foo").first()
|
||||
self.assertEqual(
|
||||
blueprint.last_applied_hash,
|
||||
@@ -107,7 +107,7 @@ class TestBlueprintsV1Tasks(TransactionTestCase):
|
||||
)
|
||||
)
|
||||
file.flush()
|
||||
blueprints_discovery.send()
|
||||
blueprints_discovery()
|
||||
blueprint.refresh_from_db()
|
||||
self.assertEqual(
|
||||
blueprint.last_applied_hash,
|
||||
|
||||
@@ -57,6 +57,7 @@ from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import (
|
||||
EndpointDeviceConnection,
|
||||
)
|
||||
from authentik.events.logs import LogEvent, capture_logs
|
||||
from authentik.events.models import SystemTask
|
||||
from authentik.events.utils import cleanse_dict
|
||||
from authentik.flows.models import FlowToken, Stage
|
||||
from authentik.lib.models import SerializerModel
|
||||
@@ -76,7 +77,6 @@ from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser
|
||||
from authentik.rbac.models import Role
|
||||
from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser
|
||||
from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType
|
||||
from authentik.tasks.models import Task
|
||||
from authentik.tenants.models import Tenant
|
||||
|
||||
# Context set when the serializer is created in a blueprint context
|
||||
@@ -118,7 +118,7 @@ def excluded_models() -> list[type[Model]]:
|
||||
SCIMProviderGroup,
|
||||
SCIMProviderUser,
|
||||
Tenant,
|
||||
Task,
|
||||
SystemTask,
|
||||
ConnectionToken,
|
||||
AuthorizationCode,
|
||||
AccessToken,
|
||||
|
||||
@@ -44,7 +44,7 @@ class ApplyBlueprintMetaSerializer(PassiveSerializer):
|
||||
return MetaResult()
|
||||
LOGGER.debug("Applying blueprint from meta model", blueprint=self.blueprint_instance)
|
||||
|
||||
apply_blueprint(self.blueprint_instance.pk)
|
||||
apply_blueprint(str(self.blueprint_instance.pk))
|
||||
return MetaResult()
|
||||
|
||||
|
||||
|
||||
@@ -4,17 +4,12 @@ from dataclasses import asdict, dataclass, field
|
||||
from hashlib import sha512
|
||||
from pathlib import Path
|
||||
from sys import platform
|
||||
from uuid import UUID
|
||||
|
||||
from dacite.core import from_dict
|
||||
from django.conf import settings
|
||||
from django.db import DatabaseError, InternalError, ProgrammingError
|
||||
from django.utils.text import slugify
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_dramatiq_postgres.middleware import CurrentTask, CurrentTaskNotFound
|
||||
from dramatiq.actor import actor
|
||||
from dramatiq.middleware import Middleware
|
||||
from structlog.stdlib import get_logger
|
||||
from watchdog.events import (
|
||||
FileCreatedEvent,
|
||||
@@ -36,13 +31,15 @@ from authentik.blueprints.v1.importer import Importer
|
||||
from authentik.blueprints.v1.labels import LABEL_AUTHENTIK_INSTANTIATE
|
||||
from authentik.blueprints.v1.oci import OCI_PREFIX
|
||||
from authentik.events.logs import capture_logs
|
||||
from authentik.events.models import TaskStatus
|
||||
from authentik.events.system_tasks import SystemTask, prefill_task
|
||||
from authentik.events.utils import sanitize_dict
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.tasks.models import Task
|
||||
from authentik.tasks.schedules.models import Schedule
|
||||
from authentik.root.celery import CELERY_APP
|
||||
from authentik.tenants.models import Tenant
|
||||
|
||||
LOGGER = get_logger()
|
||||
_file_watcher_started = False
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -56,21 +53,22 @@ class BlueprintFile:
|
||||
meta: BlueprintMetadata | None = field(default=None)
|
||||
|
||||
|
||||
class BlueprintWatcherMiddleware(Middleware):
|
||||
def start_blueprint_watcher(self):
|
||||
"""Start blueprint watcher"""
|
||||
observer = Observer()
|
||||
kwargs = {}
|
||||
if platform.startswith("linux"):
|
||||
kwargs["event_filter"] = (FileCreatedEvent, FileModifiedEvent)
|
||||
observer.schedule(
|
||||
BlueprintEventHandler(), CONFIG.get("blueprints_dir"), recursive=True, **kwargs
|
||||
)
|
||||
observer.start()
|
||||
def start_blueprint_watcher():
|
||||
"""Start blueprint watcher, if it's not running already."""
|
||||
# This function might be called twice since it's called on celery startup
|
||||
|
||||
def after_worker_boot(self, broker, worker):
|
||||
if not settings.TEST:
|
||||
self.start_blueprint_watcher()
|
||||
global _file_watcher_started # noqa: PLW0603
|
||||
if _file_watcher_started:
|
||||
return
|
||||
observer = Observer()
|
||||
kwargs = {}
|
||||
if platform.startswith("linux"):
|
||||
kwargs["event_filter"] = (FileCreatedEvent, FileModifiedEvent)
|
||||
observer.schedule(
|
||||
BlueprintEventHandler(), CONFIG.get("blueprints_dir"), recursive=True, **kwargs
|
||||
)
|
||||
observer.start()
|
||||
_file_watcher_started = True
|
||||
|
||||
|
||||
class BlueprintEventHandler(FileSystemEventHandler):
|
||||
@@ -94,7 +92,7 @@ class BlueprintEventHandler(FileSystemEventHandler):
|
||||
LOGGER.debug("new blueprint file created, starting discovery")
|
||||
for tenant in Tenant.objects.filter(ready=True):
|
||||
with tenant:
|
||||
Schedule.dispatch_by_actor(blueprints_discovery)
|
||||
blueprints_discovery.delay()
|
||||
|
||||
def on_modified(self, event: FileSystemEvent):
|
||||
"""Process file modification"""
|
||||
@@ -105,14 +103,14 @@ class BlueprintEventHandler(FileSystemEventHandler):
|
||||
with tenant:
|
||||
for instance in BlueprintInstance.objects.filter(path=rel_path, enabled=True):
|
||||
LOGGER.debug("modified blueprint file, starting apply", instance=instance)
|
||||
apply_blueprint.send_with_options(args=(instance.pk,), rel_obj=instance)
|
||||
apply_blueprint.delay(instance.pk.hex)
|
||||
|
||||
|
||||
@actor(
|
||||
description=_("Find blueprints as `blueprints_find` does, but return a safe dict."),
|
||||
@CELERY_APP.task(
|
||||
throws=(DatabaseError, ProgrammingError, InternalError),
|
||||
)
|
||||
def blueprints_find_dict():
|
||||
"""Find blueprints as `blueprints_find` does, but return a safe dict"""
|
||||
blueprints = []
|
||||
for blueprint in blueprints_find():
|
||||
blueprints.append(sanitize_dict(asdict(blueprint)))
|
||||
@@ -148,19 +146,21 @@ def blueprints_find() -> list[BlueprintFile]:
|
||||
return blueprints
|
||||
|
||||
|
||||
@actor(
|
||||
description=_("Find blueprints and check if they need to be created in the database."),
|
||||
throws=(DatabaseError, ProgrammingError, InternalError),
|
||||
@CELERY_APP.task(
|
||||
throws=(DatabaseError, ProgrammingError, InternalError), base=SystemTask, bind=True
|
||||
)
|
||||
def blueprints_discovery(path: str | None = None):
|
||||
self: Task = CurrentTask.get_task()
|
||||
@prefill_task
|
||||
def blueprints_discovery(self: SystemTask, path: str | None = None):
|
||||
"""Find blueprints and check if they need to be created in the database"""
|
||||
count = 0
|
||||
for blueprint in blueprints_find():
|
||||
if path and blueprint.path != path:
|
||||
continue
|
||||
check_blueprint_v1_file(blueprint)
|
||||
count += 1
|
||||
self.info(f"Successfully imported {count} files.")
|
||||
self.set_status(
|
||||
TaskStatus.SUCCESSFUL, _("Successfully imported {count} files.".format(count=count))
|
||||
)
|
||||
|
||||
|
||||
def check_blueprint_v1_file(blueprint: BlueprintFile):
|
||||
@@ -187,26 +187,22 @@ def check_blueprint_v1_file(blueprint: BlueprintFile):
|
||||
)
|
||||
if instance.last_applied_hash != blueprint.hash:
|
||||
LOGGER.info("Applying blueprint due to changed file", instance=instance, path=instance.path)
|
||||
apply_blueprint.send_with_options(args=(instance.pk,), rel_obj=instance)
|
||||
apply_blueprint.delay(str(instance.pk))
|
||||
|
||||
|
||||
@actor(description=_("Apply single blueprint."))
|
||||
def apply_blueprint(instance_pk: UUID):
|
||||
try:
|
||||
self: Task = CurrentTask.get_task()
|
||||
except CurrentTaskNotFound:
|
||||
self = Task()
|
||||
self.set_uid(str(instance_pk))
|
||||
@CELERY_APP.task(
|
||||
bind=True,
|
||||
base=SystemTask,
|
||||
)
|
||||
def apply_blueprint(self: SystemTask, instance_pk: str):
|
||||
"""Apply single blueprint"""
|
||||
self.save_on_success = False
|
||||
instance: BlueprintInstance | None = None
|
||||
try:
|
||||
instance: BlueprintInstance = BlueprintInstance.objects.filter(pk=instance_pk).first()
|
||||
if not instance:
|
||||
self.warning(f"Could not find blueprint {instance_pk}, skipping")
|
||||
if not instance or not instance.enabled:
|
||||
return
|
||||
self.set_uid(slugify(instance.name))
|
||||
if not instance.enabled:
|
||||
self.info(f"Blueprint {instance.name} is disabled, skipping")
|
||||
return
|
||||
blueprint_content = instance.retrieve()
|
||||
file_hash = sha512(blueprint_content.encode()).hexdigest()
|
||||
importer = Importer.from_string(blueprint_content, instance.context)
|
||||
@@ -216,18 +212,19 @@ def apply_blueprint(instance_pk: UUID):
|
||||
if not valid:
|
||||
instance.status = BlueprintInstanceStatus.ERROR
|
||||
instance.save()
|
||||
self.logs(logs)
|
||||
self.set_status(TaskStatus.ERROR, *logs)
|
||||
return
|
||||
with capture_logs() as logs:
|
||||
applied = importer.apply()
|
||||
if not applied:
|
||||
instance.status = BlueprintInstanceStatus.ERROR
|
||||
instance.save()
|
||||
self.logs(logs)
|
||||
self.set_status(TaskStatus.ERROR, *logs)
|
||||
return
|
||||
instance.status = BlueprintInstanceStatus.SUCCESSFUL
|
||||
instance.last_applied_hash = file_hash
|
||||
instance.last_applied = now()
|
||||
self.set_status(TaskStatus.SUCCESSFUL)
|
||||
except (
|
||||
OSError,
|
||||
DatabaseError,
|
||||
@@ -238,14 +235,15 @@ def apply_blueprint(instance_pk: UUID):
|
||||
) as exc:
|
||||
if instance:
|
||||
instance.status = BlueprintInstanceStatus.ERROR
|
||||
self.error(exc)
|
||||
self.set_error(exc)
|
||||
finally:
|
||||
if instance:
|
||||
instance.save()
|
||||
|
||||
|
||||
@actor(description=_("Remove blueprints which couldn't be fetched."))
|
||||
@CELERY_APP.task()
|
||||
def clear_failed_blueprints():
|
||||
"""Remove blueprints which couldn't be fetched"""
|
||||
# Exclude OCI blueprints as those might be temporarily unavailable
|
||||
for blueprint in BlueprintInstance.objects.exclude(path__startswith=OCI_PREFIX):
|
||||
try:
|
||||
|
||||
@@ -9,7 +9,6 @@ class AuthentikBrandsConfig(ManagedAppConfig):
|
||||
name = "authentik.brands"
|
||||
label = "authentik_brands"
|
||||
verbose_name = "authentik Brands"
|
||||
default = True
|
||||
mountpoints = {
|
||||
"authentik.brands.urls_root": "",
|
||||
}
|
||||
|
||||
@@ -43,6 +43,6 @@ def context_processor(request: HttpRequest) -> dict[str, Any]:
|
||||
"brand": brand,
|
||||
"brand_css": brand_css,
|
||||
"footer_links": tenant.footer_links,
|
||||
"html_meta": {**get_http_meta()},
|
||||
"html_meta": get_http_meta(),
|
||||
"version": get_full_version(),
|
||||
}
|
||||
|
||||
@@ -49,28 +49,11 @@ class GroupMemberSerializer(ModelSerializer):
|
||||
]
|
||||
|
||||
|
||||
class GroupChildSerializer(ModelSerializer):
|
||||
"""Stripped down group serializer to show relevant children for groups"""
|
||||
|
||||
attributes = JSONDictField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = Group
|
||||
fields = [
|
||||
"pk",
|
||||
"name",
|
||||
"is_superuser",
|
||||
"attributes",
|
||||
"group_uuid",
|
||||
]
|
||||
|
||||
|
||||
class GroupSerializer(ModelSerializer):
|
||||
"""Group Serializer"""
|
||||
|
||||
attributes = JSONDictField(required=False)
|
||||
users_obj = SerializerMethodField(allow_null=True)
|
||||
children_obj = SerializerMethodField(allow_null=True)
|
||||
roles_obj = ListSerializer(
|
||||
child=RoleSerializer(),
|
||||
read_only=True,
|
||||
@@ -78,6 +61,7 @@ class GroupSerializer(ModelSerializer):
|
||||
required=False,
|
||||
)
|
||||
parent_name = CharField(source="parent.name", read_only=True, allow_null=True)
|
||||
|
||||
num_pk = IntegerField(read_only=True)
|
||||
|
||||
@property
|
||||
@@ -87,25 +71,12 @@ class GroupSerializer(ModelSerializer):
|
||||
return True
|
||||
return str(request.query_params.get("include_users", "true")).lower() == "true"
|
||||
|
||||
@property
|
||||
def _should_include_children(self) -> bool:
|
||||
request: Request = self.context.get("request", None)
|
||||
if not request:
|
||||
return True
|
||||
return str(request.query_params.get("include_children", "false")).lower() == "true"
|
||||
|
||||
@extend_schema_field(GroupMemberSerializer(many=True))
|
||||
def get_users_obj(self, instance: Group) -> list[GroupMemberSerializer] | None:
|
||||
if not self._should_include_users:
|
||||
return None
|
||||
return GroupMemberSerializer(instance.users, many=True).data
|
||||
|
||||
@extend_schema_field(GroupChildSerializer(many=True))
|
||||
def get_children_obj(self, instance: Group) -> list[GroupChildSerializer] | None:
|
||||
if not self._should_include_children:
|
||||
return None
|
||||
return GroupChildSerializer(instance.children, many=True).data
|
||||
|
||||
def validate_parent(self, parent: Group | None):
|
||||
"""Validate group parent (if set), ensuring the parent isn't itself"""
|
||||
if not self.instance or not parent:
|
||||
@@ -155,17 +126,11 @@ class GroupSerializer(ModelSerializer):
|
||||
"attributes",
|
||||
"roles",
|
||||
"roles_obj",
|
||||
"children",
|
||||
"children_obj",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"users": {
|
||||
"default": list,
|
||||
},
|
||||
"children": {
|
||||
"required": False,
|
||||
"default": list,
|
||||
},
|
||||
# TODO: This field isn't unique on the database which is hard to backport
|
||||
# hence we just validate the uniqueness here
|
||||
"name": {"validators": [UniqueValidator(Group.objects.all())]},
|
||||
@@ -238,15 +203,11 @@ class GroupViewSet(UsedByMixin, ModelViewSet):
|
||||
Prefetch("users", queryset=User.objects.all().only("id"))
|
||||
)
|
||||
|
||||
if self.serializer_class(context={"request": self.request})._should_include_children:
|
||||
base_qs = base_qs.prefetch_related("children")
|
||||
|
||||
return base_qs
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter("include_users", bool, default=True),
|
||||
OpenApiParameter("include_children", bool, default=False),
|
||||
]
|
||||
)
|
||||
def list(self, request, *args, **kwargs):
|
||||
@@ -255,7 +216,6 @@ class GroupViewSet(UsedByMixin, ModelViewSet):
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
OpenApiParameter("include_users", bool, default=True),
|
||||
OpenApiParameter("include_children", bool, default=False),
|
||||
]
|
||||
)
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
|
||||
@@ -5,7 +5,7 @@ from json import loads
|
||||
from typing import Any
|
||||
|
||||
from django.contrib.auth import update_session_auth_hash
|
||||
from django.contrib.auth.models import AnonymousUser, Permission
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.db.transaction import atomic
|
||||
from django.db.utils import IntegrityError
|
||||
from django.urls import reverse_lazy
|
||||
@@ -16,7 +16,6 @@ from django.utils.translation import gettext as _
|
||||
from django_filters.filters import (
|
||||
BooleanFilter,
|
||||
CharFilter,
|
||||
IsoDateTimeFilter,
|
||||
ModelMultipleChoiceFilter,
|
||||
MultipleChoiceFilter,
|
||||
UUIDFilter,
|
||||
@@ -154,8 +153,7 @@ class UserSerializer(ModelSerializer):
|
||||
if SERIALIZER_CONTEXT_BLUEPRINT in self.context:
|
||||
self.fields["password"] = CharField(required=False, allow_null=True)
|
||||
self.fields["permissions"] = ListField(
|
||||
required=False,
|
||||
child=ChoiceField(choices=get_permission_choices()),
|
||||
required=False, child=ChoiceField(choices=get_permission_choices())
|
||||
)
|
||||
|
||||
def create(self, validated_data: dict) -> User:
|
||||
@@ -243,7 +241,6 @@ class UserSerializer(ModelSerializer):
|
||||
"type",
|
||||
"uuid",
|
||||
"password_change_date",
|
||||
"last_updated",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"name": {"allow_blank": True},
|
||||
@@ -270,10 +267,7 @@ class UserSelfSerializer(ModelSerializer):
|
||||
ListSerializer(
|
||||
child=inline_serializer(
|
||||
"UserSelfGroups",
|
||||
{
|
||||
"name": CharField(read_only=True),
|
||||
"pk": CharField(read_only=True),
|
||||
},
|
||||
{"name": CharField(read_only=True), "pk": CharField(read_only=True)},
|
||||
)
|
||||
)
|
||||
)
|
||||
@@ -321,8 +315,7 @@ class UserSelfSerializer(ModelSerializer):
|
||||
|
||||
class SessionUserSerializer(PassiveSerializer):
|
||||
"""Response for the /user/me endpoint, returns the currently active user (as `user` property)
|
||||
and, if this user is being impersonated, the original user in the `original` property.
|
||||
"""
|
||||
and, if this user is being impersonated, the original user in the `original` property."""
|
||||
|
||||
user = UserSelfSerializer()
|
||||
original = UserSelfSerializer(required=False)
|
||||
@@ -338,14 +331,6 @@ class UsersFilter(FilterSet):
|
||||
method="filter_attributes",
|
||||
)
|
||||
|
||||
date_joined__lt = IsoDateTimeFilter(field_name="date_joined", lookup_expr="lt")
|
||||
date_joined = IsoDateTimeFilter(field_name="date_joined")
|
||||
date_joined__gt = IsoDateTimeFilter(field_name="date_joined", lookup_expr="gt")
|
||||
|
||||
last_updated__lt = IsoDateTimeFilter(field_name="last_updated", lookup_expr="lt")
|
||||
last_updated = IsoDateTimeFilter(field_name="last_updated")
|
||||
last_updated__gt = IsoDateTimeFilter(field_name="last_updated", lookup_expr="gt")
|
||||
|
||||
is_superuser = BooleanFilter(field_name="ak_groups", method="filter_is_superuser")
|
||||
uuid = UUIDFilter(field_name="uuid")
|
||||
|
||||
@@ -391,8 +376,6 @@ class UsersFilter(FilterSet):
|
||||
fields = [
|
||||
"username",
|
||||
"email",
|
||||
"date_joined",
|
||||
"last_updated",
|
||||
"name",
|
||||
"is_active",
|
||||
"is_superuser",
|
||||
@@ -407,18 +390,15 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
"""User Viewset"""
|
||||
|
||||
queryset = User.objects.none()
|
||||
ordering = ["username", "date_joined", "last_updated"]
|
||||
ordering = ["username"]
|
||||
serializer_class = UserSerializer
|
||||
filterset_class = UsersFilter
|
||||
search_fields = ["email", "name", "uuid", "username"]
|
||||
search_fields = ["username", "name", "is_active", "email", "uuid", "attributes"]
|
||||
|
||||
def get_ql_fields(self):
|
||||
from djangoql.schema import BoolField, StrField
|
||||
|
||||
from authentik.enterprise.search.fields import (
|
||||
ChoiceSearchField,
|
||||
JSONSearchField,
|
||||
)
|
||||
from authentik.enterprise.search.fields import ChoiceSearchField, JSONSearchField
|
||||
|
||||
return [
|
||||
StrField(User, "username"),
|
||||
@@ -455,7 +435,6 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
user: User = self.get_object()
|
||||
planner = FlowPlanner(flow)
|
||||
planner.allow_empty_flows = True
|
||||
self.request._request.user = AnonymousUser()
|
||||
try:
|
||||
plan = planner.plan(
|
||||
self.request._request,
|
||||
@@ -513,12 +492,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
)
|
||||
},
|
||||
)
|
||||
@action(
|
||||
detail=False,
|
||||
methods=["POST"],
|
||||
pagination_class=None,
|
||||
filter_backends=[],
|
||||
)
|
||||
@action(detail=False, methods=["POST"], pagination_class=None, filter_backends=[])
|
||||
def service_account(self, request: Request) -> Response:
|
||||
"""Create a new user account that is marked as a service account"""
|
||||
username = request.data.get("name")
|
||||
@@ -562,13 +536,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
return Response(data={"non_field_errors": [str(exc)]}, status=400)
|
||||
|
||||
@extend_schema(responses={200: SessionUserSerializer(many=False)})
|
||||
@action(
|
||||
url_path="me",
|
||||
url_name="me",
|
||||
detail=False,
|
||||
pagination_class=None,
|
||||
filter_backends=[],
|
||||
)
|
||||
@action(url_path="me", url_name="me", detail=False, pagination_class=None, filter_backends=[])
|
||||
def user_me(self, request: Request) -> Response:
|
||||
"""Get information about current user"""
|
||||
context = {"request": request}
|
||||
@@ -694,18 +662,14 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
if not request.user.has_perm(
|
||||
"authentik_core.impersonate", user_to_be
|
||||
) and not request.user.has_perm("authentik_core.impersonate"):
|
||||
LOGGER.debug(
|
||||
"User attempted to impersonate without permissions",
|
||||
user=request.user,
|
||||
)
|
||||
LOGGER.debug("User attempted to impersonate without permissions", user=request.user)
|
||||
return Response(status=401)
|
||||
if user_to_be.pk == self.request.user.pk:
|
||||
LOGGER.debug("User attempted to impersonate themselves", user=request.user)
|
||||
return Response(status=401)
|
||||
if not reason and request.tenant.impersonation_require_reason:
|
||||
LOGGER.debug(
|
||||
"User attempted to impersonate without providing a reason",
|
||||
user=request.user,
|
||||
"User attempted to impersonate without providing a reason", user=request.user
|
||||
)
|
||||
return Response(status=401)
|
||||
|
||||
@@ -744,8 +708,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
@extend_schema(
|
||||
responses={
|
||||
200: inline_serializer(
|
||||
"UserPathSerializer",
|
||||
{"paths": ListField(child=CharField(), read_only=True)},
|
||||
"UserPathSerializer", {"paths": ListField(child=CharField(), read_only=True)}
|
||||
)
|
||||
},
|
||||
parameters=[
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
"""authentik core app config"""
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from authentik.blueprints.apps import ManagedAppConfig
|
||||
from authentik.tasks.schedules.common import ScheduleSpec
|
||||
|
||||
|
||||
class AuthentikCoreConfig(ManagedAppConfig):
|
||||
@@ -13,6 +14,14 @@ class AuthentikCoreConfig(ManagedAppConfig):
|
||||
mountpoint = ""
|
||||
default = True
|
||||
|
||||
@ManagedAppConfig.reconcile_global
|
||||
def debug_worker_hook(self):
|
||||
"""Dispatch startup tasks inline when debugging"""
|
||||
if settings.DEBUG:
|
||||
from authentik.root.celery import worker_ready_hook
|
||||
|
||||
worker_ready_hook()
|
||||
|
||||
@ManagedAppConfig.reconcile_tenant
|
||||
def source_inbuilt(self):
|
||||
"""Reconcile inbuilt source"""
|
||||
@@ -25,18 +34,3 @@ class AuthentikCoreConfig(ManagedAppConfig):
|
||||
},
|
||||
managed=Source.MANAGED_INBUILT,
|
||||
)
|
||||
|
||||
@property
|
||||
def tenant_schedule_specs(self) -> list[ScheduleSpec]:
|
||||
from authentik.core.tasks import clean_expired_models, clean_temporary_users
|
||||
|
||||
return [
|
||||
ScheduleSpec(
|
||||
actor=clean_expired_models,
|
||||
crontab="2-59/5 * * * *",
|
||||
),
|
||||
ScheduleSpec(
|
||||
actor=clean_temporary_users,
|
||||
crontab="9-59/5 * * * *",
|
||||
),
|
||||
]
|
||||
|
||||
21
authentik/core/management/commands/bootstrap_tasks.py
Normal file
21
authentik/core/management/commands/bootstrap_tasks.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Run bootstrap tasks"""
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django_tenants.utils import get_public_schema_name
|
||||
|
||||
from authentik.root.celery import _get_startup_tasks_all_tenants, _get_startup_tasks_default_tenant
|
||||
from authentik.tenants.models import Tenant
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Run bootstrap tasks to ensure certain objects are created"""
|
||||
|
||||
def handle(self, **options):
|
||||
for task in _get_startup_tasks_default_tenant():
|
||||
with Tenant.objects.get(schema_name=get_public_schema_name()):
|
||||
task()
|
||||
|
||||
for task in _get_startup_tasks_all_tenants():
|
||||
for tenant in Tenant.objects.filter(ready=True):
|
||||
with tenant:
|
||||
task()
|
||||
47
authentik/core/management/commands/worker.py
Normal file
47
authentik/core/management/commands/worker.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""Run worker"""
|
||||
|
||||
from sys import exit as sysexit
|
||||
from tempfile import tempdir
|
||||
|
||||
from celery.apps.worker import Worker
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import close_old_connections
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.debug import start_debug_server
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Run worker"""
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"-b",
|
||||
"--beat",
|
||||
action="store_false",
|
||||
help="When set, this worker will _not_ run Beat (scheduled) tasks",
|
||||
)
|
||||
|
||||
def handle(self, **options):
|
||||
LOGGER.debug("Celery options", **options)
|
||||
close_old_connections()
|
||||
start_debug_server()
|
||||
worker: Worker = CELERY_APP.Worker(
|
||||
no_color=False,
|
||||
quiet=True,
|
||||
optimization="fair",
|
||||
autoscale=(CONFIG.get_int("worker.concurrency"), 1),
|
||||
task_events=True,
|
||||
beat=options.get("beat", True),
|
||||
schedule_filename=f"{tempdir}/celerybeat-schedule",
|
||||
queues=["authentik", "authentik_scheduled", "authentik_events"],
|
||||
)
|
||||
for task in CELERY_APP.tasks:
|
||||
LOGGER.debug("Registered task", task=task)
|
||||
|
||||
worker.start()
|
||||
sysexit(worker.exitcode)
|
||||
@@ -1,27 +0,0 @@
|
||||
# Generated by Django 5.1.11 on 2025-07-15 15:21
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("auth", "0012_alter_user_first_name_max_length"),
|
||||
("authentik_core", "0049_alter_token_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="last_updated",
|
||||
field=models.DateTimeField(auto_now=True),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="user",
|
||||
index=models.Index(fields=["last_updated"], name="authentik_c_last_up_ed7486_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="user",
|
||||
index=models.Index(fields=["date_joined"], name="authentik_c_date_jo_58c256_idx"),
|
||||
),
|
||||
]
|
||||
@@ -274,8 +274,6 @@ class User(SerializerModel, GuardianUserMixin, AttributesMixin, AbstractUser):
|
||||
ak_groups = models.ManyToManyField("Group", related_name="users")
|
||||
password_change_date = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
last_updated = models.DateTimeField(auto_now=True)
|
||||
|
||||
objects = UserManager()
|
||||
|
||||
class Meta:
|
||||
@@ -295,8 +293,6 @@ class User(SerializerModel, GuardianUserMixin, AttributesMixin, AbstractUser):
|
||||
models.Index(fields=["uuid"]),
|
||||
models.Index(fields=["path"]),
|
||||
models.Index(fields=["type"]),
|
||||
models.Index(fields=["date_joined"]),
|
||||
models.Index(fields=["last_updated"]),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
|
||||
@@ -3,9 +3,6 @@
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_dramatiq_postgres.middleware import CurrentTask
|
||||
from dramatiq.actor import actor
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import (
|
||||
@@ -14,14 +11,17 @@ from authentik.core.models import (
|
||||
ExpiringModel,
|
||||
User,
|
||||
)
|
||||
from authentik.tasks.models import Task
|
||||
from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
@actor(description=_("Remove expired objects."))
|
||||
def clean_expired_models():
|
||||
self: Task = CurrentTask.get_task()
|
||||
@CELERY_APP.task(bind=True, base=SystemTask)
|
||||
@prefill_task
|
||||
def clean_expired_models(self: SystemTask):
|
||||
"""Remove expired objects"""
|
||||
messages = []
|
||||
for cls in ExpiringModel.__subclasses__():
|
||||
cls: ExpiringModel
|
||||
objects = (
|
||||
@@ -31,13 +31,16 @@ def clean_expired_models():
|
||||
for obj in objects:
|
||||
obj.expire_action()
|
||||
LOGGER.debug("Expired models", model=cls, amount=amount)
|
||||
self.info(f"Expired {amount} {cls._meta.verbose_name_plural}")
|
||||
messages.append(f"Expired {amount} {cls._meta.verbose_name_plural}")
|
||||
self.set_status(TaskStatus.SUCCESSFUL, *messages)
|
||||
|
||||
|
||||
@actor(description=_("Remove temporary users created by SAML Sources."))
|
||||
def clean_temporary_users():
|
||||
self: Task = CurrentTask.get_task()
|
||||
@CELERY_APP.task(bind=True, base=SystemTask)
|
||||
@prefill_task
|
||||
def clean_temporary_users(self: SystemTask):
|
||||
"""Remove temporary users created by SAML Sources"""
|
||||
_now = datetime.now()
|
||||
messages = []
|
||||
deleted_users = 0
|
||||
for user in User.objects.filter(**{f"attributes__{USER_ATTRIBUTE_GENERATED}": True}):
|
||||
if not user.attributes.get(USER_ATTRIBUTE_EXPIRES):
|
||||
@@ -49,4 +52,5 @@ def clean_temporary_users():
|
||||
LOGGER.debug("User is expired and will be deleted.", user=user, delta=delta)
|
||||
user.delete()
|
||||
deleted_users += 1
|
||||
self.info(f"Successfully deleted {deleted_users} users.")
|
||||
messages.append(f"Successfully deleted {deleted_users} users.")
|
||||
self.set_status(TaskStatus.SUCCESSFUL, *messages)
|
||||
|
||||
@@ -10,16 +10,13 @@
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1">
|
||||
{# Darkreader breaks the site regardless of theme as its not compatible with webcomponents, and we default to a dark theme based on preferred colour-scheme #}
|
||||
<meta name="darkreader-lock">
|
||||
<base href="{{ base_url_rel }}" />
|
||||
<title>{% block title %}{% trans title|default:brand.branding_title %}{% endblock %}</title>
|
||||
<link rel="icon" href="{{ brand.branding_favicon_url }}">
|
||||
<link rel="shortcut icon" href="{{ brand.branding_favicon_url }}">
|
||||
{% block head_before %}
|
||||
{% endblock %}
|
||||
|
||||
{% include "base/theme.html" %}
|
||||
|
||||
<link rel="stylesheet" type="text/css" href="{% static 'dist/authentik.css' %}">
|
||||
|
||||
<style>{{ brand_css }}</style>
|
||||
<script src="{% versioned_script 'dist/poly-%v.js' %}" type="module"></script>
|
||||
<script src="{% versioned_script 'dist/standalone/loading/index-%v.js' %}" type="module"></script>
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
{% if ui_theme == "dark" %}
|
||||
<meta name="color-scheme" content="dark" />
|
||||
<meta name="theme-color" content="#18191a">
|
||||
{% elif ui_theme == "light" %}
|
||||
<meta name="color-scheme" content="light" />
|
||||
<meta name="theme-color" content="#ffffff">
|
||||
{% else %}
|
||||
<meta name="color-scheme" content="light dark" />
|
||||
<meta name="theme-color" content="#ffffff" media="(prefers-color-scheme: light)">
|
||||
<meta name="theme-color" content="#18191a" media="(prefers-color-scheme: dark)">
|
||||
{% endif %}
|
||||
@@ -4,6 +4,8 @@
|
||||
|
||||
{% block head %}
|
||||
<script src="{% versioned_script 'dist/admin/AdminInterface-%v.js' %}" type="module"></script>
|
||||
<meta name="theme-color" content="#18191a" media="(prefers-color-scheme: dark)">
|
||||
<meta name="theme-color" content="#ffffff" media="(prefers-color-scheme: light)">
|
||||
{% include "base/header_js.html" %}
|
||||
{% endblock %}
|
||||
|
||||
|
||||
@@ -4,6 +4,8 @@
|
||||
|
||||
{% block head %}
|
||||
<script src="{% versioned_script 'dist/user/UserInterface-%v.js' %}" type="module"></script>
|
||||
<meta name="theme-color" content="#1c1e21" media="(prefers-color-scheme: light)">
|
||||
<meta name="theme-color" content="#1c1e21" media="(prefers-color-scheme: dark)">
|
||||
{% include "base/header_js.html" %}
|
||||
{% endblock %}
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ class TestTasks(APITestCase):
|
||||
expires=now(), user=get_anonymous_user(), intent=TokenIntents.INTENT_API
|
||||
)
|
||||
key = token.key
|
||||
clean_expired_models.send()
|
||||
clean_expired_models.delay().get()
|
||||
token.refresh_from_db()
|
||||
self.assertNotEqual(key, token.key)
|
||||
|
||||
@@ -50,5 +50,5 @@ class TestTasks(APITestCase):
|
||||
USER_ATTRIBUTE_EXPIRES: mktime(now().timetuple()),
|
||||
},
|
||||
)
|
||||
clean_temporary_users.send()
|
||||
clean_temporary_users.delay().get()
|
||||
self.assertFalse(User.objects.filter(username=username))
|
||||
|
||||
@@ -21,7 +21,7 @@ from authentik.core.tests.utils import (
|
||||
create_test_flow,
|
||||
create_test_user,
|
||||
)
|
||||
from authentik.flows.models import FlowAuthenticationRequirement, FlowDesignation
|
||||
from authentik.flows.models import FlowDesignation
|
||||
from authentik.lib.generators import generate_id, generate_key
|
||||
from authentik.stages.email.models import EmailStage
|
||||
|
||||
@@ -103,11 +103,8 @@ class TestUsersAPI(APITestCase):
|
||||
self.assertTrue(self.admin.check_password(new_pw))
|
||||
|
||||
def test_recovery(self):
|
||||
"""Test user recovery link"""
|
||||
flow = create_test_flow(
|
||||
FlowDesignation.RECOVERY,
|
||||
authentication=FlowAuthenticationRequirement.REQUIRE_UNAUTHENTICATED,
|
||||
)
|
||||
"""Test user recovery link (no recovery flow set)"""
|
||||
flow = create_test_flow(FlowDesignation.RECOVERY)
|
||||
brand: Brand = create_test_brand()
|
||||
brand.flow_recovery = flow
|
||||
brand.save()
|
||||
@@ -390,72 +387,3 @@ class TestUsersAPI(APITestCase):
|
||||
self.assertFalse(
|
||||
AuthenticatedSession.objects.filter(session__session_key=session_id).exists()
|
||||
)
|
||||
|
||||
def test_sort_by_last_updated(self):
|
||||
"""Test API sorting by last_updated"""
|
||||
User.objects.all().delete()
|
||||
admin = create_test_admin_user()
|
||||
self.client.force_login(admin)
|
||||
|
||||
user = create_test_user()
|
||||
admin.first_name = "Sample change"
|
||||
admin.last_name = "To trigger an update"
|
||||
admin.save()
|
||||
|
||||
# Ascending
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:user-list"),
|
||||
data={
|
||||
"ordering": "last_updated",
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
body = loads(response.content)
|
||||
self.assertEqual(len(body["results"]), 2)
|
||||
self.assertEqual(body["results"][0]["pk"], user.pk)
|
||||
|
||||
# Descending
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:user-list"),
|
||||
data={
|
||||
"ordering": "-last_updated",
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
body = loads(response.content)
|
||||
self.assertEqual(len(body["results"]), 2)
|
||||
self.assertEqual(body["results"][0]["pk"], admin.pk)
|
||||
|
||||
def test_sort_by_date_joined(self):
|
||||
"""Test API sorting by date_joined"""
|
||||
User.objects.all().delete()
|
||||
admin = create_test_admin_user()
|
||||
self.client.force_login(admin)
|
||||
|
||||
user = create_test_user()
|
||||
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:user-list"),
|
||||
data={
|
||||
"ordering": "date_joined",
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
body = loads(response.content)
|
||||
self.assertEqual(len(body["results"]), 2)
|
||||
self.assertEqual(body["results"][0]["pk"], admin.pk)
|
||||
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:user-list"),
|
||||
data={
|
||||
"ordering": "-date_joined",
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
body = loads(response.content)
|
||||
self.assertEqual(len(body["results"]), 2)
|
||||
self.assertEqual(body["results"][0]["pk"], user.pk)
|
||||
|
||||
@@ -46,10 +46,8 @@ class InterfaceView(TemplateView):
|
||||
"""Base interface view"""
|
||||
|
||||
def get_context_data(self, **kwargs: Any) -> dict[str, Any]:
|
||||
brand = CurrentBrandSerializer(self.request.brand)
|
||||
kwargs["config_json"] = dumps(ConfigView(request=Request(self.request)).get_config().data)
|
||||
kwargs["ui_theme"] = brand.data["ui_theme"]
|
||||
kwargs["brand_json"] = dumps(brand.data)
|
||||
kwargs["brand_json"] = dumps(CurrentBrandSerializer(self.request.brand).data)
|
||||
kwargs["version_family"] = f"{LOCAL_VERSION.major}.{LOCAL_VERSION.minor}"
|
||||
kwargs["version_subdomain"] = f"version-{LOCAL_VERSION.major}-{LOCAL_VERSION.minor}"
|
||||
kwargs["build"] = get_build_hash()
|
||||
|
||||
@@ -4,8 +4,6 @@ from datetime import UTC, datetime
|
||||
|
||||
from authentik.blueprints.apps import ManagedAppConfig
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.utils.time import fqdn_rand
|
||||
from authentik.tasks.schedules.common import ScheduleSpec
|
||||
|
||||
MANAGED_KEY = "goauthentik.io/crypto/jwt-managed"
|
||||
|
||||
@@ -69,14 +67,3 @@ class AuthentikCryptoConfig(ManagedAppConfig):
|
||||
"key_data": builder.private_key,
|
||||
},
|
||||
)
|
||||
|
||||
@property
|
||||
def tenant_schedule_specs(self) -> list[ScheduleSpec]:
|
||||
from authentik.crypto.tasks import certificate_discovery
|
||||
|
||||
return [
|
||||
ScheduleSpec(
|
||||
actor=certificate_discovery,
|
||||
crontab=f"{fqdn_rand('crypto_certificate_discovery')} * * * *",
|
||||
),
|
||||
]
|
||||
|
||||
13
authentik/crypto/settings.py
Normal file
13
authentik/crypto/settings.py
Normal file
@@ -0,0 +1,13 @@
|
||||
"""Crypto task Settings"""
|
||||
|
||||
from celery.schedules import crontab
|
||||
|
||||
from authentik.lib.utils.time import fqdn_rand
|
||||
|
||||
CELERY_BEAT_SCHEDULE = {
|
||||
"crypto_certificate_discovery": {
|
||||
"task": "authentik.crypto.tasks.certificate_discovery",
|
||||
"schedule": crontab(minute=fqdn_rand("crypto_certificate_discovery"), hour="*"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
}
|
||||
@@ -7,13 +7,13 @@ from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.serialization import load_pem_private_key
|
||||
from cryptography.x509.base import load_pem_x509_certificate
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_dramatiq_postgres.middleware import CurrentTask
|
||||
from dramatiq.actor import actor
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.events.models import TaskStatus
|
||||
from authentik.events.system_tasks import SystemTask, prefill_task
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.tasks.models import Task
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
@@ -36,9 +36,10 @@ def ensure_certificate_valid(body: str):
|
||||
return body
|
||||
|
||||
|
||||
@actor(description=_("Discover, import and update certificates from the filesystem."))
|
||||
def certificate_discovery():
|
||||
self: Task = CurrentTask.get_task()
|
||||
@CELERY_APP.task(bind=True, base=SystemTask)
|
||||
@prefill_task
|
||||
def certificate_discovery(self: SystemTask):
|
||||
"""Discover, import and update certificates from the filesystem"""
|
||||
certs = {}
|
||||
private_keys = {}
|
||||
discovered = 0
|
||||
@@ -83,4 +84,6 @@ def certificate_discovery():
|
||||
dirty = True
|
||||
if dirty:
|
||||
cert.save()
|
||||
self.info(f"Successfully imported {discovered} files.")
|
||||
self.set_status(
|
||||
TaskStatus.SUCCESSFUL, _("Successfully imported {count} files.".format(count=discovered))
|
||||
)
|
||||
|
||||
@@ -338,7 +338,7 @@ class TestCrypto(APITestCase):
|
||||
with open(f"{temp_dir}/foo.bar/privkey.pem", "w+", encoding="utf-8") as _key:
|
||||
_key.write(builder.private_key)
|
||||
with CONFIG.patch("cert_discovery_dir", temp_dir):
|
||||
certificate_discovery.send()
|
||||
certificate_discovery()
|
||||
keypair: CertificateKeyPair = CertificateKeyPair.objects.filter(
|
||||
managed=MANAGED_DISCOVERED % "foo"
|
||||
).first()
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
from django.conf import settings
|
||||
|
||||
from authentik.blueprints.apps import ManagedAppConfig
|
||||
from authentik.lib.utils.time import fqdn_rand
|
||||
from authentik.tasks.schedules.common import ScheduleSpec
|
||||
|
||||
|
||||
class EnterpriseConfig(ManagedAppConfig):
|
||||
@@ -28,14 +26,3 @@ class AuthentikEnterpriseConfig(EnterpriseConfig):
|
||||
from authentik.enterprise.license import LicenseKey
|
||||
|
||||
return LicenseKey.cached_summary().status.is_valid
|
||||
|
||||
@property
|
||||
def tenant_schedule_specs(self) -> list[ScheduleSpec]:
|
||||
from authentik.enterprise.tasks import enterprise_update_usage
|
||||
|
||||
return [
|
||||
ScheduleSpec(
|
||||
actor=enterprise_update_usage,
|
||||
crontab=f"{fqdn_rand('enterprise_update_usage')} */2 * * *",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -55,7 +55,6 @@ class TestEnterpriseAudit(APITestCase):
|
||||
self.assertIsNotNone(event)
|
||||
self.assertIsNotNone(event.context["diff"])
|
||||
diff = event.context["diff"]
|
||||
diff.pop("last_updated")
|
||||
self.assertEqual(
|
||||
diff,
|
||||
{
|
||||
@@ -117,7 +116,6 @@ class TestEnterpriseAudit(APITestCase):
|
||||
self.assertIsNotNone(event)
|
||||
self.assertIsNotNone(event.context["diff"])
|
||||
diff = event.context["diff"]
|
||||
diff.pop("last_updated")
|
||||
self.assertEqual(
|
||||
diff,
|
||||
{
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
"""authentik Unique Password policy app config"""
|
||||
|
||||
from authentik.enterprise.apps import EnterpriseConfig
|
||||
from authentik.lib.utils.time import fqdn_rand
|
||||
from authentik.tasks.schedules.common import ScheduleSpec
|
||||
|
||||
|
||||
class AuthentikEnterprisePoliciesUniquePasswordConfig(EnterpriseConfig):
|
||||
@@ -10,21 +8,3 @@ class AuthentikEnterprisePoliciesUniquePasswordConfig(EnterpriseConfig):
|
||||
label = "authentik_policies_unique_password"
|
||||
verbose_name = "authentik Enterprise.Policies.Unique Password"
|
||||
default = True
|
||||
|
||||
@property
|
||||
def tenant_schedule_specs(self) -> list[ScheduleSpec]:
|
||||
from authentik.enterprise.policies.unique_password.tasks import (
|
||||
check_and_purge_password_history,
|
||||
trim_password_histories,
|
||||
)
|
||||
|
||||
return [
|
||||
ScheduleSpec(
|
||||
actor=trim_password_histories,
|
||||
crontab=f"{fqdn_rand('policies_unique_password_trim')} */12 * * *",
|
||||
),
|
||||
ScheduleSpec(
|
||||
actor=check_and_purge_password_history,
|
||||
crontab=f"{fqdn_rand('policies_unique_password_purge')} */24 * * *",
|
||||
),
|
||||
]
|
||||
|
||||
20
authentik/enterprise/policies/unique_password/settings.py
Normal file
20
authentik/enterprise/policies/unique_password/settings.py
Normal file
@@ -0,0 +1,20 @@
|
||||
"""Unique Password Policy settings"""
|
||||
|
||||
from celery.schedules import crontab
|
||||
|
||||
from authentik.lib.utils.time import fqdn_rand
|
||||
|
||||
CELERY_BEAT_SCHEDULE = {
|
||||
"policies_unique_password_trim_history": {
|
||||
"task": "authentik.enterprise.policies.unique_password.tasks.trim_password_histories",
|
||||
"schedule": crontab(minute=fqdn_rand("policies_unique_password_trim"), hour="*/12"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
"policies_unique_password_check_purge": {
|
||||
"task": (
|
||||
"authentik.enterprise.policies.unique_password.tasks.check_and_purge_password_history"
|
||||
),
|
||||
"schedule": crontab(minute=fqdn_rand("policies_unique_password_purge"), hour="*/24"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
}
|
||||
@@ -1,37 +1,35 @@
|
||||
from django.db.models.aggregates import Count
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_dramatiq_postgres.middleware import CurrentTask
|
||||
from dramatiq.actor import actor
|
||||
from structlog import get_logger
|
||||
|
||||
from authentik.enterprise.policies.unique_password.models import (
|
||||
UniquePasswordPolicy,
|
||||
UserPasswordHistory,
|
||||
)
|
||||
from authentik.tasks.models import Task
|
||||
from authentik.events.system_tasks import SystemTask, TaskStatus, prefill_task
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
@actor(
|
||||
description=_(
|
||||
"Check if any UniquePasswordPolicy exists, and if not, purge the password history table."
|
||||
)
|
||||
)
|
||||
def check_and_purge_password_history():
|
||||
self: Task = CurrentTask.get_task()
|
||||
|
||||
@CELERY_APP.task(bind=True, base=SystemTask)
|
||||
@prefill_task
|
||||
def check_and_purge_password_history(self: SystemTask):
|
||||
"""Check if any UniquePasswordPolicy exists, and if not, purge the password history table.
|
||||
This is run on a schedule instead of being triggered by policy binding deletion.
|
||||
"""
|
||||
if not UniquePasswordPolicy.objects.exists():
|
||||
UserPasswordHistory.objects.all().delete()
|
||||
LOGGER.debug("Purged UserPasswordHistory table as no policies are in use")
|
||||
self.info("Successfully purged UserPasswordHistory")
|
||||
self.set_status(TaskStatus.SUCCESSFUL, "Successfully purged UserPasswordHistory")
|
||||
return
|
||||
|
||||
self.info("Not purging password histories, a unique password policy exists")
|
||||
self.set_status(
|
||||
TaskStatus.SUCCESSFUL, "Not purging password histories, a unique password policy exists"
|
||||
)
|
||||
|
||||
|
||||
@actor(description=_("Remove user password history that are too old."))
|
||||
def trim_password_histories():
|
||||
@CELERY_APP.task(bind=True, base=SystemTask)
|
||||
def trim_password_histories(self: SystemTask):
|
||||
"""Removes rows from UserPasswordHistory older than
|
||||
the `n` most recent entries.
|
||||
|
||||
@@ -39,8 +37,6 @@ def trim_password_histories():
|
||||
UniquePasswordPolicy policies.
|
||||
"""
|
||||
|
||||
self: Task = CurrentTask.get_task()
|
||||
|
||||
# No policy, we'll let the cleanup above do its thing
|
||||
if not UniquePasswordPolicy.objects.exists():
|
||||
return
|
||||
@@ -67,4 +63,4 @@ def trim_password_histories():
|
||||
|
||||
num_deleted, _ = UserPasswordHistory.objects.exclude(pk__in=all_pks_to_keep).delete()
|
||||
LOGGER.debug("Deleted stale password history records", count=num_deleted)
|
||||
self.info(f"Delete {num_deleted} stale password history records")
|
||||
self.set_status(TaskStatus.SUCCESSFUL, f"Delete {num_deleted} stale password history records")
|
||||
|
||||
@@ -76,7 +76,7 @@ class TestCheckAndPurgePasswordHistory(TestCase):
|
||||
self.assertTrue(UserPasswordHistory.objects.exists())
|
||||
|
||||
# Run the task - should purge since no policy is in use
|
||||
check_and_purge_password_history.send()
|
||||
check_and_purge_password_history()
|
||||
|
||||
# Verify the table is empty
|
||||
self.assertFalse(UserPasswordHistory.objects.exists())
|
||||
@@ -99,7 +99,7 @@ class TestCheckAndPurgePasswordHistory(TestCase):
|
||||
self.assertTrue(UserPasswordHistory.objects.exists())
|
||||
|
||||
# Run the task - should NOT purge since a policy is in use
|
||||
check_and_purge_password_history.send()
|
||||
check_and_purge_password_history()
|
||||
|
||||
# Verify the entries still exist
|
||||
self.assertTrue(UserPasswordHistory.objects.exists())
|
||||
@@ -142,7 +142,7 @@ class TestTrimPasswordHistory(TestCase):
|
||||
enabled=True,
|
||||
order=0,
|
||||
)
|
||||
trim_password_histories.send()
|
||||
trim_password_histories.delay()
|
||||
user_pwd_history_qs = UserPasswordHistory.objects.filter(user=self.user)
|
||||
self.assertEqual(len(user_pwd_history_qs), 1)
|
||||
|
||||
@@ -159,7 +159,7 @@ class TestTrimPasswordHistory(TestCase):
|
||||
enabled=False,
|
||||
order=0,
|
||||
)
|
||||
trim_password_histories.send()
|
||||
trim_password_histories.delay()
|
||||
self.assertTrue(UserPasswordHistory.objects.filter(user=self.user).exists())
|
||||
|
||||
def test_trim_password_history_fewer_records_than_maximum_is_no_op(self):
|
||||
@@ -174,5 +174,5 @@ class TestTrimPasswordHistory(TestCase):
|
||||
enabled=True,
|
||||
order=0,
|
||||
)
|
||||
trim_password_histories.send()
|
||||
trim_password_histories.delay()
|
||||
self.assertTrue(UserPasswordHistory.objects.filter(user=self.user).exists())
|
||||
|
||||
@@ -55,5 +55,5 @@ class GoogleWorkspaceProviderViewSet(OutgoingSyncProviderStatusMixin, UsedByMixi
|
||||
]
|
||||
search_fields = ["name"]
|
||||
ordering = ["name"]
|
||||
sync_task = google_workspace_sync
|
||||
sync_single_task = google_workspace_sync
|
||||
sync_objects_task = google_workspace_sync_objects
|
||||
|
||||
@@ -7,7 +7,6 @@ from django.db import models
|
||||
from django.db.models import QuerySet
|
||||
from django.templatetags.static import static
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from dramatiq.actor import Actor
|
||||
from google.oauth2.service_account import Credentials
|
||||
from rest_framework.serializers import Serializer
|
||||
|
||||
@@ -111,12 +110,6 @@ class GoogleWorkspaceProvider(OutgoingSyncProvider, BackchannelProvider):
|
||||
help_text=_("Property mappings used for group creation/updating."),
|
||||
)
|
||||
|
||||
@property
|
||||
def sync_actor(self) -> Actor:
|
||||
from authentik.enterprise.providers.google_workspace.tasks import google_workspace_sync
|
||||
|
||||
return google_workspace_sync
|
||||
|
||||
def client_for_model(
|
||||
self,
|
||||
model: type[User | Group | GoogleWorkspaceProviderUser | GoogleWorkspaceProviderGroup],
|
||||
|
||||
13
authentik/enterprise/providers/google_workspace/settings.py
Normal file
13
authentik/enterprise/providers/google_workspace/settings.py
Normal file
@@ -0,0 +1,13 @@
|
||||
"""Google workspace provider task Settings"""
|
||||
|
||||
from celery.schedules import crontab
|
||||
|
||||
from authentik.lib.utils.time import fqdn_rand
|
||||
|
||||
CELERY_BEAT_SCHEDULE = {
|
||||
"providers_google_workspace_sync": {
|
||||
"task": "authentik.enterprise.providers.google_workspace.tasks.google_workspace_sync_all",
|
||||
"schedule": crontab(minute=fqdn_rand("google_workspace_sync_all"), hour="*/4"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
}
|
||||
@@ -2,13 +2,15 @@
|
||||
|
||||
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider
|
||||
from authentik.enterprise.providers.google_workspace.tasks import (
|
||||
google_workspace_sync_direct_dispatch,
|
||||
google_workspace_sync_m2m_dispatch,
|
||||
google_workspace_sync,
|
||||
google_workspace_sync_direct,
|
||||
google_workspace_sync_m2m,
|
||||
)
|
||||
from authentik.lib.sync.outgoing.signals import register_signals
|
||||
|
||||
register_signals(
|
||||
GoogleWorkspaceProvider,
|
||||
task_sync_direct_dispatch=google_workspace_sync_direct_dispatch,
|
||||
task_sync_m2m_dispatch=google_workspace_sync_m2m_dispatch,
|
||||
task_sync_single=google_workspace_sync,
|
||||
task_sync_direct=google_workspace_sync_direct,
|
||||
task_sync_m2m=google_workspace_sync_m2m,
|
||||
)
|
||||
|
||||
@@ -1,48 +1,37 @@
|
||||
"""Google Provider tasks"""
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from dramatiq.actor import actor
|
||||
|
||||
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProvider
|
||||
from authentik.events.system_tasks import SystemTask
|
||||
from authentik.lib.sync.outgoing.exceptions import TransientSyncException
|
||||
from authentik.lib.sync.outgoing.tasks import SyncTasks
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
sync_tasks = SyncTasks(GoogleWorkspaceProvider)
|
||||
|
||||
|
||||
@actor(description=_("Sync Google Workspace provider objects."))
|
||||
@CELERY_APP.task(autoretry_for=(TransientSyncException,), retry_backoff=True)
|
||||
def google_workspace_sync_objects(*args, **kwargs):
|
||||
return sync_tasks.sync_objects(*args, **kwargs)
|
||||
|
||||
|
||||
@actor(description=_("Full sync for Google Workspace provider."))
|
||||
def google_workspace_sync(provider_pk: int, *args, **kwargs):
|
||||
@CELERY_APP.task(
|
||||
base=SystemTask, bind=True, autoretry_for=(TransientSyncException,), retry_backoff=True
|
||||
)
|
||||
def google_workspace_sync(self, provider_pk: int, *args, **kwargs):
|
||||
"""Run full sync for Google Workspace provider"""
|
||||
return sync_tasks.sync(provider_pk, google_workspace_sync_objects)
|
||||
return sync_tasks.sync_single(self, provider_pk, google_workspace_sync_objects)
|
||||
|
||||
|
||||
@actor(description=_("Sync a direct object (user, group) for Google Workspace provider."))
|
||||
@CELERY_APP.task()
|
||||
def google_workspace_sync_all():
|
||||
return sync_tasks.sync_all(google_workspace_sync)
|
||||
|
||||
|
||||
@CELERY_APP.task(autoretry_for=(TransientSyncException,), retry_backoff=True)
|
||||
def google_workspace_sync_direct(*args, **kwargs):
|
||||
return sync_tasks.sync_signal_direct(*args, **kwargs)
|
||||
|
||||
|
||||
@actor(
|
||||
description=_(
|
||||
"Dispatch syncs for a direct object (user, group) for Google Workspace providers."
|
||||
)
|
||||
)
|
||||
def google_workspace_sync_direct_dispatch(*args, **kwargs):
|
||||
return sync_tasks.sync_signal_direct_dispatch(google_workspace_sync_direct, *args, **kwargs)
|
||||
|
||||
|
||||
@actor(description=_("Sync a related object (memberships) for Google Workspace provider."))
|
||||
@CELERY_APP.task(autoretry_for=(TransientSyncException,), retry_backoff=True)
|
||||
def google_workspace_sync_m2m(*args, **kwargs):
|
||||
return sync_tasks.sync_signal_m2m(*args, **kwargs)
|
||||
|
||||
|
||||
@actor(
|
||||
description=_(
|
||||
"Dispatch syncs for a related object (memberships) for Google Workspace providers."
|
||||
)
|
||||
)
|
||||
def google_workspace_sync_m2m_dispatch(*args, **kwargs):
|
||||
return sync_tasks.sync_signal_m2m_dispatch(google_workspace_sync_m2m, *args, **kwargs)
|
||||
|
||||
@@ -324,7 +324,7 @@ class GoogleWorkspaceGroupTests(TestCase):
|
||||
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
|
||||
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
|
||||
):
|
||||
google_workspace_sync.send(self.provider.pk).get_result()
|
||||
google_workspace_sync.delay(self.provider.pk).get()
|
||||
self.assertTrue(
|
||||
GoogleWorkspaceProviderGroup.objects.filter(
|
||||
group=different_group, provider=self.provider
|
||||
|
||||
@@ -302,7 +302,7 @@ class GoogleWorkspaceUserTests(TestCase):
|
||||
"authentik.enterprise.providers.google_workspace.models.GoogleWorkspaceProvider.google_credentials",
|
||||
MagicMock(return_value={"developerKey": self.api_key, "http": http}),
|
||||
):
|
||||
google_workspace_sync.send(self.provider.pk).get_result()
|
||||
google_workspace_sync.delay(self.provider.pk).get()
|
||||
self.assertTrue(
|
||||
GoogleWorkspaceProviderUser.objects.filter(
|
||||
user=different_user, provider=self.provider
|
||||
|
||||
@@ -53,5 +53,5 @@ class MicrosoftEntraProviderViewSet(OutgoingSyncProviderStatusMixin, UsedByMixin
|
||||
]
|
||||
search_fields = ["name"]
|
||||
ordering = ["name"]
|
||||
sync_task = microsoft_entra_sync
|
||||
sync_single_task = microsoft_entra_sync
|
||||
sync_objects_task = microsoft_entra_sync_objects
|
||||
|
||||
@@ -8,7 +8,6 @@ from django.db import models
|
||||
from django.db.models import QuerySet
|
||||
from django.templatetags.static import static
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from dramatiq.actor import Actor
|
||||
from rest_framework.serializers import Serializer
|
||||
|
||||
from authentik.core.models import (
|
||||
@@ -100,12 +99,6 @@ class MicrosoftEntraProvider(OutgoingSyncProvider, BackchannelProvider):
|
||||
help_text=_("Property mappings used for group creation/updating."),
|
||||
)
|
||||
|
||||
@property
|
||||
def sync_actor(self) -> Actor:
|
||||
from authentik.enterprise.providers.microsoft_entra.tasks import microsoft_entra_sync
|
||||
|
||||
return microsoft_entra_sync
|
||||
|
||||
def client_for_model(
|
||||
self,
|
||||
model: type[User | Group | MicrosoftEntraProviderUser | MicrosoftEntraProviderGroup],
|
||||
|
||||
13
authentik/enterprise/providers/microsoft_entra/settings.py
Normal file
13
authentik/enterprise/providers/microsoft_entra/settings.py
Normal file
@@ -0,0 +1,13 @@
|
||||
"""Microsoft Entra provider task Settings"""
|
||||
|
||||
from celery.schedules import crontab
|
||||
|
||||
from authentik.lib.utils.time import fqdn_rand
|
||||
|
||||
CELERY_BEAT_SCHEDULE = {
|
||||
"providers_microsoft_entra_sync": {
|
||||
"task": "authentik.enterprise.providers.microsoft_entra.tasks.microsoft_entra_sync_all",
|
||||
"schedule": crontab(minute=fqdn_rand("microsoft_entra_sync_all"), hour="*/4"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
},
|
||||
}
|
||||
@@ -2,13 +2,15 @@
|
||||
|
||||
from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProvider
|
||||
from authentik.enterprise.providers.microsoft_entra.tasks import (
|
||||
microsoft_entra_sync_direct_dispatch,
|
||||
microsoft_entra_sync_m2m_dispatch,
|
||||
microsoft_entra_sync,
|
||||
microsoft_entra_sync_direct,
|
||||
microsoft_entra_sync_m2m,
|
||||
)
|
||||
from authentik.lib.sync.outgoing.signals import register_signals
|
||||
|
||||
register_signals(
|
||||
MicrosoftEntraProvider,
|
||||
task_sync_direct_dispatch=microsoft_entra_sync_direct_dispatch,
|
||||
task_sync_m2m_dispatch=microsoft_entra_sync_m2m_dispatch,
|
||||
task_sync_single=microsoft_entra_sync,
|
||||
task_sync_direct=microsoft_entra_sync_direct,
|
||||
task_sync_m2m=microsoft_entra_sync_m2m,
|
||||
)
|
||||
|
||||
@@ -1,46 +1,37 @@
|
||||
"""Microsoft Entra Provider tasks"""
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from dramatiq.actor import actor
|
||||
|
||||
from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProvider
|
||||
from authentik.events.system_tasks import SystemTask
|
||||
from authentik.lib.sync.outgoing.exceptions import TransientSyncException
|
||||
from authentik.lib.sync.outgoing.tasks import SyncTasks
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
sync_tasks = SyncTasks(MicrosoftEntraProvider)
|
||||
|
||||
|
||||
@actor(description=_("Sync Microsoft Entra provider objects."))
|
||||
@CELERY_APP.task(autoretry_for=(TransientSyncException,), retry_backoff=True)
|
||||
def microsoft_entra_sync_objects(*args, **kwargs):
|
||||
return sync_tasks.sync_objects(*args, **kwargs)
|
||||
|
||||
|
||||
@actor(description=_("Full sync for Microsoft Entra provider."))
|
||||
def microsoft_entra_sync(provider_pk: int, *args, **kwargs):
|
||||
@CELERY_APP.task(
|
||||
base=SystemTask, bind=True, autoretry_for=(TransientSyncException,), retry_backoff=True
|
||||
)
|
||||
def microsoft_entra_sync(self, provider_pk: int, *args, **kwargs):
|
||||
"""Run full sync for Microsoft Entra provider"""
|
||||
return sync_tasks.sync(provider_pk, microsoft_entra_sync_objects)
|
||||
return sync_tasks.sync_single(self, provider_pk, microsoft_entra_sync_objects)
|
||||
|
||||
|
||||
@actor(description=_("Sync a direct object (user, group) for Microsoft Entra provider."))
|
||||
@CELERY_APP.task()
|
||||
def microsoft_entra_sync_all():
|
||||
return sync_tasks.sync_all(microsoft_entra_sync)
|
||||
|
||||
|
||||
@CELERY_APP.task(autoretry_for=(TransientSyncException,), retry_backoff=True)
|
||||
def microsoft_entra_sync_direct(*args, **kwargs):
|
||||
return sync_tasks.sync_signal_direct(*args, **kwargs)
|
||||
|
||||
|
||||
@actor(
|
||||
description=_("Dispatch syncs for a direct object (user, group) for Microsoft Entra providers.")
|
||||
)
|
||||
def microsoft_entra_sync_direct_dispatch(*args, **kwargs):
|
||||
return sync_tasks.sync_signal_direct_dispatch(microsoft_entra_sync_direct, *args, **kwargs)
|
||||
|
||||
|
||||
@actor(description=_("Sync a related object (memberships) for Microsoft Entra provider."))
|
||||
@CELERY_APP.task(autoretry_for=(TransientSyncException,), retry_backoff=True)
|
||||
def microsoft_entra_sync_m2m(*args, **kwargs):
|
||||
return sync_tasks.sync_signal_m2m(*args, **kwargs)
|
||||
|
||||
|
||||
@actor(
|
||||
description=_(
|
||||
"Dispatch syncs for a related object (memberships) for Microsoft Entra providers."
|
||||
)
|
||||
)
|
||||
def microsoft_entra_sync_m2m_dispatch(*args, **kwargs):
|
||||
return sync_tasks.sync_signal_m2m_dispatch(microsoft_entra_sync_m2m, *args, **kwargs)
|
||||
|
||||
@@ -252,13 +252,9 @@ class MicrosoftEntraGroupTests(TestCase):
|
||||
member_add.assert_called_once()
|
||||
self.assertEqual(
|
||||
member_add.call_args[0][0].odata_id,
|
||||
f"https://graph.microsoft.com/v1.0/directoryObjects/{
|
||||
MicrosoftEntraProviderUser.objects.filter(
|
||||
f"https://graph.microsoft.com/v1.0/directoryObjects/{MicrosoftEntraProviderUser.objects.filter(
|
||||
provider=self.provider,
|
||||
)
|
||||
.first()
|
||||
.microsoft_id
|
||||
}",
|
||||
).first().microsoft_id}",
|
||||
)
|
||||
|
||||
def test_group_create_member_remove(self):
|
||||
@@ -315,13 +311,9 @@ class MicrosoftEntraGroupTests(TestCase):
|
||||
member_add.assert_called_once()
|
||||
self.assertEqual(
|
||||
member_add.call_args[0][0].odata_id,
|
||||
f"https://graph.microsoft.com/v1.0/directoryObjects/{
|
||||
MicrosoftEntraProviderUser.objects.filter(
|
||||
f"https://graph.microsoft.com/v1.0/directoryObjects/{MicrosoftEntraProviderUser.objects.filter(
|
||||
provider=self.provider,
|
||||
)
|
||||
.first()
|
||||
.microsoft_id
|
||||
}",
|
||||
).first().microsoft_id}",
|
||||
)
|
||||
member_remove.assert_called_once()
|
||||
|
||||
@@ -421,7 +413,7 @@ class MicrosoftEntraGroupTests(TestCase):
|
||||
),
|
||||
) as group_list,
|
||||
):
|
||||
microsoft_entra_sync.send(self.provider.pk).get_result()
|
||||
microsoft_entra_sync.delay(self.provider.pk).get()
|
||||
self.assertTrue(
|
||||
MicrosoftEntraProviderGroup.objects.filter(
|
||||
group=different_group, provider=self.provider
|
||||
|
||||
@@ -397,7 +397,7 @@ class MicrosoftEntraUserTests(APITestCase):
|
||||
AsyncMock(return_value=GroupCollectionResponse(value=[])),
|
||||
),
|
||||
):
|
||||
microsoft_entra_sync.send(self.provider.pk).get_result()
|
||||
microsoft_entra_sync.delay(self.provider.pk).get()
|
||||
self.assertTrue(
|
||||
MicrosoftEntraProviderUser.objects.filter(
|
||||
user=different_user, provider=self.provider
|
||||
|
||||
@@ -17,7 +17,6 @@ from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.lib.models import CreatedUpdatedModel
|
||||
from authentik.lib.utils.time import timedelta_from_string, timedelta_string_validator
|
||||
from authentik.providers.oauth2.models import JWTAlgorithms, OAuth2Provider
|
||||
from authentik.tasks.models import TasksModel
|
||||
|
||||
|
||||
class EventTypes(models.TextChoices):
|
||||
@@ -43,7 +42,7 @@ class SSFEventStatus(models.TextChoices):
|
||||
SENT = "sent"
|
||||
|
||||
|
||||
class SSFProvider(TasksModel, BackchannelProvider):
|
||||
class SSFProvider(BackchannelProvider):
|
||||
"""Shared Signals Framework provider to allow applications to
|
||||
receive user events from authentik."""
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ from authentik.enterprise.providers.ssf.models import (
|
||||
EventTypes,
|
||||
SSFProvider,
|
||||
)
|
||||
from authentik.enterprise.providers.ssf.tasks import send_ssf_events
|
||||
from authentik.enterprise.providers.ssf.tasks import send_ssf_event
|
||||
from authentik.events.middleware import audit_ignore
|
||||
from authentik.stages.authenticator.models import Device
|
||||
from authentik.stages.authenticator_duo.models import DuoDevice
|
||||
@@ -66,7 +66,7 @@ def ssf_user_session_delete_session_revoked(sender, instance: AuthenticatedSessi
|
||||
|
||||
As this signal is also triggered with a regular logout, we can't be sure
|
||||
if the session has been deleted by an admin or by the user themselves."""
|
||||
send_ssf_events(
|
||||
send_ssf_event(
|
||||
EventTypes.CAEP_SESSION_REVOKED,
|
||||
{
|
||||
"initiating_entity": "user",
|
||||
@@ -88,7 +88,7 @@ def ssf_user_session_delete_session_revoked(sender, instance: AuthenticatedSessi
|
||||
@receiver(password_changed)
|
||||
def ssf_password_changed_cred_change(sender, user: User, password: str | None, **_):
|
||||
"""Credential change trigger (password changed)"""
|
||||
send_ssf_events(
|
||||
send_ssf_event(
|
||||
EventTypes.CAEP_CREDENTIAL_CHANGE,
|
||||
{
|
||||
"credential_type": "password",
|
||||
@@ -126,7 +126,7 @@ def ssf_device_post_save(sender: type[Model], instance: Device, created: bool, *
|
||||
}
|
||||
if isinstance(instance, WebAuthnDevice) and instance.aaguid != UNKNOWN_DEVICE_TYPE_AAGUID:
|
||||
data["fido2_aaguid"] = instance.aaguid
|
||||
send_ssf_events(
|
||||
send_ssf_event(
|
||||
EventTypes.CAEP_CREDENTIAL_CHANGE,
|
||||
data,
|
||||
sub_id={
|
||||
@@ -153,7 +153,7 @@ def ssf_device_post_delete(sender: type[Model], instance: Device, **_):
|
||||
}
|
||||
if isinstance(instance, WebAuthnDevice) and instance.aaguid != UNKNOWN_DEVICE_TYPE_AAGUID:
|
||||
data["fido2_aaguid"] = instance.aaguid
|
||||
send_ssf_events(
|
||||
send_ssf_event(
|
||||
EventTypes.CAEP_CREDENTIAL_CHANGE,
|
||||
data,
|
||||
sub_id={
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from celery import group
|
||||
from django.http import HttpRequest
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_dramatiq_postgres.middleware import CurrentTask
|
||||
from dramatiq.actor import actor
|
||||
from requests.exceptions import RequestException
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
@@ -17,16 +13,19 @@ from authentik.enterprise.providers.ssf.models import (
|
||||
Stream,
|
||||
StreamEvent,
|
||||
)
|
||||
from authentik.events.logs import LogEvent
|
||||
from authentik.events.models import TaskStatus
|
||||
from authentik.events.system_tasks import SystemTask
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
from authentik.tasks.models import Task
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
session = get_http_session()
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
def send_ssf_events(
|
||||
def send_ssf_event(
|
||||
event_type: EventTypes,
|
||||
data: dict,
|
||||
stream_filter: dict | None = None,
|
||||
@@ -34,7 +33,7 @@ def send_ssf_events(
|
||||
**extra_data,
|
||||
):
|
||||
"""Wrapper to send an SSF event to multiple streams"""
|
||||
events_data = {}
|
||||
payload = []
|
||||
if not stream_filter:
|
||||
stream_filter = {}
|
||||
stream_filter["events_requested__contains"] = [event_type]
|
||||
@@ -42,22 +41,16 @@ def send_ssf_events(
|
||||
extra_data.setdefault("txn", request.request_id)
|
||||
for stream in Stream.objects.filter(**stream_filter):
|
||||
event_data = stream.prepare_event_payload(event_type, data, **extra_data)
|
||||
events_data[stream.uuid] = event_data
|
||||
ssf_events_dispatch.send(events_data)
|
||||
payload.append((str(stream.uuid), event_data))
|
||||
return _send_ssf_event.delay(payload)
|
||||
|
||||
|
||||
@actor(description=_("Dispatch SSF events."))
|
||||
def ssf_events_dispatch(events_data: dict[str, dict[str, Any]]):
|
||||
for stream_uuid, event_data in events_data.items():
|
||||
stream = Stream.objects.filter(pk=stream_uuid).first()
|
||||
if not stream:
|
||||
continue
|
||||
send_ssf_event.send_with_options(args=(stream_uuid, event_data), rel_obj=stream.provider)
|
||||
|
||||
|
||||
def _check_app_access(stream: Stream, event_data: dict) -> bool:
|
||||
def _check_app_access(stream_uuid: str, event_data: dict) -> bool:
|
||||
"""Check if event is related to user and if so, check
|
||||
if the user has access to the application"""
|
||||
stream = Stream.objects.filter(pk=stream_uuid).first()
|
||||
if not stream:
|
||||
return False
|
||||
# `event_data` is a dict version of a StreamEvent
|
||||
sub_id = event_data.get("payload", {}).get("sub_id", {})
|
||||
email = sub_id.get("user", {}).get("email", None)
|
||||
@@ -72,22 +65,42 @@ def _check_app_access(stream: Stream, event_data: dict) -> bool:
|
||||
return engine.passing
|
||||
|
||||
|
||||
@actor(description=_("Send an SSF event."))
|
||||
def send_ssf_event(stream_uuid: UUID, event_data: dict[str, Any]):
|
||||
self: Task = CurrentTask.get_task()
|
||||
@CELERY_APP.task()
|
||||
def _send_ssf_event(event_data: list[tuple[str, dict]]):
|
||||
tasks = []
|
||||
for stream, data in event_data:
|
||||
if not _check_app_access(stream, data):
|
||||
continue
|
||||
event = StreamEvent.objects.create(**data)
|
||||
tasks.extend(send_single_ssf_event(stream, str(event.uuid)))
|
||||
main_task = group(*tasks)
|
||||
main_task()
|
||||
|
||||
stream = Stream.objects.filter(pk=stream_uuid).first()
|
||||
|
||||
def send_single_ssf_event(stream_id: str, evt_id: str):
|
||||
stream = Stream.objects.filter(pk=stream_id).first()
|
||||
if not stream:
|
||||
return
|
||||
if not _check_app_access(stream, event_data):
|
||||
event = StreamEvent.objects.filter(pk=evt_id).first()
|
||||
if not event:
|
||||
return
|
||||
event = StreamEvent.objects.create(**event_data)
|
||||
self.set_uid(event.pk)
|
||||
if event.status == SSFEventStatus.SENT:
|
||||
return
|
||||
if stream.delivery_method != DeliveryMethods.RISC_PUSH:
|
||||
return
|
||||
if stream.delivery_method == DeliveryMethods.RISC_PUSH:
|
||||
return [ssf_push_event.si(str(event.pk))]
|
||||
return []
|
||||
|
||||
|
||||
@CELERY_APP.task(bind=True, base=SystemTask)
|
||||
def ssf_push_event(self: SystemTask, event_id: str):
|
||||
self.save_on_success = False
|
||||
event = StreamEvent.objects.filter(pk=event_id).first()
|
||||
if not event:
|
||||
return
|
||||
self.set_uid(event_id)
|
||||
if event.status == SSFEventStatus.SENT:
|
||||
self.set_status(TaskStatus.SUCCESSFUL)
|
||||
return
|
||||
try:
|
||||
response = session.post(
|
||||
event.stream.endpoint_url,
|
||||
@@ -97,17 +110,26 @@ def send_ssf_event(stream_uuid: UUID, event_data: dict[str, Any]):
|
||||
response.raise_for_status()
|
||||
event.status = SSFEventStatus.SENT
|
||||
event.save()
|
||||
self.set_status(TaskStatus.SUCCESSFUL)
|
||||
return
|
||||
except RequestException as exc:
|
||||
LOGGER.warning("Failed to send SSF event", exc=exc)
|
||||
self.set_status(TaskStatus.ERROR)
|
||||
attrs = {}
|
||||
if exc.response:
|
||||
attrs["response"] = {
|
||||
"content": exc.response.text,
|
||||
"status": exc.response.status_code,
|
||||
}
|
||||
self.warning(exc)
|
||||
self.warning("Failed to send request", **attrs)
|
||||
self.set_error(
|
||||
exc,
|
||||
LogEvent(
|
||||
_("Failed to send request"),
|
||||
log_level="warning",
|
||||
logger=self.__name__,
|
||||
attributes=attrs,
|
||||
),
|
||||
)
|
||||
# Re-up the expiry of the stream event
|
||||
event.expires = now() + timedelta_from_string(event.stream.provider.event_retention)
|
||||
event.status = SSFEventStatus.PENDING_FAILED
|
||||
|
||||
@@ -13,7 +13,7 @@ from authentik.enterprise.providers.ssf.models import (
|
||||
SSFProvider,
|
||||
Stream,
|
||||
)
|
||||
from authentik.enterprise.providers.ssf.tasks import send_ssf_events
|
||||
from authentik.enterprise.providers.ssf.tasks import send_ssf_event
|
||||
from authentik.enterprise.providers.ssf.views.base import SSFView
|
||||
|
||||
LOGGER = get_logger()
|
||||
@@ -109,7 +109,7 @@ class StreamView(SSFView):
|
||||
"User does not have permission to create stream for this provider."
|
||||
)
|
||||
instance: Stream = stream.save(provider=self.provider)
|
||||
send_ssf_events(
|
||||
send_ssf_event(
|
||||
EventTypes.SET_VERIFICATION,
|
||||
{
|
||||
"state": None,
|
||||
|
||||
@@ -1,5 +1,17 @@
|
||||
"""Enterprise additional settings"""
|
||||
|
||||
from celery.schedules import crontab
|
||||
|
||||
from authentik.lib.utils.time import fqdn_rand
|
||||
|
||||
CELERY_BEAT_SCHEDULE = {
|
||||
"enterprise_update_usage": {
|
||||
"task": "authentik.enterprise.tasks.enterprise_update_usage",
|
||||
"schedule": crontab(minute=fqdn_rand("enterprise_update_usage"), hour="*/2"),
|
||||
"options": {"queue": "authentik_scheduled"},
|
||||
}
|
||||
}
|
||||
|
||||
TENANT_APPS = [
|
||||
"authentik.enterprise.audit",
|
||||
"authentik.enterprise.policies.unique_password",
|
||||
|
||||
@@ -10,7 +10,6 @@ from django.utils.timezone import get_current_timezone
|
||||
from authentik.enterprise.license import CACHE_KEY_ENTERPRISE_LICENSE
|
||||
from authentik.enterprise.models import License
|
||||
from authentik.enterprise.tasks import enterprise_update_usage
|
||||
from authentik.tasks.schedules.models import Schedule
|
||||
|
||||
|
||||
@receiver(pre_save, sender=License)
|
||||
@@ -27,7 +26,7 @@ def pre_save_license(sender: type[License], instance: License, **_):
|
||||
def post_save_license(sender: type[License], instance: License, **_):
|
||||
"""Trigger license usage calculation when license is saved"""
|
||||
cache.delete(CACHE_KEY_ENTERPRISE_LICENSE)
|
||||
Schedule.dispatch_by_actor(enterprise_update_usage)
|
||||
enterprise_update_usage.delay()
|
||||
|
||||
|
||||
@receiver(post_delete, sender=License)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user