mirror of
https://github.com/goauthentik/authentik
synced 2026-05-07 07:32:23 +02:00
Compare commits
3 Commits
flows/conc
...
root/move-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b3b6d562a0 | ||
|
|
42e4011c1a | ||
|
|
e791742587 |
267
.github/actions/cherry-pick/action.yml
vendored
267
.github/actions/cherry-pick/action.yml
vendored
@@ -1,267 +0,0 @@
|
||||
name: "Cherry-picker"
|
||||
description: "Cherry-pick PRs based on their labels"
|
||||
|
||||
inputs:
|
||||
token:
|
||||
description: "GitHub Token"
|
||||
required: true
|
||||
git_user:
|
||||
description: "Git user for pushing the cherry-pick PR"
|
||||
required: true
|
||||
git_user_email:
|
||||
description: "Git user email for pushing the cherry-pick PR"
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Check if workflow should run
|
||||
id: should_run
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ inputs.token }}
|
||||
run: |
|
||||
set -e -o pipefail
|
||||
# For issues events, check if it's actually a PR
|
||||
if [ "${{ github.event_name }}" = "issues" ]; then
|
||||
# Check if this issue is actually a PR
|
||||
PR_DATA=$(gh api repos/${{ github.repository }}/pulls/${{ github.event.issue.number }} 2>/dev/null || echo "null")
|
||||
if [ "$PR_DATA" = "null" ]; then
|
||||
echo "should_run=false" >> $GITHUB_OUTPUT
|
||||
echo "reason=not_a_pr" >> $GITHUB_OUTPUT
|
||||
echo "This is an issue, not a PR. Skipping."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Get PR data
|
||||
PR_MERGED=$(echo "$PR_DATA" | jq -r '.merged')
|
||||
PR_NUMBER="${{ github.event.issue.number }}"
|
||||
MERGE_COMMIT_SHA=$(echo "$PR_DATA" | jq -r '.merge_commit_sha')
|
||||
|
||||
# Check if it's a backport label
|
||||
LABEL_NAME="${{ github.event.label.name }}"
|
||||
if [[ "$LABEL_NAME" =~ ^backport/(.+)$ ]]; then
|
||||
if [ "$PR_MERGED" = "true" ]; then
|
||||
echo "should_run=true" >> $GITHUB_OUTPUT
|
||||
echo "reason=label_added_to_merged_pr" >> $GITHUB_OUTPUT
|
||||
echo "pr_number=$PR_NUMBER" >> $GITHUB_OUTPUT
|
||||
echo "merge_commit_sha=$MERGE_COMMIT_SHA" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
else
|
||||
echo "should_run=false" >> $GITHUB_OUTPUT
|
||||
echo "reason=label_added_to_open_pr" >> $GITHUB_OUTPUT
|
||||
echo "Backport label added to open PR. Will run after PR is merged."
|
||||
exit 0
|
||||
fi
|
||||
else
|
||||
echo "should_run=false" >> $GITHUB_OUTPUT
|
||||
echo "reason=non_backport_label" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
# For pull_request and pull_request_target events
|
||||
PR_NUMBER="${{ github.event.pull_request.number }}"
|
||||
MERGE_COMMIT_SHA="${{ github.event.pull_request.merge_commit_sha }}"
|
||||
|
||||
# Case 1: PR was just merged (closed + merged = true)
|
||||
if [ "${{ github.event.action }}" = "closed" ] && [ "${{ github.event.pull_request.merged }}" = "true" ]; then
|
||||
echo "should_run=true" >> $GITHUB_OUTPUT
|
||||
echo "reason=pr_merged" >> $GITHUB_OUTPUT
|
||||
echo "pr_number=$PR_NUMBER" >> $GITHUB_OUTPUT
|
||||
echo "merge_commit_sha=$MERGE_COMMIT_SHA" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Case 2: Label was added
|
||||
if [ "${{ github.event.action }}" = "labeled" ]; then
|
||||
LABEL_NAME="${{ github.event.label.name }}"
|
||||
# Check if it's a backport label
|
||||
if [[ "$LABEL_NAME" =~ ^backport/(.+)$ ]]; then
|
||||
# Check if PR is already merged
|
||||
if [ "${{ github.event.pull_request.merged }}" = "true" ]; then
|
||||
echo "should_run=true" >> $GITHUB_OUTPUT
|
||||
echo "reason=label_added_to_merged_pr" >> $GITHUB_OUTPUT
|
||||
echo "pr_number=$PR_NUMBER" >> $GITHUB_OUTPUT
|
||||
echo "merge_commit_sha=$MERGE_COMMIT_SHA" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
else
|
||||
echo "should_run=false" >> $GITHUB_OUTPUT
|
||||
echo "reason=label_added_to_open_pr" >> $GITHUB_OUTPUT
|
||||
echo "Backport label added to open PR. Will run after PR is merged."
|
||||
exit 0
|
||||
fi
|
||||
else
|
||||
echo "should_run=false" >> $GITHUB_OUTPUT
|
||||
echo "reason=non_backport_label" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "should_run=false" >> $GITHUB_OUTPUT
|
||||
echo "reason=unknown" >> $GITHUB_OUTPUT
|
||||
- name: Configure Git
|
||||
if: steps.should_run.outputs.should_run == 'true'
|
||||
shell: bash
|
||||
env:
|
||||
user: ${{ inputs.git_user }}
|
||||
email: ${{ inputs.git_user_email }}
|
||||
run: |
|
||||
git config --global user.name "${user}"
|
||||
git config --global user.email "${email}"
|
||||
- name: Get PR details and extract backport labels
|
||||
if: steps.should_run.outputs.should_run == 'true'
|
||||
id: pr_details
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ inputs.token }}
|
||||
run: |
|
||||
set -e -o pipefail
|
||||
PR_NUMBER="${{ steps.should_run.outputs.pr_number }}"
|
||||
|
||||
# Get PR details
|
||||
PR_DATA=$(gh api repos/${{ github.repository }}/pulls/$PR_NUMBER)
|
||||
PR_TITLE=$(echo "$PR_DATA" | jq -r '.title')
|
||||
PR_AUTHOR=$(echo "$PR_DATA" | jq -r '.user.login')
|
||||
|
||||
echo "pr_title=$PR_TITLE" >> $GITHUB_OUTPUT
|
||||
echo "pr_author=$PR_AUTHOR" >> $GITHUB_OUTPUT
|
||||
|
||||
# Determine which labels to process
|
||||
if [ "${{ steps.should_run.outputs.reason }}" = "label_added_to_merged_pr" ]; then
|
||||
# Only process the specific label that was just added
|
||||
if [ "${{ github.event_name }}" = "issues" ]; then
|
||||
LABEL_NAME="${{ github.event.label.name }}"
|
||||
else
|
||||
LABEL_NAME="${{ github.event.label.name }}"
|
||||
fi
|
||||
|
||||
if [[ "$LABEL_NAME" =~ ^backport/(.+)$ ]]; then
|
||||
echo "labels=$LABEL_NAME" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "Label $LABEL_NAME does not match backport pattern"
|
||||
echo "labels=" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
else
|
||||
# PR was just merged, process all backport labels
|
||||
LABELS=$(gh pr view $PR_NUMBER --json labels --jq '.labels[].name' | grep '^backport/' | tr '\n' ' ' || true)
|
||||
echo "labels=$LABELS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Cherry-pick to target branches
|
||||
if: steps.should_run.outputs.should_run == 'true' && steps.pr_details.outputs.labels != ''
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ inputs.token }}
|
||||
run: |
|
||||
set -e -o pipefail
|
||||
PR_NUMBER='${{ steps.should_run.outputs.pr_number }}'
|
||||
COMMIT_SHA='${{ steps.should_run.outputs.merge_commit_sha }}'
|
||||
PR_TITLE='${{ steps.pr_details.outputs.pr_title }}'
|
||||
PR_AUTHOR='${{ steps.pr_details.outputs.pr_author }}'
|
||||
LABELS='${{ steps.pr_details.outputs.labels }}'
|
||||
|
||||
echo "Processing PR #$PR_NUMBER (reason: ${{ steps.should_run.outputs.reason }})"
|
||||
echo "Found backport labels: $LABELS"
|
||||
|
||||
# Process each backport label
|
||||
for label in $LABELS; do
|
||||
if [[ "$label" =~ ^backport/(.+)$ ]]; then
|
||||
TARGET_BRANCH="${BASH_REMATCH[1]}"
|
||||
echo "Processing backport to branch: $TARGET_BRANCH"
|
||||
|
||||
# Check if target branch exists
|
||||
if ! git ls-remote --heads origin "$TARGET_BRANCH" | grep -q "$TARGET_BRANCH"; then
|
||||
echo "❌ Target branch $TARGET_BRANCH does not exist, skipping"
|
||||
|
||||
# Comment on the original PR about the missing branch
|
||||
gh pr comment $PR_NUMBER --body "⚠️ Cannot backport to \`$TARGET_BRANCH\`: branch does not exist."
|
||||
continue
|
||||
fi
|
||||
|
||||
# Create a unique branch name for the cherry-pick
|
||||
CHERRY_PICK_BRANCH="cherry-pick/${PR_NUMBER}-to-${TARGET_BRANCH}"
|
||||
|
||||
# Check if a cherry-pick PR already exists
|
||||
EXISTING_PR=$(gh pr list --head "$CHERRY_PICK_BRANCH" --json number --jq '.[0].number' 2>/dev/null || echo "")
|
||||
if [ -n "$EXISTING_PR" ]; then
|
||||
echo "⚠️ Cherry-pick PR already exists: #$EXISTING_PR"
|
||||
gh pr comment $PR_NUMBER --body "Cherry-pick to \`$TARGET_BRANCH\` already exists: #$EXISTING_PR"
|
||||
continue
|
||||
fi
|
||||
|
||||
# Fetch and checkout target branch
|
||||
git fetch origin "$TARGET_BRANCH"
|
||||
git checkout -b "$CHERRY_PICK_BRANCH" "origin/$TARGET_BRANCH"
|
||||
|
||||
# Attempt cherry-pick
|
||||
if git cherry-pick "$COMMIT_SHA"; then
|
||||
echo "✅ Cherry-pick successful for $TARGET_BRANCH"
|
||||
|
||||
# Push the cherry-pick branch
|
||||
git push origin "$CHERRY_PICK_BRANCH"
|
||||
|
||||
# Create PR for the cherry-pick
|
||||
CHERRY_PICK_TITLE="$PR_TITLE (cherry-pick #$PR_NUMBER to $TARGET_BRANCH)"
|
||||
CHERRY_PICK_BODY="Cherry-pick of #$PR_NUMBER to \`$TARGET_BRANCH\` branch.
|
||||
|
||||
**Original PR:** #$PR_NUMBER
|
||||
**Original Author:** @$PR_AUTHOR
|
||||
**Cherry-picked commit:** $COMMIT_SHA"
|
||||
|
||||
NEW_PR=$(gh pr create \
|
||||
--title "$CHERRY_PICK_TITLE" \
|
||||
--body "$CHERRY_PICK_BODY" \
|
||||
--base "$TARGET_BRANCH" \
|
||||
--head "$CHERRY_PICK_BRANCH" \
|
||||
--label "cherry-pick")
|
||||
|
||||
echo "✅ Created cherry-pick PR $NEW_PR for $TARGET_BRANCH"
|
||||
|
||||
# Comment on original PR
|
||||
gh pr comment $PR_NUMBER --body "🍒 Cherry-pick to \`$TARGET_BRANCH\` created: $NEW_PR"
|
||||
|
||||
else
|
||||
echo "⚠️ Cherry-pick failed for $TARGET_BRANCH, creating conflict resolution PR"
|
||||
|
||||
# Add conflicted files and commit
|
||||
git add .
|
||||
git commit -m "Cherry-pick #$PR_NUMBER to $TARGET_BRANCH (with conflicts)
|
||||
|
||||
This cherry-pick has conflicts that need manual resolution.
|
||||
|
||||
Original PR: #$PR_NUMBER
|
||||
Original commit: $COMMIT_SHA"
|
||||
|
||||
# Push the branch with conflicts
|
||||
git push origin "$CHERRY_PICK_BRANCH"
|
||||
|
||||
# Create PR with conflict notice
|
||||
CONFLICT_TITLE="$PR_TITLE (cherry-pick #$PR_NUMBER to $TARGET_BRANCH)"
|
||||
CONFLICT_BODY="⚠️ **This cherry-pick has conflicts that require manual resolution.**
|
||||
|
||||
Cherry-pick of #$PR_NUMBER to \`$TARGET_BRANCH\` branch.
|
||||
|
||||
**Original PR:** #$PR_NUMBER
|
||||
**Original Author:** @$PR_AUTHOR
|
||||
**Cherry-picked commit:** $COMMIT_SHA
|
||||
|
||||
**Please resolve the conflicts in this PR before merging.**"
|
||||
|
||||
NEW_PR=$(gh pr create \
|
||||
--title "$CONFLICT_TITLE" \
|
||||
--body "$CONFLICT_BODY" \
|
||||
--base "$TARGET_BRANCH" \
|
||||
--head "$CHERRY_PICK_BRANCH" \
|
||||
--label "cherry-pick")
|
||||
|
||||
echo "⚠️ Created conflict resolution PR $NEW_PR for $TARGET_BRANCH"
|
||||
|
||||
# Comment on original PR
|
||||
gh pr comment $PR_NUMBER --body "⚠️ Cherry-pick to \`$TARGET_BRANCH\` has conflicts: $NEW_PR"
|
||||
fi
|
||||
|
||||
# Clean up - go back to main branch
|
||||
git checkout main
|
||||
git branch -D "$CHERRY_PICK_BRANCH" 2>/dev/null || true
|
||||
fi
|
||||
done
|
||||
@@ -2,28 +2,16 @@
|
||||
|
||||
import os
|
||||
from json import dumps
|
||||
from sys import exit as sysexit
|
||||
from time import time
|
||||
|
||||
from authentik import authentik_version
|
||||
|
||||
|
||||
def must_or_fail(input: str | None, error: str) -> str:
|
||||
if not input:
|
||||
print(f"::error::{error}")
|
||||
sysexit(1)
|
||||
return input
|
||||
|
||||
|
||||
# Decide if we should push the image or not
|
||||
should_push = True
|
||||
if len(os.environ.get("DOCKER_USERNAME", "")) < 1:
|
||||
# Don't push if we don't have DOCKER_USERNAME, i.e. no secrets are available
|
||||
should_push = False
|
||||
if (
|
||||
must_or_fail(os.environ.get("GITHUB_REPOSITORY"), "Repo required").lower()
|
||||
== "goauthentik/authentik-internal"
|
||||
):
|
||||
if os.environ.get("GITHUB_REPOSITORY").lower() == "goauthentik/authentik-internal":
|
||||
# Don't push on the internal repo
|
||||
should_push = False
|
||||
|
||||
@@ -32,16 +20,13 @@ if os.environ.get("GITHUB_HEAD_REF", "") != "":
|
||||
branch_name = os.environ["GITHUB_HEAD_REF"]
|
||||
safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-").replace("'", "-")
|
||||
|
||||
image_names = must_or_fail(os.getenv("IMAGE_NAME"), "Image name required").split(",")
|
||||
image_names = os.getenv("IMAGE_NAME").split(",")
|
||||
image_arch = os.getenv("IMAGE_ARCH") or None
|
||||
|
||||
is_pull_request = bool(os.getenv("PR_HEAD_SHA"))
|
||||
is_release = "dev" not in image_names[0]
|
||||
|
||||
sha = must_or_fail(
|
||||
os.environ["GITHUB_SHA"] if not is_pull_request else os.getenv("PR_HEAD_SHA"),
|
||||
"could not determine SHA",
|
||||
)
|
||||
sha = os.environ["GITHUB_SHA"] if not is_pull_request else os.getenv("PR_HEAD_SHA")
|
||||
|
||||
# 2042.1.0 or 2042.1.0-rc1
|
||||
version = authentik_version()
|
||||
@@ -73,7 +58,7 @@ else:
|
||||
image_main_tag = image_tags[0].split(":")[-1]
|
||||
|
||||
|
||||
def get_attest_image_names(image_with_tags: list[str]) -> str:
|
||||
def get_attest_image_names(image_with_tags: list[str]):
|
||||
"""Attestation only for GHCR"""
|
||||
image_tags = []
|
||||
for image_name in set(name.split(":")[0] for name in image_with_tags):
|
||||
@@ -97,6 +82,7 @@ if os.getenv("RELEASE", "false").lower() == "true":
|
||||
image_build_args = [f"VERSION={os.getenv('REF')}"]
|
||||
else:
|
||||
image_build_args = [f"GIT_BUILD_HASH={sha}"]
|
||||
image_build_args = "\n".join(image_build_args)
|
||||
|
||||
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
|
||||
print(f"shouldPush={str(should_push).lower()}", file=_output)
|
||||
@@ -109,4 +95,4 @@ with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
|
||||
print(f"imageMainTag={image_main_tag}", file=_output)
|
||||
print(f"imageMainName={image_tags[0]}", file=_output)
|
||||
print(f"cacheTo={cache_to}", file=_output)
|
||||
print(f"imageBuildArgs={"\n".join(image_build_args)}", file=_output)
|
||||
print(f"imageBuildArgs={image_build_args}", file=_output)
|
||||
|
||||
1
.github/actions/setup/docker-compose.yml
vendored
1
.github/actions/setup/docker-compose.yml
vendored
@@ -3,7 +3,6 @@ services:
|
||||
image: docker.io/library/postgres:${PSQL_TAG:-16}
|
||||
volumes:
|
||||
- db-data:/var/lib/postgresql/data
|
||||
command: "-c log_statement=all"
|
||||
environment:
|
||||
POSTGRES_USER: authentik
|
||||
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
||||
|
||||
28
.github/actions/test-results/action.yml
vendored
28
.github/actions/test-results/action.yml
vendored
@@ -1,28 +0,0 @@
|
||||
name: "Process test results"
|
||||
description: Convert test results to JUnit, add them to GitHub Actions and codecov
|
||||
|
||||
inputs:
|
||||
flags:
|
||||
description: Codecov flags
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: ${{ inputs.flags }}
|
||||
use_oidc: true
|
||||
- uses: codecov/test-results-action@v1
|
||||
with:
|
||||
flags: ${{ inputs.flags }}
|
||||
file: unittest.xml
|
||||
use_oidc: true
|
||||
- name: PostgreSQL Logs
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ $ACTIONS_RUNNER_DEBUG == 'true' || $ACTIONS_STEP_DEBUG == 'true' ]]; then
|
||||
docker stop setup-postgresql-1
|
||||
echo "::group::PostgreSQL Logs"
|
||||
docker logs setup-postgresql-1
|
||||
echo "::endgroup::"
|
||||
fi
|
||||
2
.github/cherry-pick-bot.yml
vendored
Normal file
2
.github/cherry-pick-bot.yml
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
enabled: true
|
||||
preservePullRequestTitle: true
|
||||
6
.github/dependabot.yml
vendored
6
.github/dependabot.yml
vendored
@@ -77,12 +77,6 @@ updates:
|
||||
goauthentik:
|
||||
patterns:
|
||||
- "@goauthentik/*"
|
||||
react:
|
||||
patterns:
|
||||
- "react"
|
||||
- "react-dom"
|
||||
- "@types/react"
|
||||
- "@types/react-dom"
|
||||
- package-ecosystem: npm
|
||||
directory: "/website"
|
||||
schedule:
|
||||
|
||||
@@ -72,13 +72,6 @@ jobs:
|
||||
run: |
|
||||
mkdir -p ./gen-ts-api
|
||||
mkdir -p ./gen-go-api
|
||||
- name: Setup node
|
||||
if: ${{ !inputs.release }}
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: generate ts client
|
||||
if: ${{ !inputs.release }}
|
||||
run: make gen-client-ts
|
||||
@@ -97,7 +90,7 @@ jobs:
|
||||
platforms: linux/${{ inputs.image_arch }}
|
||||
cache-from: type=registry,ref=${{ steps.ev.outputs.attestImageNames }}:buildcache-${{ inputs.image_arch }}
|
||||
cache-to: ${{ steps.ev.outputs.cacheTo }}
|
||||
- uses: actions/attest-build-provenance@v3
|
||||
- uses: actions/attest-build-provenance@v2
|
||||
id: attest
|
||||
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
with:
|
||||
@@ -21,7 +21,7 @@ on:
|
||||
|
||||
jobs:
|
||||
build-server-amd64:
|
||||
uses: ./.github/workflows/_reusable-docker-build-single.yml
|
||||
uses: ./.github/workflows/_reusable-docker-build-single.yaml
|
||||
secrets: inherit
|
||||
with:
|
||||
image_name: ${{ inputs.image_name }}
|
||||
@@ -31,7 +31,7 @@ jobs:
|
||||
registry_ghcr: ${{ inputs.registry_ghcr }}
|
||||
release: ${{ inputs.release }}
|
||||
build-server-arm64:
|
||||
uses: ./.github/workflows/_reusable-docker-build-single.yml
|
||||
uses: ./.github/workflows/_reusable-docker-build-single.yaml
|
||||
secrets: inherit
|
||||
with:
|
||||
image_name: ${{ inputs.image_name }}
|
||||
@@ -97,7 +97,7 @@ jobs:
|
||||
sources: |
|
||||
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-amd64.outputs.image-digest }}
|
||||
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-arm64.outputs.image-digest }}
|
||||
- uses: actions/attest-build-provenance@v3
|
||||
- uses: actions/attest-build-provenance@v2
|
||||
id: attest
|
||||
with:
|
||||
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
||||
68
.github/workflows/api-py-publish.yml
vendored
Normal file
68
.github/workflows/api-py-publish.yml
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
---
|
||||
name: API - Publish Python client
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "schema.yml"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
steps:
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- name: Install poetry & deps
|
||||
shell: bash
|
||||
run: |
|
||||
pipx install poetry || true
|
||||
sudo apt-get update
|
||||
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
|
||||
- name: Setup python and restore poetry
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version-file: "pyproject.toml"
|
||||
- name: Generate API Client
|
||||
run: make gen-client-py
|
||||
- name: Publish package
|
||||
working-directory: gen-py-api/
|
||||
run: |
|
||||
poetry build
|
||||
- name: Publish package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
packages-dir: gen-py-api/dist/
|
||||
# We can't easily upgrade the API client being used due to poetry being poetry
|
||||
# so we'll have to rely on dependabot
|
||||
# - name: Upgrade /
|
||||
# run: |
|
||||
# export VERSION=$(cd gen-py-api && poetry version -s)
|
||||
# poetry add "authentik_client=$VERSION" --allow-prereleases --lock
|
||||
# - uses: peter-evans/create-pull-request@v6
|
||||
# id: cpr
|
||||
# with:
|
||||
# token: ${{ steps.generate_token.outputs.token }}
|
||||
# branch: update-root-api-client
|
||||
# commit-message: "root: bump API Client version"
|
||||
# title: "root: bump API Client version"
|
||||
# body: "root: bump API Client version"
|
||||
# delete-branch: true
|
||||
# signoff: true
|
||||
# # ID from https://api.github.com/users/authentik-automation[bot]
|
||||
# author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
|
||||
# - uses: peter-evans/enable-pull-request-automerge@v3
|
||||
# with:
|
||||
# token: ${{ steps.generate_token.outputs.token }}
|
||||
# pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
|
||||
# merge-method: squash
|
||||
2
.github/workflows/api-ts-publish.yml
vendored
2
.github/workflows/api-ts-publish.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
|
||||
4
.github/workflows/ci-api-docs.yml
vendored
4
.github/workflows/ci-api-docs.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: website/package.json
|
||||
cache: "npm"
|
||||
@@ -71,7 +71,7 @@ jobs:
|
||||
with:
|
||||
name: api-docs
|
||||
path: website/api/build
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: website/package.json
|
||||
cache: "npm"
|
||||
|
||||
2
.github/workflows/ci-aws-cfn.yml
vendored
2
.github/workflows/ci-aws-cfn.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
- uses: actions/checkout@v5
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: lifecycle/aws/package.json
|
||||
cache: "npm"
|
||||
|
||||
6
.github/workflows/ci-docs.yml
vendored
6
.github/workflows/ci-docs.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: website/package.json
|
||||
cache: "npm"
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: website/package.json
|
||||
cache: "npm"
|
||||
@@ -102,7 +102,7 @@ jobs:
|
||||
context: .
|
||||
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache
|
||||
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache,mode=max' || '' }}
|
||||
- uses: actions/attest-build-provenance@v3
|
||||
- uses: actions/attest-build-provenance@v2
|
||||
id: attest
|
||||
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
with:
|
||||
|
||||
40
.github/workflows/ci-main.yml
vendored
40
.github/workflows/ci-main.yml
vendored
@@ -34,7 +34,6 @@ jobs:
|
||||
- codespell
|
||||
- pending-migrations
|
||||
- ruff
|
||||
- mypy
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
@@ -113,10 +112,6 @@ jobs:
|
||||
CI_TOTAL_RUNS: "5"
|
||||
run: |
|
||||
uv run make ci-test
|
||||
- uses: ./.github/actions/test-results
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
flags: unit-migrate
|
||||
test-unittest:
|
||||
name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
|
||||
runs-on: ubuntu-latest
|
||||
@@ -143,10 +138,17 @@ jobs:
|
||||
CI_TOTAL_RUNS: "5"
|
||||
run: |
|
||||
uv run make ci-test
|
||||
- uses: ./.github/actions/test-results
|
||||
if: ${{ always() }}
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: unit
|
||||
use_oidc: true
|
||||
- if: ${{ !cancelled() }}
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
flags: unit
|
||||
file: unittest.xml
|
||||
use_oidc: true
|
||||
test-integration:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
@@ -160,10 +162,17 @@ jobs:
|
||||
run: |
|
||||
uv run coverage run manage.py test tests/integration
|
||||
uv run coverage xml
|
||||
- uses: ./.github/actions/test-results
|
||||
if: ${{ always() }}
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: integration
|
||||
use_oidc: true
|
||||
- if: ${{ !cancelled() }}
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
flags: integration
|
||||
file: unittest.xml
|
||||
use_oidc: true
|
||||
test-e2e:
|
||||
name: test-e2e (${{ matrix.job.name }})
|
||||
runs-on: ubuntu-latest
|
||||
@@ -212,10 +221,17 @@ jobs:
|
||||
run: |
|
||||
uv run coverage run manage.py test ${{ matrix.job.glob }}
|
||||
uv run coverage xml
|
||||
- uses: ./.github/actions/test-results
|
||||
if: ${{ always() }}
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: e2e
|
||||
use_oidc: true
|
||||
- if: ${{ !cancelled() }}
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
flags: e2e
|
||||
file: unittest.xml
|
||||
use_oidc: true
|
||||
ci-core-mark:
|
||||
if: always()
|
||||
needs:
|
||||
@@ -240,7 +256,7 @@ jobs:
|
||||
# Needed for checkout
|
||||
contents: read
|
||||
needs: ci-core-mark
|
||||
uses: ./.github/workflows/_reusable-docker-build.yml
|
||||
uses: ./.github/workflows/_reusable-docker-build.yaml
|
||||
secrets: inherit
|
||||
with:
|
||||
image_name: ${{ github.repository == 'goauthentik/authentik-internal' && 'ghcr.io/goauthentik/internal-server' || 'ghcr.io/goauthentik/dev-server' }}
|
||||
|
||||
10
.github/workflows/ci-outpost.yml
vendored
10
.github/workflows/ci-outpost.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-go@v6
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Prepare and generate API
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-go@v6
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Setup authentik env
|
||||
@@ -115,7 +115,7 @@ jobs:
|
||||
context: .
|
||||
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache
|
||||
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && format('type=registry,ref=ghcr.io/goauthentik/dev-{0}:buildcache,mode=max', matrix.type) || '' }}
|
||||
- uses: actions/attest-build-provenance@v3
|
||||
- uses: actions/attest-build-provenance@v2
|
||||
id: attest
|
||||
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
with:
|
||||
@@ -141,10 +141,10 @@ jobs:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-go@v6
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
cache: "npm"
|
||||
|
||||
6
.github/workflows/ci-web.yml
vendored
6
.github/workflows/ci-web.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
project: web
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ${{ matrix.project }}/package.json
|
||||
cache: "npm"
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
cache: "npm"
|
||||
@@ -77,7 +77,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
cache: "npm"
|
||||
|
||||
36
.github/workflows/gh-cherry-pick.yml
vendored
36
.github/workflows/gh-cherry-pick.yml
vendored
@@ -1,36 +0,0 @@
|
||||
name: GH - Cherry-pick
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [closed, labeled]
|
||||
|
||||
jobs:
|
||||
cherry-pick:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: app-token
|
||||
name: Generate app token
|
||||
uses: actions/create-github-app-token@v2
|
||||
if: ${{ env.GH_APP_ID != '' }}
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
env:
|
||||
GH_APP_ID: ${{ secrets.GH_APP_ID }}
|
||||
- uses: actions/checkout@v5
|
||||
if: ${{ steps.app-token.outcome != 'skipped' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: "${{ steps.app-token.outputs.token }}"
|
||||
- id: get-user-id
|
||||
if: ${{ steps.app-token.outcome != 'skipped' }}
|
||||
name: Get GitHub app user ID
|
||||
run: echo "user-id=$(gh api "/users/${{ steps.app-token.outputs.app-slug }}[bot]" --jq .id)" >> "$GITHUB_OUTPUT"
|
||||
env:
|
||||
GH_TOKEN: "${{ steps.app-token.outputs.token }}"
|
||||
- uses: ./.github/actions/cherry-pick
|
||||
if: ${{ steps.app-token.outcome != 'skipped' }}
|
||||
with:
|
||||
token: ${{ steps.app-token.outputs.token }}
|
||||
git_user: ${{ steps.app-token.outputs.app-slug }}[bot]
|
||||
git_user_email: '${{ steps.get-user-id.outputs.user-id }}+${{ steps.app-token.outputs.app-slug }}[bot]@users.noreply.github.com'
|
||||
4
.github/workflows/packages-npm-publish.yml
vendored
4
.github/workflows/packages-npm-publish.yml
vendored
@@ -29,13 +29,13 @@ jobs:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 2
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ${{ matrix.package }}/package.json
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c
|
||||
with:
|
||||
files: |
|
||||
${{ matrix.package }}/package.json
|
||||
|
||||
3
.github/workflows/release-branch-off.yml
vendored
3
.github/workflows/release-branch-off.yml
vendored
@@ -43,13 +43,10 @@ jobs:
|
||||
with:
|
||||
dependencies: python
|
||||
- name: Create version branch
|
||||
env:
|
||||
GH_TOKEN: "${{ steps.app-token.outputs.token }}"
|
||||
run: |
|
||||
current_major_version="$(uv version --short | grep -oE "^[0-9]{4}\.[0-9]{1,2}")"
|
||||
git checkout -b "version-${current_major_version}"
|
||||
git push origin "version-${current_major_version}"
|
||||
gh label create "backport/version-${current_major_version}" --description "Add this label to PRs to backport changes to version-${current_major_version}" --color "fbca04"
|
||||
bump-version-pr:
|
||||
name: Open version bump PR
|
||||
needs:
|
||||
|
||||
14
.github/workflows/release-publish.yml
vendored
14
.github/workflows/release-publish.yml
vendored
@@ -7,7 +7,7 @@ on:
|
||||
|
||||
jobs:
|
||||
build-server:
|
||||
uses: ./.github/workflows/_reusable-docker-build.yml
|
||||
uses: ./.github/workflows/_reusable-docker-build.yaml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
context: .
|
||||
- uses: actions/attest-build-provenance@v3
|
||||
- uses: actions/attest-build-provenance@v2
|
||||
id: attest
|
||||
if: true
|
||||
with:
|
||||
@@ -84,7 +84,7 @@ jobs:
|
||||
- rac
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-go@v6
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Set up QEMU
|
||||
@@ -124,7 +124,7 @@ jobs:
|
||||
file: ${{ matrix.type }}.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
context: .
|
||||
- uses: actions/attest-build-provenance@v3
|
||||
- uses: actions/attest-build-provenance@v2
|
||||
id: attest
|
||||
with:
|
||||
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
||||
@@ -147,10 +147,10 @@ jobs:
|
||||
goarch: [amd64, arm64]
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-go@v6
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
cache: "npm"
|
||||
@@ -187,7 +187,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: aws-actions/configure-aws-credentials@v5
|
||||
- uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
role-to-assume: "arn:aws:iam::016170277896:role/github_goauthentik_authentik"
|
||||
aws-region: ${{ env.AWS_REGION }}
|
||||
|
||||
2
.github/workflows/repo-stale.yml
vendored
2
.github/workflows/repo-stale.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/stale@v10
|
||||
- uses: actions/stale@v9
|
||||
with:
|
||||
repo-token: ${{ steps.generate_token.outputs.token }}
|
||||
days-before-stale: 60
|
||||
|
||||
4
.github/workflows/translation-advice.yml
vendored
4
.github/workflows/translation-advice.yml
vendored
@@ -20,14 +20,14 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Find Comment
|
||||
uses: peter-evans/find-comment@v4
|
||||
uses: peter-evans/find-comment@v3
|
||||
id: fc
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: authentik translations instructions
|
||||
- name: Create or update comment
|
||||
uses: peter-evans/create-or-update-comment@v5
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
comment-id: ${{ steps.fc.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
|
||||
12
.vscode/settings.json
vendored
12
.vscode/settings.json
vendored
@@ -1,16 +1,4 @@
|
||||
{
|
||||
"[css]": {
|
||||
"editor.minimap.markSectionHeaderRegex": "#\\bregion\\s*(?<separator>-?)\\s*(?<label>.*)\\*/$"
|
||||
},
|
||||
"[makefile]": {
|
||||
"editor.minimap.markSectionHeaderRegex": "^#{25}\n##\\s\\s*(?<separator>-?)\\s*(?<label>[^\n]*)\n#{25}$"
|
||||
},
|
||||
"[dockerfile]": {
|
||||
"editor.minimap.markSectionHeaderRegex": "\\bStage\\s*\\d:(?<separator>-?)\\s*(?<label>.*)$"
|
||||
},
|
||||
"[jsonc]": {
|
||||
"editor.minimap.markSectionHeaderRegex": "#\\bregion\\s*(?<separator>-?)\\s*(?<label>.*)$"
|
||||
},
|
||||
"todo-tree.tree.showCountsInTree": true,
|
||||
"todo-tree.tree.showBadges": true,
|
||||
"yaml.customTags": [
|
||||
|
||||
@@ -24,7 +24,6 @@ Makefile @goauthentik/infrastructure
|
||||
.editorconfig @goauthentik/infrastructure
|
||||
CODEOWNERS @goauthentik/infrastructure
|
||||
# Backend packages
|
||||
packages/django-postgres-cache @goauthentik/backend
|
||||
packages/django-dramatiq-postgres @goauthentik/backend
|
||||
# Web packages
|
||||
packages/docusaurus-config @goauthentik/frontend
|
||||
@@ -34,12 +33,17 @@ packages/prettier-config @goauthentik/frontend
|
||||
packages/tsconfig @goauthentik/frontend
|
||||
# Web
|
||||
web/ @goauthentik/frontend
|
||||
tests/wdio/ @goauthentik/frontend
|
||||
# Locale
|
||||
locale/ @goauthentik/backend @goauthentik/frontend
|
||||
web/xliff/ @goauthentik/backend @goauthentik/frontend
|
||||
# Docs
|
||||
# Docs & Website
|
||||
docs/ @goauthentik/docs
|
||||
# TODO Remove after moving website to docs
|
||||
website/ @goauthentik/docs
|
||||
CODE_OF_CONDUCT.md @goauthentik/docs
|
||||
# Security
|
||||
SECURITY.md @goauthentik/security @goauthentik/docs
|
||||
# TODO Remove after moving website to docs
|
||||
website/security/ @goauthentik/security @goauthentik/docs
|
||||
docs/security/ @goauthentik/security @goauthentik/docs
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
# Contributing to authentik
|
||||
|
||||
Thanks for your interest in contributing! Please see our [contributing guide](https://docs.goauthentik.io/docs/developer-docs/?utm_source=github) for more information.
|
||||
|
||||
1
CONTRIBUTING.md
Symbolic link
1
CONTRIBUTING.md
Symbolic link
@@ -0,0 +1 @@
|
||||
website/docs/developer-docs/index.md
|
||||
46
Dockerfile
46
Dockerfile
@@ -26,7 +26,7 @@ RUN npm run build && \
|
||||
npm run build:sfe
|
||||
|
||||
# Stage 2: Build go proxy
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.25.1-bookworm AS go-builder
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.25-bookworm AS go-builder
|
||||
|
||||
ARG TARGETOS
|
||||
ARG TARGETARCH
|
||||
@@ -76,12 +76,12 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
||||
/bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||
|
||||
# Stage 4: Download uv
|
||||
FROM ghcr.io/astral-sh/uv:0.8.22 AS uv
|
||||
FROM ghcr.io/astral-sh/uv:0.8.11 AS uv
|
||||
# Stage 5: Base python image
|
||||
FROM ghcr.io/goauthentik/fips-python:3.13.7-slim-trixie-fips AS python-base
|
||||
FROM ghcr.io/goauthentik/fips-python:3.13.6-slim-bookworm-fips AS python-base
|
||||
|
||||
ENV VENV_PATH="/ak-root/.venv" \
|
||||
PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \
|
||||
PATH="/ak-root/lifecycle:/ak-root/venv/bin:$PATH" \
|
||||
UV_COMPILE_BYTECODE=1 \
|
||||
UV_LINK_MODE=copy \
|
||||
UV_NATIVE_TLS=1 \
|
||||
@@ -119,11 +119,7 @@ RUN --mount=type=cache,id=apt-$TARGETARCH$TARGETVARIANT,sharing=locked,target=/v
|
||||
libltdl-dev && \
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- -y
|
||||
|
||||
ENV UV_NO_BINARY_PACKAGE="cryptography lxml python-kadmin-rs xmlsec" \
|
||||
# https://github.com/rust-lang/rustup/issues/2949
|
||||
# Fixes issues where the rust version in the build cache is older than latest
|
||||
# and rustup tries to update it, which fails
|
||||
RUSTUP_PERMIT_COPY_RENAME="true"
|
||||
ENV UV_NO_BINARY_PACKAGE="cryptography lxml python-kadmin-rs xmlsec"
|
||||
|
||||
RUN --mount=type=bind,target=pyproject.toml,src=pyproject.toml \
|
||||
--mount=type=bind,target=uv.lock,src=uv.lock \
|
||||
@@ -149,8 +145,6 @@ LABEL org.opencontainers.image.authors="Authentik Security Inc." \
|
||||
org.opencontainers.image.vendor="Authentik Security Inc." \
|
||||
org.opencontainers.image.version=${VERSION}
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# We cannot cache this layer otherwise we'll end up with a bigger image
|
||||
RUN apt-get update && \
|
||||
apt-get upgrade -y && \
|
||||
@@ -161,28 +155,26 @@ RUN apt-get update && \
|
||||
pip3 install --no-cache-dir --upgrade pip && \
|
||||
apt-get clean && \
|
||||
rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/ && \
|
||||
adduser --system --no-create-home --uid 1000 --group --home /authentik authentik && \
|
||||
adduser --system --no-create-home --uid 1000 --group --home /ak-root authentik && \
|
||||
mkdir -p /certs /media /blueprints && \
|
||||
mkdir -p /authentik/.ssh && \
|
||||
mkdir -p /ak-root && \
|
||||
chown authentik:authentik /certs /media /authentik/.ssh /ak-root
|
||||
mkdir -p /ak-root/authentik/.ssh && \
|
||||
chown authentik:authentik /certs /media /ak-root/authentik/.ssh /ak-root
|
||||
|
||||
COPY ./authentik/ /authentik
|
||||
COPY ./pyproject.toml /
|
||||
COPY ./uv.lock /
|
||||
COPY ./schemas /schemas
|
||||
COPY ./locale /locale
|
||||
COPY ./tests /tests
|
||||
COPY ./manage.py /
|
||||
COPY ./authentik/ /ak-root/authentik
|
||||
COPY ./pyproject.toml /ak-root/
|
||||
COPY ./uv.lock /ak-root/
|
||||
COPY ./schemas /ak-root/schemas
|
||||
COPY ./locale /ak-root/locale
|
||||
COPY ./tests /ak-root/tests
|
||||
COPY ./manage.py /ak-root/
|
||||
COPY ./blueprints /blueprints
|
||||
COPY ./lifecycle/ /lifecycle
|
||||
COPY ./lifecycle/ /ak-root/lifecycle
|
||||
COPY ./authentik/sources/kerberos/krb5.conf /etc/krb5.conf
|
||||
COPY --from=go-builder /go/authentik /bin/authentik
|
||||
COPY ./packages/ /ak-root/packages
|
||||
RUN ln -s /ak-root/packages /packages
|
||||
COPY --from=python-deps /ak-root/.venv /ak-root/.venv
|
||||
COPY --from=node-builder /work/web/dist/ /web/dist/
|
||||
COPY --from=node-builder /work/web/authentik/ /web/authentik/
|
||||
COPY --from=node-builder /work/web/dist/ /ak-root/web/dist/
|
||||
COPY --from=node-builder /work/web/authentik/ /ak-root/web/authentik/
|
||||
COPY --from=geoip /usr/share/GeoIP /geoip
|
||||
|
||||
USER 1000
|
||||
@@ -194,4 +186,6 @@ ENV TMPDIR=/dev/shm/ \
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 CMD [ "ak", "healthcheck" ]
|
||||
|
||||
WORKDIR /ak-root
|
||||
|
||||
ENTRYPOINT [ "dumb-init", "--", "ak" ]
|
||||
|
||||
74
Makefile
74
Makefile
@@ -18,24 +18,7 @@ pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/
|
||||
pg_name := $(shell uv run python -m authentik.lib.config postgresql.name 2>/dev/null)
|
||||
redis_db := $(shell uv run python -m authentik.lib.config redis.db 2>/dev/null)
|
||||
|
||||
UNAME := $(shell uname)
|
||||
|
||||
# For macOS users, add the libxml2 installed from brew libxmlsec1 to the build path
|
||||
# to prevent SAML-related tests from failing and ensure correct pip dependency compilation
|
||||
ifeq ($(UNAME), Darwin)
|
||||
# Only add for brew users who installed libxmlsec1
|
||||
BREW_EXISTS := $(shell command -v brew 2> /dev/null)
|
||||
ifdef BREW_EXISTS
|
||||
LIBXML2_EXISTS := $(shell brew list libxml2 2> /dev/null)
|
||||
ifdef LIBXML2_EXISTS
|
||||
BREW_LDFLAGS := -L$(shell brew --prefix libxml2)/lib $(LDFLAGS)
|
||||
BREW_CPPFLAGS := -I$(shell brew --prefix libxml2)/include $(CPPFLAGS)
|
||||
BREW_PKG_CONFIG_PATH := $(shell brew --prefix libxml2)/lib/pkgconfig:$(PKG_CONFIG_PATH)
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
|
||||
all: lint-fix lint gen web test ## Lint, build, and test everything
|
||||
all: lint-fix lint test gen web ## Lint, build, and test everything
|
||||
|
||||
HELP_WIDTH := $(shell grep -h '^[a-z][^ ]*:.*\#\#' $(MAKEFILE_LIST) 2>/dev/null | \
|
||||
cut -d':' -f1 | awk '{printf "%d\n", length}' | sort -rn | head -1)
|
||||
@@ -67,14 +50,7 @@ lint: ## Lint the python and golang sources
|
||||
golangci-lint run -v
|
||||
|
||||
core-install:
|
||||
ifdef LIBXML2_EXISTS
|
||||
# Clear cache to ensure fresh compilation
|
||||
uv cache clean
|
||||
# Force compilation from source for lxml and xmlsec with correct environment
|
||||
LDFLAGS="$(BREW_LDFLAGS)" CPPFLAGS="$(BREW_CPPFLAGS)" PKG_CONFIG_PATH="$(BREW_PKG_CONFIG_PATH)" uv sync --frozen --reinstall-package lxml --reinstall-package xmlsec --no-binary-package lxml --no-binary-package xmlsec
|
||||
else
|
||||
uv sync --frozen
|
||||
endif
|
||||
|
||||
migrate: ## Run the Authentik Django server's migrations
|
||||
uv run python -m lifecycle.migrate
|
||||
@@ -184,7 +160,7 @@ gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescri
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-generator-cli:v7.15.0 generate \
|
||||
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \
|
||||
-i /local/schema.yml \
|
||||
-g typescript-fetch \
|
||||
-o /local/${GEN_API_TS} \
|
||||
@@ -193,7 +169,6 @@ gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescri
|
||||
--git-repo-id authentik \
|
||||
--git-user-id goauthentik
|
||||
|
||||
cd ${PWD}/${GEN_API_TS} && npm i
|
||||
cd ${PWD}/${GEN_API_TS} && npm link
|
||||
cd ${PWD}/web && npm link @goauthentik/api
|
||||
|
||||
@@ -201,7 +176,7 @@ gen-client-py: gen-clean-py ## Build and install the authentik API for Python
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-generator-cli:v7.15.0 generate \
|
||||
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \
|
||||
-i /local/schema.yml \
|
||||
-g python \
|
||||
-o /local/${GEN_API_PY} \
|
||||
@@ -239,30 +214,34 @@ node-install: ## Install the necessary libraries to build Node.js packages
|
||||
#########################
|
||||
|
||||
web-build: node-install ## Build the Authentik UI
|
||||
npm run --prefix web build
|
||||
cd web && npm run build
|
||||
|
||||
web: web-lint-fix web-lint web-check-compile ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it
|
||||
|
||||
web-test: ## Run tests for the Authentik UI
|
||||
npm run --prefix web test
|
||||
cd web && npm run test
|
||||
|
||||
web-watch: ## Build and watch the Authentik UI for changes, updating automatically
|
||||
npm run --prefix web watch
|
||||
rm -rf web/dist/
|
||||
mkdir web/dist/
|
||||
touch web/dist/.gitkeep
|
||||
cd web && npm run watch
|
||||
|
||||
web-storybook-watch: ## Build and run the storybook documentation server
|
||||
npm run --prefix web storybook
|
||||
cd web && npm run storybook
|
||||
|
||||
web-lint-fix:
|
||||
npm run --prefix web prettier
|
||||
cd web && npm run prettier
|
||||
|
||||
web-lint:
|
||||
npm run --prefix web lint
|
||||
npm run --prefix web lit-analyse
|
||||
cd web && npm run lint
|
||||
cd web && npm run lit-analyse
|
||||
|
||||
web-check-compile:
|
||||
npm run --prefix web tsc
|
||||
cd web && npm run tsc
|
||||
|
||||
web-i18n-extract:
|
||||
npm run --prefix web extract-locales
|
||||
cd web && npm run extract-locales
|
||||
|
||||
#########################
|
||||
## Docs
|
||||
@@ -274,31 +253,31 @@ docs-install:
|
||||
npm ci --prefix website
|
||||
|
||||
docs-lint-fix: lint-codespell
|
||||
npm run --prefix website prettier
|
||||
npm run prettier --prefix website
|
||||
|
||||
docs-build:
|
||||
npm run --prefix website build
|
||||
npm run build --prefix website
|
||||
|
||||
docs-watch: ## Build and watch the topics documentation
|
||||
npm run --prefix website start
|
||||
npm run start --prefix website
|
||||
|
||||
integrations: docs-lint-fix integrations-build ## Fix formatting issues in the integrations source code, lint the code, and compile it
|
||||
|
||||
integrations-build:
|
||||
npm run --prefix website -w integrations build
|
||||
npm run build --prefix website -w integrations
|
||||
|
||||
integrations-watch: ## Build and watch the Integrations documentation
|
||||
npm run --prefix website -w integrations start
|
||||
npm run start --prefix website -w integrations
|
||||
|
||||
docs-api-build:
|
||||
npm run --prefix website -w api build
|
||||
npm run build --prefix website -w api
|
||||
|
||||
docs-api-watch: ## Build and watch the API documentation
|
||||
npm run --prefix website -w api build:api
|
||||
npm run --prefix website -w api start
|
||||
npm run build:api --prefix website -w api
|
||||
npm run start --prefix website -w api
|
||||
|
||||
docs-api-clean: ## Clean generated API documentation
|
||||
npm run --prefix website -w api build:api:clean
|
||||
npm run build:api:clean --prefix website -w api
|
||||
|
||||
#########################
|
||||
## Docker
|
||||
@@ -321,9 +300,6 @@ ci--meta-debug:
|
||||
python -V
|
||||
node --version
|
||||
|
||||
ci-mypy: ci--meta-debug
|
||||
uv run mypy --strict $(PY_SOURCES)
|
||||
|
||||
ci-black: ci--meta-debug
|
||||
uv run black --check $(PY_SOURCES)
|
||||
|
||||
|
||||
28
README.md
28
README.md
@@ -9,21 +9,21 @@
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-outpost.yml)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-web.yml)
|
||||
[](https://codecov.io/gh/goauthentik/authentik)
|
||||

|
||||

|
||||
[](https://www.transifex.com/authentik/authentik/)
|
||||
|
||||
## What is authentik?
|
||||
|
||||
authentik is an open-source Identity Provider (IdP) for modern SSO. It supports SAML, OAuth2/OIDC, LDAP, RADIUS, and more, designed for self-hosting from small labs to large production clusters.
|
||||
authentik is an open-source Identity Provider that emphasizes flexibility and versatility, with support for a wide set of protocols.
|
||||
|
||||
Our [enterprise offering](https://goauthentik.io/pricing) is available for organizations to securely replace existing IdPs such as Okta, Auth0, Entra ID, and Ping Identity for robust, large-scale identity management.
|
||||
Our [enterprise offer](https://goauthentik.io/pricing) can also be used as a self-hosted replacement for large-scale deployments of Okta/Auth0, Entra ID, Ping Identity, or other legacy IdPs for employees and B2B2C use.
|
||||
|
||||
## Installation
|
||||
|
||||
- Docker Compose: recommended for small/test setups. See the [documentation](https://docs.goauthentik.io/docs/install-config/install/docker-compose/).
|
||||
- Kubernetes (Helm Chart): recommended for larger setups. See the [documentation](https://docs.goauthentik.io/docs/install-config/install/kubernetes/) and the Helm chart [repository](https://github.com/goauthentik/helm).
|
||||
- AWS CloudFormation: deploy on AWS using our official templates. See the [documentation](https://docs.goauthentik.io/docs/install-config/install/aws/).
|
||||
- DigitalOcean Marketplace: one-click deployment via the official Marketplace app. See the [app listing](https://marketplace.digitalocean.com/apps/authentik).
|
||||
For small/test setups it is recommended to use Docker Compose; refer to the [documentation](https://goauthentik.io/docs/installation/docker-compose/?utm_source=github).
|
||||
|
||||
For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/helm). This is documented [here](https://goauthentik.io/docs/installation/kubernetes/?utm_source=github).
|
||||
|
||||
## Screenshots
|
||||
|
||||
@@ -32,20 +32,14 @@ Our [enterprise offering](https://goauthentik.io/pricing) is available for organ
|
||||
|  |  |
|
||||
|  |  |
|
||||
|
||||
## Development and contributions
|
||||
## Development
|
||||
|
||||
See the [Developer Documentation](https://docs.goauthentik.io/docs/developer-docs/) for information about setting up local build environments, testing your contributions, and our contribution process.
|
||||
See [Developer Documentation](https://docs.goauthentik.io/docs/developer-docs/?utm_source=github)
|
||||
|
||||
## Security
|
||||
|
||||
Please see [SECURITY.md](SECURITY.md).
|
||||
See [SECURITY.md](SECURITY.md)
|
||||
|
||||
## Adoption
|
||||
## Adoption and Contributions
|
||||
|
||||
Using authentik? We'd love to hear your story and feature your logo. Email us at [hello@goauthentik.io](mailto:hello@goauthentik.io) or open a GitHub Issue/PR!
|
||||
|
||||
## License
|
||||
|
||||
[](LICENSE)
|
||||
[](website/LICENSE)
|
||||
[](authentik/enterprise/LICENSE)
|
||||
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [contribution guide](https://docs.goauthentik.io/docs/developer-docs?utm_source=github).
|
||||
|
||||
25
SECURITY.md
25
SECURITY.md
@@ -20,33 +20,12 @@ Even if the issue is not a CVE, we still greatly appreciate your help in hardeni
|
||||
|
||||
| Version | Supported |
|
||||
| --------- | --------- |
|
||||
| 2025.4.x | ✅ |
|
||||
| 2025.6.x | ✅ |
|
||||
| 2025.8.x | ✅ |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
If you discover a potential vulnerability, please report it responsibly through one of the following channels:
|
||||
|
||||
- **Email**: [security@goauthentik.io](mailto:security@goauthentik.io)
|
||||
- **GitHub**: Submit a private security advisory via our [repository’s advisory portal](https://github.com/goauthentik/authentik/security/advisories/new)
|
||||
|
||||
When submitting a report, please include as much detail as possible, such as:
|
||||
|
||||
- **Affected version(s)**: The version of authentik where the issue was identified.
|
||||
- **Steps to reproduce**: A clear description or proof of concept to help us verify the issue.
|
||||
- **Impact assessment**: How the vulnerability could be exploited and its potential effect.
|
||||
- **Additional information**: Logs, configuration details (if relevant), or any suggested mitigations.
|
||||
|
||||
We kindly ask that you do not disclose the vulnerability publicly until we have confirmed and addressed the issue.
|
||||
|
||||
Our team will:
|
||||
|
||||
- Acknowledge receipt of your report as quickly as possible.
|
||||
- Keep you updated on the investigation and resolution progress.
|
||||
|
||||
## Researcher Recognition
|
||||
|
||||
We value contributions from the security community. For each valid report, we will publish a dedicated entry on our Security Advisory page that optionally includes the reporter’s name (or preferred alias). Please note that while we do not currently offer monetary bounties, we are committed to giving researchers appropriate credit for their efforts in keeping authentik secure.
|
||||
To report a vulnerability, send an email to [security@goauthentik.io](mailto:security@goauthentik.io). Be sure to include relevant information like which version you've found the issue in, instructions on how to reproduce the issue, and anything else that might make it easier for us to find the issue.
|
||||
|
||||
## Severity levels
|
||||
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
from django.dispatch import receiver
|
||||
|
||||
from authentik.admin.tasks import _set_prom_info
|
||||
from authentik.root.signals import post_startup
|
||||
|
||||
|
||||
@receiver(post_startup)
|
||||
def post_startup_admin_metrics(sender, **_):
|
||||
_set_prom_info()
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_dramatiq_postgres.middleware import CurrentTask
|
||||
from dramatiq import actor
|
||||
from packaging.version import parse
|
||||
from requests import RequestException
|
||||
@@ -12,7 +13,7 @@ from authentik.admin.apps import PROM_INFO
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
from authentik.tasks.middleware import CurrentTask
|
||||
from authentik.tasks.models import Task
|
||||
|
||||
LOGGER = get_logger()
|
||||
VERSION_NULL = "0.0.0"
|
||||
@@ -34,7 +35,7 @@ def _set_prom_info():
|
||||
|
||||
@actor(description=_("Update latest version info."))
|
||||
def update_latest_version():
|
||||
self = CurrentTask.get_task()
|
||||
self: Task = CurrentTask.get_task()
|
||||
if CONFIG.get_bool("disable_update_check"):
|
||||
cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT)
|
||||
self.info("Version check disabled.")
|
||||
@@ -71,3 +72,6 @@ def update_latest_version():
|
||||
except (RequestException, IndexError) as exc:
|
||||
cache.set(VERSION_CACHE_KEY, VERSION_NULL, VERSION_CACHE_TIMEOUT)
|
||||
raise exc
|
||||
|
||||
|
||||
_set_prom_info()
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
"""Error Response schema, from https://github.com/axnsan12/drf-yasg/issues/224"""
|
||||
|
||||
from collections.abc import Callable
|
||||
from typing import Any
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from drf_spectacular.generators import SchemaGenerator
|
||||
from drf_spectacular.plumbing import (
|
||||
@@ -11,7 +8,6 @@ from drf_spectacular.plumbing import (
|
||||
build_basic_type,
|
||||
build_object_type,
|
||||
)
|
||||
from drf_spectacular.renderers import OpenApiJsonRenderer
|
||||
from drf_spectacular.settings import spectacular_settings
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from rest_framework.settings import api_settings
|
||||
@@ -19,28 +15,34 @@ from rest_framework.settings import api_settings
|
||||
from authentik.api.apps import AuthentikAPIConfig
|
||||
from authentik.api.pagination import PAGINATION_COMPONENT_NAME, PAGINATION_SCHEMA
|
||||
|
||||
|
||||
def build_standard_type(obj, **kwargs):
|
||||
"""Build a basic type with optional add owns."""
|
||||
schema = build_basic_type(obj)
|
||||
schema.update(kwargs)
|
||||
return schema
|
||||
|
||||
|
||||
GENERIC_ERROR = build_object_type(
|
||||
description=_("Generic API Error"),
|
||||
properties={
|
||||
"detail": build_basic_type(OpenApiTypes.STR),
|
||||
"code": build_basic_type(OpenApiTypes.STR),
|
||||
"detail": build_standard_type(OpenApiTypes.STR),
|
||||
"code": build_standard_type(OpenApiTypes.STR),
|
||||
},
|
||||
required=["detail"],
|
||||
)
|
||||
VALIDATION_ERROR = build_object_type(
|
||||
description=_("Validation Error"),
|
||||
properties={
|
||||
api_settings.NON_FIELD_ERRORS_KEY: build_array_type(build_basic_type(OpenApiTypes.STR)),
|
||||
"code": build_basic_type(OpenApiTypes.STR),
|
||||
api_settings.NON_FIELD_ERRORS_KEY: build_array_type(build_standard_type(OpenApiTypes.STR)),
|
||||
"code": build_standard_type(OpenApiTypes.STR),
|
||||
},
|
||||
required=[],
|
||||
additionalProperties={},
|
||||
)
|
||||
|
||||
|
||||
def create_component(
|
||||
generator: SchemaGenerator, name: str, schema: Any, type_=ResolvedComponent.SCHEMA
|
||||
) -> ResolvedComponent:
|
||||
def create_component(generator: SchemaGenerator, name, schema, type_=ResolvedComponent.SCHEMA):
|
||||
"""Register a component and return a reference to it."""
|
||||
component = ResolvedComponent(
|
||||
name=name,
|
||||
@@ -52,18 +54,7 @@ def create_component(
|
||||
return component
|
||||
|
||||
|
||||
def preprocess_schema_exclude_non_api(endpoints: list[tuple[str, Any, Any, Callable]], **kwargs):
|
||||
"""Filter out all API Views which are not mounted under /api"""
|
||||
return [
|
||||
(path, path_regex, method, callback)
|
||||
for path, path_regex, method, callback in endpoints
|
||||
if path.startswith("/" + AuthentikAPIConfig.mountpoint)
|
||||
]
|
||||
|
||||
|
||||
def postprocess_schema_responses(
|
||||
result: dict[str, Any], generator: SchemaGenerator, **kwargs
|
||||
) -> dict[str, Any]:
|
||||
def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs):
|
||||
"""Workaround to set a default response for endpoints.
|
||||
Workaround suggested at
|
||||
<https://github.com/tfranzel/drf-spectacular/issues/119#issuecomment-656970357>
|
||||
@@ -113,81 +104,10 @@ def postprocess_schema_responses(
|
||||
return result
|
||||
|
||||
|
||||
def postprocess_schema_pagination(
|
||||
result: dict[str, Any], generator: SchemaGenerator, **kwargs
|
||||
) -> dict[str, Any]:
|
||||
"""Optimise pagination parameters, instead of redeclaring parameters for each endpoint
|
||||
declare them globally and refer to them"""
|
||||
to_replace = {
|
||||
"ordering": create_component(
|
||||
generator,
|
||||
"QueryPaginationOrdering",
|
||||
{
|
||||
"name": "ordering",
|
||||
"required": False,
|
||||
"in": "query",
|
||||
"description": "Which field to use when ordering the results.",
|
||||
"schema": {"type": "string"},
|
||||
},
|
||||
ResolvedComponent.PARAMETER,
|
||||
),
|
||||
"page": create_component(
|
||||
generator,
|
||||
"QueryPaginationPage",
|
||||
{
|
||||
"name": "page",
|
||||
"required": False,
|
||||
"in": "query",
|
||||
"description": "A page number within the paginated result set.",
|
||||
"schema": {"type": "integer"},
|
||||
},
|
||||
ResolvedComponent.PARAMETER,
|
||||
),
|
||||
"page_size": create_component(
|
||||
generator,
|
||||
"QueryPaginationPageSize",
|
||||
{
|
||||
"name": "page_size",
|
||||
"required": False,
|
||||
"in": "query",
|
||||
"description": "Number of results to return per page.",
|
||||
"schema": {"type": "integer"},
|
||||
},
|
||||
ResolvedComponent.PARAMETER,
|
||||
),
|
||||
"search": create_component(
|
||||
generator,
|
||||
"QuerySearch",
|
||||
{
|
||||
"name": "search",
|
||||
"required": False,
|
||||
"in": "query",
|
||||
"description": "A search term.",
|
||||
"schema": {"type": "string"},
|
||||
},
|
||||
ResolvedComponent.PARAMETER,
|
||||
),
|
||||
}
|
||||
for path in result["paths"].values():
|
||||
for method in path.values():
|
||||
for idx, param in enumerate(method.get("parameters", [])):
|
||||
for replace_name, replace_ref in to_replace.items():
|
||||
if param["name"] == replace_name:
|
||||
method["parameters"][idx] = replace_ref.ref
|
||||
return result
|
||||
|
||||
|
||||
def postprocess_schema_remove_unused(
|
||||
result: dict[str, Any], generator: SchemaGenerator, **kwargs
|
||||
) -> dict[str, Any]:
|
||||
"""Remove unused components"""
|
||||
# To check if the schema is used, render it to JSON and then substring check that
|
||||
# less efficient than walking through the tree but a lot simpler and no
|
||||
# possibility that we miss something
|
||||
raw = OpenApiJsonRenderer().render(result, renderer_context={}).decode()
|
||||
for key in result["components"][ResolvedComponent.SCHEMA].keys():
|
||||
if raw.count(key) > 1:
|
||||
continue
|
||||
del generator.registry._components[(key, ResolvedComponent.SCHEMA)]
|
||||
result["components"] = generator.registry.build(spectacular_settings.APPEND_COMPONENTS)
|
||||
return result
|
||||
def preprocess_schema_exclude_non_api(endpoints, **kwargs):
|
||||
"""Filter out all API Views which are not mounted under /api"""
|
||||
return [
|
||||
(path, path_regex, method, callback)
|
||||
for path, path_regex, method, callback in endpoints
|
||||
if path.startswith("/" + AuthentikAPIConfig.mountpoint)
|
||||
]
|
||||
|
||||
@@ -76,7 +76,6 @@ from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser
|
||||
from authentik.rbac.models import Role
|
||||
from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser
|
||||
from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType
|
||||
from authentik.stages.consent.models import UserConsent
|
||||
from authentik.tasks.models import Task
|
||||
from authentik.tenants.models import Tenant
|
||||
|
||||
@@ -136,7 +135,6 @@ def excluded_models() -> list[type[Model]]:
|
||||
EndpointDeviceConnection,
|
||||
DeviceToken,
|
||||
StreamEvent,
|
||||
UserConsent,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ from django.db import DatabaseError, InternalError, ProgrammingError
|
||||
from django.utils.text import slugify
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_dramatiq_postgres.middleware import CurrentTaskNotFound
|
||||
from django_dramatiq_postgres.middleware import CurrentTask, CurrentTaskNotFound
|
||||
from dramatiq.actor import actor
|
||||
from dramatiq.middleware import Middleware
|
||||
from structlog.stdlib import get_logger
|
||||
@@ -38,8 +38,6 @@ from authentik.blueprints.v1.oci import OCI_PREFIX
|
||||
from authentik.events.logs import capture_logs
|
||||
from authentik.events.utils import sanitize_dict
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.tasks.apps import PRIORITY_HIGH
|
||||
from authentik.tasks.middleware import CurrentTask
|
||||
from authentik.tasks.models import Task
|
||||
from authentik.tasks.schedules.models import Schedule
|
||||
from authentik.tenants.models import Tenant
|
||||
@@ -113,7 +111,6 @@ class BlueprintEventHandler(FileSystemEventHandler):
|
||||
@actor(
|
||||
description=_("Find blueprints as `blueprints_find` does, but return a safe dict."),
|
||||
throws=(DatabaseError, ProgrammingError, InternalError),
|
||||
priority=PRIORITY_HIGH,
|
||||
)
|
||||
def blueprints_find_dict():
|
||||
blueprints = []
|
||||
@@ -156,7 +153,7 @@ def blueprints_find() -> list[BlueprintFile]:
|
||||
throws=(DatabaseError, ProgrammingError, InternalError),
|
||||
)
|
||||
def blueprints_discovery(path: str | None = None):
|
||||
self = CurrentTask.get_task()
|
||||
self: Task = CurrentTask.get_task()
|
||||
count = 0
|
||||
for blueprint in blueprints_find():
|
||||
if path and blueprint.path != path:
|
||||
@@ -196,7 +193,7 @@ def check_blueprint_v1_file(blueprint: BlueprintFile):
|
||||
@actor(description=_("Apply single blueprint."))
|
||||
def apply_blueprint(instance_pk: UUID):
|
||||
try:
|
||||
self = CurrentTask.get_task()
|
||||
self: Task = CurrentTask.get_task()
|
||||
except CurrentTaskNotFound:
|
||||
self = Task()
|
||||
self.set_uid(str(instance_pk))
|
||||
|
||||
@@ -113,7 +113,7 @@ class Brand(SerializerModel):
|
||||
try:
|
||||
return self.attributes.get("settings", {}).get("locale", "")
|
||||
|
||||
except Exception as exc: # noqa
|
||||
except Exception as exc:
|
||||
LOGGER.warning("Failed to get default locale", exc=exc)
|
||||
return ""
|
||||
|
||||
|
||||
@@ -29,8 +29,8 @@ from authentik.rbac.api.roles import RoleSerializer
|
||||
from authentik.rbac.decorators import permission_required
|
||||
|
||||
|
||||
class PartialUserSerializer(ModelSerializer):
|
||||
"""Partial User Serializer, does not include child relations."""
|
||||
class GroupMemberSerializer(ModelSerializer):
|
||||
"""Stripped down user serializer to show relevant users for groups"""
|
||||
|
||||
attributes = JSONDictField(required=False)
|
||||
uid = CharField(read_only=True)
|
||||
@@ -94,11 +94,11 @@ class GroupSerializer(ModelSerializer):
|
||||
return True
|
||||
return str(request.query_params.get("include_children", "false")).lower() == "true"
|
||||
|
||||
@extend_schema_field(PartialUserSerializer(many=True))
|
||||
def get_users_obj(self, instance: Group) -> list[PartialUserSerializer] | None:
|
||||
@extend_schema_field(GroupMemberSerializer(many=True))
|
||||
def get_users_obj(self, instance: Group) -> list[GroupMemberSerializer] | None:
|
||||
if not self._should_include_users:
|
||||
return None
|
||||
return PartialUserSerializer(instance.users, many=True).data
|
||||
return GroupMemberSerializer(instance.users, many=True).data
|
||||
|
||||
@extend_schema_field(GroupChildSerializer(many=True))
|
||||
def get_children_obj(self, instance: Group) -> list[GroupChildSerializer] | None:
|
||||
@@ -295,7 +295,7 @@ class GroupViewSet(UsedByMixin, ModelViewSet):
|
||||
@extend_schema(
|
||||
request=UserAccountSerializer,
|
||||
responses={
|
||||
204: OpenApiResponse(description="User removed"),
|
||||
204: OpenApiResponse(description="User added"),
|
||||
404: OpenApiResponse(description="User not found"),
|
||||
},
|
||||
)
|
||||
@@ -307,7 +307,7 @@ class GroupViewSet(UsedByMixin, ModelViewSet):
|
||||
permission_classes=[],
|
||||
)
|
||||
def remove_user(self, request: Request, pk: str) -> Response:
|
||||
"""Remove user from group"""
|
||||
"""Add user to group"""
|
||||
group: Group = self.get_object()
|
||||
user: User = (
|
||||
get_objects_for_user(request.user, "authentik_core.view_user")
|
||||
|
||||
@@ -171,7 +171,7 @@ class PropertyMappingViewSet(
|
||||
except PropertyMappingExpressionException as exc:
|
||||
response_data["result"] = exception_to_string(exc.exc)
|
||||
response_data["successful"] = False
|
||||
except Exception as exc: # noqa
|
||||
except Exception as exc:
|
||||
response_data["result"] = exception_to_string(exc)
|
||||
response_data["successful"] = False
|
||||
response = PropertyMappingTestResultSerializer(response_data)
|
||||
|
||||
@@ -97,8 +97,8 @@ class ParamUserSerializer(PassiveSerializer):
|
||||
user = PrimaryKeyRelatedField(queryset=User.objects.all().exclude_anonymous(), required=False)
|
||||
|
||||
|
||||
class PartialGroupSerializer(ModelSerializer):
|
||||
"""Partial Group Serializer, does not include child relations."""
|
||||
class UserGroupSerializer(ModelSerializer):
|
||||
"""Simplified Group Serializer for user's groups"""
|
||||
|
||||
attributes = JSONDictField(required=False)
|
||||
parent_name = CharField(source="parent.name", read_only=True, allow_null=True)
|
||||
@@ -143,11 +143,11 @@ class UserSerializer(ModelSerializer):
|
||||
return True
|
||||
return str(request.query_params.get("include_groups", "true")).lower() == "true"
|
||||
|
||||
@extend_schema_field(PartialGroupSerializer(many=True))
|
||||
def get_groups_obj(self, instance: User) -> list[PartialGroupSerializer] | None:
|
||||
@extend_schema_field(UserGroupSerializer(many=True))
|
||||
def get_groups_obj(self, instance: User) -> list[UserGroupSerializer] | None:
|
||||
if not self._should_include_groups:
|
||||
return None
|
||||
return PartialGroupSerializer(instance.ak_groups, many=True).data
|
||||
return UserGroupSerializer(instance.ak_groups, many=True).data
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
@@ -328,27 +328,6 @@ class SessionUserSerializer(PassiveSerializer):
|
||||
original = UserSelfSerializer(required=False)
|
||||
|
||||
|
||||
class UserPasswordSetSerializer(PassiveSerializer):
|
||||
"""Payload to set a users' password directly"""
|
||||
|
||||
password = CharField(required=True)
|
||||
|
||||
|
||||
class UserServiceAccountSerializer(PassiveSerializer):
|
||||
"""Payload to create a service account"""
|
||||
|
||||
name = CharField(
|
||||
required=True,
|
||||
validators=[UniqueValidator(queryset=User.objects.all().order_by("username"))],
|
||||
)
|
||||
create_group = BooleanField(default=False)
|
||||
expiring = BooleanField(default=True)
|
||||
expires = DateTimeField(
|
||||
required=False,
|
||||
help_text="If not provided, valid for 360 days",
|
||||
)
|
||||
|
||||
|
||||
class UsersFilter(FilterSet):
|
||||
"""Filter for users"""
|
||||
|
||||
@@ -509,7 +488,18 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
|
||||
@permission_required(None, ["authentik_core.add_user", "authentik_core.add_token"])
|
||||
@extend_schema(
|
||||
request=UserServiceAccountSerializer,
|
||||
request=inline_serializer(
|
||||
"UserServiceAccountSerializer",
|
||||
{
|
||||
"name": CharField(required=True),
|
||||
"create_group": BooleanField(default=False),
|
||||
"expiring": BooleanField(default=True),
|
||||
"expires": DateTimeField(
|
||||
required=False,
|
||||
help_text="If not provided, valid for 360 days",
|
||||
),
|
||||
},
|
||||
),
|
||||
responses={
|
||||
200: inline_serializer(
|
||||
"UserServiceAccountResponse",
|
||||
@@ -531,12 +521,11 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
)
|
||||
def service_account(self, request: Request) -> Response:
|
||||
"""Create a new user account that is marked as a service account"""
|
||||
data = UserServiceAccountSerializer(data=request.data)
|
||||
data.is_valid(raise_exception=True)
|
||||
expires = data.validated_data.get("expires", now() + timedelta(days=360))
|
||||
username = request.data.get("name")
|
||||
create_group = request.data.get("create_group", False)
|
||||
expiring = request.data.get("expiring", True)
|
||||
expires = request.data.get("expires", now() + timedelta(days=360))
|
||||
|
||||
username = data.validated_data["name"]
|
||||
expiring = data.validated_data["expiring"]
|
||||
with atomic():
|
||||
try:
|
||||
user: User = User.objects.create(
|
||||
@@ -554,10 +543,10 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
"user_uid": user.uid,
|
||||
"user_pk": user.pk,
|
||||
}
|
||||
if data.validated_data["create_group"] and self.request.user.has_perm(
|
||||
"authentik_core.add_group"
|
||||
):
|
||||
group = Group.objects.create(name=username)
|
||||
if create_group and self.request.user.has_perm("authentik_core.add_group"):
|
||||
group = Group.objects.create(
|
||||
name=username,
|
||||
)
|
||||
group.users.add(user)
|
||||
response["group_pk"] = str(group.pk)
|
||||
token = Token.objects.create(
|
||||
@@ -570,29 +559,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
response["token"] = token.key
|
||||
return Response(response)
|
||||
except IntegrityError as exc:
|
||||
error_msg = str(exc).lower()
|
||||
|
||||
if "unique" in error_msg:
|
||||
return Response(
|
||||
data={
|
||||
"non_field_errors": [
|
||||
_("A user/group with these details already exists")
|
||||
]
|
||||
},
|
||||
status=400,
|
||||
)
|
||||
else:
|
||||
LOGGER.warning("Service account creation failed", exc=exc)
|
||||
return Response(
|
||||
data={"non_field_errors": [_("Unable to create user")]},
|
||||
status=400,
|
||||
)
|
||||
except (ValueError, TypeError) as exc:
|
||||
LOGGER.error("Unexpected error during service account creation", exc=exc)
|
||||
return Response(
|
||||
data={"non_field_errors": [_("Unknown error occurred")]},
|
||||
status=500,
|
||||
)
|
||||
return Response(data={"non_field_errors": [str(exc)]}, status=400)
|
||||
|
||||
@extend_schema(responses={200: SessionUserSerializer(many=False)})
|
||||
@action(
|
||||
@@ -618,7 +585,12 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
|
||||
@permission_required("authentik_core.reset_user_password")
|
||||
@extend_schema(
|
||||
request=UserPasswordSetSerializer,
|
||||
request=inline_serializer(
|
||||
"UserPasswordSetSerializer",
|
||||
{
|
||||
"password": CharField(required=True),
|
||||
},
|
||||
),
|
||||
responses={
|
||||
204: OpenApiResponse(description="Successfully changed password"),
|
||||
400: OpenApiResponse(description="Bad request"),
|
||||
@@ -627,11 +599,9 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
@action(detail=True, methods=["POST"], permission_classes=[])
|
||||
def set_password(self, request: Request, pk: int) -> Response:
|
||||
"""Set password for user"""
|
||||
data = UserPasswordSetSerializer(data=request.data)
|
||||
data.is_valid(raise_exception=True)
|
||||
user: User = self.get_object()
|
||||
try:
|
||||
user.set_password(data.validated_data["password"], request=request)
|
||||
user.set_password(request.data.get("password"), request=request)
|
||||
user.save()
|
||||
except (ValidationError, IntegrityError) as exc:
|
||||
LOGGER.debug("Failed to set password", exc=exc)
|
||||
@@ -708,7 +678,8 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
},
|
||||
),
|
||||
responses={
|
||||
204: OpenApiResponse(description="Successfully started impersonation"),
|
||||
"204": OpenApiResponse(description="Successfully started impersonation"),
|
||||
"401": OpenApiResponse(description="Access denied"),
|
||||
},
|
||||
)
|
||||
@action(detail=True, methods=["POST"], permission_classes=[])
|
||||
@@ -727,7 +698,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
"User attempted to impersonate without permissions",
|
||||
user=request.user,
|
||||
)
|
||||
return Response(status=403)
|
||||
return Response(status=401)
|
||||
if user_to_be.pk == self.request.user.pk:
|
||||
LOGGER.debug("User attempted to impersonate themselves", user=request.user)
|
||||
return Response(status=401)
|
||||
@@ -736,19 +707,19 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
"User attempted to impersonate without providing a reason",
|
||||
user=request.user,
|
||||
)
|
||||
raise ValidationError({"reason": _("This field is required.")})
|
||||
return Response(status=401)
|
||||
|
||||
request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER] = request.user
|
||||
request.session[SESSION_KEY_IMPERSONATE_USER] = user_to_be
|
||||
|
||||
Event.new(EventAction.IMPERSONATION_STARTED, reason=reason).from_http(request, user_to_be)
|
||||
|
||||
return Response(status=204)
|
||||
return Response(status=201)
|
||||
|
||||
@extend_schema(
|
||||
request=None,
|
||||
request=OpenApiTypes.NONE,
|
||||
responses={
|
||||
"204": OpenApiResponse(description="Successfully ended impersonation"),
|
||||
"204": OpenApiResponse(description="Successfully started impersonation"),
|
||||
},
|
||||
)
|
||||
@action(detail=False, methods=["GET"])
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""custom runserver command"""
|
||||
|
||||
from io import StringIO
|
||||
from typing import TextIO
|
||||
|
||||
from daphne.management.commands.runserver import Command as RunServer
|
||||
from daphne.server import Server
|
||||
@@ -33,4 +33,4 @@ class Command(RunServer):
|
||||
super().__init__(*args, **kwargs)
|
||||
# Redirect standard stdout banner from Daphne into the void
|
||||
# as there are a couple more steps that happen before startup is fully done
|
||||
self.stdout = StringIO()
|
||||
self.stdout = TextIO()
|
||||
|
||||
@@ -99,7 +99,7 @@ class Command(BaseCommand):
|
||||
else:
|
||||
try:
|
||||
hook()
|
||||
except Exception: # noqa
|
||||
except Exception:
|
||||
# Match the behavior of the cpython shell where an error in
|
||||
# sys.__interactivehook__ prints a warning and the exception
|
||||
# and continues.
|
||||
|
||||
@@ -13,6 +13,14 @@ import authentik.core.models
|
||||
import authentik.lib.models
|
||||
|
||||
|
||||
def migrate_sessions(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||
from django.core.cache import cache
|
||||
|
||||
session_keys = cache.keys(KEY_PREFIX + "*")
|
||||
cache.delete_many(session_keys)
|
||||
|
||||
|
||||
def fix_duplicates(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
db_alias = schema_editor.connection.alias
|
||||
Token = apps.get_model("authentik_core", "token")
|
||||
@@ -143,6 +151,9 @@ class Migration(migrations.Migration):
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.RunPython(
|
||||
code=migrate_sessions,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="application",
|
||||
name="meta_launch_url",
|
||||
|
||||
@@ -7,10 +7,15 @@ from django.contrib.auth import BACKEND_SESSION_KEY, HASH_SESSION_KEY, SESSION_K
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.contrib.sessions.backends.cache import KEY_PREFIX
|
||||
from django.utils.timezone import now, timedelta
|
||||
from authentik.lib.migrations import progress_bar
|
||||
from authentik.root.middleware import ClientIPMiddleware
|
||||
|
||||
|
||||
SESSION_CACHE_ALIAS = "default"
|
||||
|
||||
|
||||
class PickleSerializer:
|
||||
"""
|
||||
Simple wrapper around pickle to be used in signing.dumps()/loads() and
|
||||
@@ -78,6 +83,27 @@ def _migrate_session(
|
||||
)
|
||||
|
||||
|
||||
def migrate_redis_sessions(apps, schema_editor):
|
||||
from django.core.cache import caches
|
||||
|
||||
db_alias = schema_editor.connection.alias
|
||||
cache = caches[SESSION_CACHE_ALIAS]
|
||||
|
||||
# Not a redis cache, skipping
|
||||
if not hasattr(cache, "keys"):
|
||||
return
|
||||
|
||||
print("\nMigrating Redis sessions to database, this might take a couple of minutes...")
|
||||
for key, session_data in progress_bar(cache.get_many(cache.keys(f"{KEY_PREFIX}*")).items()):
|
||||
_migrate_session(
|
||||
apps=apps,
|
||||
db_alias=db_alias,
|
||||
session_key=key.removeprefix(KEY_PREFIX),
|
||||
session_data=session_data,
|
||||
expires=now() + timedelta(seconds=cache.ttl(key)),
|
||||
)
|
||||
|
||||
|
||||
def migrate_database_sessions(apps, schema_editor):
|
||||
DjangoSession = apps.get_model("sessions", "Session")
|
||||
db_alias = schema_editor.connection.alias
|
||||
@@ -205,6 +231,10 @@ class Migration(migrations.Migration):
|
||||
"verbose_name_plural": "Authenticated Sessions",
|
||||
},
|
||||
),
|
||||
migrations.RunPython(
|
||||
code=migrate_redis_sessions,
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
migrations.RunPython(
|
||||
code=migrate_database_sessions,
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 5.1.12 on 2025-09-25 13:39
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0050_user_last_updated_and_more"),
|
||||
("authentik_rbac", "0006_alter_role_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="group",
|
||||
index=models.Index(fields=["is_superuser"], name="authentik_c_is_supe_1e5a97_idx"),
|
||||
),
|
||||
]
|
||||
@@ -114,21 +114,15 @@ class AttributesMixin(models.Model):
|
||||
|
||||
def update_attributes(self, properties: dict[str, Any]):
|
||||
"""Update fields and attributes, but correctly by merging dicts"""
|
||||
needs_update = False
|
||||
for key, value in properties.items():
|
||||
if key == "attributes":
|
||||
continue
|
||||
if getattr(self, key, None) != value:
|
||||
setattr(self, key, value)
|
||||
needs_update = True
|
||||
setattr(self, key, value)
|
||||
final_attributes = {}
|
||||
MERGE_LIST_UNIQUE.merge(final_attributes, self.attributes)
|
||||
MERGE_LIST_UNIQUE.merge(final_attributes, properties.get("attributes", {}))
|
||||
if self.attributes != final_attributes:
|
||||
self.attributes = final_attributes
|
||||
needs_update = True
|
||||
if needs_update:
|
||||
self.save()
|
||||
self.attributes = final_attributes
|
||||
self.save()
|
||||
|
||||
@classmethod
|
||||
def update_or_create_attributes(
|
||||
@@ -206,10 +200,7 @@ class Group(SerializerModel, AttributesMixin):
|
||||
"parent",
|
||||
),
|
||||
)
|
||||
indexes = (
|
||||
models.Index(fields=["name"]),
|
||||
models.Index(fields=["is_superuser"]),
|
||||
)
|
||||
indexes = [models.Index(fields=["name"])]
|
||||
verbose_name = _("Group")
|
||||
verbose_name_plural = _("Groups")
|
||||
permissions = [
|
||||
@@ -406,12 +397,10 @@ class User(SerializerModel, GuardianUserMixin, AttributesMixin, AbstractUser):
|
||||
|
||||
def locale(self, request: HttpRequest | None = None) -> str:
|
||||
"""Get the locale the user has configured"""
|
||||
if request and hasattr(request, "LANGUAGE_CODE"):
|
||||
return request.LANGUAGE_CODE
|
||||
try:
|
||||
return self.attributes.get("settings", {}).get("locale", "")
|
||||
|
||||
except Exception as exc: # noqa
|
||||
except Exception as exc:
|
||||
LOGGER.warning("Failed to get default locale", exc=exc)
|
||||
if request:
|
||||
return request.brand.locale
|
||||
@@ -592,7 +581,7 @@ class Application(SerializerModel, PolicyBindingModel):
|
||||
try:
|
||||
return url % user.__dict__
|
||||
|
||||
except Exception as exc: # noqa
|
||||
except Exception as exc:
|
||||
LOGGER.warning("Failed to format launch url", exc=exc)
|
||||
return url
|
||||
return url
|
||||
@@ -788,7 +777,7 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
||||
"slug": self.slug,
|
||||
}
|
||||
|
||||
except Exception as exc: # noqa
|
||||
except Exception as exc:
|
||||
LOGGER.warning("Failed to template user path", exc=exc, source=self)
|
||||
return User.default_path()
|
||||
|
||||
|
||||
@@ -2,9 +2,10 @@
|
||||
|
||||
from django.contrib.auth.signals import user_logged_in
|
||||
from django.core.cache import cache
|
||||
from django.core.signals import Signal
|
||||
from django.db.models import Model
|
||||
from django.db.models.signals import post_delete, post_save, pre_save
|
||||
from django.dispatch import Signal, receiver
|
||||
from django.dispatch import receiver
|
||||
from django.http.request import HttpRequest
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ from datetime import datetime, timedelta
|
||||
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_postgres_cache.tasks import clear_expired_cache
|
||||
from django_dramatiq_postgres.middleware import CurrentTask
|
||||
from dramatiq.actor import actor
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
@@ -14,31 +14,29 @@ from authentik.core.models import (
|
||||
ExpiringModel,
|
||||
User,
|
||||
)
|
||||
from authentik.lib.utils.db import chunked_queryset
|
||||
from authentik.tasks.middleware import CurrentTask
|
||||
from authentik.tasks.models import Task
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
@actor(description=_("Remove expired objects."))
|
||||
def clean_expired_models():
|
||||
self = CurrentTask.get_task()
|
||||
self: Task = CurrentTask.get_task()
|
||||
for cls in ExpiringModel.__subclasses__():
|
||||
cls: ExpiringModel
|
||||
objects = (
|
||||
cls.objects.all().exclude(expiring=False).exclude(expiring=True, expires__gt=now())
|
||||
)
|
||||
amount = objects.count()
|
||||
for obj in chunked_queryset(objects):
|
||||
for obj in objects:
|
||||
obj.expire_action()
|
||||
LOGGER.debug("Expired models", model=cls, amount=amount)
|
||||
self.info(f"Expired {amount} {cls._meta.verbose_name_plural}")
|
||||
clear_expired_cache()
|
||||
|
||||
|
||||
@actor(description=_("Remove temporary users created by SAML Sources."))
|
||||
def clean_temporary_users():
|
||||
self = CurrentTask.get_task()
|
||||
self: Task = CurrentTask.get_task()
|
||||
_now = datetime.now()
|
||||
deleted_users = 0
|
||||
for user in User.objects.filter(**{f"attributes__{USER_ATTRIBUTE_GENERATED}": True}):
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<ak-skip-to-content></ak-skip-to-content>
|
||||
<ak-message-container alignment="bottom"></ak-message-container>
|
||||
<ak-interface-admin>
|
||||
<ak-loading></ak-loading>
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<ak-skip-to-content></ak-skip-to-content>
|
||||
<ak-message-container></ak-message-container>
|
||||
<ak-interface-user>
|
||||
<ak-loading></ak-loading>
|
||||
|
||||
@@ -45,7 +45,6 @@
|
||||
{% block body %}
|
||||
<div class="pf-c-background-image">
|
||||
</div>
|
||||
<ak-skip-to-content></ak-skip-to-content>
|
||||
<ak-message-container></ak-message-container>
|
||||
<div class="pf-c-login stacked">
|
||||
<div class="ak-login-container">
|
||||
|
||||
@@ -59,7 +59,7 @@ class TestImpersonation(APITestCase):
|
||||
),
|
||||
data={"reason": "some reason"},
|
||||
)
|
||||
self.assertEqual(response.status_code, 204)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
|
||||
response = self.client.get(reverse("authentik_api:user-me"))
|
||||
response_body = loads(response.content.decode())
|
||||
@@ -80,7 +80,7 @@ class TestImpersonation(APITestCase):
|
||||
),
|
||||
data={"reason": "some reason"},
|
||||
)
|
||||
self.assertEqual(response.status_code, 204)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
|
||||
response = self.client.get(reverse("authentik_api:user-me"))
|
||||
response_body = loads(response.content.decode())
|
||||
@@ -137,10 +137,10 @@ class TestImpersonation(APITestCase):
|
||||
self.client.force_login(self.user)
|
||||
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-impersonate", kwargs={"pk": self.other_user.pk}),
|
||||
reverse("authentik_api:user-impersonate", kwargs={"pk": self.user.pk}),
|
||||
data={"reason": ""},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertEqual(response.status_code, 401)
|
||||
|
||||
response = self.client.get(reverse("authentik_api:user-me"))
|
||||
response_body = loads(response.content.decode())
|
||||
|
||||
@@ -102,16 +102,6 @@ class TestUsersAPI(APITestCase):
|
||||
self.admin.refresh_from_db()
|
||||
self.assertTrue(self.admin.check_password(new_pw))
|
||||
|
||||
def test_set_password_blank(self):
|
||||
"""Test Direct password set"""
|
||||
self.client.force_login(self.admin)
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-set-password", kwargs={"pk": self.admin.pk}),
|
||||
data={"password": ""},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(response.content, {"password": ["This field may not be blank."]})
|
||||
|
||||
def test_recovery(self):
|
||||
"""Test user recovery link"""
|
||||
flow = create_test_flow(
|
||||
@@ -469,274 +459,3 @@ class TestUsersAPI(APITestCase):
|
||||
body = loads(response.content)
|
||||
self.assertEqual(len(body["results"]), 2)
|
||||
self.assertEqual(body["results"][0]["pk"], user.pk)
|
||||
|
||||
def test_service_account_validation_empty_username(self):
|
||||
"""Test service account creation with empty/blank username validation"""
|
||||
self.client.force_login(self.admin)
|
||||
|
||||
# Test with empty string
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-service-account"),
|
||||
data={
|
||||
"name": "",
|
||||
"create_group": True,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content,
|
||||
{"name": ["This field may not be blank."]},
|
||||
)
|
||||
|
||||
# Test with only whitespace
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-service-account"),
|
||||
data={
|
||||
"name": " ",
|
||||
"create_group": True,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content,
|
||||
{"name": ["This field may not be blank."]},
|
||||
)
|
||||
|
||||
# Test with tab and newline characters
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-service-account"),
|
||||
data={
|
||||
"name": "\t\n",
|
||||
"create_group": True,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content,
|
||||
{"name": ["This field may not be blank."]},
|
||||
)
|
||||
|
||||
def test_service_account_validation_valid_username(self):
|
||||
"""Test service account creation with valid username"""
|
||||
self.client.force_login(self.admin)
|
||||
|
||||
# Test with valid username
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-service-account"),
|
||||
data={
|
||||
"name": "valid-service-account",
|
||||
"create_group": True,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
# Verify response structure
|
||||
body = loads(response.content)
|
||||
self.assertIn("username", body)
|
||||
self.assertIn("user_uid", body)
|
||||
self.assertIn("user_pk", body)
|
||||
self.assertIn("group_pk", body) # Should exist since create_group=True
|
||||
self.assertIn("token", body)
|
||||
|
||||
# Verify field types
|
||||
self.assertEqual(body["username"], "valid-service-account")
|
||||
self.assertIsInstance(body["user_pk"], int)
|
||||
self.assertIsInstance(body["user_uid"], str)
|
||||
self.assertIsInstance(body["token"], str)
|
||||
self.assertIsInstance(body["group_pk"], str)
|
||||
|
||||
def test_service_account_validation_without_group(self):
|
||||
"""Test service account creation without creating a group"""
|
||||
self.client.force_login(self.admin)
|
||||
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-service-account"),
|
||||
data={
|
||||
"name": "no-group-service-account",
|
||||
"create_group": False,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
body = loads(response.content)
|
||||
self.assertIn("username", body)
|
||||
self.assertIn("user_uid", body)
|
||||
self.assertIn("user_pk", body)
|
||||
self.assertIn("token", body)
|
||||
# Should NOT have group_pk when create_group=False
|
||||
self.assertNotIn("group_pk", body)
|
||||
|
||||
def test_service_account_validation_duplicate_username(self):
|
||||
"""Test service account creation with duplicate username"""
|
||||
self.client.force_login(self.admin)
|
||||
|
||||
# Create first service account
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-service-account"),
|
||||
data={
|
||||
"name": "duplicate-test",
|
||||
"create_group": True,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
# Attempt to create second with same username
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-service-account"),
|
||||
data={
|
||||
"name": "duplicate-test",
|
||||
"create_group": True,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content,
|
||||
{"name": ["This field must be unique."]},
|
||||
)
|
||||
|
||||
def test_service_account_validation_invalid_create_group(self):
|
||||
"""Test service account creation with invalid create_group field"""
|
||||
self.client.force_login(self.admin)
|
||||
|
||||
# Test with string instead of boolean
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-service-account"),
|
||||
data={
|
||||
"name": "test-sa",
|
||||
"create_group": "invalid",
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content,
|
||||
{"create_group": ["Must be a valid boolean."]},
|
||||
)
|
||||
|
||||
# Test with number instead of boolean
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-service-account"),
|
||||
data={
|
||||
"name": "test-sa",
|
||||
"create_group": 123,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content,
|
||||
{"create_group": ["Must be a valid boolean."]},
|
||||
)
|
||||
|
||||
def test_service_account_validation_invalid_expiring(self):
|
||||
"""Test service account creation with invalid expiring field"""
|
||||
self.client.force_login(self.admin)
|
||||
|
||||
# Test with string instead of boolean
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-service-account"),
|
||||
data={
|
||||
"name": "test-sa",
|
||||
"expiring": "invalid",
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content,
|
||||
{"expiring": ["Must be a valid boolean."]},
|
||||
)
|
||||
|
||||
def test_service_account_validation_invalid_expires(self):
|
||||
"""Test service account creation with invalid expires field"""
|
||||
self.client.force_login(self.admin)
|
||||
|
||||
# Test with invalid datetime string
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-service-account"),
|
||||
data={
|
||||
"name": "test-sa",
|
||||
"expires": "invalid-datetime",
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content,
|
||||
{
|
||||
"expires": [
|
||||
"Datetime has wrong format. Use one of these formats instead: "
|
||||
"YYYY-MM-DDThh:mm[:ss[.uuuuuu]][+HH:MM|-HH:MM|Z]."
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
# Test with invalid format
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-service-account"),
|
||||
data={
|
||||
"name": "test-sa",
|
||||
"expires": "2024-13-45", # Invalid month/day
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content,
|
||||
{
|
||||
"expires": [
|
||||
"Datetime has wrong format. Use one of these formats instead: "
|
||||
"YYYY-MM-DDThh:mm[:ss[.uuuuuu]][+HH:MM|-HH:MM|Z]."
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
def test_service_account_validation_multiple_errors(self):
|
||||
"""Test service account creation with multiple validation errors"""
|
||||
self.client.force_login(self.admin)
|
||||
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-service-account"),
|
||||
data={
|
||||
"name": "", # Empty username
|
||||
"create_group": "invalid", # Invalid boolean
|
||||
"expiring": 123, # Invalid boolean
|
||||
"expires": "not-a-date", # Invalid datetime
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content,
|
||||
{
|
||||
"name": ["This field may not be blank."],
|
||||
"create_group": ["Must be a valid boolean."],
|
||||
"expiring": ["Must be a valid boolean."],
|
||||
"expires": [
|
||||
"Datetime has wrong format. Use one of these formats instead: "
|
||||
"YYYY-MM-DDThh:mm[:ss[.uuuuuu]][+HH:MM|-HH:MM|Z]."
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
def test_service_account_validation_user_friendly_duplicate_error(self):
|
||||
"""Test that duplicate username returns user-friendly error, not database error"""
|
||||
self.client.force_login(self.admin)
|
||||
|
||||
# Create first service account
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-service-account"),
|
||||
data={
|
||||
"name": "duplicate-username-test",
|
||||
"create_group": True,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
# Attempt to create second with same username
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-service-account"),
|
||||
data={
|
||||
"name": "duplicate-username-test",
|
||||
"create_group": True,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content,
|
||||
{"name": ["This field must be unique."]},
|
||||
)
|
||||
|
||||
@@ -30,7 +30,6 @@ from authentik.flows.views.interface import FlowInterfaceView
|
||||
from authentik.root.asgi_middleware import AuthMiddlewareStack
|
||||
from authentik.root.messages.consumer import MessageConsumer
|
||||
from authentik.root.middleware import ChannelsLoggingMiddleware
|
||||
from authentik.tenants.channels import TenantsAwareMiddleware
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
@@ -98,9 +97,7 @@ api_urlpatterns = [
|
||||
websocket_urlpatterns = [
|
||||
path(
|
||||
"ws/client/",
|
||||
ChannelsLoggingMiddleware(
|
||||
TenantsAwareMiddleware(AuthMiddlewareStack(MessageConsumer.as_asgi()))
|
||||
),
|
||||
ChannelsLoggingMiddleware(AuthMiddlewareStack(MessageConsumer.as_asgi())),
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@@ -20,11 +20,6 @@ from authentik.lib.models import CreatedUpdatedModel, SerializerModel
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
def fingerprint_sha256(cert: Certificate) -> str:
|
||||
"""Get SHA256 Fingerprint of certificate"""
|
||||
return hexlify(cert.fingerprint(hashes.SHA256()), ":").decode("utf-8")
|
||||
|
||||
|
||||
class CertificateKeyPair(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||
"""CertificateKeyPair that can be used for signing or encrypting if `key_data`
|
||||
is set, otherwise it can be used to verify remote data."""
|
||||
@@ -87,7 +82,7 @@ class CertificateKeyPair(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||
@property
|
||||
def fingerprint_sha256(self) -> str:
|
||||
"""Get SHA256 Fingerprint of certificate_data"""
|
||||
return fingerprint_sha256(self.certificate)
|
||||
return hexlify(self.certificate.fingerprint(hashes.SHA256()), ":").decode("utf-8")
|
||||
|
||||
@property
|
||||
def fingerprint_sha1(self) -> str:
|
||||
|
||||
@@ -7,12 +7,13 @@ from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.serialization import load_pem_private_key
|
||||
from cryptography.x509.base import load_pem_x509_certificate
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_dramatiq_postgres.middleware import CurrentTask
|
||||
from dramatiq.actor import actor
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.tasks.middleware import CurrentTask
|
||||
from authentik.tasks.models import Task
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
@@ -37,7 +38,7 @@ def ensure_certificate_valid(body: str):
|
||||
|
||||
@actor(description=_("Discover, import and update certificates from the filesystem."))
|
||||
def certificate_discovery():
|
||||
self = CurrentTask.get_task()
|
||||
self: Task = CurrentTask.get_task()
|
||||
certs = {}
|
||||
private_keys = {}
|
||||
discovered = 0
|
||||
|
||||
@@ -27,7 +27,7 @@ class TestCrypto(APITestCase):
|
||||
def test_model_private(self):
|
||||
"""Test model private key"""
|
||||
cert = CertificateKeyPair.objects.create(
|
||||
name=generate_id(),
|
||||
name="test",
|
||||
certificate_data="foo",
|
||||
key_data="foo",
|
||||
)
|
||||
@@ -271,7 +271,7 @@ class TestCrypto(APITestCase):
|
||||
keypair = create_test_cert()
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name=generate_id(),
|
||||
client_id=generate_id(),
|
||||
client_id="test",
|
||||
client_secret=generate_key(),
|
||||
authorization_flow=create_test_flow(),
|
||||
redirect_uris=[RedirectURI(RedirectURIMatchingMode.STRICT, "http://localhost")],
|
||||
@@ -303,7 +303,7 @@ class TestCrypto(APITestCase):
|
||||
keypair = create_test_cert()
|
||||
OAuth2Provider.objects.create(
|
||||
name=generate_id(),
|
||||
client_id=generate_id(),
|
||||
client_id="test",
|
||||
client_secret=generate_key(),
|
||||
authorization_flow=create_test_flow(),
|
||||
redirect_uris=[RedirectURI(RedirectURIMatchingMode.STRICT, "http://localhost")],
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from django.db.models.aggregates import Count
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_dramatiq_postgres.middleware import CurrentTask
|
||||
from dramatiq.actor import actor
|
||||
from structlog import get_logger
|
||||
|
||||
@@ -7,7 +8,7 @@ from authentik.enterprise.policies.unique_password.models import (
|
||||
UniquePasswordPolicy,
|
||||
UserPasswordHistory,
|
||||
)
|
||||
from authentik.tasks.middleware import CurrentTask
|
||||
from authentik.tasks.models import Task
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
@@ -18,7 +19,7 @@ LOGGER = get_logger()
|
||||
)
|
||||
)
|
||||
def check_and_purge_password_history():
|
||||
self = CurrentTask.get_task()
|
||||
self: Task = CurrentTask.get_task()
|
||||
|
||||
if not UniquePasswordPolicy.objects.exists():
|
||||
UserPasswordHistory.objects.all().delete()
|
||||
@@ -38,7 +39,7 @@ def trim_password_histories():
|
||||
UniquePasswordPolicy policies.
|
||||
"""
|
||||
|
||||
self = CurrentTask.get_task()
|
||||
self: Task = CurrentTask.get_task()
|
||||
|
||||
# No policy, we'll let the cleanup above do its thing
|
||||
if not UniquePasswordPolicy.objects.exists():
|
||||
|
||||
@@ -4,7 +4,7 @@ from rest_framework import mixins
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.users import PartialGroupSerializer
|
||||
from authentik.core.api.users import UserGroupSerializer
|
||||
from authentik.core.api.utils import ModelSerializer
|
||||
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProviderGroup
|
||||
from authentik.lib.sync.outgoing.api import OutgoingSyncConnectionCreateMixin
|
||||
@@ -13,7 +13,7 @@ from authentik.lib.sync.outgoing.api import OutgoingSyncConnectionCreateMixin
|
||||
class GoogleWorkspaceProviderGroupSerializer(ModelSerializer):
|
||||
"""GoogleWorkspaceProviderGroup Serializer"""
|
||||
|
||||
group_obj = PartialGroupSerializer(source="group", read_only=True)
|
||||
group_obj = UserGroupSerializer(source="group", read_only=True)
|
||||
|
||||
class Meta:
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from rest_framework import mixins
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
|
||||
from authentik.core.api.groups import PartialUserSerializer
|
||||
from authentik.core.api.groups import GroupMemberSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import ModelSerializer
|
||||
from authentik.enterprise.providers.google_workspace.models import GoogleWorkspaceProviderUser
|
||||
@@ -13,7 +13,7 @@ from authentik.lib.sync.outgoing.api import OutgoingSyncConnectionCreateMixin
|
||||
class GoogleWorkspaceProviderUserSerializer(ModelSerializer):
|
||||
"""GoogleWorkspaceProviderUser Serializer"""
|
||||
|
||||
user_obj = PartialUserSerializer(source="user", read_only=True)
|
||||
user_obj = GroupMemberSerializer(source="user", read_only=True)
|
||||
|
||||
class Meta:
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ from rest_framework import mixins
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.users import PartialGroupSerializer
|
||||
from authentik.core.api.users import UserGroupSerializer
|
||||
from authentik.core.api.utils import ModelSerializer
|
||||
from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProviderGroup
|
||||
from authentik.lib.sync.outgoing.api import OutgoingSyncConnectionCreateMixin
|
||||
@@ -13,7 +13,7 @@ from authentik.lib.sync.outgoing.api import OutgoingSyncConnectionCreateMixin
|
||||
class MicrosoftEntraProviderGroupSerializer(ModelSerializer):
|
||||
"""MicrosoftEntraProviderGroup Serializer"""
|
||||
|
||||
group_obj = PartialGroupSerializer(source="group", read_only=True)
|
||||
group_obj = UserGroupSerializer(source="group", read_only=True)
|
||||
|
||||
class Meta:
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from rest_framework import mixins
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
|
||||
from authentik.core.api.groups import PartialUserSerializer
|
||||
from authentik.core.api.groups import GroupMemberSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import ModelSerializer
|
||||
from authentik.enterprise.providers.microsoft_entra.models import MicrosoftEntraProviderUser
|
||||
@@ -13,7 +13,7 @@ from authentik.lib.sync.outgoing.api import OutgoingSyncConnectionCreateMixin
|
||||
class MicrosoftEntraProviderUserSerializer(ModelSerializer):
|
||||
"""MicrosoftEntraProviderUser Serializer"""
|
||||
|
||||
user_obj = PartialUserSerializer(source="user", read_only=True)
|
||||
user_obj = GroupMemberSerializer(source="user", read_only=True)
|
||||
|
||||
class Meta:
|
||||
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
from django.utils.translation import gettext as _
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.enterprise.license import LicenseKey
|
||||
|
||||
|
||||
class RadiusProviderSerializerMixin:
|
||||
|
||||
def validate_certificate(self, cert: CertificateKeyPair) -> CertificateKeyPair:
|
||||
if cert:
|
||||
if not LicenseKey.cached_summary().status.is_valid:
|
||||
raise ValidationError(_("Enterprise is required to use EAP-TLS."))
|
||||
return cert
|
||||
@@ -1,9 +0,0 @@
|
||||
from authentik.enterprise.apps import EnterpriseConfig
|
||||
|
||||
|
||||
class AuthentikEnterpriseProviderRadiusConfig(EnterpriseConfig):
|
||||
|
||||
name = "authentik.enterprise.providers.radius"
|
||||
label = "authentik_enterprise_providers_radius"
|
||||
verbose_name = "authentik Enterprise.Providers.Radius"
|
||||
default = True
|
||||
@@ -1,14 +0,0 @@
|
||||
from django.utils.translation import gettext as _
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
from authentik.enterprise.license import LicenseKey
|
||||
from authentik.providers.scim.models import SCIMAuthenticationMode
|
||||
|
||||
|
||||
class SCIMProviderSerializerMixin:
|
||||
|
||||
def validate_auth_mode(self, auth_mode: SCIMAuthenticationMode) -> SCIMAuthenticationMode:
|
||||
if auth_mode == SCIMAuthenticationMode.OAUTH:
|
||||
if not LicenseKey.cached_summary().status.is_valid:
|
||||
raise ValidationError(_("Enterprise is required to use the OAuth mode."))
|
||||
return auth_mode
|
||||
@@ -1,9 +0,0 @@
|
||||
from authentik.enterprise.apps import EnterpriseConfig
|
||||
|
||||
|
||||
class AuthentikEnterpriseProviderSCIMConfig(EnterpriseConfig):
|
||||
|
||||
name = "authentik.enterprise.providers.scim"
|
||||
label = "authentik_enterprise_providers_scim"
|
||||
verbose_name = "authentik Enterprise.Providers.SCIM"
|
||||
default = True
|
||||
@@ -1,80 +0,0 @@
|
||||
from datetime import timedelta
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from django.utils.timezone import now
|
||||
from requests import Request, RequestException
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.providers.scim.clients.exceptions import SCIMRequestException
|
||||
from authentik.sources.oauth.clients.oauth2 import OAuth2Client
|
||||
from authentik.sources.oauth.models import OAuthSource, UserOAuthSourceConnection
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.providers.scim.models import SCIMProvider
|
||||
|
||||
|
||||
class SCIMOAuthException(SCIMRequestException):
|
||||
"""Exceptions related to OAuth operations for SCIM requests"""
|
||||
|
||||
|
||||
class SCIMOAuthAuth:
|
||||
|
||||
def __init__(self, provider: "SCIMProvider"):
|
||||
self.provider = provider
|
||||
self.user = provider.auth_oauth_user
|
||||
self.connection = self.get_connection()
|
||||
self.logger = get_logger().bind()
|
||||
|
||||
def retrieve_token(self):
|
||||
if not self.provider.auth_oauth:
|
||||
return None
|
||||
source: OAuthSource = self.provider.auth_oauth
|
||||
client = OAuth2Client(source, None)
|
||||
access_token_url = source.source_type.access_token_url or ""
|
||||
if source.source_type.urls_customizable and source.access_token_url:
|
||||
access_token_url = source.access_token_url
|
||||
data = client.get_access_token_args(None, None)
|
||||
data["grant_type"] = "password"
|
||||
data.update(self.provider.auth_oauth_params)
|
||||
try:
|
||||
response = client.do_request(
|
||||
"POST",
|
||||
access_token_url,
|
||||
auth=client.get_access_token_auth(),
|
||||
data=data,
|
||||
headers=client._default_headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
body = response.json()
|
||||
if "error" in body:
|
||||
self.logger.info("Failed to get new OAuth token", error=body["error"])
|
||||
raise SCIMOAuthException(response, body["error"])
|
||||
return body
|
||||
except RequestException as exc:
|
||||
raise SCIMOAuthException(exc.response, message="Failed to get OAuth token") from exc
|
||||
|
||||
def get_connection(self):
|
||||
token = UserOAuthSourceConnection.objects.filter(
|
||||
source=self.provider.auth_oauth, user=self.user, expires__gt=now()
|
||||
).first()
|
||||
if token and token.access_token:
|
||||
return token
|
||||
token = self.retrieve_token()
|
||||
access_token = token["access_token"]
|
||||
expires_in = int(token.get("expires_in", 0))
|
||||
token, _ = UserOAuthSourceConnection.objects.update_or_create(
|
||||
source=self.provider.auth_oauth,
|
||||
user=self.user,
|
||||
defaults={
|
||||
"access_token": access_token,
|
||||
"expires": now() + timedelta(seconds=expires_in),
|
||||
},
|
||||
)
|
||||
return token
|
||||
|
||||
def __call__(self, request: Request) -> Request:
|
||||
if not self.connection.is_valid:
|
||||
self.logger.info("OAuth token expired, renewing token")
|
||||
self.connection = self.get_connection()
|
||||
request.headers["Authorization"] = f"Bearer {self.connection.access_token}"
|
||||
return request
|
||||
@@ -1,30 +0,0 @@
|
||||
from django.db.models import Model
|
||||
from django.db.models.signals import post_save
|
||||
from django.dispatch import receiver
|
||||
|
||||
from authentik.core.models import USER_PATH_SYSTEM_PREFIX, User, UserTypes
|
||||
from authentik.events.middleware import audit_ignore
|
||||
from authentik.providers.scim.models import SCIMAuthenticationMode, SCIMProvider
|
||||
|
||||
USER_PATH_PROVIDERS_SCIM = USER_PATH_SYSTEM_PREFIX + "/providers/scim"
|
||||
|
||||
|
||||
@receiver(post_save, sender=SCIMProvider)
|
||||
def scim_provider_post_save(sender: type[Model], instance: SCIMProvider, created: bool, **__):
|
||||
"""Create service account before provider is saved"""
|
||||
identifier = f"ak-providers-scim-{instance.pk}"
|
||||
with audit_ignore():
|
||||
if instance.auth_mode == SCIMAuthenticationMode.OAUTH:
|
||||
user, user_created = User.objects.update_or_create(
|
||||
username=identifier,
|
||||
defaults={
|
||||
"name": f"SCIM Provider {instance.name} Service-Account",
|
||||
"type": UserTypes.INTERNAL_SERVICE_ACCOUNT,
|
||||
"path": USER_PATH_PROVIDERS_SCIM,
|
||||
},
|
||||
)
|
||||
if created or user_created:
|
||||
instance.auth_oauth_user = user
|
||||
instance.save()
|
||||
elif instance.auth_mode == SCIMAuthenticationMode.TOKEN:
|
||||
User.objects.filter(username=identifier).delete()
|
||||
@@ -1,193 +0,0 @@
|
||||
"""SCIM OAuth tests"""
|
||||
|
||||
from base64 import b64encode
|
||||
from datetime import timedelta
|
||||
from unittest.mock import MagicMock, PropertyMock, patch
|
||||
|
||||
from django.urls import reverse
|
||||
from django.utils.timezone import now
|
||||
from requests_mock import Mocker
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.blueprints.tests import apply_blueprint
|
||||
from authentik.core.models import Application, Group, User
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.enterprise.license import LicenseKey
|
||||
from authentik.enterprise.models import License
|
||||
from authentik.enterprise.tests.test_license import expiry_valid
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.providers.scim.models import SCIMAuthenticationMode, SCIMMapping, SCIMProvider
|
||||
from authentik.sources.oauth.models import OAuthSource, UserOAuthSourceConnection
|
||||
from authentik.tenants.models import Tenant
|
||||
|
||||
|
||||
class SCIMOAuthTests(APITestCase):
|
||||
"""SCIM User tests"""
|
||||
|
||||
@apply_blueprint("system/providers-scim.yaml")
|
||||
def setUp(self) -> None:
|
||||
# Delete all users and groups as the mocked HTTP responses only return one ID
|
||||
# which will cause errors with multiple users
|
||||
Tenant.objects.update(avatars="none")
|
||||
User.objects.all().exclude_anonymous().delete()
|
||||
Group.objects.all().delete()
|
||||
self.source = OAuthSource.objects.create(
|
||||
name=generate_id(),
|
||||
slug=generate_id(),
|
||||
access_token_url="http://localhost/token", # nosec
|
||||
consumer_key=generate_id(),
|
||||
consumer_secret=generate_id(),
|
||||
provider_type="openidconnect",
|
||||
)
|
||||
self.provider = SCIMProvider.objects.create(
|
||||
name=generate_id(),
|
||||
url="https://localhost",
|
||||
auth_mode=SCIMAuthenticationMode.OAUTH,
|
||||
auth_oauth=self.source,
|
||||
auth_oauth_params={
|
||||
"foo": "bar",
|
||||
},
|
||||
exclude_users_service_account=True,
|
||||
)
|
||||
self.app: Application = Application.objects.create(
|
||||
name=generate_id(),
|
||||
slug=generate_id(),
|
||||
)
|
||||
self.app.backchannel_providers.add(self.provider)
|
||||
self.provider.property_mappings.add(
|
||||
SCIMMapping.objects.get(managed="goauthentik.io/providers/scim/user")
|
||||
)
|
||||
self.provider.property_mappings_group.add(
|
||||
SCIMMapping.objects.get(managed="goauthentik.io/providers/scim/group")
|
||||
)
|
||||
|
||||
def test_retrieve_token(self):
|
||||
"""Test token retrieval"""
|
||||
with Mocker() as mocker:
|
||||
token = generate_id()
|
||||
mocker.post("http://localhost/token", json={"access_token": token, "expires_in": 3600})
|
||||
self.provider.scim_auth()
|
||||
conn = UserOAuthSourceConnection.objects.filter(
|
||||
source=self.source,
|
||||
user=self.provider.auth_oauth_user,
|
||||
).first()
|
||||
self.assertIsNotNone(conn)
|
||||
self.assertTrue(conn.is_valid)
|
||||
auth = (
|
||||
b64encode(
|
||||
b":".join((self.source.consumer_key.encode(), self.source.consumer_secret.encode()))
|
||||
)
|
||||
.strip()
|
||||
.decode()
|
||||
)
|
||||
self.assertEqual(
|
||||
mocker.request_history[0].headers["Authorization"],
|
||||
f"Basic {auth}",
|
||||
)
|
||||
self.assertEqual(mocker.request_history[0].body, "grant_type=password&foo=bar")
|
||||
|
||||
def test_existing_token(self):
|
||||
"""Test existing token"""
|
||||
UserOAuthSourceConnection.objects.create(
|
||||
source=self.source,
|
||||
user=self.provider.auth_oauth_user,
|
||||
access_token=generate_id(),
|
||||
expires=now() + timedelta(hours=3),
|
||||
)
|
||||
with Mocker() as mocker:
|
||||
self.provider.scim_auth()
|
||||
self.assertEqual(len(mocker.request_history), 0)
|
||||
|
||||
@Mocker()
|
||||
def test_user_create(self, mock: Mocker):
|
||||
"""Test user creation"""
|
||||
scim_id = generate_id()
|
||||
token = generate_id()
|
||||
mock.post("http://localhost/token", json={"access_token": token, "expires_in": 3600})
|
||||
mock.get(
|
||||
"https://localhost/ServiceProviderConfig",
|
||||
json={},
|
||||
)
|
||||
mock.post(
|
||||
"https://localhost/Users",
|
||||
json={
|
||||
"id": scim_id,
|
||||
},
|
||||
)
|
||||
uid = generate_id()
|
||||
user = User.objects.create(
|
||||
username=uid,
|
||||
name=f"{uid} {uid}",
|
||||
email=f"{uid}@goauthentik.io",
|
||||
)
|
||||
self.assertEqual(mock.call_count, 3)
|
||||
self.assertEqual(mock.request_history[1].method, "GET")
|
||||
self.assertEqual(mock.request_history[2].method, "POST")
|
||||
self.assertJSONEqual(
|
||||
mock.request_history[2].body,
|
||||
{
|
||||
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:User"],
|
||||
"active": True,
|
||||
"emails": [
|
||||
{
|
||||
"primary": True,
|
||||
"type": "other",
|
||||
"value": f"{uid}@goauthentik.io",
|
||||
}
|
||||
],
|
||||
"externalId": user.uid,
|
||||
"name": {
|
||||
"familyName": uid,
|
||||
"formatted": f"{uid} {uid}",
|
||||
"givenName": uid,
|
||||
},
|
||||
"displayName": f"{uid} {uid}",
|
||||
"userName": uid,
|
||||
},
|
||||
)
|
||||
|
||||
@patch(
|
||||
"authentik.enterprise.license.LicenseKey.validate",
|
||||
MagicMock(
|
||||
return_value=LicenseKey(
|
||||
aud="",
|
||||
exp=expiry_valid,
|
||||
name=generate_id(),
|
||||
internal_users=100,
|
||||
external_users=100,
|
||||
)
|
||||
),
|
||||
)
|
||||
def test_api_create(self):
|
||||
License.objects.create(key=generate_id())
|
||||
self.client.force_login(create_test_admin_user())
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:scimprovider-list"),
|
||||
{
|
||||
"name": generate_id(),
|
||||
"url": "http://localhost",
|
||||
"auth_mode": "oauth",
|
||||
"auth_oauth": str(self.source.pk),
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 201)
|
||||
|
||||
@patch(
|
||||
"authentik.enterprise.models.LicenseUsageStatus.is_valid",
|
||||
PropertyMock(return_value=False),
|
||||
)
|
||||
def test_api_create_no_license(self):
|
||||
self.client.force_login(create_test_admin_user())
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:scimprovider-list"),
|
||||
{
|
||||
"name": generate_id(),
|
||||
"url": "http://localhost",
|
||||
"auth_mode": "oauth",
|
||||
"auth_oauth": str(self.source.pk),
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
res.content, {"auth_mode": ["Enterprise is required to use the OAuth mode."]}
|
||||
)
|
||||
@@ -4,6 +4,7 @@ from uuid import UUID
|
||||
from django.http import HttpRequest
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_dramatiq_postgres.middleware import CurrentTask
|
||||
from dramatiq.actor import actor
|
||||
from requests.exceptions import RequestException
|
||||
from structlog.stdlib import get_logger
|
||||
@@ -19,7 +20,7 @@ from authentik.enterprise.providers.ssf.models import (
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
from authentik.tasks.middleware import CurrentTask
|
||||
from authentik.tasks.models import Task
|
||||
|
||||
session = get_http_session()
|
||||
LOGGER = get_logger()
|
||||
@@ -73,7 +74,7 @@ def _check_app_access(stream: Stream, event_data: dict) -> bool:
|
||||
|
||||
@actor(description=_("Send an SSF event."))
|
||||
def send_ssf_event(stream_uuid: UUID, event_data: dict[str, Any]):
|
||||
self = CurrentTask.get_task()
|
||||
self: Task = CurrentTask.get_task()
|
||||
|
||||
stream = Stream.objects.filter(pk=stream_uuid).first()
|
||||
if not stream:
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
SPECTACULAR_SETTINGS = {
|
||||
"POSTPROCESSING_HOOKS": [
|
||||
"authentik.api.schema.postprocess_schema_responses",
|
||||
"authentik.api.schema.postprocess_schema_pagination",
|
||||
"authentik.api.schema.postprocess_schema_remove_unused",
|
||||
"authentik.enterprise.search.schema.postprocess_schema_search_autocomplete",
|
||||
"drf_spectacular.hooks.postprocess_schema_enums",
|
||||
],
|
||||
|
||||
@@ -5,8 +5,6 @@ TENANT_APPS = [
|
||||
"authentik.enterprise.policies.unique_password",
|
||||
"authentik.enterprise.providers.google_workspace",
|
||||
"authentik.enterprise.providers.microsoft_entra",
|
||||
"authentik.enterprise.providers.radius",
|
||||
"authentik.enterprise.providers.scim",
|
||||
"authentik.enterprise.providers.ssf",
|
||||
"authentik.enterprise.search",
|
||||
"authentik.enterprise.stages.authenticator_endpoint_gdtc",
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
# Generated by Django 5.1.12 on 2025-09-08 19:43
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_stages_authenticator_endpoint_gdtc", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="authenticatorendpointgdtcstage",
|
||||
name="friendly_name",
|
||||
field=models.TextField(blank=True, default=""),
|
||||
preserve_default=False,
|
||||
),
|
||||
]
|
||||
@@ -7,8 +7,6 @@ from cryptography.x509 import (
|
||||
Certificate,
|
||||
NameOID,
|
||||
ObjectIdentifier,
|
||||
RFC822Name,
|
||||
SubjectAlternativeName,
|
||||
UnsupportedGeneralNameType,
|
||||
load_pem_x509_certificate,
|
||||
)
|
||||
@@ -17,7 +15,7 @@ from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from authentik.brands.models import Brand
|
||||
from authentik.core.models import User
|
||||
from authentik.crypto.models import CertificateKeyPair, fingerprint_sha256
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.enterprise.stages.mtls.models import (
|
||||
CertAttributes,
|
||||
MutualTLSStage,
|
||||
@@ -139,7 +137,7 @@ class MTLSStageView(ChallengeStageView):
|
||||
case CertAttributes.COMMON_NAME:
|
||||
cert_attr = self.get_cert_attribute(cert, NameOID.COMMON_NAME)
|
||||
case CertAttributes.EMAIL:
|
||||
cert_attr = self.get_cert_email(cert)
|
||||
cert_attr = self.get_cert_attribute(cert, NameOID.EMAIL_ADDRESS)
|
||||
match stage.user_attribute:
|
||||
case UserAttributes.USERNAME:
|
||||
user_attr = "username"
|
||||
@@ -173,7 +171,7 @@ class MTLSStageView(ChallengeStageView):
|
||||
self.executor.plan.context.setdefault(PLAN_CONTEXT_PROMPT, {})
|
||||
self.executor.plan.context[PLAN_CONTEXT_PROMPT].update(
|
||||
{
|
||||
"email": self.get_cert_email(cert),
|
||||
"email": self.get_cert_attribute(cert, NameOID.EMAIL_ADDRESS),
|
||||
"name": self.get_cert_attribute(cert, NameOID.COMMON_NAME),
|
||||
}
|
||||
)
|
||||
@@ -185,13 +183,6 @@ class MTLSStageView(ChallengeStageView):
|
||||
return None
|
||||
return str(attr[0].value)
|
||||
|
||||
def get_cert_email(self, cert: Certificate) -> str | None:
|
||||
ext = cert.extensions.get_extension_for_class(SubjectAlternativeName)
|
||||
_cert_attr = ext.value.get_values_for_type(RFC822Name)
|
||||
if len(_cert_attr) < 1:
|
||||
return None
|
||||
return str(_cert_attr[0])
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
stage: MutualTLSStage = self.executor.current_stage
|
||||
certs = [
|
||||
@@ -219,7 +210,6 @@ class MTLSStageView(ChallengeStageView):
|
||||
if not cert and stage.mode == TLSMode.OPTIONAL:
|
||||
self.logger.info("No certificate given, continuing")
|
||||
return self.executor.stage_ok()
|
||||
self.logger.debug("Received certificate", cert=fingerprint_sha256(cert))
|
||||
existing_user = self.check_if_user(cert)
|
||||
if self.executor.flow.designation == FlowDesignation.ENROLLMENT:
|
||||
self.enroll_prepare_user(cert)
|
||||
|
||||
@@ -19,7 +19,7 @@ if TYPE_CHECKING:
|
||||
class ASNDict(TypedDict):
|
||||
"""ASN Details"""
|
||||
|
||||
asn: int | None
|
||||
asn: int
|
||||
as_org: str | None
|
||||
network: str | None
|
||||
|
||||
@@ -60,7 +60,7 @@ class ASNContextProcessor(MMDBContextProcessor):
|
||||
except (GeoIP2Error, ValueError):
|
||||
return None
|
||||
|
||||
def asn_to_dict(self, asn: ASN | None) -> ASNDict | dict:
|
||||
def asn_to_dict(self, asn: ASN | None) -> ASNDict:
|
||||
"""Convert ASN to dict"""
|
||||
if not asn:
|
||||
return {}
|
||||
|
||||
@@ -19,10 +19,10 @@ if TYPE_CHECKING:
|
||||
class GeoIPDict(TypedDict):
|
||||
"""GeoIP Details"""
|
||||
|
||||
continent: str | None
|
||||
country: str | None
|
||||
lat: float | None
|
||||
long: float | None
|
||||
continent: str
|
||||
country: str
|
||||
lat: float
|
||||
long: float
|
||||
city: str
|
||||
|
||||
|
||||
@@ -61,7 +61,7 @@ class GeoIPContextProcessor(MMDBContextProcessor):
|
||||
except (GeoIP2Error, ValueError):
|
||||
return None
|
||||
|
||||
def city_to_dict(self, city: City | None) -> GeoIPDict | dict:
|
||||
def city_to_dict(self, city: City | None) -> GeoIPDict:
|
||||
"""Convert City to dict"""
|
||||
if not city:
|
||||
return {}
|
||||
|
||||
@@ -197,8 +197,7 @@ class AuditMiddleware:
|
||||
return
|
||||
if _CTX_IGNORE.get():
|
||||
return
|
||||
current_request = _CTX_REQUEST.get()
|
||||
if current_request is None or request.request_id != current_request.request_id:
|
||||
if request.request_id != _CTX_REQUEST.get().request_id:
|
||||
return
|
||||
user = self.get_user(request)
|
||||
|
||||
@@ -213,8 +212,7 @@ class AuditMiddleware:
|
||||
return
|
||||
if _CTX_IGNORE.get():
|
||||
return
|
||||
current_request = _CTX_REQUEST.get()
|
||||
if current_request is None or request.request_id != current_request.request_id:
|
||||
if request.request_id != _CTX_REQUEST.get().request_id:
|
||||
return
|
||||
user = self.get_user(request)
|
||||
|
||||
@@ -241,8 +239,7 @@ class AuditMiddleware:
|
||||
return
|
||||
if _CTX_IGNORE.get():
|
||||
return
|
||||
current_request = _CTX_REQUEST.get()
|
||||
if current_request is None or request.request_id != current_request.request_id:
|
||||
if request.request_id != _CTX_REQUEST.get().request_id:
|
||||
return
|
||||
user = self.get_user(request)
|
||||
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
# Generated by Django 5.1.11 on 2025-07-28 15:05
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_events", "0012_notificationtransport_email_subject_prefix_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.DeleteModel(
|
||||
name="SystemTask",
|
||||
),
|
||||
]
|
||||
@@ -632,3 +632,45 @@ class NotificationWebhookMapping(PropertyMapping):
|
||||
class Meta:
|
||||
verbose_name = _("Webhook Mapping")
|
||||
verbose_name_plural = _("Webhook Mappings")
|
||||
|
||||
|
||||
class TaskStatus(models.TextChoices):
|
||||
"""DEPRECATED do not use"""
|
||||
|
||||
UNKNOWN = "unknown"
|
||||
SUCCESSFUL = "successful"
|
||||
WARNING = "warning"
|
||||
ERROR = "error"
|
||||
|
||||
|
||||
class SystemTask(ExpiringModel):
|
||||
"""DEPRECATED do not use"""
|
||||
|
||||
uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||
name = models.TextField()
|
||||
uid = models.TextField(null=True)
|
||||
|
||||
start_timestamp = models.DateTimeField(default=now)
|
||||
finish_timestamp = models.DateTimeField(default=now)
|
||||
duration = models.FloatField(default=0)
|
||||
|
||||
status = models.TextField(choices=TaskStatus.choices)
|
||||
|
||||
description = models.TextField(null=True)
|
||||
messages = models.JSONField()
|
||||
|
||||
task_call_module = models.TextField()
|
||||
task_call_func = models.TextField()
|
||||
task_call_args = models.JSONField(default=list)
|
||||
task_call_kwargs = models.JSONField(default=dict)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"System Task {self.name}"
|
||||
|
||||
class Meta:
|
||||
unique_together = (("name", "uid"),)
|
||||
default_permissions = ()
|
||||
permissions = ()
|
||||
verbose_name = _("System Task")
|
||||
verbose_name_plural = _("System Tasks")
|
||||
indexes = ExpiringModel.Meta.indexes
|
||||
|
||||
@@ -4,6 +4,7 @@ from uuid import UUID
|
||||
|
||||
from django.db.models.query_utils import Q
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_dramatiq_postgres.middleware import CurrentTask
|
||||
from dramatiq.actor import actor
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
from structlog.stdlib import get_logger
|
||||
@@ -15,10 +16,9 @@ from authentik.events.models import (
|
||||
NotificationRule,
|
||||
NotificationTransport,
|
||||
)
|
||||
from authentik.lib.utils.db import chunked_queryset
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
from authentik.policies.models import PolicyBinding, PolicyEngineMode
|
||||
from authentik.tasks.middleware import CurrentTask
|
||||
from authentik.tasks.models import Task
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
@@ -37,7 +37,7 @@ def event_trigger_dispatch(event_uuid: UUID):
|
||||
)
|
||||
def event_trigger_handler(event_uuid: UUID, trigger_name: str):
|
||||
"""Check if policies attached to NotificationRule match event"""
|
||||
self = CurrentTask.get_task()
|
||||
self: Task = CurrentTask.get_task()
|
||||
|
||||
event: Event = Event.objects.filter(event_uuid=event_uuid).first()
|
||||
if not event:
|
||||
@@ -123,14 +123,13 @@ def gdpr_cleanup(user_pk: int):
|
||||
"""cleanup events from gdpr_compliance"""
|
||||
events = Event.objects.filter(user__pk=user_pk)
|
||||
LOGGER.debug("GDPR cleanup, removing events from user", events=events.count())
|
||||
for event in chunked_queryset(events):
|
||||
event.delete()
|
||||
events.delete()
|
||||
|
||||
|
||||
@actor(description=_("Cleanup seen notifications and notifications whose event expired."))
|
||||
def notification_cleanup():
|
||||
"""Cleanup seen notifications and notifications whose event expired."""
|
||||
self = CurrentTask.get_task()
|
||||
self: Task = CurrentTask.get_task()
|
||||
notifications = Notification.objects.filter(Q(event=None) | Q(seen=True))
|
||||
amount = notifications.count()
|
||||
notifications.delete()
|
||||
|
||||
@@ -46,5 +46,5 @@ class FlowStageBindingViewSet(UsedByMixin, ModelViewSet):
|
||||
serializer_class = FlowStageBindingSerializer
|
||||
filterset_fields = "__all__"
|
||||
search_fields = ["stage__name"]
|
||||
ordering = ["order", "pk"]
|
||||
ordering_fields = ["order", "stage__name", "target__uuid", "pk"]
|
||||
ordering = ["order"]
|
||||
ordering_fields = ["order", "stage__name"]
|
||||
|
||||
@@ -54,7 +54,6 @@ class Challenge(PassiveSerializer):
|
||||
|
||||
flow_info = ContextualFlowInfo(required=False)
|
||||
component = CharField(default="")
|
||||
xid = CharField(required=False)
|
||||
|
||||
response_errors = DictField(
|
||||
child=ErrorDetailSerializer(many=True), allow_empty=True, required=False
|
||||
|
||||
@@ -291,7 +291,7 @@ class ConfigurableStage(models.Model):
|
||||
class FriendlyNamedStage(models.Model):
|
||||
"""Abstract base class for a Stage that can have a user friendly name configured."""
|
||||
|
||||
friendly_name = models.TextField(blank=True)
|
||||
friendly_name = models.TextField(null=True)
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
@@ -143,12 +143,10 @@ class FlowPlan:
|
||||
request: HttpRequest,
|
||||
flow: Flow,
|
||||
allowed_silent_types: list["StageView"] | None = None,
|
||||
**get_params,
|
||||
) -> HttpResponse:
|
||||
"""Redirect to the flow executor for this flow plan"""
|
||||
from authentik.flows.views.executor import (
|
||||
SESSION_KEY_PLAN,
|
||||
FlowContainer,
|
||||
FlowExecutorView,
|
||||
)
|
||||
|
||||
@@ -159,7 +157,6 @@ class FlowPlan:
|
||||
# No unskippable stages found, so we can directly return the response of the last stage
|
||||
final_stage: type[StageView] = self.bindings[-1].stage.view
|
||||
temp_exec = FlowExecutorView(flow=flow, request=request, plan=self)
|
||||
temp_exec.container = FlowContainer(request)
|
||||
temp_exec.current_stage = self.bindings[-1].stage
|
||||
temp_exec.current_stage_view = final_stage
|
||||
temp_exec.setup(request, flow.slug)
|
||||
@@ -177,9 +174,6 @@ class FlowPlan:
|
||||
):
|
||||
get_qs["inspector"] = "available"
|
||||
|
||||
for key, value in get_params:
|
||||
get_qs[key] = value
|
||||
|
||||
return redirect_with_qs(
|
||||
"authentik_core:if-flow",
|
||||
get_qs,
|
||||
|
||||
@@ -160,7 +160,7 @@ class ChallengeStageView(StageView):
|
||||
"user": self.get_pending_user(for_display=True),
|
||||
}
|
||||
|
||||
except Exception as exc: # noqa
|
||||
except Exception as exc:
|
||||
self.logger.warning("failed to template title", exc=exc)
|
||||
return self.executor.flow.title
|
||||
|
||||
@@ -192,7 +192,6 @@ class ChallengeStageView(StageView):
|
||||
)
|
||||
flow_info.is_valid()
|
||||
challenge.initial_data["flow_info"] = flow_info.data
|
||||
challenge.initial_data["xid"] = self.executor.container.exec_id
|
||||
if isinstance(challenge, WithUserInfoChallenge):
|
||||
# If there's a pending user, update the `username` field
|
||||
# this field is only used by password managers.
|
||||
@@ -287,12 +286,6 @@ class SessionEndStage(ChallengeStageView):
|
||||
that the user is likely to take after signing out of a provider."""
|
||||
|
||||
def get_challenge(self, *args, **kwargs) -> Challenge:
|
||||
if not self.request.user.is_authenticated:
|
||||
return RedirectChallenge(
|
||||
data={
|
||||
"to": reverse("authentik_core:root-redirect"),
|
||||
},
|
||||
)
|
||||
application: Application | None = self.executor.plan.context.get(PLAN_CONTEXT_APPLICATION)
|
||||
data = {
|
||||
"component": "ak-stage-session-end",
|
||||
|
||||
@@ -27,9 +27,8 @@ window.authentik.flow = {
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<ak-skip-to-content></ak-skip-to-content>
|
||||
<ak-message-container></ak-message-container>
|
||||
<ak-flow-executor flowSlug="{{ flow.slug }}" xid="{{ xid }}">
|
||||
<ak-flow-executor flowSlug="{{ flow.slug }}">
|
||||
<ak-loading></ak-loading>
|
||||
</ak-flow-executor>
|
||||
{% endblock %}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""authentik multi-stage authentication engine"""
|
||||
|
||||
from copy import deepcopy
|
||||
from uuid import uuid4
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.mixins import LoginRequiredMixin
|
||||
@@ -64,7 +63,6 @@ from authentik.policies.engine import PolicyEngine
|
||||
LOGGER = get_logger()
|
||||
# Argument used to redirect user after login
|
||||
NEXT_ARG_NAME = "next"
|
||||
SESSION_KEY_PLAN_CONTAINER = "authentik/flows/plan_container/%s"
|
||||
SESSION_KEY_PLAN = "authentik/flows/plan"
|
||||
SESSION_KEY_APPLICATION_PRE = "authentik/flows/application_pre"
|
||||
SESSION_KEY_GET = "authentik/flows/get"
|
||||
@@ -72,7 +70,6 @@ SESSION_KEY_POST = "authentik/flows/post"
|
||||
SESSION_KEY_HISTORY = "authentik/flows/history"
|
||||
QS_KEY_TOKEN = "flow_token" # nosec
|
||||
QS_QUERY = "query"
|
||||
QS_EXEC_ID = "xid"
|
||||
|
||||
|
||||
def challenge_types():
|
||||
@@ -99,88 +96,6 @@ class InvalidStageError(SentryIgnoredException):
|
||||
"""Error raised when a challenge from a stage is not valid"""
|
||||
|
||||
|
||||
class FlowContainer:
|
||||
"""Allow for multiple concurrent flow executions in the same session"""
|
||||
|
||||
def __init__(self, request: HttpRequest, exec_id: str | None = None) -> None:
|
||||
self.request = request
|
||||
self.exec_id = exec_id
|
||||
|
||||
@staticmethod
|
||||
def new(request: HttpRequest):
|
||||
exec_id = str(uuid4())
|
||||
request.session[SESSION_KEY_PLAN_CONTAINER % exec_id] = {}
|
||||
return FlowContainer(request, exec_id)
|
||||
|
||||
def exists(self) -> bool:
|
||||
"""Check if flow exists in container/session"""
|
||||
return SESSION_KEY_PLAN in self.session
|
||||
|
||||
def save(self):
|
||||
self.request.session.modified = True
|
||||
|
||||
@property
|
||||
def session(self):
|
||||
# Backwards compatibility: store session plan/etc directly in session
|
||||
if not self.exec_id:
|
||||
return self.request.session
|
||||
self.request.session.setdefault(SESSION_KEY_PLAN_CONTAINER % self.exec_id, {})
|
||||
return self.request.session.get(SESSION_KEY_PLAN_CONTAINER % self.exec_id, {})
|
||||
|
||||
@property
|
||||
def plan(self) -> FlowPlan:
|
||||
return self.session.get(SESSION_KEY_PLAN)
|
||||
|
||||
def to_redirect(
|
||||
self,
|
||||
request: HttpRequest,
|
||||
flow: Flow,
|
||||
allowed_silent_types: list[StageView] | None = None,
|
||||
**get_params,
|
||||
) -> HttpResponse:
|
||||
get_params[QS_EXEC_ID] = self.exec_id
|
||||
return self.plan.to_redirect(
|
||||
request, flow, allowed_silent_types=allowed_silent_types, **get_params
|
||||
)
|
||||
|
||||
@plan.setter
|
||||
def plan(self, value: FlowPlan):
|
||||
self.session[SESSION_KEY_PLAN] = value
|
||||
self.request.session.modified = True
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def application_pre(self):
|
||||
return self.session.get(SESSION_KEY_APPLICATION_PRE)
|
||||
|
||||
@property
|
||||
def get(self) -> QueryDict:
|
||||
return self.session.get(SESSION_KEY_GET)
|
||||
|
||||
@get.setter
|
||||
def get(self, value: QueryDict):
|
||||
self.session[SESSION_KEY_GET] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def post(self) -> QueryDict:
|
||||
return self.session.get(SESSION_KEY_POST)
|
||||
|
||||
@post.setter
|
||||
def post(self, value: QueryDict):
|
||||
self.session[SESSION_KEY_POST] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def history(self) -> list[FlowPlan]:
|
||||
return self.session.get(SESSION_KEY_HISTORY)
|
||||
|
||||
@history.setter
|
||||
def history(self, value: list[FlowPlan]):
|
||||
self.session[SESSION_KEY_HISTORY] = value
|
||||
self.save()
|
||||
|
||||
|
||||
@method_decorator(xframe_options_sameorigin, name="dispatch")
|
||||
class FlowExecutorView(APIView):
|
||||
"""Flow executor, passing requests to Stage Views"""
|
||||
@@ -188,9 +103,8 @@ class FlowExecutorView(APIView):
|
||||
permission_classes = [AllowAny]
|
||||
|
||||
flow: Flow = None
|
||||
plan: FlowPlan | None = None
|
||||
container: FlowContainer
|
||||
|
||||
plan: FlowPlan | None = None
|
||||
current_binding: FlowStageBinding | None = None
|
||||
current_stage: Stage
|
||||
current_stage_view: View
|
||||
@@ -246,12 +160,10 @@ class FlowExecutorView(APIView):
|
||||
if QS_KEY_TOKEN in get_params:
|
||||
plan = self._check_flow_token(get_params[QS_KEY_TOKEN])
|
||||
if plan:
|
||||
container = FlowContainer.new(request)
|
||||
container.plan = plan
|
||||
self.request.session[SESSION_KEY_PLAN] = plan
|
||||
# Early check if there's an active Plan for the current session
|
||||
self.container = FlowContainer(request, request.GET.get(QS_EXEC_ID))
|
||||
if self.container.exists():
|
||||
self.plan: FlowPlan = self.container.plan
|
||||
if SESSION_KEY_PLAN in self.request.session:
|
||||
self.plan: FlowPlan = self.request.session[SESSION_KEY_PLAN]
|
||||
if self.plan.flow_pk != self.flow.pk.hex:
|
||||
self._logger.warning(
|
||||
"f(exec): Found existing plan for other flow, deleting plan",
|
||||
@@ -264,14 +176,13 @@ class FlowExecutorView(APIView):
|
||||
self._logger.debug("f(exec): Continuing existing plan")
|
||||
|
||||
# Initial flow request, check if we have an upstream query string passed in
|
||||
self.container.get = get_params
|
||||
request.session[SESSION_KEY_GET] = get_params
|
||||
# Don't check session again as we've either already loaded the plan or we need to plan
|
||||
if not self.plan:
|
||||
self.container.history = []
|
||||
request.session[SESSION_KEY_HISTORY] = []
|
||||
self._logger.debug("f(exec): No active Plan found, initiating planner")
|
||||
try:
|
||||
self.plan = self._initiate_plan()
|
||||
self.container.plan = self.plan
|
||||
except FlowNonApplicableException as exc:
|
||||
self._logger.warning("f(exec): Flow not applicable to current user", exc=exc)
|
||||
return self.handle_invalid_flow(exc)
|
||||
@@ -287,7 +198,7 @@ class FlowExecutorView(APIView):
|
||||
# if the cached plan is from an older version, it might have different attributes
|
||||
# in which case we just delete the plan and invalidate everything
|
||||
next_binding = self.plan.next(self.request)
|
||||
except Exception as exc: # noqa
|
||||
except Exception as exc:
|
||||
self._logger.warning(
|
||||
"f(exec): found incompatible flow plan, invalidating run", exc=exc
|
||||
)
|
||||
@@ -344,19 +255,12 @@ class FlowExecutorView(APIView):
|
||||
request=OpenApiTypes.NONE,
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name=QS_QUERY,
|
||||
name="query",
|
||||
location=OpenApiParameter.QUERY,
|
||||
required=True,
|
||||
description="Querystring as received",
|
||||
type=OpenApiTypes.STR,
|
||||
),
|
||||
OpenApiParameter(
|
||||
name=QS_EXEC_ID,
|
||||
location=OpenApiParameter.QUERY,
|
||||
required=False,
|
||||
description="Flow execution ID",
|
||||
type=OpenApiTypes.STR,
|
||||
),
|
||||
)
|
||||
],
|
||||
operation_id="flows_executor_get",
|
||||
)
|
||||
@@ -383,7 +287,7 @@ class FlowExecutorView(APIView):
|
||||
span.set_data("authentik Stage", self.current_stage_view)
|
||||
span.set_data("authentik Flow", self.flow.slug)
|
||||
stage_response = self.current_stage_view.dispatch(request)
|
||||
return to_stage_response(request, stage_response, self.container.exec_id)
|
||||
return to_stage_response(request, stage_response)
|
||||
except Exception as exc:
|
||||
return self.handle_exception(exc)
|
||||
|
||||
@@ -402,19 +306,12 @@ class FlowExecutorView(APIView):
|
||||
),
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name=QS_QUERY,
|
||||
name="query",
|
||||
location=OpenApiParameter.QUERY,
|
||||
required=True,
|
||||
description="Querystring as received",
|
||||
type=OpenApiTypes.STR,
|
||||
),
|
||||
OpenApiParameter(
|
||||
name=QS_EXEC_ID,
|
||||
location=OpenApiParameter.QUERY,
|
||||
required=True,
|
||||
description="Flow execution ID",
|
||||
type=OpenApiTypes.STR,
|
||||
),
|
||||
)
|
||||
],
|
||||
operation_id="flows_executor_solve",
|
||||
)
|
||||
@@ -441,21 +338,20 @@ class FlowExecutorView(APIView):
|
||||
span.set_data("authentik Stage", self.current_stage_view)
|
||||
span.set_data("authentik Flow", self.flow.slug)
|
||||
stage_response = self.current_stage_view.dispatch(request)
|
||||
return to_stage_response(request, stage_response, self.container.exec_id)
|
||||
except Exception as exc: # noqa
|
||||
return to_stage_response(request, stage_response)
|
||||
except Exception as exc:
|
||||
return self.handle_exception(exc)
|
||||
|
||||
def _initiate_plan(self) -> FlowPlan:
|
||||
planner = FlowPlanner(self.flow)
|
||||
plan = planner.plan(self.request)
|
||||
container = FlowContainer.new(self.request)
|
||||
container.plan = plan
|
||||
self.request.session[SESSION_KEY_PLAN] = plan
|
||||
try:
|
||||
# Call the has_stages getter to check that
|
||||
# there are no issues with the class we might've gotten
|
||||
# from the cache. If there are errors, just delete all cached flows
|
||||
_ = plan.has_stages
|
||||
except Exception: # noqa
|
||||
except Exception:
|
||||
keys = cache.keys(f"{CACHE_PREFIX}*")
|
||||
cache.delete_many(keys)
|
||||
return self._initiate_plan()
|
||||
@@ -473,7 +369,7 @@ class FlowExecutorView(APIView):
|
||||
except FlowNonApplicableException as exc:
|
||||
self._logger.warning("f(exec): Flow restart not applicable to current user", exc=exc)
|
||||
return self.handle_invalid_flow(exc)
|
||||
self.container.plan = plan
|
||||
self.request.session[SESSION_KEY_PLAN] = plan
|
||||
kwargs = self.kwargs
|
||||
kwargs.update({"flow_slug": self.flow.slug})
|
||||
return redirect_with_qs("authentik_api:flow-executor", self.request.GET, **kwargs)
|
||||
@@ -495,13 +391,9 @@ class FlowExecutorView(APIView):
|
||||
)
|
||||
self.cancel()
|
||||
if next_param and not is_url_absolute(next_param):
|
||||
return to_stage_response(
|
||||
self.request, redirect_with_qs(next_param), self.container.exec_id
|
||||
)
|
||||
return to_stage_response(self.request, redirect_with_qs(next_param))
|
||||
return to_stage_response(
|
||||
self.request,
|
||||
self.stage_invalid(error_message=_("Invalid next URL")),
|
||||
self.container.exec_id,
|
||||
self.request, self.stage_invalid(error_message=_("Invalid next URL"))
|
||||
)
|
||||
|
||||
def stage_ok(self) -> HttpResponse:
|
||||
@@ -515,7 +407,7 @@ class FlowExecutorView(APIView):
|
||||
self.current_stage_view.cleanup()
|
||||
self.request.session.get(SESSION_KEY_HISTORY, []).append(deepcopy(self.plan))
|
||||
self.plan.pop()
|
||||
self.container.plan = self.plan
|
||||
self.request.session[SESSION_KEY_PLAN] = self.plan
|
||||
if self.plan.bindings:
|
||||
self._logger.debug(
|
||||
"f(exec): Continuing with next stage",
|
||||
@@ -558,7 +450,6 @@ class FlowExecutorView(APIView):
|
||||
|
||||
def cancel(self):
|
||||
"""Cancel current flow execution"""
|
||||
# TODO: Clean up container
|
||||
keys_to_delete = [
|
||||
SESSION_KEY_APPLICATION_PRE,
|
||||
SESSION_KEY_PLAN,
|
||||
@@ -581,8 +472,8 @@ class CancelView(View):
|
||||
|
||||
def get(self, request: HttpRequest) -> HttpResponse:
|
||||
"""View which canels the currently active plan"""
|
||||
if FlowContainer(request, request.GET.get(QS_EXEC_ID)).exists():
|
||||
del request.session[SESSION_KEY_PLAN_CONTAINER % request.GET.get(QS_EXEC_ID)]
|
||||
if SESSION_KEY_PLAN in request.session:
|
||||
del request.session[SESSION_KEY_PLAN]
|
||||
LOGGER.debug("Canceled current plan")
|
||||
return redirect("authentik_flows:default-invalidation")
|
||||
|
||||
@@ -630,12 +521,19 @@ class ToDefaultFlow(View):
|
||||
|
||||
def dispatch(self, request: HttpRequest) -> HttpResponse:
|
||||
flow = self.get_flow()
|
||||
get_qs = request.GET.copy()
|
||||
get_qs[QS_EXEC_ID] = str(uuid4())
|
||||
return redirect_with_qs("authentik_core:if-flow", get_qs, flow_slug=flow.slug)
|
||||
# If user already has a pending plan, clear it so we don't have to later.
|
||||
if SESSION_KEY_PLAN in self.request.session:
|
||||
plan: FlowPlan = self.request.session[SESSION_KEY_PLAN]
|
||||
if plan.flow_pk != flow.pk.hex:
|
||||
LOGGER.warning(
|
||||
"f(def): Found existing plan for other flow, deleting plan",
|
||||
flow_slug=flow.slug,
|
||||
)
|
||||
del self.request.session[SESSION_KEY_PLAN]
|
||||
return redirect_with_qs("authentik_core:if-flow", request.GET, flow_slug=flow.slug)
|
||||
|
||||
|
||||
def to_stage_response(request: HttpRequest, source: HttpResponse, xid: str) -> HttpResponse:
|
||||
def to_stage_response(request: HttpRequest, source: HttpResponse) -> HttpResponse:
|
||||
"""Convert normal HttpResponse into JSON Response"""
|
||||
if (
|
||||
isinstance(source, HttpResponseRedirect)
|
||||
@@ -654,7 +552,6 @@ def to_stage_response(request: HttpRequest, source: HttpResponse, xid: str) -> H
|
||||
RedirectChallenge(
|
||||
{
|
||||
"to": str(redirect_url),
|
||||
"xid": xid,
|
||||
}
|
||||
)
|
||||
)
|
||||
@@ -663,7 +560,6 @@ def to_stage_response(request: HttpRequest, source: HttpResponse, xid: str) -> H
|
||||
ShellChallenge(
|
||||
{
|
||||
"body": source.render().content.decode("utf-8"),
|
||||
"xid": xid,
|
||||
}
|
||||
)
|
||||
)
|
||||
@@ -673,7 +569,6 @@ def to_stage_response(request: HttpRequest, source: HttpResponse, xid: str) -> H
|
||||
ShellChallenge(
|
||||
{
|
||||
"body": source.content.decode("utf-8"),
|
||||
"xid": xid,
|
||||
}
|
||||
)
|
||||
)
|
||||
@@ -705,6 +600,4 @@ class ConfigureFlowInitView(LoginRequiredMixin, View):
|
||||
except FlowNonApplicableException:
|
||||
LOGGER.warning("Flow not applicable to user")
|
||||
raise Http404 from None
|
||||
container = FlowContainer.new(request)
|
||||
container.plan = plan
|
||||
return container.to_redirect(request, stage.configure_flow)
|
||||
return plan.to_redirect(request, stage.configure_flow)
|
||||
|
||||
@@ -7,7 +7,6 @@ from ua_parser.user_agent_parser import Parse
|
||||
|
||||
from authentik.core.views.interface import InterfaceView
|
||||
from authentik.flows.models import Flow
|
||||
from authentik.flows.views.executor import QS_EXEC_ID
|
||||
|
||||
|
||||
class FlowInterfaceView(InterfaceView):
|
||||
@@ -18,7 +17,6 @@ class FlowInterfaceView(InterfaceView):
|
||||
kwargs["flow"] = flow
|
||||
kwargs["flow_background_url"] = flow.background_url(self.request)
|
||||
kwargs["inspector"] = "inspector" in self.request.GET
|
||||
kwargs["xid"] = self.request.GET.get(QS_EXEC_ID)
|
||||
return super().get_context_data(**kwargs)
|
||||
|
||||
def compat_needs_sfe(self) -> bool:
|
||||
|
||||
@@ -19,7 +19,7 @@ def start_debug_server(**kwargs) -> bool:
|
||||
)
|
||||
return False
|
||||
|
||||
listen: str = CONFIG.get("listen.debug_py", "127.0.0.1:9901")
|
||||
listen: str = CONFIG.get("listen.listen_debug_py", "127.0.0.1:9901")
|
||||
host, _, port = listen.rpartition(":")
|
||||
try:
|
||||
debugpy.listen((host, int(port)), **kwargs) # nosec
|
||||
|
||||
@@ -31,14 +31,14 @@ postgresql:
|
||||
# host: replica1.example.com
|
||||
|
||||
listen:
|
||||
http: 0.0.0.0:9000
|
||||
https: 0.0.0.0:9443
|
||||
ldap: 0.0.0.0:3389
|
||||
ldaps: 0.0.0.0:6636
|
||||
radius: 0.0.0.0:1812
|
||||
metrics: 0.0.0.0:9300
|
||||
debug: 0.0.0.0:9900
|
||||
debug_py: 0.0.0.0:9901
|
||||
listen_http: 0.0.0.0:9000
|
||||
listen_https: 0.0.0.0:9443
|
||||
listen_ldap: 0.0.0.0:3389
|
||||
listen_ldaps: 0.0.0.0:6636
|
||||
listen_radius: 0.0.0.0:1812
|
||||
listen_metrics: 0.0.0.0:9300
|
||||
listen_debug: 0.0.0.0:9900
|
||||
listen_debug_py: 0.0.0.0:9901
|
||||
trusted_proxy_cidrs:
|
||||
- 127.0.0.0/8
|
||||
- 10.0.0.0/8
|
||||
@@ -152,9 +152,8 @@ worker:
|
||||
processes: 1
|
||||
threads: 2
|
||||
consumer_listen_timeout: "seconds=30"
|
||||
task_max_retries: 5
|
||||
task_max_retries: 20
|
||||
task_default_time_limit: "minutes=10"
|
||||
lock_purge_interval: "minutes=1"
|
||||
task_purge_interval: "days=1"
|
||||
task_expiration: "days=30"
|
||||
scheduler_interval: "seconds=60"
|
||||
|
||||
@@ -3,10 +3,9 @@
|
||||
import re
|
||||
import socket
|
||||
from ipaddress import ip_address, ip_network
|
||||
from smtplib import SMTPException
|
||||
from textwrap import indent
|
||||
from types import CodeType
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from typing import Any
|
||||
|
||||
from cachetools import TLRUCache, cached
|
||||
from django.core.exceptions import FieldError
|
||||
@@ -30,10 +29,6 @@ from authentik.policies.types import PolicyRequest, PolicyResult
|
||||
from authentik.providers.oauth2.id_token import IDToken
|
||||
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider
|
||||
from authentik.stages.authenticator import devices_for_user
|
||||
from authentik.stages.email.utils import TemplateEmailMessage
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.stages.email.models import EmailStage
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
@@ -62,12 +57,11 @@ class BaseEvaluator:
|
||||
self._globals = {
|
||||
"ak_call_policy": self.expr_func_call_policy,
|
||||
"ak_create_event": self.expr_event_create,
|
||||
"ak_create_jwt": self.expr_create_jwt,
|
||||
"ak_is_group_member": BaseEvaluator.expr_is_group_member,
|
||||
"ak_logger": get_logger(self._filename).bind(),
|
||||
"ak_send_email": self.expr_send_email,
|
||||
"ak_user_by": BaseEvaluator.expr_user_by,
|
||||
"ak_user_has_authenticator": BaseEvaluator.expr_func_user_has_authenticator,
|
||||
"ak_create_jwt": self.expr_create_jwt,
|
||||
"ip_address": ip_address,
|
||||
"ip_network": ip_network,
|
||||
"list_flatten": BaseEvaluator.expr_flatten,
|
||||
@@ -222,81 +216,6 @@ class BaseEvaluator:
|
||||
access_token.save()
|
||||
return access_token.token
|
||||
|
||||
def expr_send_email(
|
||||
self,
|
||||
address: str | list[str],
|
||||
subject: str,
|
||||
body: str | None = None,
|
||||
stage: "EmailStage | None" = None,
|
||||
template: str | None = None,
|
||||
context: dict | None = None,
|
||||
) -> bool:
|
||||
"""Send an email using authentik's email system
|
||||
|
||||
Args:
|
||||
address: Email address(es) to send to. Can be:
|
||||
- Single email: "user@example.com"
|
||||
- List of emails: ["user1@example.com", "user2@example.com"]
|
||||
subject: Email subject
|
||||
body: Email body (plain text/HTML). Mutually exclusive with template.
|
||||
stage: EmailStage instance to use for settings. If None, uses global settings.
|
||||
template: Template name to render. Mutually exclusive with body.
|
||||
context: Additional context variables for template rendering.
|
||||
|
||||
Returns:
|
||||
bool: True if email was queued successfully, False otherwise
|
||||
"""
|
||||
# Deferred imports to avoid circular import issues
|
||||
from authentik.stages.email.tasks import send_mails
|
||||
|
||||
if body and template:
|
||||
raise ValueError("body and template parameters are mutually exclusive")
|
||||
|
||||
if not body and not template:
|
||||
raise ValueError("Either body or template parameter must be provided")
|
||||
|
||||
# Normalize address parameter to list of (name, email) tuples
|
||||
if isinstance(address, str):
|
||||
# Single email address
|
||||
to_addresses = [("", address)]
|
||||
elif isinstance(address, list):
|
||||
if not address:
|
||||
raise ValueError("Address list cannot be empty")
|
||||
# List of email strings
|
||||
to_addresses = [("", email) for email in address]
|
||||
else:
|
||||
raise ValueError("Address must be a string or list of strings")
|
||||
|
||||
try:
|
||||
if template is not None:
|
||||
# Use all available context from the evaluator for template rendering
|
||||
template_context = self._context.copy()
|
||||
# Add any custom context passed to the function
|
||||
if context:
|
||||
template_context.update(context)
|
||||
|
||||
# Use template rendering
|
||||
message = TemplateEmailMessage(
|
||||
subject=subject,
|
||||
to=to_addresses,
|
||||
template_name=template,
|
||||
template_context=template_context,
|
||||
)
|
||||
else:
|
||||
# Use plain body
|
||||
message = TemplateEmailMessage(
|
||||
subject=subject,
|
||||
to=to_addresses,
|
||||
body=body,
|
||||
)
|
||||
|
||||
send_mails(stage, message)
|
||||
return True
|
||||
|
||||
except (SMTPException, ConnectionError, ValidationError, ValueError) as exc:
|
||||
LOGGER.warning("Failed to send email", exc=exc, addresses=to_addresses, subject=subject)
|
||||
return False
|
||||
|
||||
def wrap_expression(self, expression: str) -> str:
|
||||
"""Wrap expression in a function, call it, and save the result as `result`"""
|
||||
handler_signature = ",".join(sanitize_arg(x) for x in self._context.keys())
|
||||
|
||||
@@ -43,9 +43,7 @@ def structlog_configure():
|
||||
structlog.stdlib.PositionalArgumentsFormatter(),
|
||||
structlog.processors.TimeStamper(fmt="iso", utc=False),
|
||||
structlog.processors.StackInfoRenderer(),
|
||||
structlog.processors.ExceptionRenderer(
|
||||
structlog.processors.ExceptionDictTransformer(show_locals=CONFIG.get_bool("debug"))
|
||||
),
|
||||
structlog.processors.dict_tracebacks,
|
||||
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
|
||||
],
|
||||
logger_factory=structlog.stdlib.LoggerFactory(),
|
||||
@@ -67,14 +65,7 @@ def get_logger_config():
|
||||
"json": {
|
||||
"()": structlog.stdlib.ProcessorFormatter,
|
||||
"processor": structlog.processors.JSONRenderer(sort_keys=True),
|
||||
"foreign_pre_chain": LOG_PRE_CHAIN
|
||||
+ [
|
||||
structlog.processors.ExceptionRenderer(
|
||||
structlog.processors.ExceptionDictTransformer(
|
||||
show_locals=CONFIG.get_bool("debug")
|
||||
)
|
||||
),
|
||||
],
|
||||
"foreign_pre_chain": LOG_PRE_CHAIN + [structlog.processors.dict_tracebacks],
|
||||
},
|
||||
"console": {
|
||||
"()": structlog.stdlib.ProcessorFormatter,
|
||||
@@ -112,7 +103,6 @@ def get_logger_config():
|
||||
"hpack": "WARNING",
|
||||
"httpx": "WARNING",
|
||||
"azure": "WARNING",
|
||||
"channels_postgres": "WARNING",
|
||||
}
|
||||
for handler_name, level in handler_level_map.items():
|
||||
base_config["loggers"][handler_name] = {
|
||||
|
||||
@@ -3,15 +3,19 @@
|
||||
from asyncio.exceptions import CancelledError
|
||||
from typing import Any
|
||||
|
||||
from channels_redis.core import ChannelFull
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation, ValidationError
|
||||
from django.db import DatabaseError, InternalError, OperationalError, ProgrammingError
|
||||
from django.http.response import Http404
|
||||
from django_redis.exceptions import ConnectionInterrupted
|
||||
from docker.errors import DockerException
|
||||
from dramatiq.errors import Retry
|
||||
from h11 import LocalProtocolError
|
||||
from ldap3.core.exceptions import LDAPException
|
||||
from psycopg.errors import Error
|
||||
from redis.exceptions import ConnectionError as RedisConnectionError
|
||||
from redis.exceptions import RedisError, ResponseError
|
||||
from rest_framework.exceptions import APIException
|
||||
from sentry_sdk import HttpTransport, get_current_scope
|
||||
from sentry_sdk import init as sentry_sdk_init
|
||||
@@ -19,6 +23,7 @@ from sentry_sdk.api import set_tag
|
||||
from sentry_sdk.integrations.argv import ArgvIntegration
|
||||
from sentry_sdk.integrations.django import DjangoIntegration
|
||||
from sentry_sdk.integrations.dramatiq import DramatiqIntegration
|
||||
from sentry_sdk.integrations.redis import RedisIntegration
|
||||
from sentry_sdk.integrations.socket import SocketIntegration
|
||||
from sentry_sdk.integrations.stdlib import StdlibIntegration
|
||||
from sentry_sdk.integrations.threading import ThreadingIntegration
|
||||
@@ -54,7 +59,13 @@ ignored_classes = (
|
||||
ProgrammingError,
|
||||
SuspiciousOperation,
|
||||
ValidationError,
|
||||
# Redis errors
|
||||
RedisConnectionError,
|
||||
ConnectionInterrupted,
|
||||
RedisError,
|
||||
ResponseError,
|
||||
# websocket errors
|
||||
ChannelFull,
|
||||
WebSocketException,
|
||||
LocalProtocolError,
|
||||
# rest_framework error
|
||||
@@ -101,6 +112,7 @@ def sentry_init(**sentry_init_kwargs):
|
||||
ArgvIntegration(),
|
||||
DjangoIntegration(transaction_style="function_name", cache_spans=True),
|
||||
DramatiqIntegration(),
|
||||
RedisIntegration(),
|
||||
SocketIntegration(),
|
||||
StdlibIntegration(),
|
||||
ThreadingIntegration(propagate_hub=True),
|
||||
@@ -147,7 +159,9 @@ def before_send(event: dict, hint: dict) -> dict | None:
|
||||
if event["logger"] in [
|
||||
"asyncio",
|
||||
"multiprocessing",
|
||||
"django_redis",
|
||||
"django.security.DisallowedHost",
|
||||
"django_redis.cache",
|
||||
"paramiko.transport",
|
||||
]:
|
||||
return None
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
from dramatiq.actor import Actor
|
||||
from dramatiq.results.errors import ResultFailure
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import BooleanField, CharField, ChoiceField
|
||||
@@ -111,13 +110,9 @@ class OutgoingSyncProviderStatusMixin:
|
||||
"override_dry_run": params.validated_data["override_dry_run"],
|
||||
"pk": params.validated_data["sync_object_id"],
|
||||
},
|
||||
retries=0,
|
||||
rel_obj=provider,
|
||||
)
|
||||
try:
|
||||
msg.get_result(block=True)
|
||||
except ResultFailure:
|
||||
pass
|
||||
msg.get_result(block=True)
|
||||
task: Task = msg.options["task"]
|
||||
task.refresh_from_db()
|
||||
return Response(SyncObjectResultSerializer(instance={"messages": task._messages}).data)
|
||||
|
||||
@@ -2,6 +2,7 @@ from django.core.paginator import Paginator
|
||||
from django.db.models import Model, QuerySet
|
||||
from django.db.models.query import Q
|
||||
from django.utils.text import slugify
|
||||
from django_dramatiq_postgres.middleware import CurrentTask
|
||||
from dramatiq.actor import Actor
|
||||
from dramatiq.composition import group
|
||||
from dramatiq.errors import Retry
|
||||
@@ -19,9 +20,7 @@ from authentik.lib.sync.outgoing.exceptions import (
|
||||
TransientSyncException,
|
||||
)
|
||||
from authentik.lib.sync.outgoing.models import OutgoingSyncProvider
|
||||
from authentik.lib.utils.errors import exception_to_dict
|
||||
from authentik.lib.utils.reflection import class_to_path, path_to_class
|
||||
from authentik.tasks.middleware import CurrentTask
|
||||
from authentik.tasks.models import Task
|
||||
|
||||
|
||||
@@ -61,7 +60,7 @@ class SyncTasks:
|
||||
provider_pk: int,
|
||||
sync_objects: Actor[[str, int, int, bool], None],
|
||||
):
|
||||
task = CurrentTask.get_task()
|
||||
task: Task = CurrentTask.get_task()
|
||||
self.logger = get_logger().bind(
|
||||
provider_type=class_to_path(self._provider_model),
|
||||
provider_pk=provider_pk,
|
||||
@@ -118,7 +117,7 @@ class SyncTasks:
|
||||
override_dry_run=False,
|
||||
**filter,
|
||||
):
|
||||
task = CurrentTask.get_task()
|
||||
task: Task = CurrentTask.get_task()
|
||||
_object_type: type[Model] = path_to_class(object_type)
|
||||
self.logger = get_logger().bind(
|
||||
provider_type=class_to_path(self._provider_model),
|
||||
@@ -165,17 +164,16 @@ class SyncTasks:
|
||||
except BadRequestSyncException as exc:
|
||||
self.logger.warning("failed to sync object", exc=exc, obj=obj)
|
||||
task.warning(
|
||||
f"Failed to sync {str(obj)} due to error: {str(exc)}",
|
||||
f"Failed to sync {obj._meta.verbose_name} {str(obj)} due to error: {str(exc)}",
|
||||
arguments=exc.args[1:],
|
||||
obj=sanitize_item(obj),
|
||||
exception=exception_to_dict(exc),
|
||||
)
|
||||
except TransientSyncException as exc:
|
||||
self.logger.warning("failed to sync object", exc=exc, user=obj)
|
||||
task.warning(
|
||||
f"Failed to sync {str(obj)} due to transient error: {str(exc)}",
|
||||
f"Failed to sync {obj._meta.verbose_name} {str(obj)} due to "
|
||||
"transient error: {str(exc)}",
|
||||
obj=sanitize_item(obj),
|
||||
exception=exception_to_dict(exc),
|
||||
)
|
||||
except StopSync as exc:
|
||||
self.logger.warning("Stopping sync", exc=exc)
|
||||
@@ -207,7 +205,7 @@ class SyncTasks:
|
||||
provider_pk: int,
|
||||
raw_op: str,
|
||||
):
|
||||
task = CurrentTask.get_task()
|
||||
task: Task = CurrentTask.get_task()
|
||||
self.logger = get_logger().bind(
|
||||
provider_type=class_to_path(self._provider_model),
|
||||
)
|
||||
@@ -281,7 +279,7 @@ class SyncTasks:
|
||||
action: str,
|
||||
pk_set: list[int],
|
||||
):
|
||||
task = CurrentTask.get_task()
|
||||
task: Task = CurrentTask.get_task()
|
||||
self.logger = get_logger().bind(
|
||||
provider_type=class_to_path(self._provider_model),
|
||||
)
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
"""Test Evaluator base functions"""
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.test import RequestFactory, TestCase
|
||||
from django.urls import reverse
|
||||
from jwt import decode
|
||||
@@ -79,163 +77,3 @@ class TestEvaluator(TestCase):
|
||||
jwt, provider.client_secret, algorithms=["HS256"], audience=provider.client_id
|
||||
)
|
||||
self.assertEqual(decoded["preferred_username"], user.username)
|
||||
|
||||
@patch("authentik.stages.email.tasks.send_mails")
|
||||
def test_expr_send_email_with_body(self, mock_send_mails):
|
||||
"""Test ak_send_email with body parameter"""
|
||||
user = create_test_user()
|
||||
evaluator = BaseEvaluator(generate_id())
|
||||
evaluator._context = {"user": user}
|
||||
|
||||
# Test sending email with body
|
||||
result = evaluator.evaluate(
|
||||
"return ak_send_email('test@example.com', 'Test Subject', body='Test Body')"
|
||||
)
|
||||
|
||||
self.assertTrue(result)
|
||||
mock_send_mails.assert_called_once()
|
||||
|
||||
# Verify the call arguments - send_mails is called with (stage, message)
|
||||
args, kwargs = mock_send_mails.call_args
|
||||
stage, message = args
|
||||
|
||||
# Check that global settings are used (stage is None)
|
||||
self.assertIsNone(stage)
|
||||
|
||||
# Check message properties
|
||||
self.assertEqual(message.subject, "Test Subject")
|
||||
self.assertEqual(message.to, ["test@example.com"])
|
||||
self.assertEqual(message.body, "Test Body")
|
||||
|
||||
@patch("authentik.stages.email.tasks.send_mails")
|
||||
def test_expr_send_email_with_template(self, mock_send_mails):
|
||||
"""Test ak_send_email with template parameter"""
|
||||
user = create_test_user()
|
||||
evaluator = BaseEvaluator(generate_id())
|
||||
evaluator._context = {"user": user}
|
||||
|
||||
# Test sending email with template
|
||||
result = evaluator.evaluate(
|
||||
"return ak_send_email('test@example.com', 'Test Subject', "
|
||||
"template='email/password_reset.html')"
|
||||
)
|
||||
|
||||
self.assertTrue(result)
|
||||
mock_send_mails.assert_called_once()
|
||||
|
||||
def test_expr_send_email_validation_errors(self):
|
||||
"""Test ak_send_email validation errors"""
|
||||
evaluator = BaseEvaluator(generate_id())
|
||||
|
||||
# Test error when both body and template are provided
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
evaluator.evaluate(
|
||||
"return ak_send_email('test@example.com', 'Test', "
|
||||
"body='Body', template='template.html')"
|
||||
)
|
||||
self.assertIn("mutually exclusive", str(cm.exception))
|
||||
|
||||
# Test error when neither body nor template are provided
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
evaluator.evaluate("return ak_send_email('test@example.com', 'Test')")
|
||||
self.assertIn("Either body or template parameter must be provided", str(cm.exception))
|
||||
|
||||
@patch("authentik.stages.email.tasks.send_mails")
|
||||
def test_expr_send_email_with_custom_stage(self, mock_send_mails):
|
||||
"""Test ak_send_email with custom EmailStage"""
|
||||
from authentik.stages.email.models import EmailStage
|
||||
|
||||
user = create_test_user()
|
||||
custom_stage = EmailStage(
|
||||
name="custom-stage", use_global_settings=False, from_address="custom@example.com"
|
||||
)
|
||||
|
||||
evaluator = BaseEvaluator(generate_id())
|
||||
evaluator._context = {"user": user, "custom_stage": custom_stage}
|
||||
|
||||
# Test sending email with custom stage
|
||||
result = evaluator.evaluate(
|
||||
"return ak_send_email('test@example.com', 'Test Subject', "
|
||||
"body='Test Body', stage=custom_stage)"
|
||||
)
|
||||
|
||||
self.assertTrue(result)
|
||||
mock_send_mails.assert_called_once()
|
||||
|
||||
# Verify the custom stage was used
|
||||
args, kwargs = mock_send_mails.call_args
|
||||
stage, message = args
|
||||
|
||||
self.assertEqual(stage, custom_stage)
|
||||
self.assertFalse(stage.use_global_settings)
|
||||
|
||||
@patch("authentik.stages.email.tasks.send_mails")
|
||||
def test_expr_send_email_with_context(self, mock_send_mails):
|
||||
"""Test ak_send_email with custom context parameter"""
|
||||
user = create_test_user()
|
||||
evaluator = BaseEvaluator(generate_id())
|
||||
evaluator._context = {"user": user, "request_id": "123"}
|
||||
|
||||
# Test sending email with template and custom context
|
||||
result = evaluator.evaluate(
|
||||
"return ak_send_email('test@example.com', 'Test Subject', "
|
||||
"template='email/password_reset.html', "
|
||||
"context={'url': 'http://localhost', 'expires': '2026-01-01'})"
|
||||
)
|
||||
|
||||
self.assertTrue(result)
|
||||
mock_send_mails.assert_called_once()
|
||||
|
||||
# Verify the call arguments - send_mails is called with (stage, message)
|
||||
args, kwargs = mock_send_mails.call_args
|
||||
stage, message = args
|
||||
|
||||
# Check that global settings are used (stage is None)
|
||||
self.assertIsNone(stage)
|
||||
|
||||
self.assertEqual(message.subject, "Test Subject")
|
||||
self.assertEqual(message.to, ["test@example.com"])
|
||||
self.assertIn("2026-01-01", message.body)
|
||||
self.assertIn("http://localhost", message.body)
|
||||
|
||||
@patch("authentik.stages.email.tasks.send_mails")
|
||||
def test_expr_send_email_multiple_addresses(self, mock_send_mails):
|
||||
"""Test ak_send_email with multiple email addresses"""
|
||||
user = create_test_user()
|
||||
evaluator = BaseEvaluator(generate_id())
|
||||
evaluator._context = {"user": user}
|
||||
|
||||
# Test sending email to multiple addresses
|
||||
result = evaluator.evaluate(
|
||||
"return ak_send_email(['user1@example.com', 'user2@example.com'], "
|
||||
"'Test Subject', body='Test Body')"
|
||||
)
|
||||
|
||||
self.assertTrue(result)
|
||||
mock_send_mails.assert_called_once()
|
||||
|
||||
# Verify the call arguments - send_mails is called with (stage, message)
|
||||
args, kwargs = mock_send_mails.call_args
|
||||
stage, message = args
|
||||
|
||||
# Check that global settings are used (stage is None)
|
||||
self.assertIsNone(stage)
|
||||
|
||||
# Check message properties - should have multiple recipients
|
||||
self.assertEqual(message.subject, "Test Subject")
|
||||
self.assertEqual(message.to, ["user1@example.com", "user2@example.com"])
|
||||
self.assertEqual(message.body, "Test Body")
|
||||
|
||||
def test_expr_send_email_multiple_addresses_validation(self):
|
||||
"""Test ak_send_email validation with multiple addresses"""
|
||||
evaluator = BaseEvaluator(generate_id())
|
||||
|
||||
# Test error when empty list is provided
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
evaluator.evaluate("return ak_send_email([], 'Test', body='Body')")
|
||||
self.assertIn("Address list cannot be empty", str(cm.exception))
|
||||
|
||||
# Test error when invalid type is provided
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
evaluator.evaluate("return ak_send_email(123, 'Test', body='Body')")
|
||||
self.assertIn("Address must be a string or list of strings", str(cm.exception))
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
"""authentik database utilities"""
|
||||
|
||||
import gc
|
||||
|
||||
from django.db import reset_queries
|
||||
from django.db.models import QuerySet
|
||||
|
||||
|
||||
def chunked_queryset(queryset: QuerySet, chunk_size: int = 1_000):
|
||||
if not queryset.exists():
|
||||
return []
|
||||
|
||||
def get_chunks(qs: QuerySet):
|
||||
qs = qs.order_by("pk")
|
||||
pks = qs.values_list("pk", flat=True)
|
||||
start_pk = pks[0]
|
||||
while True:
|
||||
try:
|
||||
end_pk = pks.filter(pk__gte=start_pk)[chunk_size]
|
||||
except IndexError:
|
||||
break
|
||||
yield qs.filter(pk__gte=start_pk, pk__lt=end_pk)
|
||||
start_pk = end_pk
|
||||
yield qs.filter(pk__gte=start_pk)
|
||||
|
||||
for chunk in get_chunks(queryset):
|
||||
reset_queries()
|
||||
gc.collect()
|
||||
yield from chunk.iterator()
|
||||
@@ -4,11 +4,9 @@ from traceback import extract_tb
|
||||
|
||||
from structlog.tracebacks import ExceptionDictTransformer
|
||||
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.utils.reflection import class_to_path
|
||||
|
||||
TRACEBACK_HEADER = "Traceback (most recent call last):"
|
||||
_exception_transformer = ExceptionDictTransformer(show_locals=CONFIG.get_bool("debug"))
|
||||
|
||||
|
||||
def exception_to_string(exc: Exception) -> str:
|
||||
@@ -25,4 +23,4 @@ def exception_to_string(exc: Exception) -> str:
|
||||
|
||||
def exception_to_dict(exc: Exception) -> dict:
|
||||
"""Format exception as a dictionary"""
|
||||
return _exception_transformer((type(exc), exc, exc.__traceback__))
|
||||
return ExceptionDictTransformer()((type(exc), exc, exc.__traceback__))
|
||||
|
||||
@@ -6,7 +6,6 @@ from pathlib import Path
|
||||
from tempfile import gettempdir
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.module_loading import import_string
|
||||
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
@@ -63,13 +62,3 @@ def get_env() -> str:
|
||||
if "AK_APPLIANCE" in os.environ:
|
||||
return os.environ["AK_APPLIANCE"]
|
||||
return "custom"
|
||||
|
||||
|
||||
def ConditionalInheritance(path: str):
|
||||
"""Conditionally inherit from a class, intended for things like authentik.enterprise,
|
||||
without which authentik should still be able to run"""
|
||||
try:
|
||||
cls = import_string(path)
|
||||
return cls
|
||||
except ModuleNotFoundError:
|
||||
return object
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user