mirror of
https://github.com/goauthentik/authentik
synced 2026-05-07 15:42:48 +02:00
Compare commits
4 Commits
ci/test-po
...
developer-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c11f407470 | ||
|
|
b7c6b961a1 | ||
|
|
e6adb72695 | ||
|
|
9cbdcd2cad |
267
.github/actions/cherry-pick/action.yml
vendored
267
.github/actions/cherry-pick/action.yml
vendored
@@ -1,267 +0,0 @@
|
||||
name: "Cherry-picker"
|
||||
description: "Cherry-pick PRs based on their labels"
|
||||
|
||||
inputs:
|
||||
token:
|
||||
description: "GitHub Token"
|
||||
required: true
|
||||
git_user:
|
||||
description: "Git user for pushing the cherry-pick PR"
|
||||
required: true
|
||||
git_user_email:
|
||||
description: "Git user email for pushing the cherry-pick PR"
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Check if workflow should run
|
||||
id: should_run
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ inputs.token }}
|
||||
run: |
|
||||
set -e -o pipefail
|
||||
# For issues events, check if it's actually a PR
|
||||
if [ "${{ github.event_name }}" = "issues" ]; then
|
||||
# Check if this issue is actually a PR
|
||||
PR_DATA=$(gh api repos/${{ github.repository }}/pulls/${{ github.event.issue.number }} 2>/dev/null || echo "null")
|
||||
if [ "$PR_DATA" = "null" ]; then
|
||||
echo "should_run=false" >> $GITHUB_OUTPUT
|
||||
echo "reason=not_a_pr" >> $GITHUB_OUTPUT
|
||||
echo "This is an issue, not a PR. Skipping."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Get PR data
|
||||
PR_MERGED=$(echo "$PR_DATA" | jq -r '.merged')
|
||||
PR_NUMBER="${{ github.event.issue.number }}"
|
||||
MERGE_COMMIT_SHA=$(echo "$PR_DATA" | jq -r '.merge_commit_sha')
|
||||
|
||||
# Check if it's a backport label
|
||||
LABEL_NAME="${{ github.event.label.name }}"
|
||||
if [[ "$LABEL_NAME" =~ ^backport/(.+)$ ]]; then
|
||||
if [ "$PR_MERGED" = "true" ]; then
|
||||
echo "should_run=true" >> $GITHUB_OUTPUT
|
||||
echo "reason=label_added_to_merged_pr" >> $GITHUB_OUTPUT
|
||||
echo "pr_number=$PR_NUMBER" >> $GITHUB_OUTPUT
|
||||
echo "merge_commit_sha=$MERGE_COMMIT_SHA" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
else
|
||||
echo "should_run=false" >> $GITHUB_OUTPUT
|
||||
echo "reason=label_added_to_open_pr" >> $GITHUB_OUTPUT
|
||||
echo "Backport label added to open PR. Will run after PR is merged."
|
||||
exit 0
|
||||
fi
|
||||
else
|
||||
echo "should_run=false" >> $GITHUB_OUTPUT
|
||||
echo "reason=non_backport_label" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
# For pull_request and pull_request_target events
|
||||
PR_NUMBER="${{ github.event.pull_request.number }}"
|
||||
MERGE_COMMIT_SHA="${{ github.event.pull_request.merge_commit_sha }}"
|
||||
|
||||
# Case 1: PR was just merged (closed + merged = true)
|
||||
if [ "${{ github.event.action }}" = "closed" ] && [ "${{ github.event.pull_request.merged }}" = "true" ]; then
|
||||
echo "should_run=true" >> $GITHUB_OUTPUT
|
||||
echo "reason=pr_merged" >> $GITHUB_OUTPUT
|
||||
echo "pr_number=$PR_NUMBER" >> $GITHUB_OUTPUT
|
||||
echo "merge_commit_sha=$MERGE_COMMIT_SHA" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Case 2: Label was added
|
||||
if [ "${{ github.event.action }}" = "labeled" ]; then
|
||||
LABEL_NAME="${{ github.event.label.name }}"
|
||||
# Check if it's a backport label
|
||||
if [[ "$LABEL_NAME" =~ ^backport/(.+)$ ]]; then
|
||||
# Check if PR is already merged
|
||||
if [ "${{ github.event.pull_request.merged }}" = "true" ]; then
|
||||
echo "should_run=true" >> $GITHUB_OUTPUT
|
||||
echo "reason=label_added_to_merged_pr" >> $GITHUB_OUTPUT
|
||||
echo "pr_number=$PR_NUMBER" >> $GITHUB_OUTPUT
|
||||
echo "merge_commit_sha=$MERGE_COMMIT_SHA" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
else
|
||||
echo "should_run=false" >> $GITHUB_OUTPUT
|
||||
echo "reason=label_added_to_open_pr" >> $GITHUB_OUTPUT
|
||||
echo "Backport label added to open PR. Will run after PR is merged."
|
||||
exit 0
|
||||
fi
|
||||
else
|
||||
echo "should_run=false" >> $GITHUB_OUTPUT
|
||||
echo "reason=non_backport_label" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "should_run=false" >> $GITHUB_OUTPUT
|
||||
echo "reason=unknown" >> $GITHUB_OUTPUT
|
||||
- name: Configure Git
|
||||
if: steps.should_run.outputs.should_run == 'true'
|
||||
shell: bash
|
||||
env:
|
||||
user: ${{ inputs.git_user }}
|
||||
email: ${{ inputs.git_user_email }}
|
||||
run: |
|
||||
git config --global user.name "${user}"
|
||||
git config --global user.email "${email}"
|
||||
- name: Get PR details and extract backport labels
|
||||
if: steps.should_run.outputs.should_run == 'true'
|
||||
id: pr_details
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ inputs.token }}
|
||||
run: |
|
||||
set -e -o pipefail
|
||||
PR_NUMBER="${{ steps.should_run.outputs.pr_number }}"
|
||||
|
||||
# Get PR details
|
||||
PR_DATA=$(gh api repos/${{ github.repository }}/pulls/$PR_NUMBER)
|
||||
PR_TITLE=$(echo "$PR_DATA" | jq -r '.title')
|
||||
PR_AUTHOR=$(echo "$PR_DATA" | jq -r '.user.login')
|
||||
|
||||
echo "pr_title=$PR_TITLE" >> $GITHUB_OUTPUT
|
||||
echo "pr_author=$PR_AUTHOR" >> $GITHUB_OUTPUT
|
||||
|
||||
# Determine which labels to process
|
||||
if [ "${{ steps.should_run.outputs.reason }}" = "label_added_to_merged_pr" ]; then
|
||||
# Only process the specific label that was just added
|
||||
if [ "${{ github.event_name }}" = "issues" ]; then
|
||||
LABEL_NAME="${{ github.event.label.name }}"
|
||||
else
|
||||
LABEL_NAME="${{ github.event.label.name }}"
|
||||
fi
|
||||
|
||||
if [[ "$LABEL_NAME" =~ ^backport/(.+)$ ]]; then
|
||||
echo "labels=$LABEL_NAME" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "Label $LABEL_NAME does not match backport pattern"
|
||||
echo "labels=" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
else
|
||||
# PR was just merged, process all backport labels
|
||||
LABELS=$(gh pr view $PR_NUMBER --json labels --jq '.labels[].name' | grep '^backport/' | tr '\n' ' ' || true)
|
||||
echo "labels=$LABELS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Cherry-pick to target branches
|
||||
if: steps.should_run.outputs.should_run == 'true' && steps.pr_details.outputs.labels != ''
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ inputs.token }}
|
||||
run: |
|
||||
set -e -o pipefail
|
||||
PR_NUMBER='${{ steps.should_run.outputs.pr_number }}'
|
||||
COMMIT_SHA='${{ steps.should_run.outputs.merge_commit_sha }}'
|
||||
PR_TITLE='${{ steps.pr_details.outputs.pr_title }}'
|
||||
PR_AUTHOR='${{ steps.pr_details.outputs.pr_author }}'
|
||||
LABELS='${{ steps.pr_details.outputs.labels }}'
|
||||
|
||||
echo "Processing PR #$PR_NUMBER (reason: ${{ steps.should_run.outputs.reason }})"
|
||||
echo "Found backport labels: $LABELS"
|
||||
|
||||
# Process each backport label
|
||||
for label in $LABELS; do
|
||||
if [[ "$label" =~ ^backport/(.+)$ ]]; then
|
||||
TARGET_BRANCH="${BASH_REMATCH[1]}"
|
||||
echo "Processing backport to branch: $TARGET_BRANCH"
|
||||
|
||||
# Check if target branch exists
|
||||
if ! git ls-remote --heads origin "$TARGET_BRANCH" | grep -q "$TARGET_BRANCH"; then
|
||||
echo "❌ Target branch $TARGET_BRANCH does not exist, skipping"
|
||||
|
||||
# Comment on the original PR about the missing branch
|
||||
gh pr comment $PR_NUMBER --body "⚠️ Cannot backport to \`$TARGET_BRANCH\`: branch does not exist."
|
||||
continue
|
||||
fi
|
||||
|
||||
# Create a unique branch name for the cherry-pick
|
||||
CHERRY_PICK_BRANCH="cherry-pick-${PR_NUMBER}-to-${TARGET_BRANCH}"
|
||||
|
||||
# Check if a cherry-pick PR already exists
|
||||
EXISTING_PR=$(gh pr list --head "$CHERRY_PICK_BRANCH" --json number --jq '.[0].number' 2>/dev/null || echo "")
|
||||
if [ -n "$EXISTING_PR" ]; then
|
||||
echo "⚠️ Cherry-pick PR already exists: #$EXISTING_PR"
|
||||
gh pr comment $PR_NUMBER --body "Cherry-pick to \`$TARGET_BRANCH\` already exists: #$EXISTING_PR"
|
||||
continue
|
||||
fi
|
||||
|
||||
# Fetch and checkout target branch
|
||||
git fetch origin "$TARGET_BRANCH"
|
||||
git checkout -b "$CHERRY_PICK_BRANCH" "origin/$TARGET_BRANCH"
|
||||
|
||||
# Attempt cherry-pick
|
||||
if git cherry-pick "$COMMIT_SHA"; then
|
||||
echo "✅ Cherry-pick successful for $TARGET_BRANCH"
|
||||
|
||||
# Push the cherry-pick branch
|
||||
git push origin "$CHERRY_PICK_BRANCH"
|
||||
|
||||
# Create PR for the cherry-pick
|
||||
CHERRY_PICK_TITLE="$PR_TITLE (cherry-pick #$PR_NUMBER)"
|
||||
CHERRY_PICK_BODY="Cherry-pick of #$PR_NUMBER to \`$TARGET_BRANCH\` branch.
|
||||
|
||||
**Original PR:** #$PR_NUMBER
|
||||
**Original Author:** @$PR_AUTHOR
|
||||
**Cherry-picked commit:** $COMMIT_SHA"
|
||||
|
||||
NEW_PR=$(gh pr create \
|
||||
--title "$CHERRY_PICK_TITLE" \
|
||||
--body "$CHERRY_PICK_BODY" \
|
||||
--base "$TARGET_BRANCH" \
|
||||
--head "$CHERRY_PICK_BRANCH" \
|
||||
--label "cherry-pick")
|
||||
|
||||
echo "✅ Created cherry-pick PR $NEW_PR for $TARGET_BRANCH"
|
||||
|
||||
# Comment on original PR
|
||||
gh pr comment $PR_NUMBER --body "🍒 Cherry-pick to \`$TARGET_BRANCH\` created: $NEW_PR"
|
||||
|
||||
else
|
||||
echo "⚠️ Cherry-pick failed for $TARGET_BRANCH, creating conflict resolution PR"
|
||||
|
||||
# Add conflicted files and commit
|
||||
git add .
|
||||
git commit -m "Cherry-pick #$PR_NUMBER to $TARGET_BRANCH (with conflicts)
|
||||
|
||||
This cherry-pick has conflicts that need manual resolution.
|
||||
|
||||
Original PR: #$PR_NUMBER
|
||||
Original commit: $COMMIT_SHA"
|
||||
|
||||
# Push the branch with conflicts
|
||||
git push origin "$CHERRY_PICK_BRANCH"
|
||||
|
||||
# Create PR with conflict notice
|
||||
CONFLICT_TITLE="$PR_TITLE (backport of #$PR_NUMBER)"
|
||||
CONFLICT_BODY="⚠️ **This cherry-pick has conflicts that require manual resolution.**
|
||||
|
||||
Cherry-pick of #$PR_NUMBER to \`$TARGET_BRANCH\` branch.
|
||||
|
||||
**Original PR:** #$PR_NUMBER
|
||||
**Original Author:** @$PR_AUTHOR
|
||||
**Cherry-picked commit:** $COMMIT_SHA
|
||||
|
||||
**Please resolve the conflicts in this PR before merging.**"
|
||||
|
||||
NEW_PR=$(gh pr create \
|
||||
--title "$CONFLICT_TITLE" \
|
||||
--body "$CONFLICT_BODY" \
|
||||
--base "$TARGET_BRANCH" \
|
||||
--head "$CHERRY_PICK_BRANCH" \
|
||||
--label "cherry-pick")
|
||||
|
||||
echo "⚠️ Created conflict resolution PR $NEW_PR for $TARGET_BRANCH"
|
||||
|
||||
# Comment on original PR
|
||||
gh pr comment $PR_NUMBER --body "⚠️ Cherry-pick to \`$TARGET_BRANCH\` has conflicts: $NEW_PR"
|
||||
fi
|
||||
|
||||
# Clean up - go back to main branch
|
||||
git checkout main
|
||||
git branch -D "$CHERRY_PICK_BRANCH" 2>/dev/null || true
|
||||
fi
|
||||
done
|
||||
@@ -2,28 +2,16 @@
|
||||
|
||||
import os
|
||||
from json import dumps
|
||||
from sys import exit as sysexit
|
||||
from time import time
|
||||
|
||||
from authentik import authentik_version
|
||||
|
||||
|
||||
def must_or_fail(input: str | None, error: str) -> str:
|
||||
if not input:
|
||||
print(f"::error::{error}")
|
||||
sysexit(1)
|
||||
return input
|
||||
|
||||
|
||||
# Decide if we should push the image or not
|
||||
should_push = True
|
||||
if len(os.environ.get("DOCKER_USERNAME", "")) < 1:
|
||||
# Don't push if we don't have DOCKER_USERNAME, i.e. no secrets are available
|
||||
should_push = False
|
||||
if (
|
||||
must_or_fail(os.environ.get("GITHUB_REPOSITORY"), "Repo required").lower()
|
||||
== "goauthentik/authentik-internal"
|
||||
):
|
||||
if os.environ.get("GITHUB_REPOSITORY").lower() == "goauthentik/authentik-internal":
|
||||
# Don't push on the internal repo
|
||||
should_push = False
|
||||
|
||||
@@ -32,16 +20,13 @@ if os.environ.get("GITHUB_HEAD_REF", "") != "":
|
||||
branch_name = os.environ["GITHUB_HEAD_REF"]
|
||||
safe_branch_name = branch_name.replace("refs/heads/", "").replace("/", "-").replace("'", "-")
|
||||
|
||||
image_names = must_or_fail(os.getenv("IMAGE_NAME"), "Image name required").split(",")
|
||||
image_names = os.getenv("IMAGE_NAME").split(",")
|
||||
image_arch = os.getenv("IMAGE_ARCH") or None
|
||||
|
||||
is_pull_request = bool(os.getenv("PR_HEAD_SHA"))
|
||||
is_release = "dev" not in image_names[0]
|
||||
|
||||
sha = must_or_fail(
|
||||
os.environ["GITHUB_SHA"] if not is_pull_request else os.getenv("PR_HEAD_SHA"),
|
||||
"could not determine SHA",
|
||||
)
|
||||
sha = os.environ["GITHUB_SHA"] if not is_pull_request else os.getenv("PR_HEAD_SHA")
|
||||
|
||||
# 2042.1.0 or 2042.1.0-rc1
|
||||
version = authentik_version()
|
||||
@@ -73,7 +58,7 @@ else:
|
||||
image_main_tag = image_tags[0].split(":")[-1]
|
||||
|
||||
|
||||
def get_attest_image_names(image_with_tags: list[str]) -> str:
|
||||
def get_attest_image_names(image_with_tags: list[str]):
|
||||
"""Attestation only for GHCR"""
|
||||
image_tags = []
|
||||
for image_name in set(name.split(":")[0] for name in image_with_tags):
|
||||
@@ -97,6 +82,7 @@ if os.getenv("RELEASE", "false").lower() == "true":
|
||||
image_build_args = [f"VERSION={os.getenv('REF')}"]
|
||||
else:
|
||||
image_build_args = [f"GIT_BUILD_HASH={sha}"]
|
||||
image_build_args = "\n".join(image_build_args)
|
||||
|
||||
with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
|
||||
print(f"shouldPush={str(should_push).lower()}", file=_output)
|
||||
@@ -109,4 +95,4 @@ with open(os.environ["GITHUB_OUTPUT"], "a+", encoding="utf-8") as _output:
|
||||
print(f"imageMainTag={image_main_tag}", file=_output)
|
||||
print(f"imageMainName={image_tags[0]}", file=_output)
|
||||
print(f"cacheTo={cache_to}", file=_output)
|
||||
print(f"imageBuildArgs={"\n".join(image_build_args)}", file=_output)
|
||||
print(f"imageBuildArgs={image_build_args}", file=_output)
|
||||
|
||||
21
.github/actions/setup/action.yml
vendored
21
.github/actions/setup/action.yml
vendored
@@ -8,9 +8,6 @@ inputs:
|
||||
postgresql_version:
|
||||
description: "Optional postgresql image tag"
|
||||
default: "16"
|
||||
profiles:
|
||||
description: "Extra profiles of supporting services to start"
|
||||
default: ""
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
@@ -58,13 +55,21 @@ runs:
|
||||
shell: bash
|
||||
run: |
|
||||
export PSQL_TAG=${{ inputs.postgresql_version }}
|
||||
export COMPOSE_PROFILES=${{ inputs.profiles }}
|
||||
docker compose -f .github/actions/setup/docker-compose.yml up -d
|
||||
cd web && npm ci
|
||||
- name: Generate config
|
||||
if: ${{ contains(inputs.dependencies, 'python') }}
|
||||
shell: bash
|
||||
env:
|
||||
PROFILES: ${{ inputs.profiles }}
|
||||
shell: uv run python {0}
|
||||
run: |
|
||||
uv run python3 ${{ github.action_path }}/ci_config.py
|
||||
from authentik.lib.generators import generate_id
|
||||
from yaml import safe_dump
|
||||
|
||||
with open("local.env.yml", "w") as _config:
|
||||
safe_dump(
|
||||
{
|
||||
"log_level": "debug",
|
||||
"secret_key": generate_id(),
|
||||
},
|
||||
_config,
|
||||
default_flow_style=False,
|
||||
)
|
||||
|
||||
18
.github/actions/setup/ci_config.py
vendored
18
.github/actions/setup/ci_config.py
vendored
@@ -1,18 +0,0 @@
|
||||
from os import getenv
|
||||
from typing import Any
|
||||
|
||||
from yaml import safe_dump
|
||||
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
config: dict[str, Any] = {
|
||||
"log_level": "debug",
|
||||
"secret_key": generate_id(),
|
||||
}
|
||||
|
||||
profiles = getenv("PROFILES")
|
||||
if profiles and "postgres_replica" in profiles:
|
||||
config["postgresql"] = {"read_replicas": {"0": {"host": "localhost", "port": 5433}}}
|
||||
|
||||
with open("local.env.yml", "w") as _config:
|
||||
safe_dump(config, _config, default_flow_style=False)
|
||||
41
.github/actions/setup/docker-compose.yml
vendored
41
.github/actions/setup/docker-compose.yml
vendored
@@ -1,17 +1,8 @@
|
||||
services:
|
||||
redis:
|
||||
image: docker.io/library/redis:7
|
||||
ports:
|
||||
- 6379:6379
|
||||
restart: always
|
||||
|
||||
postgres:
|
||||
postgresql:
|
||||
image: docker.io/library/postgres:${PSQL_TAG:-16}
|
||||
volumes:
|
||||
- db-data:/var/lib/postgresql/data
|
||||
- ./primary/00-replication.sql:/docker-entrypoint-initdb.d/00-replication.sql
|
||||
- ./primary/01-replication-hba.sh:/docker-entrypoint-initdb.d/01-replication-hba.sh
|
||||
command: postgres -c 'wal_level=replica' -c 'max_wal_senders=10' -c 'max_replication_slots=10' -c 'listen_addresses=*'
|
||||
environment:
|
||||
POSTGRES_USER: authentik
|
||||
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
||||
@@ -19,34 +10,12 @@ services:
|
||||
ports:
|
||||
- 5432:5432
|
||||
restart: always
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER}"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
postgres_replica:
|
||||
profiles:
|
||||
- postgres_replica
|
||||
image: docker.io/library/postgres:${PSQL_TAG:-16}
|
||||
environment:
|
||||
POSTGRES_USER: authentik
|
||||
POSTGRES_PASSWORD: "EK-5jnKfjrGRm<77"
|
||||
POSTGRES_DB: authentik
|
||||
redis:
|
||||
image: docker.io/library/redis:7
|
||||
ports:
|
||||
- "5433:5432"
|
||||
volumes:
|
||||
- db-data-replica:/var/lib/postgresql/data
|
||||
- ./replica:/replica
|
||||
command: /replica/start.sh
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U $${POSTGRES_USER}"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
- 6379:6379
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
db-data:
|
||||
driver: local
|
||||
db-data-replica:
|
||||
driver: local
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
-- Create replication role if it doesn't exist
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT FROM pg_catalog.pg_roles WHERE rolname = 'replica') THEN
|
||||
CREATE ROLE replica WITH REPLICATION LOGIN PASSWORD 'EK-5jnKfjrGRm<77';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- Create replication slot if it doesn't exist
|
||||
SELECT pg_create_physical_replication_slot('replica_slot', true);
|
||||
@@ -1,3 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euxo pipefail
|
||||
echo "host replication all all scram-sha-256" >> /var/lib/postgresql/data/pg_hba.conf
|
||||
9
.github/actions/setup/replica/start.sh
vendored
9
.github/actions/setup/replica/start.sh
vendored
@@ -1,9 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euxo pipefail
|
||||
echo 'Waiting for primary to be ready...'
|
||||
while ! pg_isready -h postgres -p 5432 -U replica; do sleep 1; done;
|
||||
echo 'Primary is ready, starting replica...'
|
||||
rm -rf /var/lib/postgresql/data/* 2>/dev/null || true
|
||||
PGPASSWORD=${POSTGRES_PASSWORD} pg_basebackup -h postgres -U replica -D /var/lib/postgresql/data -Fp -Xs -R -P
|
||||
echo 'Replication setup complete, starting PostgreSQL...'
|
||||
docker-entrypoint.sh postgres
|
||||
2
.github/cherry-pick-bot.yml
vendored
Normal file
2
.github/cherry-pick-bot.yml
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
enabled: true
|
||||
preservePullRequestTitle: true
|
||||
6
.github/dependabot.yml
vendored
6
.github/dependabot.yml
vendored
@@ -77,12 +77,6 @@ updates:
|
||||
goauthentik:
|
||||
patterns:
|
||||
- "@goauthentik/*"
|
||||
react:
|
||||
patterns:
|
||||
- "react"
|
||||
- "react-dom"
|
||||
- "@types/react"
|
||||
- "@types/react-dom"
|
||||
- package-ecosystem: npm
|
||||
directory: "/website"
|
||||
schedule:
|
||||
|
||||
@@ -90,7 +90,7 @@ jobs:
|
||||
platforms: linux/${{ inputs.image_arch }}
|
||||
cache-from: type=registry,ref=${{ steps.ev.outputs.attestImageNames }}:buildcache-${{ inputs.image_arch }}
|
||||
cache-to: ${{ steps.ev.outputs.cacheTo }}
|
||||
- uses: actions/attest-build-provenance@v3
|
||||
- uses: actions/attest-build-provenance@v2
|
||||
id: attest
|
||||
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
with:
|
||||
@@ -21,7 +21,7 @@ on:
|
||||
|
||||
jobs:
|
||||
build-server-amd64:
|
||||
uses: ./.github/workflows/_reusable-docker-build-single.yml
|
||||
uses: ./.github/workflows/_reusable-docker-build-single.yaml
|
||||
secrets: inherit
|
||||
with:
|
||||
image_name: ${{ inputs.image_name }}
|
||||
@@ -31,7 +31,7 @@ jobs:
|
||||
registry_ghcr: ${{ inputs.registry_ghcr }}
|
||||
release: ${{ inputs.release }}
|
||||
build-server-arm64:
|
||||
uses: ./.github/workflows/_reusable-docker-build-single.yml
|
||||
uses: ./.github/workflows/_reusable-docker-build-single.yaml
|
||||
secrets: inherit
|
||||
with:
|
||||
image_name: ${{ inputs.image_name }}
|
||||
@@ -97,7 +97,7 @@ jobs:
|
||||
sources: |
|
||||
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-amd64.outputs.image-digest }}
|
||||
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-arm64.outputs.image-digest }}
|
||||
- uses: actions/attest-build-provenance@v3
|
||||
- uses: actions/attest-build-provenance@v2
|
||||
id: attest
|
||||
with:
|
||||
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
||||
68
.github/workflows/api-py-publish.yml
vendored
Normal file
68
.github/workflows/api-py-publish.yml
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
---
|
||||
name: API - Publish Python client
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "schema.yml"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
if: ${{ github.repository != 'goauthentik/authentik-internal' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
steps:
|
||||
- id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- name: Install poetry & deps
|
||||
shell: bash
|
||||
run: |
|
||||
pipx install poetry || true
|
||||
sudo apt-get update
|
||||
sudo apt-get install --no-install-recommends -y libpq-dev openssl libxmlsec1-dev pkg-config gettext
|
||||
- name: Setup python and restore poetry
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version-file: "pyproject.toml"
|
||||
- name: Generate API Client
|
||||
run: make gen-client-py
|
||||
- name: Publish package
|
||||
working-directory: gen-py-api/
|
||||
run: |
|
||||
poetry build
|
||||
- name: Publish package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
packages-dir: gen-py-api/dist/
|
||||
# We can't easily upgrade the API client being used due to poetry being poetry
|
||||
# so we'll have to rely on dependabot
|
||||
# - name: Upgrade /
|
||||
# run: |
|
||||
# export VERSION=$(cd gen-py-api && poetry version -s)
|
||||
# poetry add "authentik_client=$VERSION" --allow-prereleases --lock
|
||||
# - uses: peter-evans/create-pull-request@v6
|
||||
# id: cpr
|
||||
# with:
|
||||
# token: ${{ steps.generate_token.outputs.token }}
|
||||
# branch: update-root-api-client
|
||||
# commit-message: "root: bump API Client version"
|
||||
# title: "root: bump API Client version"
|
||||
# body: "root: bump API Client version"
|
||||
# delete-branch: true
|
||||
# signoff: true
|
||||
# # ID from https://api.github.com/users/authentik-automation[bot]
|
||||
# author: authentik-automation[bot] <135050075+authentik-automation[bot]@users.noreply.github.com>
|
||||
# - uses: peter-evans/enable-pull-request-automerge@v3
|
||||
# with:
|
||||
# token: ${{ steps.generate_token.outputs.token }}
|
||||
# pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
|
||||
# merge-method: squash
|
||||
2
.github/workflows/api-ts-publish.yml
vendored
2
.github/workflows/api-ts-publish.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
|
||||
4
.github/workflows/ci-api-docs.yml
vendored
4
.github/workflows/ci-api-docs.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: website/package.json
|
||||
cache: "npm"
|
||||
@@ -71,7 +71,7 @@ jobs:
|
||||
with:
|
||||
name: api-docs
|
||||
path: website/api/build
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: website/package.json
|
||||
cache: "npm"
|
||||
|
||||
2
.github/workflows/ci-aws-cfn.yml
vendored
2
.github/workflows/ci-aws-cfn.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
- uses: actions/checkout@v5
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: lifecycle/aws/package.json
|
||||
cache: "npm"
|
||||
|
||||
6
.github/workflows/ci-docs.yml
vendored
6
.github/workflows/ci-docs.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: website/package.json
|
||||
cache: "npm"
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: website/package.json
|
||||
cache: "npm"
|
||||
@@ -102,7 +102,7 @@ jobs:
|
||||
context: .
|
||||
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache
|
||||
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache,mode=max' || '' }}
|
||||
- uses: actions/attest-build-provenance@v3
|
||||
- uses: actions/attest-build-provenance@v2
|
||||
id: attest
|
||||
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
with:
|
||||
|
||||
11
.github/workflows/ci-main.yml
vendored
11
.github/workflows/ci-main.yml
vendored
@@ -34,7 +34,6 @@ jobs:
|
||||
- codespell
|
||||
- pending-migrations
|
||||
- ruff
|
||||
- mypy
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
@@ -67,6 +66,7 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
psql:
|
||||
- 15-alpine
|
||||
- 16-alpine
|
||||
- 17-alpine
|
||||
run_id: [1, 2, 3, 4, 5]
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
run: |
|
||||
uv run make ci-test
|
||||
test-unittest:
|
||||
name: test-unittest - PostgreSQL ${{ matrix.psql }} (${{ matrix.profiles }}) - Run ${{ matrix.run_id }}/5
|
||||
name: test-unittest - PostgreSQL ${{ matrix.psql }} - Run ${{ matrix.run_id }}/5
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
needs: test-make-seed
|
||||
@@ -121,11 +121,9 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
psql:
|
||||
- 15-alpine
|
||||
- 16-alpine
|
||||
- 17-alpine
|
||||
profiles:
|
||||
- ""
|
||||
- postgres_replica
|
||||
run_id: [1, 2, 3, 4, 5]
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
@@ -133,7 +131,6 @@ jobs:
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
postgresql_version: ${{ matrix.psql }}
|
||||
profiles: ${{ matrix.profiles }}
|
||||
- name: run unittest
|
||||
env:
|
||||
CI_TEST_SEED: ${{ needs.test-make-seed.outputs.seed }}
|
||||
@@ -259,7 +256,7 @@ jobs:
|
||||
# Needed for checkout
|
||||
contents: read
|
||||
needs: ci-core-mark
|
||||
uses: ./.github/workflows/_reusable-docker-build.yml
|
||||
uses: ./.github/workflows/_reusable-docker-build.yaml
|
||||
secrets: inherit
|
||||
with:
|
||||
image_name: ${{ github.repository == 'goauthentik/authentik-internal' && 'ghcr.io/goauthentik/internal-server' || 'ghcr.io/goauthentik/dev-server' }}
|
||||
|
||||
10
.github/workflows/ci-outpost.yml
vendored
10
.github/workflows/ci-outpost.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-go@v6
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Prepare and generate API
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-go@v6
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Setup authentik env
|
||||
@@ -115,7 +115,7 @@ jobs:
|
||||
context: .
|
||||
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache
|
||||
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && format('type=registry,ref=ghcr.io/goauthentik/dev-{0}:buildcache,mode=max', matrix.type) || '' }}
|
||||
- uses: actions/attest-build-provenance@v3
|
||||
- uses: actions/attest-build-provenance@v2
|
||||
id: attest
|
||||
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
with:
|
||||
@@ -141,10 +141,10 @@ jobs:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-go@v6
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
cache: "npm"
|
||||
|
||||
6
.github/workflows/ci-web.yml
vendored
6
.github/workflows/ci-web.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
project: web
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ${{ matrix.project }}/package.json
|
||||
cache: "npm"
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
cache: "npm"
|
||||
@@ -77,7 +77,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
cache: "npm"
|
||||
|
||||
36
.github/workflows/gh-cherry-pick.yml
vendored
36
.github/workflows/gh-cherry-pick.yml
vendored
@@ -1,36 +0,0 @@
|
||||
name: GH - Cherry-pick
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [closed, labeled]
|
||||
|
||||
jobs:
|
||||
cherry-pick:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: app-token
|
||||
name: Generate app token
|
||||
uses: actions/create-github-app-token@v2
|
||||
if: ${{ env.GH_APP_ID != '' }}
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
env:
|
||||
GH_APP_ID: ${{ secrets.GH_APP_ID }}
|
||||
- uses: actions/checkout@v5
|
||||
if: ${{ steps.app-token.outcome != 'skipped' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: "${{ steps.app-token.outputs.token }}"
|
||||
- id: get-user-id
|
||||
if: ${{ steps.app-token.outcome != 'skipped' }}
|
||||
name: Get GitHub app user ID
|
||||
run: echo "user-id=$(gh api "/users/${{ steps.app-token.outputs.app-slug }}[bot]" --jq .id)" >> "$GITHUB_OUTPUT"
|
||||
env:
|
||||
GH_TOKEN: "${{ steps.app-token.outputs.token }}"
|
||||
- uses: ./.github/actions/cherry-pick
|
||||
if: ${{ steps.app-token.outcome != 'skipped' }}
|
||||
with:
|
||||
token: ${{ steps.app-token.outputs.token }}
|
||||
git_user: ${{ steps.app-token.outputs.app-slug }}[bot]
|
||||
git_user_email: '${{ steps.get-user-id.outputs.user-id }}+${{ steps.app-token.outputs.app-slug }}[bot]@users.noreply.github.com'
|
||||
4
.github/workflows/packages-npm-publish.yml
vendored
4
.github/workflows/packages-npm-publish.yml
vendored
@@ -29,13 +29,13 @@ jobs:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 2
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ${{ matrix.package }}/package.json
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c
|
||||
with:
|
||||
files: |
|
||||
${{ matrix.package }}/package.json
|
||||
|
||||
3
.github/workflows/release-branch-off.yml
vendored
3
.github/workflows/release-branch-off.yml
vendored
@@ -43,13 +43,10 @@ jobs:
|
||||
with:
|
||||
dependencies: python
|
||||
- name: Create version branch
|
||||
env:
|
||||
GH_TOKEN: "${{ steps.app-token.outputs.token }}"
|
||||
run: |
|
||||
current_major_version="$(uv version --short | grep -oE "^[0-9]{4}\.[0-9]{1,2}")"
|
||||
git checkout -b "version-${current_major_version}"
|
||||
git push origin "version-${current_major_version}"
|
||||
gh label create "backport/version-${current_major_version}" --description "Add this label to PRs to backport changes to version-${current_major_version}" --color "fbca04"
|
||||
bump-version-pr:
|
||||
name: Open version bump PR
|
||||
needs:
|
||||
|
||||
14
.github/workflows/release-publish.yml
vendored
14
.github/workflows/release-publish.yml
vendored
@@ -7,7 +7,7 @@ on:
|
||||
|
||||
jobs:
|
||||
build-server:
|
||||
uses: ./.github/workflows/_reusable-docker-build.yml
|
||||
uses: ./.github/workflows/_reusable-docker-build.yaml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
context: .
|
||||
- uses: actions/attest-build-provenance@v3
|
||||
- uses: actions/attest-build-provenance@v2
|
||||
id: attest
|
||||
if: true
|
||||
with:
|
||||
@@ -84,7 +84,7 @@ jobs:
|
||||
- rac
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-go@v6
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Set up QEMU
|
||||
@@ -124,7 +124,7 @@ jobs:
|
||||
file: ${{ matrix.type }}.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
context: .
|
||||
- uses: actions/attest-build-provenance@v3
|
||||
- uses: actions/attest-build-provenance@v2
|
||||
id: attest
|
||||
with:
|
||||
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
||||
@@ -147,10 +147,10 @@ jobs:
|
||||
goarch: [amd64, arm64]
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-go@v6
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
cache: "npm"
|
||||
@@ -187,7 +187,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: aws-actions/configure-aws-credentials@v5
|
||||
- uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
role-to-assume: "arn:aws:iam::016170277896:role/github_goauthentik_authentik"
|
||||
aws-region: ${{ env.AWS_REGION }}
|
||||
|
||||
2
.github/workflows/repo-stale.yml
vendored
2
.github/workflows/repo-stale.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
with:
|
||||
app_id: ${{ secrets.GH_APP_ID }}
|
||||
private_key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/stale@v10
|
||||
- uses: actions/stale@v9
|
||||
with:
|
||||
repo-token: ${{ steps.generate_token.outputs.token }}
|
||||
days-before-stale: 60
|
||||
|
||||
12
.vscode/settings.json
vendored
12
.vscode/settings.json
vendored
@@ -1,16 +1,4 @@
|
||||
{
|
||||
"[css]": {
|
||||
"editor.minimap.markSectionHeaderRegex": "#\\bregion\\s*(?<separator>-?)\\s*(?<label>.*)\\*/$"
|
||||
},
|
||||
"[makefile]": {
|
||||
"editor.minimap.markSectionHeaderRegex": "^#{25}\n##\\s\\s*(?<separator>-?)\\s*(?<label>[^\n]*)\n#{25}$"
|
||||
},
|
||||
"[dockerfile]": {
|
||||
"editor.minimap.markSectionHeaderRegex": "\\bStage\\s*\\d:(?<separator>-?)\\s*(?<label>.*)$"
|
||||
},
|
||||
"[jsonc]": {
|
||||
"editor.minimap.markSectionHeaderRegex": "#\\bregion\\s*(?<separator>-?)\\s*(?<label>.*)$"
|
||||
},
|
||||
"todo-tree.tree.showCountsInTree": true,
|
||||
"todo-tree.tree.showBadges": true,
|
||||
"yaml.customTags": [
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
# Contributing to authentik
|
||||
|
||||
Thanks for your interest in contributing! Please see our [contributing guide](https://docs.goauthentik.io/docs/developer-docs/?utm_source=github) for more information.
|
||||
|
||||
1
CONTRIBUTING.md
Symbolic link
1
CONTRIBUTING.md
Symbolic link
@@ -0,0 +1 @@
|
||||
website/docs/developer-docs/index.md
|
||||
@@ -76,9 +76,9 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
||||
/bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||
|
||||
# Stage 4: Download uv
|
||||
FROM ghcr.io/astral-sh/uv:0.8.22 AS uv
|
||||
FROM ghcr.io/astral-sh/uv:0.8.13 AS uv
|
||||
# Stage 5: Base python image
|
||||
FROM ghcr.io/goauthentik/fips-python:3.13.7-slim-trixie-fips AS python-base
|
||||
FROM ghcr.io/goauthentik/fips-python:3.13.7-slim-bookworm-fips AS python-base
|
||||
|
||||
ENV VENV_PATH="/ak-root/.venv" \
|
||||
PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \
|
||||
|
||||
74
Makefile
74
Makefile
@@ -18,24 +18,7 @@ pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/
|
||||
pg_name := $(shell uv run python -m authentik.lib.config postgresql.name 2>/dev/null)
|
||||
redis_db := $(shell uv run python -m authentik.lib.config redis.db 2>/dev/null)
|
||||
|
||||
UNAME := $(shell uname)
|
||||
|
||||
# For macOS users, add the libxml2 installed from brew libxmlsec1 to the build path
|
||||
# to prevent SAML-related tests from failing and ensure correct pip dependency compilation
|
||||
ifeq ($(UNAME), Darwin)
|
||||
# Only add for brew users who installed libxmlsec1
|
||||
BREW_EXISTS := $(shell command -v brew 2> /dev/null)
|
||||
ifdef BREW_EXISTS
|
||||
LIBXML2_EXISTS := $(shell brew list libxml2 2> /dev/null)
|
||||
ifdef LIBXML2_EXISTS
|
||||
BREW_LDFLAGS := -L$(shell brew --prefix libxml2)/lib $(LDFLAGS)
|
||||
BREW_CPPFLAGS := -I$(shell brew --prefix libxml2)/include $(CPPFLAGS)
|
||||
BREW_PKG_CONFIG_PATH := $(shell brew --prefix libxml2)/lib/pkgconfig:$(PKG_CONFIG_PATH)
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
|
||||
all: lint-fix lint gen web test ## Lint, build, and test everything
|
||||
all: lint-fix lint test gen web ## Lint, build, and test everything
|
||||
|
||||
HELP_WIDTH := $(shell grep -h '^[a-z][^ ]*:.*\#\#' $(MAKEFILE_LIST) 2>/dev/null | \
|
||||
cut -d':' -f1 | awk '{printf "%d\n", length}' | sort -rn | head -1)
|
||||
@@ -67,14 +50,7 @@ lint: ## Lint the python and golang sources
|
||||
golangci-lint run -v
|
||||
|
||||
core-install:
|
||||
ifdef LIBXML2_EXISTS
|
||||
# Clear cache to ensure fresh compilation
|
||||
uv cache clean
|
||||
# Force compilation from source for lxml and xmlsec with correct environment
|
||||
LDFLAGS="$(BREW_LDFLAGS)" CPPFLAGS="$(BREW_CPPFLAGS)" PKG_CONFIG_PATH="$(BREW_PKG_CONFIG_PATH)" uv sync --frozen --reinstall-package lxml --reinstall-package xmlsec --no-binary-package lxml --no-binary-package xmlsec
|
||||
else
|
||||
uv sync --frozen
|
||||
endif
|
||||
|
||||
migrate: ## Run the Authentik Django server's migrations
|
||||
uv run python -m lifecycle.migrate
|
||||
@@ -184,7 +160,7 @@ gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescri
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-generator-cli:v7.15.0 generate \
|
||||
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \
|
||||
-i /local/schema.yml \
|
||||
-g typescript-fetch \
|
||||
-o /local/${GEN_API_TS} \
|
||||
@@ -193,7 +169,6 @@ gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescri
|
||||
--git-repo-id authentik \
|
||||
--git-user-id goauthentik
|
||||
|
||||
cd ${PWD}/${GEN_API_TS} && npm i
|
||||
cd ${PWD}/${GEN_API_TS} && npm link
|
||||
cd ${PWD}/web && npm link @goauthentik/api
|
||||
|
||||
@@ -201,7 +176,7 @@ gen-client-py: gen-clean-py ## Build and install the authentik API for Python
|
||||
docker run \
|
||||
--rm -v ${PWD}:/local \
|
||||
--user ${UID}:${GID} \
|
||||
docker.io/openapitools/openapi-generator-cli:v7.15.0 generate \
|
||||
docker.io/openapitools/openapi-generator-cli:v7.11.0 generate \
|
||||
-i /local/schema.yml \
|
||||
-g python \
|
||||
-o /local/${GEN_API_PY} \
|
||||
@@ -239,30 +214,34 @@ node-install: ## Install the necessary libraries to build Node.js packages
|
||||
#########################
|
||||
|
||||
web-build: node-install ## Build the Authentik UI
|
||||
npm run --prefix web build
|
||||
cd web && npm run build
|
||||
|
||||
web: web-lint-fix web-lint web-check-compile ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it
|
||||
|
||||
web-test: ## Run tests for the Authentik UI
|
||||
npm run --prefix web test
|
||||
cd web && npm run test
|
||||
|
||||
web-watch: ## Build and watch the Authentik UI for changes, updating automatically
|
||||
npm run --prefix web watch
|
||||
rm -rf web/dist/
|
||||
mkdir web/dist/
|
||||
touch web/dist/.gitkeep
|
||||
cd web && npm run watch
|
||||
|
||||
web-storybook-watch: ## Build and run the storybook documentation server
|
||||
npm run --prefix web storybook
|
||||
cd web && npm run storybook
|
||||
|
||||
web-lint-fix:
|
||||
npm run --prefix web prettier
|
||||
cd web && npm run prettier
|
||||
|
||||
web-lint:
|
||||
npm run --prefix web lint
|
||||
npm run --prefix web lit-analyse
|
||||
cd web && npm run lint
|
||||
cd web && npm run lit-analyse
|
||||
|
||||
web-check-compile:
|
||||
npm run --prefix web tsc
|
||||
cd web && npm run tsc
|
||||
|
||||
web-i18n-extract:
|
||||
npm run --prefix web extract-locales
|
||||
cd web && npm run extract-locales
|
||||
|
||||
#########################
|
||||
## Docs
|
||||
@@ -274,31 +253,31 @@ docs-install:
|
||||
npm ci --prefix website
|
||||
|
||||
docs-lint-fix: lint-codespell
|
||||
npm run --prefix website prettier
|
||||
npm run prettier --prefix website
|
||||
|
||||
docs-build:
|
||||
npm run --prefix website build
|
||||
npm run build --prefix website
|
||||
|
||||
docs-watch: ## Build and watch the topics documentation
|
||||
npm run --prefix website start
|
||||
npm run start --prefix website
|
||||
|
||||
integrations: docs-lint-fix integrations-build ## Fix formatting issues in the integrations source code, lint the code, and compile it
|
||||
|
||||
integrations-build:
|
||||
npm run --prefix website -w integrations build
|
||||
npm run build --prefix website -w integrations
|
||||
|
||||
integrations-watch: ## Build and watch the Integrations documentation
|
||||
npm run --prefix website -w integrations start
|
||||
npm run start --prefix website -w integrations
|
||||
|
||||
docs-api-build:
|
||||
npm run --prefix website -w api build
|
||||
npm run build --prefix website -w api
|
||||
|
||||
docs-api-watch: ## Build and watch the API documentation
|
||||
npm run --prefix website -w api build:api
|
||||
npm run --prefix website -w api start
|
||||
npm run build:api --prefix website -w api
|
||||
npm run start --prefix website -w api
|
||||
|
||||
docs-api-clean: ## Clean generated API documentation
|
||||
npm run --prefix website -w api build:api:clean
|
||||
npm run build:api:clean --prefix website -w api
|
||||
|
||||
#########################
|
||||
## Docker
|
||||
@@ -321,9 +300,6 @@ ci--meta-debug:
|
||||
python -V
|
||||
node --version
|
||||
|
||||
ci-mypy: ci--meta-debug
|
||||
uv run mypy --strict $(PY_SOURCES)
|
||||
|
||||
ci-black: ci--meta-debug
|
||||
uv run black --check $(PY_SOURCES)
|
||||
|
||||
|
||||
28
README.md
28
README.md
@@ -9,21 +9,21 @@
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-outpost.yml)
|
||||
[](https://github.com/goauthentik/authentik/actions/workflows/ci-web.yml)
|
||||
[](https://codecov.io/gh/goauthentik/authentik)
|
||||

|
||||

|
||||
[](https://www.transifex.com/authentik/authentik/)
|
||||
|
||||
## What is authentik?
|
||||
|
||||
authentik is an open-source Identity Provider (IdP) for modern SSO. It supports SAML, OAuth2/OIDC, LDAP, RADIUS, and more, designed for self-hosting from small labs to large production clusters.
|
||||
authentik is an open-source Identity Provider that emphasizes flexibility and versatility, with support for a wide set of protocols.
|
||||
|
||||
Our [enterprise offering](https://goauthentik.io/pricing) is available for organizations to securely replace existing IdPs such as Okta, Auth0, Entra ID, and Ping Identity for robust, large-scale identity management.
|
||||
Our [enterprise offer](https://goauthentik.io/pricing) can also be used as a self-hosted replacement for large-scale deployments of Okta/Auth0, Entra ID, Ping Identity, or other legacy IdPs for employees and B2B2C use.
|
||||
|
||||
## Installation
|
||||
|
||||
- Docker Compose: recommended for small/test setups. See the [documentation](https://docs.goauthentik.io/docs/install-config/install/docker-compose/).
|
||||
- Kubernetes (Helm Chart): recommended for larger setups. See the [documentation](https://docs.goauthentik.io/docs/install-config/install/kubernetes/) and the Helm chart [repository](https://github.com/goauthentik/helm).
|
||||
- AWS CloudFormation: deploy on AWS using our official templates. See the [documentation](https://docs.goauthentik.io/docs/install-config/install/aws/).
|
||||
- DigitalOcean Marketplace: one-click deployment via the official Marketplace app. See the [app listing](https://marketplace.digitalocean.com/apps/authentik).
|
||||
For small/test setups it is recommended to use Docker Compose; refer to the [documentation](https://goauthentik.io/docs/installation/docker-compose/?utm_source=github).
|
||||
|
||||
For bigger setups, there is a Helm Chart [here](https://github.com/goauthentik/helm). This is documented [here](https://goauthentik.io/docs/installation/kubernetes/?utm_source=github).
|
||||
|
||||
## Screenshots
|
||||
|
||||
@@ -32,20 +32,14 @@ Our [enterprise offering](https://goauthentik.io/pricing) is available for organ
|
||||
|  |  |
|
||||
|  |  |
|
||||
|
||||
## Development and contributions
|
||||
## Development
|
||||
|
||||
See the [Developer Documentation](https://docs.goauthentik.io/docs/developer-docs/) for information about setting up local build environments, testing your contributions, and our contribution process.
|
||||
See [Developer Documentation](https://docs.goauthentik.io/docs/developer-docs/?utm_source=github)
|
||||
|
||||
## Security
|
||||
|
||||
Please see [SECURITY.md](SECURITY.md).
|
||||
See [SECURITY.md](SECURITY.md)
|
||||
|
||||
## Adoption
|
||||
## Adoption and Contributions
|
||||
|
||||
Using authentik? We'd love to hear your story and feature your logo. Email us at [hello@goauthentik.io](mailto:hello@goauthentik.io) or open a GitHub Issue/PR!
|
||||
|
||||
## License
|
||||
|
||||
[](LICENSE)
|
||||
[](website/LICENSE)
|
||||
[](authentik/enterprise/LICENSE)
|
||||
Your organization uses authentik? We'd love to add your logo to the readme and our website! Email us @ hello@goauthentik.io or open a GitHub Issue/PR! For more information on how to contribute to authentik, please refer to our [contribution guide](https://docs.goauthentik.io/docs/developer-docs?utm_source=github).
|
||||
|
||||
@@ -104,68 +104,6 @@ def postprocess_schema_responses(result, generator: SchemaGenerator, **kwargs):
|
||||
return result
|
||||
|
||||
|
||||
def postprocess_schema_pagination(result, generator: SchemaGenerator, **kwargs):
|
||||
to_replace = {
|
||||
"ordering": create_component(
|
||||
generator,
|
||||
"QueryPaginationOrdering",
|
||||
{
|
||||
"name": "ordering",
|
||||
"required": False,
|
||||
"in": "query",
|
||||
"description": "Which field to use when ordering the results.",
|
||||
"schema": {"type": "string"},
|
||||
},
|
||||
ResolvedComponent.PARAMETER,
|
||||
),
|
||||
"page": create_component(
|
||||
generator,
|
||||
"QueryPaginationPage",
|
||||
{
|
||||
"name": "page",
|
||||
"required": False,
|
||||
"in": "query",
|
||||
"description": "A page number within the paginated result set.",
|
||||
"schema": {"type": "integer"},
|
||||
},
|
||||
ResolvedComponent.PARAMETER,
|
||||
),
|
||||
"page_size": create_component(
|
||||
generator,
|
||||
"QueryPaginationPageSize",
|
||||
{
|
||||
"name": "page_size",
|
||||
"required": False,
|
||||
"in": "query",
|
||||
"description": "Number of results to return per page.",
|
||||
"schema": {"type": "integer"},
|
||||
},
|
||||
ResolvedComponent.PARAMETER,
|
||||
),
|
||||
"search": create_component(
|
||||
generator,
|
||||
"QuerySearch",
|
||||
{
|
||||
"name": "search",
|
||||
"required": False,
|
||||
"in": "query",
|
||||
"description": "A search term.",
|
||||
"schema": {"type": "string"},
|
||||
},
|
||||
ResolvedComponent.PARAMETER,
|
||||
),
|
||||
}
|
||||
for path in result["paths"].values():
|
||||
for method in path.values():
|
||||
# print(method["parameters"])
|
||||
for idx, param in enumerate(method.get("parameters", [])):
|
||||
for replace_name, replace_ref in to_replace.items():
|
||||
if param["name"] == replace_name:
|
||||
method["parameters"][idx] = replace_ref.ref
|
||||
# print(method["parameters"])
|
||||
return result
|
||||
|
||||
|
||||
def preprocess_schema_exclude_non_api(endpoints, **kwargs):
|
||||
"""Filter out all API Views which are not mounted under /api"""
|
||||
return [
|
||||
|
||||
@@ -76,7 +76,6 @@ from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser
|
||||
from authentik.rbac.models import Role
|
||||
from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser
|
||||
from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType
|
||||
from authentik.stages.consent.models import UserConsent
|
||||
from authentik.tasks.models import Task
|
||||
from authentik.tenants.models import Tenant
|
||||
|
||||
@@ -136,7 +135,6 @@ def excluded_models() -> list[type[Model]]:
|
||||
EndpointDeviceConnection,
|
||||
DeviceToken,
|
||||
StreamEvent,
|
||||
UserConsent,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -38,7 +38,6 @@ from authentik.blueprints.v1.oci import OCI_PREFIX
|
||||
from authentik.events.logs import capture_logs
|
||||
from authentik.events.utils import sanitize_dict
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.tasks.apps import PRIORITY_HIGH
|
||||
from authentik.tasks.models import Task
|
||||
from authentik.tasks.schedules.models import Schedule
|
||||
from authentik.tenants.models import Tenant
|
||||
@@ -112,7 +111,6 @@ class BlueprintEventHandler(FileSystemEventHandler):
|
||||
@actor(
|
||||
description=_("Find blueprints as `blueprints_find` does, but return a safe dict."),
|
||||
throws=(DatabaseError, ProgrammingError, InternalError),
|
||||
priority=PRIORITY_HIGH,
|
||||
)
|
||||
def blueprints_find_dict():
|
||||
blueprints = []
|
||||
|
||||
@@ -113,7 +113,7 @@ class Brand(SerializerModel):
|
||||
try:
|
||||
return self.attributes.get("settings", {}).get("locale", "")
|
||||
|
||||
except Exception as exc: # noqa
|
||||
except Exception as exc:
|
||||
LOGGER.warning("Failed to get default locale", exc=exc)
|
||||
return ""
|
||||
|
||||
|
||||
@@ -295,7 +295,7 @@ class GroupViewSet(UsedByMixin, ModelViewSet):
|
||||
@extend_schema(
|
||||
request=UserAccountSerializer,
|
||||
responses={
|
||||
204: OpenApiResponse(description="User removed"),
|
||||
204: OpenApiResponse(description="User added"),
|
||||
404: OpenApiResponse(description="User not found"),
|
||||
},
|
||||
)
|
||||
@@ -307,7 +307,7 @@ class GroupViewSet(UsedByMixin, ModelViewSet):
|
||||
permission_classes=[],
|
||||
)
|
||||
def remove_user(self, request: Request, pk: str) -> Response:
|
||||
"""Remove user from group"""
|
||||
"""Add user to group"""
|
||||
group: Group = self.get_object()
|
||||
user: User = (
|
||||
get_objects_for_user(request.user, "authentik_core.view_user")
|
||||
|
||||
@@ -171,7 +171,7 @@ class PropertyMappingViewSet(
|
||||
except PropertyMappingExpressionException as exc:
|
||||
response_data["result"] = exception_to_string(exc.exc)
|
||||
response_data["successful"] = False
|
||||
except Exception as exc: # noqa
|
||||
except Exception as exc:
|
||||
response_data["result"] = exception_to_string(exc)
|
||||
response_data["successful"] = False
|
||||
response = PropertyMappingTestResultSerializer(response_data)
|
||||
|
||||
@@ -328,12 +328,6 @@ class SessionUserSerializer(PassiveSerializer):
|
||||
original = UserSelfSerializer(required=False)
|
||||
|
||||
|
||||
class UserPasswordSetSerializer(PassiveSerializer):
|
||||
"""Payload to set a users' password directly"""
|
||||
|
||||
password = CharField(required=True)
|
||||
|
||||
|
||||
class UsersFilter(FilterSet):
|
||||
"""Filter for users"""
|
||||
|
||||
@@ -591,7 +585,12 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
|
||||
@permission_required("authentik_core.reset_user_password")
|
||||
@extend_schema(
|
||||
request=UserPasswordSetSerializer,
|
||||
request=inline_serializer(
|
||||
"UserPasswordSetSerializer",
|
||||
{
|
||||
"password": CharField(required=True),
|
||||
},
|
||||
),
|
||||
responses={
|
||||
204: OpenApiResponse(description="Successfully changed password"),
|
||||
400: OpenApiResponse(description="Bad request"),
|
||||
@@ -600,11 +599,9 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
@action(detail=True, methods=["POST"], permission_classes=[])
|
||||
def set_password(self, request: Request, pk: int) -> Response:
|
||||
"""Set password for user"""
|
||||
data = UserPasswordSetSerializer(data=request.data)
|
||||
data.is_valid(raise_exception=True)
|
||||
user: User = self.get_object()
|
||||
try:
|
||||
user.set_password(data.validated_data["password"], request=request)
|
||||
user.set_password(request.data.get("password"), request=request)
|
||||
user.save()
|
||||
except (ValidationError, IntegrityError) as exc:
|
||||
LOGGER.debug("Failed to set password", exc=exc)
|
||||
@@ -681,7 +678,8 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
},
|
||||
),
|
||||
responses={
|
||||
204: OpenApiResponse(description="Successfully started impersonation"),
|
||||
"204": OpenApiResponse(description="Successfully started impersonation"),
|
||||
"401": OpenApiResponse(description="Access denied"),
|
||||
},
|
||||
)
|
||||
@action(detail=True, methods=["POST"], permission_classes=[])
|
||||
@@ -700,7 +698,7 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
"User attempted to impersonate without permissions",
|
||||
user=request.user,
|
||||
)
|
||||
return Response(status=403)
|
||||
return Response(status=401)
|
||||
if user_to_be.pk == self.request.user.pk:
|
||||
LOGGER.debug("User attempted to impersonate themselves", user=request.user)
|
||||
return Response(status=401)
|
||||
@@ -709,19 +707,19 @@ class UserViewSet(UsedByMixin, ModelViewSet):
|
||||
"User attempted to impersonate without providing a reason",
|
||||
user=request.user,
|
||||
)
|
||||
raise ValidationError({"reason": _("This field is required.")})
|
||||
return Response(status=401)
|
||||
|
||||
request.session[SESSION_KEY_IMPERSONATE_ORIGINAL_USER] = request.user
|
||||
request.session[SESSION_KEY_IMPERSONATE_USER] = user_to_be
|
||||
|
||||
Event.new(EventAction.IMPERSONATION_STARTED, reason=reason).from_http(request, user_to_be)
|
||||
|
||||
return Response(status=204)
|
||||
return Response(status=201)
|
||||
|
||||
@extend_schema(
|
||||
request=OpenApiTypes.NONE,
|
||||
responses={
|
||||
"204": OpenApiResponse(description="Successfully ended impersonation"),
|
||||
"204": OpenApiResponse(description="Successfully started impersonation"),
|
||||
},
|
||||
)
|
||||
@action(detail=False, methods=["GET"])
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""custom runserver command"""
|
||||
|
||||
from io import StringIO
|
||||
from typing import TextIO
|
||||
|
||||
from daphne.management.commands.runserver import Command as RunServer
|
||||
from daphne.server import Server
|
||||
@@ -33,4 +33,4 @@ class Command(RunServer):
|
||||
super().__init__(*args, **kwargs)
|
||||
# Redirect standard stdout banner from Daphne into the void
|
||||
# as there are a couple more steps that happen before startup is fully done
|
||||
self.stdout = StringIO()
|
||||
self.stdout = TextIO()
|
||||
|
||||
@@ -99,7 +99,7 @@ class Command(BaseCommand):
|
||||
else:
|
||||
try:
|
||||
hook()
|
||||
except Exception: # noqa
|
||||
except Exception:
|
||||
# Match the behavior of the cpython shell where an error in
|
||||
# sys.__interactivehook__ prints a warning and the exception
|
||||
# and continues.
|
||||
|
||||
@@ -114,21 +114,15 @@ class AttributesMixin(models.Model):
|
||||
|
||||
def update_attributes(self, properties: dict[str, Any]):
|
||||
"""Update fields and attributes, but correctly by merging dicts"""
|
||||
needs_update = False
|
||||
for key, value in properties.items():
|
||||
if key == "attributes":
|
||||
continue
|
||||
if getattr(self, key, None) != value:
|
||||
setattr(self, key, value)
|
||||
needs_update = True
|
||||
setattr(self, key, value)
|
||||
final_attributes = {}
|
||||
MERGE_LIST_UNIQUE.merge(final_attributes, self.attributes)
|
||||
MERGE_LIST_UNIQUE.merge(final_attributes, properties.get("attributes", {}))
|
||||
if self.attributes != final_attributes:
|
||||
self.attributes = final_attributes
|
||||
needs_update = True
|
||||
if needs_update:
|
||||
self.save()
|
||||
self.attributes = final_attributes
|
||||
self.save()
|
||||
|
||||
@classmethod
|
||||
def update_or_create_attributes(
|
||||
@@ -406,7 +400,7 @@ class User(SerializerModel, GuardianUserMixin, AttributesMixin, AbstractUser):
|
||||
try:
|
||||
return self.attributes.get("settings", {}).get("locale", "")
|
||||
|
||||
except Exception as exc: # noqa
|
||||
except Exception as exc:
|
||||
LOGGER.warning("Failed to get default locale", exc=exc)
|
||||
if request:
|
||||
return request.brand.locale
|
||||
@@ -587,7 +581,7 @@ class Application(SerializerModel, PolicyBindingModel):
|
||||
try:
|
||||
return url % user.__dict__
|
||||
|
||||
except Exception as exc: # noqa
|
||||
except Exception as exc:
|
||||
LOGGER.warning("Failed to format launch url", exc=exc)
|
||||
return url
|
||||
return url
|
||||
@@ -783,7 +777,7 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
||||
"slug": self.slug,
|
||||
}
|
||||
|
||||
except Exception as exc: # noqa
|
||||
except Exception as exc:
|
||||
LOGGER.warning("Failed to template user path", exc=exc, source=self)
|
||||
return User.default_path()
|
||||
|
||||
|
||||
@@ -2,9 +2,10 @@
|
||||
|
||||
from django.contrib.auth.signals import user_logged_in
|
||||
from django.core.cache import cache
|
||||
from django.core.signals import Signal
|
||||
from django.db.models import Model
|
||||
from django.db.models.signals import post_delete, post_save, pre_save
|
||||
from django.dispatch import Signal, receiver
|
||||
from django.dispatch import receiver
|
||||
from django.http.request import HttpRequest
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
|
||||
@@ -14,7 +14,6 @@ from authentik.core.models import (
|
||||
ExpiringModel,
|
||||
User,
|
||||
)
|
||||
from authentik.lib.utils.db import chunked_queryset
|
||||
from authentik.tasks.models import Task
|
||||
|
||||
LOGGER = get_logger()
|
||||
@@ -29,7 +28,7 @@ def clean_expired_models():
|
||||
cls.objects.all().exclude(expiring=False).exclude(expiring=True, expires__gt=now())
|
||||
)
|
||||
amount = objects.count()
|
||||
for obj in chunked_queryset(objects):
|
||||
for obj in objects:
|
||||
obj.expire_action()
|
||||
LOGGER.debug("Expired models", model=cls, amount=amount)
|
||||
self.info(f"Expired {amount} {cls._meta.verbose_name_plural}")
|
||||
|
||||
@@ -59,7 +59,7 @@ class TestImpersonation(APITestCase):
|
||||
),
|
||||
data={"reason": "some reason"},
|
||||
)
|
||||
self.assertEqual(response.status_code, 204)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
|
||||
response = self.client.get(reverse("authentik_api:user-me"))
|
||||
response_body = loads(response.content.decode())
|
||||
@@ -80,7 +80,7 @@ class TestImpersonation(APITestCase):
|
||||
),
|
||||
data={"reason": "some reason"},
|
||||
)
|
||||
self.assertEqual(response.status_code, 204)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
|
||||
response = self.client.get(reverse("authentik_api:user-me"))
|
||||
response_body = loads(response.content.decode())
|
||||
@@ -137,10 +137,10 @@ class TestImpersonation(APITestCase):
|
||||
self.client.force_login(self.user)
|
||||
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-impersonate", kwargs={"pk": self.other_user.pk}),
|
||||
reverse("authentik_api:user-impersonate", kwargs={"pk": self.user.pk}),
|
||||
data={"reason": ""},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertEqual(response.status_code, 401)
|
||||
|
||||
response = self.client.get(reverse("authentik_api:user-me"))
|
||||
response_body = loads(response.content.decode())
|
||||
|
||||
@@ -102,16 +102,6 @@ class TestUsersAPI(APITestCase):
|
||||
self.admin.refresh_from_db()
|
||||
self.assertTrue(self.admin.check_password(new_pw))
|
||||
|
||||
def test_set_password_blank(self):
|
||||
"""Test Direct password set"""
|
||||
self.client.force_login(self.admin)
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-set-password", kwargs={"pk": self.admin.pk}),
|
||||
data={"password": ""},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(response.content, {"password": ["This field may not be blank."]})
|
||||
|
||||
def test_recovery(self):
|
||||
"""Test user recovery link"""
|
||||
flow = create_test_flow(
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
from django.utils.translation import gettext as _
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.enterprise.license import LicenseKey
|
||||
|
||||
|
||||
class RadiusProviderSerializerMixin:
|
||||
|
||||
def validate_certificate(self, cert: CertificateKeyPair) -> CertificateKeyPair:
|
||||
if cert:
|
||||
if not LicenseKey.cached_summary().status.is_valid:
|
||||
raise ValidationError(_("Enterprise is required to use EAP-TLS."))
|
||||
return cert
|
||||
@@ -1,9 +0,0 @@
|
||||
from authentik.enterprise.apps import EnterpriseConfig
|
||||
|
||||
|
||||
class AuthentikEnterpriseProviderRadiusConfig(EnterpriseConfig):
|
||||
|
||||
name = "authentik.enterprise.providers.radius"
|
||||
label = "authentik_enterprise_providers_radius"
|
||||
verbose_name = "authentik Enterprise.Providers.Radius"
|
||||
default = True
|
||||
@@ -1,14 +0,0 @@
|
||||
from django.utils.translation import gettext as _
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
from authentik.enterprise.license import LicenseKey
|
||||
from authentik.providers.scim.models import SCIMAuthenticationMode
|
||||
|
||||
|
||||
class SCIMProviderSerializerMixin:
|
||||
|
||||
def validate_auth_mode(self, auth_mode: SCIMAuthenticationMode) -> SCIMAuthenticationMode:
|
||||
if auth_mode == SCIMAuthenticationMode.OAUTH:
|
||||
if not LicenseKey.cached_summary().status.is_valid:
|
||||
raise ValidationError(_("Enterprise is required to use the OAuth mode."))
|
||||
return auth_mode
|
||||
@@ -1,9 +0,0 @@
|
||||
from authentik.enterprise.apps import EnterpriseConfig
|
||||
|
||||
|
||||
class AuthentikEnterpriseProviderSCIMConfig(EnterpriseConfig):
|
||||
|
||||
name = "authentik.enterprise.providers.scim"
|
||||
label = "authentik_enterprise_providers_scim"
|
||||
verbose_name = "authentik Enterprise.Providers.SCIM"
|
||||
default = True
|
||||
@@ -1,80 +0,0 @@
|
||||
from datetime import timedelta
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from django.utils.timezone import now
|
||||
from requests import Request, RequestException
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.providers.scim.clients.exceptions import SCIMRequestException
|
||||
from authentik.sources.oauth.clients.oauth2 import OAuth2Client
|
||||
from authentik.sources.oauth.models import OAuthSource, UserOAuthSourceConnection
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.providers.scim.models import SCIMProvider
|
||||
|
||||
|
||||
class SCIMOAuthException(SCIMRequestException):
|
||||
"""Exceptions related to OAuth operations for SCIM requests"""
|
||||
|
||||
|
||||
class SCIMOAuthAuth:
|
||||
|
||||
def __init__(self, provider: "SCIMProvider"):
|
||||
self.provider = provider
|
||||
self.user = provider.auth_oauth_user
|
||||
self.connection = self.get_connection()
|
||||
self.logger = get_logger().bind()
|
||||
|
||||
def retrieve_token(self):
|
||||
if not self.provider.auth_oauth:
|
||||
return None
|
||||
source: OAuthSource = self.provider.auth_oauth
|
||||
client = OAuth2Client(source, None)
|
||||
access_token_url = source.source_type.access_token_url or ""
|
||||
if source.source_type.urls_customizable and source.access_token_url:
|
||||
access_token_url = source.access_token_url
|
||||
data = client.get_access_token_args(None, None)
|
||||
data["grant_type"] = "password"
|
||||
data.update(self.provider.auth_oauth_params)
|
||||
try:
|
||||
response = client.do_request(
|
||||
"POST",
|
||||
access_token_url,
|
||||
auth=client.get_access_token_auth(),
|
||||
data=data,
|
||||
headers=client._default_headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
body = response.json()
|
||||
if "error" in body:
|
||||
self.logger.info("Failed to get new OAuth token", error=body["error"])
|
||||
raise SCIMOAuthException(response, body["error"])
|
||||
return body
|
||||
except RequestException as exc:
|
||||
raise SCIMOAuthException(exc.response, message="Failed to get OAuth token") from exc
|
||||
|
||||
def get_connection(self):
|
||||
token = UserOAuthSourceConnection.objects.filter(
|
||||
source=self.provider.auth_oauth, user=self.user, expires__gt=now()
|
||||
).first()
|
||||
if token and token.access_token:
|
||||
return token
|
||||
token = self.retrieve_token()
|
||||
access_token = token["access_token"]
|
||||
expires_in = int(token.get("expires_in", 0))
|
||||
token, _ = UserOAuthSourceConnection.objects.update_or_create(
|
||||
source=self.provider.auth_oauth,
|
||||
user=self.user,
|
||||
defaults={
|
||||
"access_token": access_token,
|
||||
"expires": now() + timedelta(seconds=expires_in),
|
||||
},
|
||||
)
|
||||
return token
|
||||
|
||||
def __call__(self, request: Request) -> Request:
|
||||
if not self.connection.is_valid:
|
||||
self.logger.info("OAuth token expired, renewing token")
|
||||
self.connection = self.get_connection()
|
||||
request.headers["Authorization"] = f"Bearer {self.connection.access_token}"
|
||||
return request
|
||||
@@ -1,30 +0,0 @@
|
||||
from django.db.models import Model
|
||||
from django.db.models.signals import post_save
|
||||
from django.dispatch import receiver
|
||||
|
||||
from authentik.core.models import USER_PATH_SYSTEM_PREFIX, User, UserTypes
|
||||
from authentik.events.middleware import audit_ignore
|
||||
from authentik.providers.scim.models import SCIMAuthenticationMode, SCIMProvider
|
||||
|
||||
USER_PATH_PROVIDERS_SCIM = USER_PATH_SYSTEM_PREFIX + "/providers/scim"
|
||||
|
||||
|
||||
@receiver(post_save, sender=SCIMProvider)
|
||||
def scim_provider_post_save(sender: type[Model], instance: SCIMProvider, created: bool, **__):
|
||||
"""Create service account before provider is saved"""
|
||||
identifier = f"ak-providers-scim-{instance.pk}"
|
||||
with audit_ignore():
|
||||
if instance.auth_mode == SCIMAuthenticationMode.OAUTH:
|
||||
user, user_created = User.objects.update_or_create(
|
||||
username=identifier,
|
||||
defaults={
|
||||
"name": f"SCIM Provider {instance.name} Service-Account",
|
||||
"type": UserTypes.INTERNAL_SERVICE_ACCOUNT,
|
||||
"path": USER_PATH_PROVIDERS_SCIM,
|
||||
},
|
||||
)
|
||||
if created or user_created:
|
||||
instance.auth_oauth_user = user
|
||||
instance.save()
|
||||
elif instance.auth_mode == SCIMAuthenticationMode.TOKEN:
|
||||
User.objects.filter(username=identifier).delete()
|
||||
@@ -1,193 +0,0 @@
|
||||
"""SCIM OAuth tests"""
|
||||
|
||||
from base64 import b64encode
|
||||
from datetime import timedelta
|
||||
from unittest.mock import MagicMock, PropertyMock, patch
|
||||
|
||||
from django.urls import reverse
|
||||
from django.utils.timezone import now
|
||||
from requests_mock import Mocker
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.blueprints.tests import apply_blueprint
|
||||
from authentik.core.models import Application, Group, User
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.enterprise.license import LicenseKey
|
||||
from authentik.enterprise.models import License
|
||||
from authentik.enterprise.tests.test_license import expiry_valid
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.providers.scim.models import SCIMAuthenticationMode, SCIMMapping, SCIMProvider
|
||||
from authentik.sources.oauth.models import OAuthSource, UserOAuthSourceConnection
|
||||
from authentik.tenants.models import Tenant
|
||||
|
||||
|
||||
class SCIMOAuthTests(APITestCase):
|
||||
"""SCIM User tests"""
|
||||
|
||||
@apply_blueprint("system/providers-scim.yaml")
|
||||
def setUp(self) -> None:
|
||||
# Delete all users and groups as the mocked HTTP responses only return one ID
|
||||
# which will cause errors with multiple users
|
||||
Tenant.objects.update(avatars="none")
|
||||
User.objects.all().exclude_anonymous().delete()
|
||||
Group.objects.all().delete()
|
||||
self.source = OAuthSource.objects.create(
|
||||
name=generate_id(),
|
||||
slug=generate_id(),
|
||||
access_token_url="http://localhost/token", # nosec
|
||||
consumer_key=generate_id(),
|
||||
consumer_secret=generate_id(),
|
||||
provider_type="openidconnect",
|
||||
)
|
||||
self.provider = SCIMProvider.objects.create(
|
||||
name=generate_id(),
|
||||
url="https://localhost",
|
||||
auth_mode=SCIMAuthenticationMode.OAUTH,
|
||||
auth_oauth=self.source,
|
||||
auth_oauth_params={
|
||||
"foo": "bar",
|
||||
},
|
||||
exclude_users_service_account=True,
|
||||
)
|
||||
self.app: Application = Application.objects.create(
|
||||
name=generate_id(),
|
||||
slug=generate_id(),
|
||||
)
|
||||
self.app.backchannel_providers.add(self.provider)
|
||||
self.provider.property_mappings.add(
|
||||
SCIMMapping.objects.get(managed="goauthentik.io/providers/scim/user")
|
||||
)
|
||||
self.provider.property_mappings_group.add(
|
||||
SCIMMapping.objects.get(managed="goauthentik.io/providers/scim/group")
|
||||
)
|
||||
|
||||
def test_retrieve_token(self):
|
||||
"""Test token retrieval"""
|
||||
with Mocker() as mocker:
|
||||
token = generate_id()
|
||||
mocker.post("http://localhost/token", json={"access_token": token, "expires_in": 3600})
|
||||
self.provider.scim_auth()
|
||||
conn = UserOAuthSourceConnection.objects.filter(
|
||||
source=self.source,
|
||||
user=self.provider.auth_oauth_user,
|
||||
).first()
|
||||
self.assertIsNotNone(conn)
|
||||
self.assertTrue(conn.is_valid)
|
||||
auth = (
|
||||
b64encode(
|
||||
b":".join((self.source.consumer_key.encode(), self.source.consumer_secret.encode()))
|
||||
)
|
||||
.strip()
|
||||
.decode()
|
||||
)
|
||||
self.assertEqual(
|
||||
mocker.request_history[0].headers["Authorization"],
|
||||
f"Basic {auth}",
|
||||
)
|
||||
self.assertEqual(mocker.request_history[0].body, "grant_type=password&foo=bar")
|
||||
|
||||
def test_existing_token(self):
|
||||
"""Test existing token"""
|
||||
UserOAuthSourceConnection.objects.create(
|
||||
source=self.source,
|
||||
user=self.provider.auth_oauth_user,
|
||||
access_token=generate_id(),
|
||||
expires=now() + timedelta(hours=3),
|
||||
)
|
||||
with Mocker() as mocker:
|
||||
self.provider.scim_auth()
|
||||
self.assertEqual(len(mocker.request_history), 0)
|
||||
|
||||
@Mocker()
|
||||
def test_user_create(self, mock: Mocker):
|
||||
"""Test user creation"""
|
||||
scim_id = generate_id()
|
||||
token = generate_id()
|
||||
mock.post("http://localhost/token", json={"access_token": token, "expires_in": 3600})
|
||||
mock.get(
|
||||
"https://localhost/ServiceProviderConfig",
|
||||
json={},
|
||||
)
|
||||
mock.post(
|
||||
"https://localhost/Users",
|
||||
json={
|
||||
"id": scim_id,
|
||||
},
|
||||
)
|
||||
uid = generate_id()
|
||||
user = User.objects.create(
|
||||
username=uid,
|
||||
name=f"{uid} {uid}",
|
||||
email=f"{uid}@goauthentik.io",
|
||||
)
|
||||
self.assertEqual(mock.call_count, 3)
|
||||
self.assertEqual(mock.request_history[1].method, "GET")
|
||||
self.assertEqual(mock.request_history[2].method, "POST")
|
||||
self.assertJSONEqual(
|
||||
mock.request_history[2].body,
|
||||
{
|
||||
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:User"],
|
||||
"active": True,
|
||||
"emails": [
|
||||
{
|
||||
"primary": True,
|
||||
"type": "other",
|
||||
"value": f"{uid}@goauthentik.io",
|
||||
}
|
||||
],
|
||||
"externalId": user.uid,
|
||||
"name": {
|
||||
"familyName": uid,
|
||||
"formatted": f"{uid} {uid}",
|
||||
"givenName": uid,
|
||||
},
|
||||
"displayName": f"{uid} {uid}",
|
||||
"userName": uid,
|
||||
},
|
||||
)
|
||||
|
||||
@patch(
|
||||
"authentik.enterprise.license.LicenseKey.validate",
|
||||
MagicMock(
|
||||
return_value=LicenseKey(
|
||||
aud="",
|
||||
exp=expiry_valid,
|
||||
name=generate_id(),
|
||||
internal_users=100,
|
||||
external_users=100,
|
||||
)
|
||||
),
|
||||
)
|
||||
def test_api_create(self):
|
||||
License.objects.create(key=generate_id())
|
||||
self.client.force_login(create_test_admin_user())
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:scimprovider-list"),
|
||||
{
|
||||
"name": generate_id(),
|
||||
"url": "http://localhost",
|
||||
"auth_mode": "oauth",
|
||||
"auth_oauth": str(self.source.pk),
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 201)
|
||||
|
||||
@patch(
|
||||
"authentik.enterprise.models.LicenseUsageStatus.is_valid",
|
||||
PropertyMock(return_value=False),
|
||||
)
|
||||
def test_api_create_no_license(self):
|
||||
self.client.force_login(create_test_admin_user())
|
||||
res = self.client.post(
|
||||
reverse("authentik_api:scimprovider-list"),
|
||||
{
|
||||
"name": generate_id(),
|
||||
"url": "http://localhost",
|
||||
"auth_mode": "oauth",
|
||||
"auth_oauth": str(self.source.pk),
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
res.content, {"auth_mode": ["Enterprise is required to use the OAuth mode."]}
|
||||
)
|
||||
@@ -1,7 +1,6 @@
|
||||
SPECTACULAR_SETTINGS = {
|
||||
"POSTPROCESSING_HOOKS": [
|
||||
"authentik.api.schema.postprocess_schema_responses",
|
||||
"authentik.api.schema.postprocess_schema_pagination",
|
||||
"authentik.enterprise.search.schema.postprocess_schema_search_autocomplete",
|
||||
"drf_spectacular.hooks.postprocess_schema_enums",
|
||||
],
|
||||
|
||||
@@ -5,8 +5,6 @@ TENANT_APPS = [
|
||||
"authentik.enterprise.policies.unique_password",
|
||||
"authentik.enterprise.providers.google_workspace",
|
||||
"authentik.enterprise.providers.microsoft_entra",
|
||||
"authentik.enterprise.providers.radius",
|
||||
"authentik.enterprise.providers.scim",
|
||||
"authentik.enterprise.providers.ssf",
|
||||
"authentik.enterprise.search",
|
||||
"authentik.enterprise.stages.authenticator_endpoint_gdtc",
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
# Generated by Django 5.1.12 on 2025-09-08 19:43
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_stages_authenticator_endpoint_gdtc", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="authenticatorendpointgdtcstage",
|
||||
name="friendly_name",
|
||||
field=models.TextField(blank=True, default=""),
|
||||
preserve_default=False,
|
||||
),
|
||||
]
|
||||
@@ -19,7 +19,7 @@ if TYPE_CHECKING:
|
||||
class ASNDict(TypedDict):
|
||||
"""ASN Details"""
|
||||
|
||||
asn: int | None
|
||||
asn: int
|
||||
as_org: str | None
|
||||
network: str | None
|
||||
|
||||
@@ -60,7 +60,7 @@ class ASNContextProcessor(MMDBContextProcessor):
|
||||
except (GeoIP2Error, ValueError):
|
||||
return None
|
||||
|
||||
def asn_to_dict(self, asn: ASN | None) -> ASNDict | dict:
|
||||
def asn_to_dict(self, asn: ASN | None) -> ASNDict:
|
||||
"""Convert ASN to dict"""
|
||||
if not asn:
|
||||
return {}
|
||||
|
||||
@@ -19,10 +19,10 @@ if TYPE_CHECKING:
|
||||
class GeoIPDict(TypedDict):
|
||||
"""GeoIP Details"""
|
||||
|
||||
continent: str | None
|
||||
country: str | None
|
||||
lat: float | None
|
||||
long: float | None
|
||||
continent: str
|
||||
country: str
|
||||
lat: float
|
||||
long: float
|
||||
city: str
|
||||
|
||||
|
||||
@@ -61,7 +61,7 @@ class GeoIPContextProcessor(MMDBContextProcessor):
|
||||
except (GeoIP2Error, ValueError):
|
||||
return None
|
||||
|
||||
def city_to_dict(self, city: City | None) -> GeoIPDict | dict:
|
||||
def city_to_dict(self, city: City | None) -> GeoIPDict:
|
||||
"""Convert City to dict"""
|
||||
if not city:
|
||||
return {}
|
||||
|
||||
@@ -197,8 +197,7 @@ class AuditMiddleware:
|
||||
return
|
||||
if _CTX_IGNORE.get():
|
||||
return
|
||||
current_request = _CTX_REQUEST.get()
|
||||
if current_request is None or request.request_id != current_request.request_id:
|
||||
if request.request_id != _CTX_REQUEST.get().request_id:
|
||||
return
|
||||
user = self.get_user(request)
|
||||
|
||||
@@ -213,8 +212,7 @@ class AuditMiddleware:
|
||||
return
|
||||
if _CTX_IGNORE.get():
|
||||
return
|
||||
current_request = _CTX_REQUEST.get()
|
||||
if current_request is None or request.request_id != current_request.request_id:
|
||||
if request.request_id != _CTX_REQUEST.get().request_id:
|
||||
return
|
||||
user = self.get_user(request)
|
||||
|
||||
@@ -241,8 +239,7 @@ class AuditMiddleware:
|
||||
return
|
||||
if _CTX_IGNORE.get():
|
||||
return
|
||||
current_request = _CTX_REQUEST.get()
|
||||
if current_request is None or request.request_id != current_request.request_id:
|
||||
if request.request_id != _CTX_REQUEST.get().request_id:
|
||||
return
|
||||
user = self.get_user(request)
|
||||
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
# Generated by Django 5.1.11 on 2025-07-28 15:05
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_events", "0012_notificationtransport_email_subject_prefix_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.DeleteModel(
|
||||
name="SystemTask",
|
||||
),
|
||||
]
|
||||
@@ -632,3 +632,45 @@ class NotificationWebhookMapping(PropertyMapping):
|
||||
class Meta:
|
||||
verbose_name = _("Webhook Mapping")
|
||||
verbose_name_plural = _("Webhook Mappings")
|
||||
|
||||
|
||||
class TaskStatus(models.TextChoices):
|
||||
"""DEPRECATED do not use"""
|
||||
|
||||
UNKNOWN = "unknown"
|
||||
SUCCESSFUL = "successful"
|
||||
WARNING = "warning"
|
||||
ERROR = "error"
|
||||
|
||||
|
||||
class SystemTask(ExpiringModel):
|
||||
"""DEPRECATED do not use"""
|
||||
|
||||
uuid = models.UUIDField(primary_key=True, editable=False, default=uuid4)
|
||||
name = models.TextField()
|
||||
uid = models.TextField(null=True)
|
||||
|
||||
start_timestamp = models.DateTimeField(default=now)
|
||||
finish_timestamp = models.DateTimeField(default=now)
|
||||
duration = models.FloatField(default=0)
|
||||
|
||||
status = models.TextField(choices=TaskStatus.choices)
|
||||
|
||||
description = models.TextField(null=True)
|
||||
messages = models.JSONField()
|
||||
|
||||
task_call_module = models.TextField()
|
||||
task_call_func = models.TextField()
|
||||
task_call_args = models.JSONField(default=list)
|
||||
task_call_kwargs = models.JSONField(default=dict)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"System Task {self.name}"
|
||||
|
||||
class Meta:
|
||||
unique_together = (("name", "uid"),)
|
||||
default_permissions = ()
|
||||
permissions = ()
|
||||
verbose_name = _("System Task")
|
||||
verbose_name_plural = _("System Tasks")
|
||||
indexes = ExpiringModel.Meta.indexes
|
||||
|
||||
@@ -16,7 +16,6 @@ from authentik.events.models import (
|
||||
NotificationRule,
|
||||
NotificationTransport,
|
||||
)
|
||||
from authentik.lib.utils.db import chunked_queryset
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
from authentik.policies.models import PolicyBinding, PolicyEngineMode
|
||||
from authentik.tasks.models import Task
|
||||
@@ -124,8 +123,7 @@ def gdpr_cleanup(user_pk: int):
|
||||
"""cleanup events from gdpr_compliance"""
|
||||
events = Event.objects.filter(user__pk=user_pk)
|
||||
LOGGER.debug("GDPR cleanup, removing events from user", events=events.count())
|
||||
for event in chunked_queryset(events):
|
||||
event.delete()
|
||||
events.delete()
|
||||
|
||||
|
||||
@actor(description=_("Cleanup seen notifications and notifications whose event expired."))
|
||||
|
||||
@@ -291,7 +291,7 @@ class ConfigurableStage(models.Model):
|
||||
class FriendlyNamedStage(models.Model):
|
||||
"""Abstract base class for a Stage that can have a user friendly name configured."""
|
||||
|
||||
friendly_name = models.TextField(blank=True)
|
||||
friendly_name = models.TextField(null=True)
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
@@ -160,7 +160,7 @@ class ChallengeStageView(StageView):
|
||||
"user": self.get_pending_user(for_display=True),
|
||||
}
|
||||
|
||||
except Exception as exc: # noqa
|
||||
except Exception as exc:
|
||||
self.logger.warning("failed to template title", exc=exc)
|
||||
return self.executor.flow.title
|
||||
|
||||
|
||||
@@ -198,7 +198,7 @@ class FlowExecutorView(APIView):
|
||||
# if the cached plan is from an older version, it might have different attributes
|
||||
# in which case we just delete the plan and invalidate everything
|
||||
next_binding = self.plan.next(self.request)
|
||||
except Exception as exc: # noqa
|
||||
except Exception as exc:
|
||||
self._logger.warning(
|
||||
"f(exec): found incompatible flow plan, invalidating run", exc=exc
|
||||
)
|
||||
@@ -288,7 +288,7 @@ class FlowExecutorView(APIView):
|
||||
span.set_data("authentik Flow", self.flow.slug)
|
||||
stage_response = self.current_stage_view.dispatch(request)
|
||||
return to_stage_response(request, stage_response)
|
||||
except Exception as exc: # noqa
|
||||
except Exception as exc:
|
||||
return self.handle_exception(exc)
|
||||
|
||||
@extend_schema(
|
||||
@@ -339,7 +339,7 @@ class FlowExecutorView(APIView):
|
||||
span.set_data("authentik Flow", self.flow.slug)
|
||||
stage_response = self.current_stage_view.dispatch(request)
|
||||
return to_stage_response(request, stage_response)
|
||||
except Exception as exc: # noqa
|
||||
except Exception as exc:
|
||||
return self.handle_exception(exc)
|
||||
|
||||
def _initiate_plan(self) -> FlowPlan:
|
||||
@@ -351,7 +351,7 @@ class FlowExecutorView(APIView):
|
||||
# there are no issues with the class we might've gotten
|
||||
# from the cache. If there are errors, just delete all cached flows
|
||||
_ = plan.has_stages
|
||||
except Exception: # noqa
|
||||
except Exception:
|
||||
keys = cache.keys(f"{CACHE_PREFIX}*")
|
||||
cache.delete_many(keys)
|
||||
return self._initiate_plan()
|
||||
|
||||
@@ -444,10 +444,6 @@ def django_db_config(config: ConfigLoader | None = None) -> dict:
|
||||
f"postgresql.read_replicas.{replica}.conn_options", default={}
|
||||
)
|
||||
_database["OPTIONS"].update(replica_conn_options)
|
||||
_database["TEST"] = {
|
||||
"MIRROR": "default",
|
||||
"NAME": config.get("postgresql.test.name"),
|
||||
}
|
||||
|
||||
db[f"replica_{replica}"] = _database
|
||||
return db
|
||||
|
||||
@@ -43,9 +43,7 @@ def structlog_configure():
|
||||
structlog.stdlib.PositionalArgumentsFormatter(),
|
||||
structlog.processors.TimeStamper(fmt="iso", utc=False),
|
||||
structlog.processors.StackInfoRenderer(),
|
||||
structlog.processors.ExceptionRenderer(
|
||||
structlog.processors.ExceptionDictTransformer(show_locals=CONFIG.get_bool("debug"))
|
||||
),
|
||||
structlog.processors.dict_tracebacks,
|
||||
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
|
||||
],
|
||||
logger_factory=structlog.stdlib.LoggerFactory(),
|
||||
@@ -67,14 +65,7 @@ def get_logger_config():
|
||||
"json": {
|
||||
"()": structlog.stdlib.ProcessorFormatter,
|
||||
"processor": structlog.processors.JSONRenderer(sort_keys=True),
|
||||
"foreign_pre_chain": LOG_PRE_CHAIN
|
||||
+ [
|
||||
structlog.processors.ExceptionRenderer(
|
||||
structlog.processors.ExceptionDictTransformer(
|
||||
show_locals=CONFIG.get_bool("debug")
|
||||
)
|
||||
),
|
||||
],
|
||||
"foreign_pre_chain": LOG_PRE_CHAIN + [structlog.processors.dict_tracebacks],
|
||||
},
|
||||
"console": {
|
||||
"()": structlog.stdlib.ProcessorFormatter,
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
from dramatiq.actor import Actor
|
||||
from dramatiq.results.errors import ResultFailure
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import BooleanField, CharField, ChoiceField
|
||||
@@ -111,13 +110,9 @@ class OutgoingSyncProviderStatusMixin:
|
||||
"override_dry_run": params.validated_data["override_dry_run"],
|
||||
"pk": params.validated_data["sync_object_id"],
|
||||
},
|
||||
retries=0,
|
||||
rel_obj=provider,
|
||||
)
|
||||
try:
|
||||
msg.get_result(block=True)
|
||||
except ResultFailure:
|
||||
pass
|
||||
msg.get_result(block=True)
|
||||
task: Task = msg.options["task"]
|
||||
task.refresh_from_db()
|
||||
return Response(SyncObjectResultSerializer(instance={"messages": task._messages}).data)
|
||||
|
||||
@@ -20,7 +20,6 @@ from authentik.lib.sync.outgoing.exceptions import (
|
||||
TransientSyncException,
|
||||
)
|
||||
from authentik.lib.sync.outgoing.models import OutgoingSyncProvider
|
||||
from authentik.lib.utils.errors import exception_to_dict
|
||||
from authentik.lib.utils.reflection import class_to_path, path_to_class
|
||||
from authentik.tasks.models import Task
|
||||
|
||||
@@ -165,17 +164,16 @@ class SyncTasks:
|
||||
except BadRequestSyncException as exc:
|
||||
self.logger.warning("failed to sync object", exc=exc, obj=obj)
|
||||
task.warning(
|
||||
f"Failed to sync {str(obj)} due to error: {str(exc)}",
|
||||
f"Failed to sync {obj._meta.verbose_name} {str(obj)} due to error: {str(exc)}",
|
||||
arguments=exc.args[1:],
|
||||
obj=sanitize_item(obj),
|
||||
exception=exception_to_dict(exc),
|
||||
)
|
||||
except TransientSyncException as exc:
|
||||
self.logger.warning("failed to sync object", exc=exc, user=obj)
|
||||
task.warning(
|
||||
f"Failed to sync {str(obj)} due to " f"transient error: {str(exc)}",
|
||||
f"Failed to sync {obj._meta.verbose_name} {str(obj)} due to "
|
||||
"transient error: {str(exc)}",
|
||||
obj=sanitize_item(obj),
|
||||
exception=exception_to_dict(exc),
|
||||
)
|
||||
except StopSync as exc:
|
||||
self.logger.warning("Stopping sync", exc=exc)
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
"""authentik database utilities"""
|
||||
|
||||
import gc
|
||||
|
||||
from django.db import reset_queries
|
||||
from django.db.models import QuerySet
|
||||
|
||||
|
||||
def chunked_queryset(queryset: QuerySet, chunk_size: int = 1_000):
|
||||
if not queryset.exists():
|
||||
return []
|
||||
|
||||
def get_chunks(qs: QuerySet):
|
||||
qs = qs.order_by("pk")
|
||||
pks = qs.values_list("pk", flat=True)
|
||||
start_pk = pks[0]
|
||||
while True:
|
||||
try:
|
||||
end_pk = pks.filter(pk__gte=start_pk)[chunk_size]
|
||||
except IndexError:
|
||||
break
|
||||
yield qs.filter(pk__gte=start_pk, pk__lt=end_pk)
|
||||
start_pk = end_pk
|
||||
yield qs.filter(pk__gte=start_pk)
|
||||
|
||||
for chunk in get_chunks(queryset):
|
||||
reset_queries()
|
||||
gc.collect()
|
||||
yield from chunk.iterator()
|
||||
@@ -6,7 +6,6 @@ from pathlib import Path
|
||||
from tempfile import gettempdir
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.module_loading import import_string
|
||||
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
@@ -63,13 +62,3 @@ def get_env() -> str:
|
||||
if "AK_APPLIANCE" in os.environ:
|
||||
return os.environ["AK_APPLIANCE"]
|
||||
return "custom"
|
||||
|
||||
|
||||
def ConditionalInheritance(path: str):
|
||||
"""Conditionally inherit from a class, intended for things like authentik.enterprise,
|
||||
without which authentik should still be able to run"""
|
||||
try:
|
||||
cls = import_string(path)
|
||||
return cls
|
||||
except ModuleNotFoundError:
|
||||
return object
|
||||
|
||||
@@ -26,6 +26,7 @@ HIST_POLICIES_EXECUTION_TIME = Histogram(
|
||||
"binding_order",
|
||||
"binding_target_type",
|
||||
"binding_target_name",
|
||||
"object_pk",
|
||||
"object_type",
|
||||
"mode",
|
||||
],
|
||||
|
||||
@@ -86,6 +86,7 @@ class PolicyEngine:
|
||||
binding_order=binding.order,
|
||||
binding_target_type=binding.target_type,
|
||||
binding_target_name=binding.target_name,
|
||||
object_pk=str(self.request.obj.pk),
|
||||
object_type=class_to_path(self.request.obj.__class__),
|
||||
mode="cache_retrieve",
|
||||
).time():
|
||||
|
||||
@@ -71,7 +71,7 @@ class PolicyEvaluator(BaseEvaluator):
|
||||
# PolicyExceptions should be propagated back to the process,
|
||||
# which handles recording and returning a correct result
|
||||
raise exc
|
||||
except Exception as exc: # noqa
|
||||
except Exception as exc:
|
||||
LOGGER.warning("Expression error", exc=exc)
|
||||
return PolicyResult(False, str(exc))
|
||||
else:
|
||||
|
||||
@@ -131,6 +131,7 @@ class PolicyProcess(PROCESS_CLASS):
|
||||
binding_order=self.binding.order,
|
||||
binding_target_type=self.binding.target_type,
|
||||
binding_target_name=self.binding.target_name,
|
||||
object_pk=str(self.request.obj.pk) if self.request.obj else "",
|
||||
object_type=class_to_path(self.request.obj.__class__) if self.request.obj else "",
|
||||
mode="execute_process",
|
||||
).time(),
|
||||
@@ -144,6 +145,6 @@ class PolicyProcess(PROCESS_CLASS):
|
||||
"""Task wrapper to run policy checking"""
|
||||
try:
|
||||
self.connection.send(self.profiling_wrapper())
|
||||
except Exception as exc: # noqa
|
||||
except Exception as exc:
|
||||
LOGGER.warning("Policy failed to run", exc=exc)
|
||||
self.connection.send(PolicyResult(False, str(exc)))
|
||||
|
||||
@@ -147,12 +147,11 @@ class IDToken:
|
||||
id_dict.update(self.claims)
|
||||
return id_dict
|
||||
|
||||
def to_access_token(self, provider: "OAuth2Provider", token: "BaseGrantModel") -> str:
|
||||
def to_access_token(self, provider: "OAuth2Provider") -> str:
|
||||
"""Encode id_token for use as access token, adding fields"""
|
||||
final = self.to_dict()
|
||||
final["azp"] = provider.client_id
|
||||
final["uid"] = generate_id()
|
||||
final["scope"] = " ".join(token.scope)
|
||||
return provider.encode(final)
|
||||
|
||||
def to_jwt(self, provider: "OAuth2Provider") -> str:
|
||||
|
||||
@@ -497,7 +497,7 @@ class AccessToken(SerializerModel, ExpiringModel, BaseGrantModel):
|
||||
|
||||
@id_token.setter
|
||||
def id_token(self, value: "IDToken"):
|
||||
self.token = value.to_access_token(self.provider, self)
|
||||
self.token = value.to_access_token(self.provider)
|
||||
self._id_token = json.dumps(asdict(value))
|
||||
|
||||
@property
|
||||
|
||||
@@ -4,18 +4,17 @@ import re
|
||||
import uuid
|
||||
from base64 import b64decode
|
||||
from binascii import Error
|
||||
from time import time
|
||||
from typing import Any
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from django.http import HttpRequest, HttpResponse, JsonResponse
|
||||
from django.http.response import HttpResponseRedirect
|
||||
from django.utils.cache import patch_vary_headers
|
||||
from django.utils.timezone import now
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.middleware import CTX_AUTH_VIA, KEY_USER
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
from authentik.providers.oauth2.errors import BearerTokenError
|
||||
from authentik.providers.oauth2.id_token import hash_session_key
|
||||
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider
|
||||
@@ -230,13 +229,11 @@ def create_logout_token(
|
||||
|
||||
LOGGER.debug("Creating logout token", provider=provider, sub=sub)
|
||||
|
||||
_now = now()
|
||||
# Create the logout token payload
|
||||
payload = {
|
||||
"iss": str(iss),
|
||||
"aud": provider.client_id,
|
||||
"iat": int(_now.timestamp()),
|
||||
"exp": int((_now + timedelta_from_string(provider.access_token_validity)).timestamp()),
|
||||
"iat": int(time()),
|
||||
"jti": str(uuid.uuid4()),
|
||||
"events": {
|
||||
"http://schemas.openid.net/event/backchannel-logout": {},
|
||||
|
||||
@@ -60,7 +60,7 @@ class UserInfoView(View):
|
||||
for scope in scopes:
|
||||
if scope in special_scope_map:
|
||||
scope_descriptions.append(
|
||||
PermissionDict(id=str(scope), name=str(special_scope_map[scope]))
|
||||
PermissionDict(id=scope, name=str(special_scope_map[scope]))
|
||||
)
|
||||
return scope_descriptions
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ def migrate_sessions(apps, schema_editor):
|
||||
for token in ConnectionToken.objects.using(db_alias).all():
|
||||
token.session = (
|
||||
AuthenticatedSession.objects.using(db_alias)
|
||||
.filter(session__session_key=token.old_session.session_key)
|
||||
.filter(session_key=token.old_session.session_key)
|
||||
.first()
|
||||
)
|
||||
if token.session:
|
||||
|
||||
@@ -23,19 +23,13 @@ from authentik.core.models import Application
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.lib.expression.exceptions import ControlFlowException
|
||||
from authentik.lib.sync.mapper import PropertyMappingManager
|
||||
from authentik.lib.utils.reflection import ConditionalInheritance
|
||||
from authentik.policies.api.exec import PolicyTestResultSerializer
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
from authentik.policies.types import PolicyResult
|
||||
from authentik.providers.radius.models import RadiusProvider, RadiusProviderPropertyMapping
|
||||
|
||||
|
||||
class RadiusProviderSerializer(
|
||||
ConditionalInheritance(
|
||||
"authentik.enterprise.providers.radius.api.RadiusProviderSerializerMixin"
|
||||
),
|
||||
ProviderSerializer,
|
||||
):
|
||||
class RadiusProviderSerializer(ProviderSerializer):
|
||||
"""RadiusProvider Serializer"""
|
||||
|
||||
outpost_set = ListField(child=CharField(), read_only=True, source="outpost_set.all")
|
||||
@@ -49,7 +43,6 @@ class RadiusProviderSerializer(
|
||||
"shared_secret",
|
||||
"outpost_set",
|
||||
"mfa_support",
|
||||
"certificate",
|
||||
]
|
||||
extra_kwargs = ProviderSerializer.Meta.extra_kwargs
|
||||
|
||||
@@ -85,7 +78,6 @@ class RadiusOutpostConfigSerializer(ModelSerializer):
|
||||
"client_networks",
|
||||
"shared_secret",
|
||||
"mfa_support",
|
||||
"certificate",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
# Generated by Django 5.1.11 on 2025-07-20 17:20
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_crypto", "0004_alter_certificatekeypair_name"),
|
||||
("authentik_providers_radius", "0004_alter_radiusproviderpropertymapping_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="radiusprovider",
|
||||
name="certificate",
|
||||
field=models.ForeignKey(
|
||||
default=None,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="authentik_crypto.certificatekeypair",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,14 +1,11 @@
|
||||
"""Radius Provider"""
|
||||
|
||||
from collections.abc import Iterable
|
||||
|
||||
from django.db import models
|
||||
from django.templatetags.static import static
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework.serializers import Serializer
|
||||
|
||||
from authentik.core.models import PropertyMapping, Provider
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.outposts.models import OutpostModel
|
||||
|
||||
@@ -41,10 +38,6 @@ class RadiusProvider(OutpostModel, Provider):
|
||||
),
|
||||
)
|
||||
|
||||
certificate = models.ForeignKey(
|
||||
CertificateKeyPair, on_delete=models.CASCADE, default=None, null=True
|
||||
)
|
||||
|
||||
@property
|
||||
def launch_url(self) -> str | None:
|
||||
"""Radius never has a launch URL"""
|
||||
@@ -64,12 +57,6 @@ class RadiusProvider(OutpostModel, Provider):
|
||||
|
||||
return RadiusProviderSerializer
|
||||
|
||||
def get_required_objects(self) -> Iterable[models.Model | str]:
|
||||
required = [self, "authentik_stages_mtls.pass_outpost_certificate"]
|
||||
if self.certificate is not None:
|
||||
required.append(self.certificate)
|
||||
return required
|
||||
|
||||
def __str__(self):
|
||||
return f"Radius Provider {self.name}"
|
||||
|
||||
|
||||
@@ -239,33 +239,32 @@ class AssertionProcessor:
|
||||
).from_http(self.http_request)
|
||||
LOGGER.warning("Failed to evaluate property mapping", exc=exc)
|
||||
return name_id
|
||||
if self.auth_n_request.name_id_policy == SAML_NAME_ID_FORMAT_EMAIL:
|
||||
if name_id.attrib["Format"] == SAML_NAME_ID_FORMAT_EMAIL:
|
||||
name_id.text = self.http_request.user.email
|
||||
return name_id
|
||||
if self.auth_n_request.name_id_policy in [
|
||||
if name_id.attrib["Format"] in [
|
||||
SAML_NAME_ID_FORMAT_PERSISTENT,
|
||||
SAML_NAME_ID_FORMAT_UNSPECIFIED,
|
||||
]:
|
||||
name_id.text = persistent
|
||||
return name_id
|
||||
if self.auth_n_request.name_id_policy == SAML_NAME_ID_FORMAT_X509:
|
||||
if name_id.attrib["Format"] == SAML_NAME_ID_FORMAT_X509:
|
||||
# This attribute is statically set by the LDAP source
|
||||
name_id.text = self.http_request.user.attributes.get(
|
||||
LDAP_DISTINGUISHED_NAME, persistent
|
||||
)
|
||||
return name_id
|
||||
if self.auth_n_request.name_id_policy == SAML_NAME_ID_FORMAT_WINDOWS:
|
||||
if name_id.attrib["Format"] == SAML_NAME_ID_FORMAT_WINDOWS:
|
||||
# This attribute is statically set by the LDAP source
|
||||
name_id.text = self.http_request.user.attributes.get("upn", persistent)
|
||||
return name_id
|
||||
if self.auth_n_request.name_id_policy == SAML_NAME_ID_FORMAT_TRANSIENT:
|
||||
if name_id.attrib["Format"] == SAML_NAME_ID_FORMAT_TRANSIENT:
|
||||
# Use the hash of the user's session, which changes every session
|
||||
session_key: str = self.http_request.session.session_key
|
||||
name_id.text = sha256(session_key.encode()).hexdigest()
|
||||
return name_id
|
||||
raise UnsupportedNameIDFormat(
|
||||
"Assertion contains NameID with unsupported "
|
||||
f"format {self.auth_n_request.name_id_policy}."
|
||||
f"Assertion contains NameID with unsupported format {name_id.attrib['Format']}."
|
||||
)
|
||||
|
||||
def get_assertion_subject(self) -> Element:
|
||||
|
||||
@@ -5,15 +5,11 @@ from rest_framework.viewsets import ModelViewSet
|
||||
from authentik.core.api.providers import ProviderSerializer
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.lib.sync.outgoing.api import OutgoingSyncProviderStatusMixin
|
||||
from authentik.lib.utils.reflection import ConditionalInheritance
|
||||
from authentik.providers.scim.models import SCIMProvider
|
||||
from authentik.providers.scim.tasks import scim_sync, scim_sync_objects
|
||||
|
||||
|
||||
class SCIMProviderSerializer(
|
||||
ConditionalInheritance("authentik.enterprise.providers.scim.api.SCIMProviderSerializerMixin"),
|
||||
ProviderSerializer,
|
||||
):
|
||||
class SCIMProviderSerializer(ProviderSerializer):
|
||||
"""SCIMProvider Serializer"""
|
||||
|
||||
class Meta:
|
||||
@@ -32,9 +28,6 @@ class SCIMProviderSerializer(
|
||||
"url",
|
||||
"verify_certificates",
|
||||
"token",
|
||||
"auth_mode",
|
||||
"auth_oauth",
|
||||
"auth_oauth_params",
|
||||
"compatibility_mode",
|
||||
"exclude_users_service_account",
|
||||
"filter_group",
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from requests import Request
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.providers.scim.models import SCIMProvider
|
||||
|
||||
|
||||
class SCIMTokenAuth:
|
||||
|
||||
def __init__(self, provider: "SCIMProvider"):
|
||||
self.provider = provider
|
||||
|
||||
def __call__(self, request: Request) -> Request:
|
||||
request.headers["Authorization"] = f"Bearer {self.provider.token}"
|
||||
return request
|
||||
@@ -35,6 +35,7 @@ class SCIMClient[TModel: "Model", TConnection: "Model", TSchema: "BaseModel"](
|
||||
"""SCIM Client"""
|
||||
|
||||
base_url: str
|
||||
token: str
|
||||
|
||||
_session: Session
|
||||
_config: ServiceProviderConfiguration
|
||||
@@ -44,12 +45,12 @@ class SCIMClient[TModel: "Model", TConnection: "Model", TSchema: "BaseModel"](
|
||||
self._session = get_http_session()
|
||||
self._session.verify = provider.verify_certificates
|
||||
self.provider = provider
|
||||
self.auth = provider.scim_auth()
|
||||
# Remove trailing slashes as we assume the URL doesn't have any
|
||||
base_url = provider.url
|
||||
if base_url.endswith("/"):
|
||||
base_url = base_url[:-1]
|
||||
self.base_url = base_url
|
||||
self.token = provider.token
|
||||
self._config = self.get_service_provider_config()
|
||||
|
||||
def _request(self, method: str, path: str, **kwargs) -> dict:
|
||||
@@ -61,8 +62,8 @@ class SCIMClient[TModel: "Model", TConnection: "Model", TSchema: "BaseModel"](
|
||||
method,
|
||||
f"{self.base_url}{path}",
|
||||
**kwargs,
|
||||
auth=self.auth,
|
||||
headers={
|
||||
"Authorization": f"Bearer {self.token}",
|
||||
"Accept": "application/scim+json",
|
||||
"Content-Type": "application/scim+json",
|
||||
},
|
||||
|
||||
@@ -27,8 +27,3 @@ class SCIMRequestException(TransientSyncException):
|
||||
except ValidationError:
|
||||
pass
|
||||
return self._message
|
||||
|
||||
def __str__(self):
|
||||
if self._response:
|
||||
return self._response.text
|
||||
return super().__str__()
|
||||
|
||||
@@ -72,8 +72,7 @@ class SCIMUserClient(SCIMClient[User, SCIMProviderUser, SCIMUserSchema]):
|
||||
if not self._config.filter.supported:
|
||||
raise exc
|
||||
users = self._request(
|
||||
"GET",
|
||||
f"/Users?{urlencode({'filter': f'userName eq \"{scim_user.userName}\"'})}",
|
||||
"GET", f"/Users?{urlencode({'filter': f'userName eq {scim_user.userName}'})}"
|
||||
)
|
||||
users_res = users.get("Resources", [])
|
||||
if len(users_res) < 1:
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
# Generated by Django 5.1.12 on 2025-09-23 12:31
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_providers_scim", "0013_scimprovidergroup_attributes_and_more"),
|
||||
("authentik_sources_oauth", "0011_useroauthsourceconnection_expires"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="scimprovider",
|
||||
name="auth_mode",
|
||||
field=models.TextField(
|
||||
choices=[("token", "Token"), ("oauth", "OAuth")], default="token"
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="scimprovider",
|
||||
name="auth_oauth",
|
||||
field=models.ForeignKey(
|
||||
default=None,
|
||||
help_text="OAuth Source used for authentication",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_DEFAULT,
|
||||
to="authentik_sources_oauth.oauthsource",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="scimprovider",
|
||||
name="auth_oauth_params",
|
||||
field=models.JSONField(
|
||||
blank=True,
|
||||
default=dict,
|
||||
help_text="Additional OAuth parameters, such as grant_type",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="scimprovider",
|
||||
name="auth_oauth_user",
|
||||
field=models.ForeignKey(
|
||||
default=None,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="scimprovider",
|
||||
name="token",
|
||||
field=models.TextField(blank=True, help_text="Authentication token"),
|
||||
),
|
||||
]
|
||||
@@ -8,17 +8,12 @@ from django.db.models import QuerySet
|
||||
from django.templatetags.static import static
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from dramatiq.actor import Actor
|
||||
from requests.auth import AuthBase
|
||||
from rest_framework.serializers import Serializer
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import BackchannelProvider, Group, PropertyMapping, User, UserTypes
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.lib.sync.outgoing.base import BaseOutgoingSyncClient
|
||||
from authentik.lib.sync.outgoing.models import OutgoingSyncProvider
|
||||
from authentik.providers.scim.clients.auth import SCIMTokenAuth
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class SCIMProviderUser(SerializerModel):
|
||||
@@ -65,13 +60,6 @@ class SCIMProviderGroup(SerializerModel):
|
||||
return f"SCIM Provider Group {self.group_id} to {self.provider_id}"
|
||||
|
||||
|
||||
class SCIMAuthenticationMode(models.TextChoices):
|
||||
"""SCIM authentication modes"""
|
||||
|
||||
TOKEN = "token", _("Token")
|
||||
OAUTH = "oauth", _("OAuth")
|
||||
|
||||
|
||||
class SCIMCompatibilityMode(models.TextChoices):
|
||||
"""SCIM compatibility mode"""
|
||||
|
||||
@@ -90,26 +78,7 @@ class SCIMProvider(OutgoingSyncProvider, BackchannelProvider):
|
||||
)
|
||||
|
||||
url = models.TextField(help_text=_("Base URL to SCIM requests, usually ends in /v2"))
|
||||
|
||||
auth_mode = models.TextField(
|
||||
choices=SCIMAuthenticationMode.choices, default=SCIMAuthenticationMode.TOKEN
|
||||
)
|
||||
|
||||
token = models.TextField(help_text=_("Authentication token"), blank=True)
|
||||
auth_oauth = models.ForeignKey(
|
||||
"authentik_sources_oauth.OAuthSource",
|
||||
on_delete=models.SET_DEFAULT,
|
||||
default=None,
|
||||
null=True,
|
||||
help_text=_("OAuth Source used for authentication"),
|
||||
)
|
||||
auth_oauth_params = models.JSONField(
|
||||
blank=True, default=dict, help_text=_("Additional OAuth parameters, such as grant_type")
|
||||
)
|
||||
auth_oauth_user = models.ForeignKey(
|
||||
"authentik_core.User", on_delete=models.CASCADE, default=None, null=True
|
||||
)
|
||||
|
||||
token = models.TextField(help_text=_("Authentication token"))
|
||||
verify_certificates = models.BooleanField(default=True)
|
||||
|
||||
property_mappings_group = models.ManyToManyField(
|
||||
@@ -127,16 +96,6 @@ class SCIMProvider(OutgoingSyncProvider, BackchannelProvider):
|
||||
help_text=_("Alter authentik behavior for vendor-specific SCIM implementations."),
|
||||
)
|
||||
|
||||
def scim_auth(self) -> AuthBase:
|
||||
if self.auth_mode == SCIMAuthenticationMode.OAUTH:
|
||||
try:
|
||||
from authentik.enterprise.providers.scim.auth_oauth2 import SCIMOAuthAuth
|
||||
|
||||
return SCIMOAuthAuth(self)
|
||||
except ImportError:
|
||||
LOGGER.warning("Failed to import SCIM OAuth Client")
|
||||
return SCIMTokenAuth(self)
|
||||
|
||||
@property
|
||||
def icon_url(self) -> str | None:
|
||||
return static("authentik/sources/scim.png")
|
||||
|
||||
@@ -61,8 +61,7 @@ class InitialPermissionsMiddleware:
|
||||
):
|
||||
if not created:
|
||||
return
|
||||
current_request = _CTX_REQUEST.get()
|
||||
if current_request is None or request.request_id != current_request.request_id:
|
||||
if request.request_id != _CTX_REQUEST.get().request_id:
|
||||
return
|
||||
user: User = request.user
|
||||
if not user or user.is_anonymous:
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
# Generated by Django 5.1.11 on 2025-08-29 14:42
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_rbac", "0005_initialpermissions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="role",
|
||||
options={
|
||||
"permissions": [
|
||||
("assign_role_permissions", "Can assign permissions to roles"),
|
||||
("unassign_role_permissions", "Can unassign permissions from roles"),
|
||||
],
|
||||
"verbose_name": "Role",
|
||||
"verbose_name_plural": "Roles",
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -71,8 +71,8 @@ class Role(SerializerModel):
|
||||
verbose_name = _("Role")
|
||||
verbose_name_plural = _("Roles")
|
||||
permissions = [
|
||||
("assign_role_permissions", _("Can assign permissions to roles")),
|
||||
("unassign_role_permissions", _("Can unassign permissions from roles")),
|
||||
("assign_role_permissions", _("Can assign permissions to users")),
|
||||
("unassign_role_permissions", _("Can unassign permissions from users")),
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
from datetime import timedelta
|
||||
from getpass import getuser
|
||||
|
||||
from django.utils.timesince import timesince
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
@@ -17,38 +16,25 @@ class Command(TenantCommand):
|
||||
|
||||
help = _("Create a Key which can be used to restore access to authentik.")
|
||||
|
||||
def format_duration_message(self, duration: int) -> str:
|
||||
"""Format duration in minutes to a human-readable message"""
|
||||
current_time = now()
|
||||
future_time = current_time + timedelta(minutes=duration)
|
||||
|
||||
# fyi a non-breaking space is returned by timesince
|
||||
return timesince(current_time, future_time)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"duration",
|
||||
nargs="?",
|
||||
default=60,
|
||||
type=int,
|
||||
help="How long the token is valid for (in minutes). Default: 60 minutes (1 hour).",
|
||||
default=1,
|
||||
action="store",
|
||||
help="How long the token is valid for (in years).",
|
||||
)
|
||||
parser.add_argument("user", action="store", help="Which user the Token gives access to.")
|
||||
|
||||
def handle_per_tenant(self, *args, **options):
|
||||
"""Create Token used to recover access"""
|
||||
duration = int(options.get("duration", 60))
|
||||
expiry = now() + timedelta(minutes=duration)
|
||||
duration = int(options.get("duration", 1))
|
||||
expiry = now() + timedelta(days=duration * 365.2425)
|
||||
user = User.objects.filter(username=options.get("user")).first()
|
||||
if not user:
|
||||
self.stderr.write(f"User '{options.get('user')}' not found.")
|
||||
return
|
||||
_, url = create_recovery_token(user, expiry, getuser())
|
||||
|
||||
duration_msg = self.format_duration_message(duration)
|
||||
|
||||
self.stdout.write(
|
||||
f"Store this link safely, as it will allow anyone to access authentik as {user}."
|
||||
)
|
||||
self.stdout.write(f"This recovery token is valid for {duration_msg}.")
|
||||
self.stdout.write(url)
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
"""recovery tests"""
|
||||
|
||||
from datetime import timedelta
|
||||
from io import StringIO
|
||||
|
||||
from django.core.management import call_command
|
||||
from django.test import TestCase
|
||||
from django.urls import reverse
|
||||
from django.utils.timezone import now
|
||||
from django_tenants.utils import get_public_schema_name
|
||||
|
||||
from authentik.core.models import Token, TokenIntents, User
|
||||
@@ -24,21 +22,20 @@ class TestRecovery(TestCase):
|
||||
self.assertEqual(len(Token.objects.filter(intent=TokenIntents.INTENT_RECOVERY)), 0)
|
||||
call_command(
|
||||
"create_recovery_key",
|
||||
"5",
|
||||
"1",
|
||||
self.user.username,
|
||||
schema=get_public_schema_name(),
|
||||
stdout=out,
|
||||
)
|
||||
token = Token.objects.get(intent=TokenIntents.INTENT_RECOVERY, user=self.user)
|
||||
self.assertIn(token.key, out.getvalue())
|
||||
self.assertIn("valid for 5\xa0minutes", out.getvalue())
|
||||
self.assertEqual(len(Token.objects.filter(intent=TokenIntents.INTENT_RECOVERY)), 1)
|
||||
|
||||
def test_create_key_invalid(self):
|
||||
"""Test creation of a new key (invalid)"""
|
||||
out = StringIO()
|
||||
self.assertEqual(len(Token.objects.filter(intent=TokenIntents.INTENT_RECOVERY)), 0)
|
||||
call_command("create_recovery_key", "5", "foo", schema=get_public_schema_name(), stderr=out)
|
||||
call_command("create_recovery_key", "1", "foo", schema=get_public_schema_name(), stderr=out)
|
||||
self.assertIn("not found", out.getvalue())
|
||||
|
||||
def test_recovery_view(self):
|
||||
@@ -46,7 +43,7 @@ class TestRecovery(TestCase):
|
||||
out = StringIO()
|
||||
call_command(
|
||||
"create_recovery_key",
|
||||
"10",
|
||||
"1",
|
||||
self.user.username,
|
||||
schema=get_public_schema_name(),
|
||||
stdout=out,
|
||||
@@ -74,116 +71,3 @@ class TestRecovery(TestCase):
|
||||
)
|
||||
self.assertIn("successfully added to", out.getvalue())
|
||||
self.assertTrue(self.user.is_superuser)
|
||||
|
||||
def test_create_key_default_duration(self):
|
||||
"""Test creation of a new key with default duration (60 minutes)"""
|
||||
out = StringIO()
|
||||
before_creation = now()
|
||||
call_command(
|
||||
"create_recovery_key",
|
||||
self.user.username,
|
||||
schema=get_public_schema_name(),
|
||||
stdout=out,
|
||||
)
|
||||
after_creation = now()
|
||||
|
||||
token = Token.objects.get(intent=TokenIntents.INTENT_RECOVERY, user=self.user)
|
||||
self.assertIn(token.key, out.getvalue())
|
||||
self.assertIn("valid for 1\xa0hour", out.getvalue())
|
||||
|
||||
# Verify the token expires in approximately 60 minutes (default)
|
||||
expected_expiry_min = before_creation + timedelta(minutes=60)
|
||||
expected_expiry_max = after_creation + timedelta(minutes=60)
|
||||
self.assertGreaterEqual(token.expires, expected_expiry_min)
|
||||
self.assertLessEqual(token.expires, expected_expiry_max)
|
||||
|
||||
def test_create_key_custom_duration(self):
|
||||
"""Test creation of a new key with custom duration"""
|
||||
out = StringIO()
|
||||
custom_duration = 120 # 2 hours
|
||||
before_creation = now()
|
||||
|
||||
call_command(
|
||||
"create_recovery_key",
|
||||
str(custom_duration),
|
||||
self.user.username,
|
||||
schema=get_public_schema_name(),
|
||||
stdout=out,
|
||||
)
|
||||
after_creation = now()
|
||||
|
||||
token = Token.objects.get(intent=TokenIntents.INTENT_RECOVERY, user=self.user)
|
||||
self.assertIn(token.key, out.getvalue())
|
||||
self.assertIn("valid for 2\xa0hours", out.getvalue())
|
||||
|
||||
# Verify the token expires in approximately the custom duration
|
||||
expected_expiry_min = before_creation + timedelta(minutes=custom_duration)
|
||||
expected_expiry_max = after_creation + timedelta(minutes=custom_duration)
|
||||
self.assertGreaterEqual(token.expires, expected_expiry_min)
|
||||
self.assertLessEqual(token.expires, expected_expiry_max)
|
||||
|
||||
def test_create_key_short_duration(self):
|
||||
"""Test creation of a new key with very short duration (1 minute)"""
|
||||
out = StringIO()
|
||||
short_duration = 1
|
||||
before_creation = now()
|
||||
|
||||
call_command(
|
||||
"create_recovery_key",
|
||||
str(short_duration),
|
||||
self.user.username,
|
||||
schema=get_public_schema_name(),
|
||||
stdout=out,
|
||||
)
|
||||
after_creation = now()
|
||||
|
||||
token = Token.objects.get(intent=TokenIntents.INTENT_RECOVERY, user=self.user)
|
||||
self.assertIn(token.key, out.getvalue())
|
||||
self.assertIn("valid for 1\xa0minute", out.getvalue())
|
||||
|
||||
# Verify the token expires in approximately 1 minute
|
||||
expected_expiry_min = before_creation + timedelta(minutes=short_duration)
|
||||
expected_expiry_max = after_creation + timedelta(minutes=short_duration)
|
||||
self.assertGreaterEqual(token.expires, expected_expiry_min)
|
||||
self.assertLessEqual(token.expires, expected_expiry_max)
|
||||
|
||||
def test_create_key_duration_validation(self):
|
||||
"""Test that the duration is correctly converted to minutes"""
|
||||
# Test various durations to ensure they're calculated correctly
|
||||
test_cases = [1, 5, 30, 60, 120, 1440] # 1min, 5min, 30min, 1hr, 2hr, 24hr
|
||||
|
||||
for duration in test_cases:
|
||||
with self.subTest(duration=duration):
|
||||
out = StringIO()
|
||||
before_creation = now()
|
||||
|
||||
call_command(
|
||||
"create_recovery_key",
|
||||
str(duration),
|
||||
self.user.username,
|
||||
schema=get_public_schema_name(),
|
||||
stdout=out,
|
||||
)
|
||||
after_creation = now()
|
||||
|
||||
token = Token.objects.get(intent=TokenIntents.INTENT_RECOVERY, user=self.user)
|
||||
|
||||
# Verify the token expires in approximately the specified duration
|
||||
expected_expiry_min = before_creation + timedelta(minutes=duration)
|
||||
expected_expiry_max = after_creation + timedelta(minutes=duration)
|
||||
self.assertGreaterEqual(token.expires, expected_expiry_min)
|
||||
self.assertLessEqual(token.expires, expected_expiry_max)
|
||||
|
||||
# Clean up for next iteration
|
||||
token.delete()
|
||||
|
||||
def test_create_key_help_text(self):
|
||||
"""Test that the help text correctly indicates minutes"""
|
||||
from authentik.recovery.management.commands.create_recovery_key import Command
|
||||
|
||||
command = Command()
|
||||
# Check that the help text mentions minutes
|
||||
parser = command.create_parser("test", "create_recovery_key")
|
||||
help_text = parser.format_help()
|
||||
self.assertIn("minutes", help_text.lower())
|
||||
self.assertNotIn("years", help_text.lower())
|
||||
|
||||
@@ -175,7 +175,6 @@ SPECTACULAR_SETTINGS = {
|
||||
"SAMLNameIDPolicyEnum": "authentik.sources.saml.models.SAMLNameIDPolicy",
|
||||
"UserTypeEnum": "authentik.core.models.UserTypes",
|
||||
"UserVerificationEnum": "authentik.stages.authenticator_webauthn.models.UserVerification",
|
||||
"SCIMAuthenticationModeEnum": "authentik.providers.scim.models.SCIMAuthenticationMode",
|
||||
},
|
||||
"ENUM_ADD_EXPLICIT_BLANK_NULL_CHOICE": False,
|
||||
"ENUM_GENERATE_CHOICE_DESCRIPTION": False,
|
||||
@@ -184,7 +183,6 @@ SPECTACULAR_SETTINGS = {
|
||||
],
|
||||
"POSTPROCESSING_HOOKS": [
|
||||
"authentik.api.schema.postprocess_schema_responses",
|
||||
"authentik.api.schema.postprocess_schema_pagination",
|
||||
"drf_spectacular.hooks.postprocess_schema_enums",
|
||||
],
|
||||
}
|
||||
@@ -257,7 +255,6 @@ MIDDLEWARE = [
|
||||
"authentik.root.middleware.LoggingMiddleware",
|
||||
"authentik.root.middleware.ClientIPMiddleware",
|
||||
"authentik.stages.user_login.middleware.BoundSessionMiddleware",
|
||||
"django.middleware.locale.LocaleMiddleware",
|
||||
"authentik.core.middleware.AuthenticationMiddleware",
|
||||
"authentik.core.middleware.RequestIDMiddleware",
|
||||
"authentik.brands.middleware.BrandMiddleware",
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from datetime import timedelta
|
||||
|
||||
from django.dispatch import Signal, receiver
|
||||
from django.core.signals import Signal
|
||||
from django.dispatch import receiver
|
||||
from django.utils.timezone import now
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ from unittest.mock import patch
|
||||
import pytest
|
||||
from django.conf import settings
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.test import TestCase as DjangoTestCase
|
||||
from django.test.runner import DiscoverRunner
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
@@ -21,8 +20,6 @@ from authentik.tasks.test import use_test_broker
|
||||
|
||||
# globally set maxDiff to none to show full assert error
|
||||
TestCase.maxDiff = None
|
||||
# allow testing with read-replicas
|
||||
DjangoTestCase.databases = "__all__"
|
||||
|
||||
|
||||
def get_docker_tag() -> str:
|
||||
@@ -66,15 +63,6 @@ class PytestTestRunner(DiscoverRunner): # pragma: no cover
|
||||
settings.TEST = True
|
||||
settings.DRAMATIQ["test"] = True
|
||||
|
||||
# Set any other test databases's name to their test name early
|
||||
# django does this itself, however only _after_ migrating the default alias
|
||||
# which triggers some reads that might go to the read replica, which
|
||||
# would be routed to the wrong database
|
||||
for alias, db in settings.DATABASES.items():
|
||||
if alias == "default":
|
||||
continue
|
||||
db["NAME"] = db["TEST"]["NAME"]
|
||||
|
||||
# Test-specific configuration
|
||||
test_config = {
|
||||
"events.context_processors.geoip": "tests/GeoLite2-City-Test.mmdb",
|
||||
@@ -189,6 +177,6 @@ class PytestTestRunner(DiscoverRunner): # pragma: no cover
|
||||
with patch("guardian.shortcuts._get_ct_cached", patched__get_ct_cached):
|
||||
try:
|
||||
return pytest.main(self.args)
|
||||
except Exception as e: # noqa
|
||||
except Exception as e:
|
||||
self.logger.error("Error running tests", error=str(e), test_files=self.args)
|
||||
return 1
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user