mirror of
https://github.com/goauthentik/authentik
synced 2026-05-12 18:06:21 +02:00
Compare commits
117 Commits
version/20
...
web/flow/t
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c8d5832018 | ||
|
|
52eef39e7e | ||
|
|
a2f5ad00a9 | ||
|
|
86a689ae05 | ||
|
|
50092fef98 | ||
|
|
ea188eeac3 | ||
|
|
50bb59b84c | ||
|
|
e6ca150a21 | ||
|
|
3cd4afb06a | ||
|
|
42ad526636 | ||
|
|
f921eefdde | ||
|
|
dd9eb4274d | ||
|
|
ac0c1c9df3 | ||
|
|
634f1f754d | ||
|
|
1620155f32 | ||
|
|
8d29ae4dd8 | ||
|
|
1521ade889 | ||
|
|
9a4c56e6b2 | ||
|
|
4f1126ea99 | ||
|
|
d15e360a74 | ||
|
|
2cd358d5e1 | ||
|
|
9fbc76098a | ||
|
|
be3f35c58f | ||
|
|
ac31137c97 | ||
|
|
8509056e21 | ||
|
|
63e188773f | ||
|
|
8585c646a2 | ||
|
|
5c66e50205 | ||
|
|
2c658e2ee6 | ||
|
|
2c52a19a44 | ||
|
|
bc2a07156b | ||
|
|
0845cc400c | ||
|
|
91bbf0e449 | ||
|
|
11f500c670 | ||
|
|
eaf9185e73 | ||
|
|
47ed22b57d | ||
|
|
542a605f9a | ||
|
|
5b4a921b1a | ||
|
|
08218dcd1a | ||
|
|
2c1fd70808 | ||
|
|
5603fe193e | ||
|
|
6a882f22fa | ||
|
|
442d42f850 | ||
|
|
d0daec4aa1 | ||
|
|
951da48f81 | ||
|
|
7f353ac1b8 | ||
|
|
7080053510 | ||
|
|
f55207b6ef | ||
|
|
7347577436 | ||
|
|
4963dde699 | ||
|
|
b60bfadaaf | ||
|
|
0fc0783e7e | ||
|
|
aa73014f6a | ||
|
|
a95d3abe83 | ||
|
|
6948146faa | ||
|
|
0ea5f10e5a | ||
|
|
d78e459dcb | ||
|
|
f4393b7b3c | ||
|
|
0457b59792 | ||
|
|
79d5bc02e9 | ||
|
|
f0e742c3ae | ||
|
|
8a389b4b46 | ||
|
|
e3402682e6 | ||
|
|
38c4701e63 | ||
|
|
cddc13fcf7 | ||
|
|
136e59cfba | ||
|
|
5da349342f | ||
|
|
518e9e1554 | ||
|
|
baa64bc1b0 | ||
|
|
8b958408c2 | ||
|
|
9a6ca4fba2 | ||
|
|
9cf551b1d6 | ||
|
|
f63b3c2bbb | ||
|
|
61dbc932da | ||
|
|
fa994ae318 | ||
|
|
cd78fc01f2 | ||
|
|
9251e608fa | ||
|
|
0055882212 | ||
|
|
68387362c2 | ||
|
|
5fe344fb12 | ||
|
|
aa20df5cca | ||
|
|
75c61afd18 | ||
|
|
811e7946f7 | ||
|
|
19f25d5aa7 | ||
|
|
ffbe8fb598 | ||
|
|
2279768e6f | ||
|
|
675e60b816 | ||
|
|
e338bef104 | ||
|
|
457b61c5b4 | ||
|
|
25eefb7d55 | ||
|
|
50d2f69332 | ||
|
|
7d972ec711 | ||
|
|
854427e463 | ||
|
|
be349e2e14 | ||
|
|
bd0e81b8ad | ||
|
|
f6afb59515 | ||
|
|
dddde09be5 | ||
|
|
6d7fc94698 | ||
|
|
1dcf9108ad | ||
|
|
7bb6a3dfe6 | ||
|
|
9cc440eee1 | ||
|
|
fe9e4526ac | ||
|
|
20b66f850c | ||
|
|
67b327414b | ||
|
|
5b8d86b5a9 | ||
|
|
67aed3e318 | ||
|
|
9809b94030 | ||
|
|
e7527c551b | ||
|
|
36b10b434a | ||
|
|
831797b871 | ||
|
|
5cc2c0f45f | ||
|
|
32442766f4 | ||
|
|
75790909a8 | ||
|
|
e0d5df89ca | ||
|
|
f25a9c624e | ||
|
|
914993a788 | ||
|
|
89dad07a66 |
4
.github/actions/setup/action.yml
vendored
4
.github/actions/setup/action.yml
vendored
@@ -22,7 +22,7 @@ runs:
|
||||
sudo rm -rf /usr/local/lib/android
|
||||
- name: Install uv
|
||||
if: ${{ contains(inputs.dependencies, 'python') }}
|
||||
uses: astral-sh/setup-uv@eac588ad8def6316056a12d4907a9d4d84ff7a3b # v5
|
||||
uses: astral-sh/setup-uv@803947b9bd8e9f986429fa0c5a41c367cd732b41 # v5
|
||||
with:
|
||||
enable-cache: true
|
||||
- name: Setup python
|
||||
@@ -58,7 +58,7 @@ runs:
|
||||
run: |
|
||||
export PSQL_TAG=${{ inputs.postgresql_version }}
|
||||
docker compose -f .github/actions/setup/compose.yml up -d
|
||||
cd web && npm ci
|
||||
cd web && npm i
|
||||
- name: Generate config
|
||||
if: ${{ contains(inputs.dependencies, 'python') }}
|
||||
shell: uv run python {0}
|
||||
|
||||
2
.github/workflows/_reusable-docker-build.yml
vendored
2
.github/workflows/_reusable-docker-build.yml
vendored
@@ -90,7 +90,7 @@ jobs:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- uses: int128/docker-manifest-create-action@1a059c021f1d5e9f2bd39de745d5dd3a0ef6df90 # v2
|
||||
- uses: int128/docker-manifest-create-action@a39573caa37b6a8a03302d43b57c3f48635096e2 # v2
|
||||
id: build
|
||||
with:
|
||||
tags: ${{ matrix.tag }}
|
||||
|
||||
2
.github/workflows/gen-image-compress.yml
vendored
2
.github/workflows/gen-image-compress.yml
vendored
@@ -38,7 +38,7 @@ jobs:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- name: Compress images
|
||||
id: compress
|
||||
uses: calibreapp/image-actions@d9c8ee5c3dc52ae4622c82ead88d658f4b16b65f # main
|
||||
uses: calibreapp/image-actions@420075c115b26f8785e293c5bd5bef0911c506e5 # main
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||
compressOnly: ${{ github.event_name != 'pull_request' }}
|
||||
|
||||
2
.github/workflows/packages-npm-publish.yml
vendored
2
.github/workflows/packages-npm-publish.yml
vendored
@@ -40,7 +40,7 @@ jobs:
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@8cba46e29c11878d930bca7870bb54394d3e8b21 # 24d32ffd492484c1d75e0c0b894501ddb9d30d62
|
||||
uses: tj-actions/changed-files@e0021407031f5be11a464abee9a0776171c79891 # 24d32ffd492484c1d75e0c0b894501ddb9d30d62
|
||||
with:
|
||||
files: |
|
||||
${{ matrix.package }}/package.json
|
||||
|
||||
23
.github/workflows/release-publish.yml
vendored
23
.github/workflows/release-publish.yml
vendored
@@ -160,17 +160,10 @@ jobs:
|
||||
node-version-file: web/package.json
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Install web dependencies
|
||||
working-directory: web/
|
||||
run: |
|
||||
npm ci
|
||||
- name: Generate API Clients
|
||||
run: |
|
||||
make gen-client-ts
|
||||
make gen-client-go
|
||||
- name: Build web
|
||||
working-directory: web/
|
||||
run: |
|
||||
npm ci
|
||||
npm run build-proxy
|
||||
- name: Build outpost
|
||||
run: |
|
||||
@@ -199,7 +192,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6.0.0
|
||||
- uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708 # v5
|
||||
with:
|
||||
role-to-assume: "arn:aws:iam::016170277896:role/github_goauthentik_authentik"
|
||||
aws-region: ${{ env.AWS_REGION }}
|
||||
@@ -217,12 +210,12 @@ jobs:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- name: Run test suite in final docker images
|
||||
run: |
|
||||
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> lifecycle/container/.env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> lifecycle/container/.env
|
||||
docker compose -f lifecycle/container/compose.yml pull -q
|
||||
docker compose -f lifecycle/container/compose.yml up --no-start
|
||||
docker compose -f lifecycle/container/compose.yml start postgresql
|
||||
docker compose -f lifecycle/container/compose.yml run -u root server test-all
|
||||
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env
|
||||
docker compose pull -q
|
||||
docker compose up --no-start
|
||||
docker compose start postgresql
|
||||
docker compose run -u root server test-all
|
||||
sentry-release:
|
||||
needs:
|
||||
- build-server
|
||||
|
||||
1
.github/workflows/release-tag.yml
vendored
1
.github/workflows/release-tag.yml
vendored
@@ -91,7 +91,6 @@ jobs:
|
||||
# ID from https://api.github.com/users/authentik-automation[bot]
|
||||
git config --global user.name '${{ steps.app-token.outputs.app-slug }}[bot]'
|
||||
git config --global user.email '${{ steps.get-user-id.outputs.user-id }}+${{ steps.app-token.outputs.app-slug }}[bot]@users.noreply.github.com'
|
||||
git pull
|
||||
git commit -a -m "release: ${{ inputs.version }}" --allow-empty
|
||||
git tag "version/${{ inputs.version }}" HEAD -m "version/${{ inputs.version }}"
|
||||
git push --follow-tags
|
||||
|
||||
8
Makefile
8
Makefile
@@ -148,11 +148,11 @@ bump: ## Bump authentik version. Usage: make bump version=20xx.xx.xx
|
||||
ifndef version
|
||||
$(error Usage: make bump version=20xx.xx.xx )
|
||||
endif
|
||||
$(eval current_version := $(shell cat ${PWD}/internal/constants/VERSION))
|
||||
$(SED_INPLACE) 's/^version = ".*"/version = "$(version)"/' ${PWD}/pyproject.toml
|
||||
$(SED_INPLACE) 's/^VERSION = ".*"/VERSION = "$(version)"/' ${PWD}/authentik/__init__.py
|
||||
$(SED_INPLACE) 's/^version = ".*"/version = "$(version)"/' pyproject.toml
|
||||
$(SED_INPLACE) 's/^VERSION = ".*"/VERSION = "$(version)"/' authentik/__init__.py
|
||||
$(MAKE) gen-build gen-compose aws-cfn
|
||||
$(SED_INPLACE) "s/\"${current_version}\"/\"$(version)\"/" ${PWD}/package.json ${PWD}/package-lock.json ${PWD}/web/package.json ${PWD}/web/package-lock.json
|
||||
npm version --no-git-tag-version --allow-same-version $(version)
|
||||
cd ${PWD}/web && npm version --no-git-tag-version --allow-same-version $(version)
|
||||
echo -n $(version) > ${PWD}/internal/constants/VERSION
|
||||
|
||||
#########################
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from functools import lru_cache
|
||||
from os import environ
|
||||
|
||||
VERSION = "2026.2.0"
|
||||
VERSION = "2026.2.0-rc1"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
"""Schema generation tests"""
|
||||
|
||||
from pathlib import Path
|
||||
from tempfile import gettempdir
|
||||
from uuid import uuid4
|
||||
|
||||
from django.core.management import call_command
|
||||
from django.urls import reverse
|
||||
@@ -31,14 +29,15 @@ class TestSchemaGeneration(APITestCase):
|
||||
|
||||
def test_build_schema(self):
|
||||
"""Test schema build command"""
|
||||
tmp = Path(gettempdir())
|
||||
blueprint_file = tmp / f"{str(uuid4())}.json"
|
||||
api_file = tmp / f"{str(uuid4())}.yml"
|
||||
blueprint_file = Path("blueprints/schema.json")
|
||||
api_file = Path("schema.yml")
|
||||
blueprint_file.unlink()
|
||||
api_file.unlink()
|
||||
with (
|
||||
CONFIG.patch("debug", True),
|
||||
CONFIG.patch("tenants.enabled", True),
|
||||
CONFIG.patch("outposts.disable_embedded_outpost", True),
|
||||
):
|
||||
call_command("build_schema", blueprint_file=blueprint_file, api_file=api_file)
|
||||
call_command("build_schema")
|
||||
self.assertTrue(blueprint_file.exists())
|
||||
self.assertTrue(api_file.exists())
|
||||
|
||||
@@ -43,6 +43,8 @@ def get_attrs(obj: SerializerModel) -> dict[str, Any]:
|
||||
continue
|
||||
if _field.read_only:
|
||||
data.pop(field_name, None)
|
||||
if _field.get_initial() == data.get(field_name, None):
|
||||
data.pop(field_name, None)
|
||||
if field_name.endswith("_set"):
|
||||
data.pop(field_name, None)
|
||||
return data
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from typing import Any
|
||||
|
||||
from django.db.models import Case, F, IntegerField, Q, Value, When
|
||||
from django.db.models.functions import Concat, Length
|
||||
from django.db.models.functions import Length
|
||||
from django.http.request import HttpRequest
|
||||
from django.utils.html import _json_script_escapes
|
||||
from django.utils.safestring import mark_safe
|
||||
@@ -26,8 +26,7 @@ def get_brand_for_request(request: HttpRequest) -> Brand:
|
||||
domain_length=Length("domain"),
|
||||
match_priority=Case(
|
||||
When(
|
||||
condition=Q(host_domain__iexact=F("domain"))
|
||||
| Q(host_domain__iendswith=Concat(Value("."), F("domain"))),
|
||||
condition=Q(host_domain__iendswith=F("domain")),
|
||||
then=F("domain_length"),
|
||||
),
|
||||
default=Value(-1),
|
||||
|
||||
@@ -28,8 +28,6 @@ SAML_ATTRIBUTES_GROUP = "http://schemas.xmlsoap.org/claims/Group"
|
||||
SAML_BINDING_POST = "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST"
|
||||
SAML_BINDING_REDIRECT = "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
|
||||
|
||||
SAML_STATUS_SUCCESS = "urn:oasis:names:tc:SAML:2.0:status:Success"
|
||||
|
||||
DSA_SHA1 = "http://www.w3.org/2000/09/xmldsig#dsa-sha1"
|
||||
RSA_SHA1 = "http://www.w3.org/2000/09/xmldsig#rsa-sha1"
|
||||
# https://datatracker.ietf.org/doc/html/rfc4051#section-2.3.2
|
||||
|
||||
@@ -72,7 +72,6 @@ from authentik.core.middleware import (
|
||||
from authentik.core.models import (
|
||||
USER_ATTRIBUTE_TOKEN_EXPIRING,
|
||||
USER_PATH_SERVICE_ACCOUNT,
|
||||
USERNAME_MAX_LENGTH,
|
||||
Group,
|
||||
Session,
|
||||
Token,
|
||||
@@ -145,7 +144,7 @@ class UserSerializer(ModelSerializer):
|
||||
roles_obj = SerializerMethodField(allow_null=True)
|
||||
uid = CharField(read_only=True)
|
||||
username = CharField(
|
||||
max_length=USERNAME_MAX_LENGTH,
|
||||
max_length=150,
|
||||
validators=[UniqueValidator(queryset=User.objects.all().order_by("username"))],
|
||||
)
|
||||
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
"""authentik core models"""
|
||||
|
||||
import re
|
||||
import traceback
|
||||
from datetime import datetime, timedelta
|
||||
from enum import StrEnum
|
||||
from hashlib import sha256
|
||||
@@ -52,7 +50,6 @@ from authentik.tenants.models import DEFAULT_TOKEN_DURATION, DEFAULT_TOKEN_LENGT
|
||||
from authentik.tenants.utils import get_current_tenant, get_unique_identifier
|
||||
|
||||
LOGGER = get_logger()
|
||||
USERNAME_MAX_LENGTH = 150
|
||||
USER_PATH_SYSTEM_PREFIX = "goauthentik.io"
|
||||
_USER_ATTR_PREFIX = f"{USER_PATH_SYSTEM_PREFIX}/user"
|
||||
USER_ATTRIBUTE_DEBUG = f"{_USER_ATTR_PREFIX}/debug"
|
||||
@@ -530,35 +527,23 @@ class User(SerializerModel, AttributesMixin, AbstractUser):
|
||||
"default: in 30 days). See authentik logs for every will invocation of this "
|
||||
"deprecation."
|
||||
)
|
||||
stacktrace = traceback.format_stack()
|
||||
# The last line is this function, the next-to-last line is its caller
|
||||
cause = stacktrace[-2] if len(stacktrace) > 1 else "Unknown, see stacktrace in logs"
|
||||
if search := re.search(r'"(.*?)"', cause):
|
||||
cause = f"Property mapping or Expression policy named {search.group(1)}"
|
||||
|
||||
LOGGER.warning(
|
||||
"deprecation used",
|
||||
message=message_logger,
|
||||
deprecation=deprecation,
|
||||
replacement=replacement,
|
||||
cause=cause,
|
||||
stacktrace=stacktrace,
|
||||
)
|
||||
if not Event.filter_not_expired(
|
||||
action=EventAction.CONFIGURATION_WARNING,
|
||||
context__deprecation=deprecation,
|
||||
context__cause=cause,
|
||||
action=EventAction.CONFIGURATION_WARNING, context__deprecation=deprecation
|
||||
).exists():
|
||||
event = Event.new(
|
||||
EventAction.CONFIGURATION_WARNING,
|
||||
deprecation=deprecation,
|
||||
replacement=replacement,
|
||||
message=message_event,
|
||||
cause=cause,
|
||||
)
|
||||
event.expires = datetime.now() + timedelta(days=30)
|
||||
event.save()
|
||||
|
||||
return self.groups
|
||||
|
||||
def set_password(self, raw_password, signal=True, sender=None, request=None):
|
||||
|
||||
@@ -44,24 +44,19 @@
|
||||
{% endblock %}
|
||||
</div>
|
||||
</main>
|
||||
<footer
|
||||
name="site-footer"
|
||||
aria-label="{% trans 'Site footer' %}"
|
||||
class="pf-c-login__footer pf-m-dark">
|
||||
<div name="flow-links" aria-label="{% trans 'Flow links' %}">
|
||||
<ul class="pf-c-list pf-m-inline" part="list">
|
||||
{% for link in footer_links %}
|
||||
<li part="list-item">
|
||||
<a part="list-item-link" href="{{ link.href }}">{{ link.name }}</a>
|
||||
</li>
|
||||
{% endfor %}
|
||||
<li part="list-item">
|
||||
<span>
|
||||
{% trans 'Powered by authentik' %}
|
||||
</span>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
<footer aria-label="Site footer" class="pf-c-login__footer pf-m-dark">
|
||||
<ul class="pf-c-list pf-m-inline">
|
||||
{% for link in footer_links %}
|
||||
<li>
|
||||
<a href="{{ link.href }}">{{ link.name }}</a>
|
||||
</li>
|
||||
{% endfor %}
|
||||
<li>
|
||||
<span>
|
||||
{% trans 'Powered by authentik' %}
|
||||
</span>
|
||||
</li>
|
||||
</ul>
|
||||
</footer>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from hashlib import sha256
|
||||
from json import loads
|
||||
from unittest.mock import PropertyMock, patch
|
||||
|
||||
from django.urls import reverse
|
||||
from jwt import encode
|
||||
@@ -233,43 +232,3 @@ class TestEndpointStage(FlowTestCase):
|
||||
plan = plan()
|
||||
self.assertNotIn(PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE, plan.context)
|
||||
self.assertEqual(plan.context[PLAN_CONTEXT_DEVICE], self.device)
|
||||
|
||||
def test_endpoint_stage_connector_no_stage_optional(self):
|
||||
flow = create_test_flow()
|
||||
stage = EndpointStage.objects.create(connector=self.connector, mode=StageMode.OPTIONAL)
|
||||
FlowStageBinding.objects.create(stage=stage, target=flow, order=0)
|
||||
|
||||
with patch(
|
||||
"authentik.endpoints.connectors.agent.models.AgentConnector.stage",
|
||||
PropertyMock(return_value=None),
|
||||
):
|
||||
with self.assertFlowFinishes() as plan:
|
||||
res = self.client.get(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||
)
|
||||
self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
|
||||
plan = plan()
|
||||
self.assertNotIn(PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE, plan.context)
|
||||
self.assertNotIn(PLAN_CONTEXT_DEVICE, plan.context)
|
||||
|
||||
def test_endpoint_stage_connector_no_stage_required(self):
|
||||
flow = create_test_flow()
|
||||
stage = EndpointStage.objects.create(connector=self.connector, mode=StageMode.REQUIRED)
|
||||
FlowStageBinding.objects.create(stage=stage, target=flow, order=0)
|
||||
|
||||
with patch(
|
||||
"authentik.endpoints.connectors.agent.models.AgentConnector.stage",
|
||||
PropertyMock(return_value=None),
|
||||
):
|
||||
with self.assertFlowFinishes() as plan:
|
||||
res = self.client.get(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||
)
|
||||
self.assertStageResponse(
|
||||
res,
|
||||
component="ak-stage-access-denied",
|
||||
error_message="Invalid stage configuration",
|
||||
)
|
||||
plan = plan()
|
||||
self.assertNotIn(PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE, plan.context)
|
||||
self.assertNotIn(PLAN_CONTEXT_DEVICE, plan.context)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from authentik.endpoints.models import EndpointStage, StageMode
|
||||
from authentik.endpoints.models import EndpointStage
|
||||
from authentik.flows.stage import StageView
|
||||
|
||||
PLAN_CONTEXT_ENDPOINT_CONNECTOR = "endpoint_connector"
|
||||
@@ -6,24 +6,15 @@ PLAN_CONTEXT_ENDPOINT_CONNECTOR = "endpoint_connector"
|
||||
|
||||
class EndpointStageView(StageView):
|
||||
|
||||
def _get_inner(self) -> StageView | None:
|
||||
def _get_inner(self):
|
||||
stage: EndpointStage = self.executor.current_stage
|
||||
inner_stage: type[StageView] | None = stage.connector.stage
|
||||
if not inner_stage:
|
||||
return None
|
||||
return self.executor.stage_ok()
|
||||
return inner_stage(self.executor, request=self.request)
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
inner = self._get_inner()
|
||||
if inner is None:
|
||||
stage: EndpointStage = self.executor.current_stage
|
||||
if stage.mode == StageMode.OPTIONAL:
|
||||
return self.executor.stage_ok()
|
||||
else:
|
||||
return self.executor.stage_invalid("Invalid stage configuration")
|
||||
return inner.dispatch(request, *args, **kwargs)
|
||||
return self._get_inner().dispatch(request, *args, **kwargs)
|
||||
|
||||
def cleanup(self):
|
||||
inner = self._get_inner()
|
||||
if inner is not None:
|
||||
return inner.cleanup()
|
||||
return self._get_inner().cleanup()
|
||||
|
||||
@@ -15,7 +15,6 @@ from django.core.cache import cache
|
||||
from django.db.models.query import QuerySet
|
||||
from django.utils.timezone import now
|
||||
from jwt import PyJWTError, decode, get_unverified_header
|
||||
from jwt.algorithms import ECAlgorithm
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import (
|
||||
ChoiceField,
|
||||
@@ -110,20 +109,13 @@ class LicenseKey:
|
||||
intermediate.verify_directly_issued_by(get_licensing_key())
|
||||
except InvalidSignature, TypeError, ValueError, Error:
|
||||
raise ValidationError("Unable to verify license") from None
|
||||
_validate_curve_original = ECAlgorithm._validate_curve
|
||||
try:
|
||||
# authentik's license are generated with `algorithm="ES512"` and signed with
|
||||
# a key of curve `secp384r1`. Starting with version 2.11.0, pyjwt enforces the spec, see
|
||||
# https://github.com/jpadilla/pyjwt/commit/5b8622773358e56d3d3c0a9acf404809ff34433a
|
||||
# authentik will change its license generation to `algorithm="ES384"` in 2026.
|
||||
# TODO: remove this when the last incompatible license runs out.
|
||||
ECAlgorithm._validate_curve = lambda *_: True
|
||||
body = from_dict(
|
||||
LicenseKey,
|
||||
decode(
|
||||
jwt,
|
||||
our_cert.public_key(),
|
||||
algorithms=["ES384", "ES512"],
|
||||
algorithms=["ES512"],
|
||||
audience=get_license_aud(),
|
||||
options={"verify_exp": check_expiry, "verify_signature": check_expiry},
|
||||
),
|
||||
@@ -133,8 +125,6 @@ class LicenseKey:
|
||||
if unverified["aud"] != get_license_aud():
|
||||
raise ValidationError("Invalid Install ID in license") from None
|
||||
raise ValidationError("Unable to verify license") from None
|
||||
finally:
|
||||
ECAlgorithm._validate_curve = _validate_curve_original
|
||||
return body
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -1,150 +0,0 @@
|
||||
from datetime import datetime
|
||||
|
||||
from django.db.models import BooleanField as ModelBooleanField
|
||||
from django.db.models import Case, Q, Value, When
|
||||
from django_filters.rest_framework import BooleanFilter, FilterSet
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.fields import IntegerField, SerializerMethodField
|
||||
from rest_framework.mixins import CreateModelMixin
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
|
||||
from authentik.core.api.utils import ModelSerializer
|
||||
from authentik.enterprise.api import EnterpriseRequiredMixin
|
||||
from authentik.enterprise.lifecycle.api.reviews import ReviewSerializer
|
||||
from authentik.enterprise.lifecycle.models import LifecycleIteration, ReviewState
|
||||
from authentik.enterprise.lifecycle.utils import (
|
||||
ContentTypeField,
|
||||
ReviewerGroupSerializer,
|
||||
ReviewerUserSerializer,
|
||||
admin_link_for_model,
|
||||
parse_content_type,
|
||||
start_of_day,
|
||||
)
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
|
||||
|
||||
class LifecycleIterationSerializer(EnterpriseRequiredMixin, ModelSerializer):
|
||||
content_type = ContentTypeField()
|
||||
object_verbose = SerializerMethodField()
|
||||
object_admin_url = SerializerMethodField(read_only=True)
|
||||
grace_period_end = SerializerMethodField(read_only=True)
|
||||
reviews = ReviewSerializer(many=True, read_only=True, source="review_set.all")
|
||||
user_can_review = SerializerMethodField(read_only=True)
|
||||
|
||||
reviewer_groups = ReviewerGroupSerializer(
|
||||
many=True, read_only=True, source="rule.reviewer_groups"
|
||||
)
|
||||
min_reviewers = IntegerField(read_only=True, source="rule.min_reviewers")
|
||||
reviewers = ReviewerUserSerializer(many=True, read_only=True, source="rule.reviewers")
|
||||
|
||||
next_review_date = SerializerMethodField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = LifecycleIteration
|
||||
fields = [
|
||||
"id",
|
||||
"content_type",
|
||||
"object_id",
|
||||
"object_verbose",
|
||||
"object_admin_url",
|
||||
"state",
|
||||
"opened_on",
|
||||
"grace_period_end",
|
||||
"next_review_date",
|
||||
"reviews",
|
||||
"user_can_review",
|
||||
"reviewer_groups",
|
||||
"min_reviewers",
|
||||
"reviewers",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
def get_object_verbose(self, iteration: LifecycleIteration) -> str:
|
||||
return str(iteration.object)
|
||||
|
||||
def get_object_admin_url(self, iteration: LifecycleIteration) -> str:
|
||||
return admin_link_for_model(iteration.object)
|
||||
|
||||
def get_grace_period_end(self, iteration: LifecycleIteration) -> datetime:
|
||||
return start_of_day(
|
||||
iteration.opened_on + timedelta_from_string(iteration.rule.grace_period)
|
||||
)
|
||||
|
||||
def get_next_review_date(self, iteration: LifecycleIteration) -> datetime:
|
||||
return start_of_day(iteration.opened_on + timedelta_from_string(iteration.rule.interval))
|
||||
|
||||
def get_user_can_review(self, iteration: LifecycleIteration) -> bool:
|
||||
return iteration.user_can_review(self.context["request"].user)
|
||||
|
||||
|
||||
class LifecycleIterationFilterSet(FilterSet):
|
||||
user_is_reviewer = BooleanFilter(field_name="user_is_reviewer", lookup_expr="exact")
|
||||
|
||||
|
||||
class IterationViewSet(EnterpriseRequiredMixin, CreateModelMixin, GenericViewSet):
|
||||
queryset = LifecycleIteration.objects.all()
|
||||
serializer_class = LifecycleIterationSerializer
|
||||
ordering = ["-opened_on"]
|
||||
ordering_fields = ["state", "content_type__model", "opened_on", "grace_period_end"]
|
||||
filterset_class = LifecycleIterationFilterSet
|
||||
|
||||
def get_queryset(self):
|
||||
user = self.request.user
|
||||
return self.queryset.annotate(
|
||||
user_is_reviewer=Case(
|
||||
When(
|
||||
Q(rule__reviewers=user)
|
||||
| Q(rule__reviewer_groups__in=user.groups.all().with_ancestors()),
|
||||
then=Value(True),
|
||||
),
|
||||
default=Value(False),
|
||||
output_field=ModelBooleanField(),
|
||||
)
|
||||
).distinct()
|
||||
|
||||
@action(
|
||||
detail=False,
|
||||
methods=["get"],
|
||||
url_path=r"latest/(?P<content_type>[^/]+)/(?P<object_id>[^/]+)",
|
||||
)
|
||||
def latest_iteration(self, request: Request, content_type: str, object_id: str) -> Response:
|
||||
ct = parse_content_type(content_type)
|
||||
try:
|
||||
obj = (
|
||||
self.get_queryset()
|
||||
.filter(
|
||||
content_type__app_label=ct["app_label"],
|
||||
content_type__model=ct["model"],
|
||||
object_id=object_id,
|
||||
)
|
||||
.latest("opened_on")
|
||||
)
|
||||
except LifecycleIteration.DoesNotExist:
|
||||
return Response(status=404)
|
||||
serializer = self.get_serializer(obj)
|
||||
return Response(serializer.data)
|
||||
|
||||
@extend_schema(
|
||||
operation_id="lifecycle_iterations_list_open",
|
||||
responses={200: LifecycleIterationSerializer(many=True)},
|
||||
)
|
||||
@action(
|
||||
detail=False,
|
||||
methods=["get"],
|
||||
url_path=r"open",
|
||||
)
|
||||
def open_iterations(self, request: Request):
|
||||
iterations = self.get_queryset().filter(
|
||||
Q(state=ReviewState.PENDING) | Q(state=ReviewState.OVERDUE)
|
||||
)
|
||||
iterations = self.filter_queryset(iterations)
|
||||
page = self.paginate_queryset(iterations)
|
||||
if page is not None:
|
||||
serializer = self.get_serializer(page, many=True)
|
||||
return self.get_paginated_response(serializer.data)
|
||||
|
||||
serializer = self.get_serializer(iterations, many=True)
|
||||
return Response(serializer.data)
|
||||
@@ -1,33 +0,0 @@
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.mixins import CreateModelMixin
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
|
||||
from authentik.core.api.utils import ModelSerializer
|
||||
from authentik.enterprise.api import EnterpriseRequiredMixin
|
||||
from authentik.enterprise.lifecycle.models import LifecycleIteration, Review
|
||||
from authentik.enterprise.lifecycle.utils import ReviewerUserSerializer
|
||||
|
||||
|
||||
class ReviewSerializer(EnterpriseRequiredMixin, ModelSerializer):
|
||||
reviewer = ReviewerUserSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Review
|
||||
fields = ["id", "iteration", "reviewer", "timestamp", "note"]
|
||||
read_only_fields = ["id", "timestamp", "reviewer"]
|
||||
|
||||
def validate_iteration(self, iteration: LifecycleIteration) -> LifecycleIteration:
|
||||
user = self.context["request"].user
|
||||
if not iteration.user_can_review(user):
|
||||
raise ValidationError(_("You are not allowed to submit a review for this object."))
|
||||
return iteration
|
||||
|
||||
|
||||
class ReviewViewSet(EnterpriseRequiredMixin, CreateModelMixin, GenericViewSet):
|
||||
queryset = Review.objects.all()
|
||||
serializer_class = ReviewSerializer
|
||||
|
||||
def perform_create(self, serializer: ReviewSerializer) -> None:
|
||||
review = serializer.save(reviewer=self.request.user)
|
||||
review.iteration.on_review(self.request)
|
||||
@@ -1,113 +0,0 @@
|
||||
from django.utils.translation import gettext as _
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import SerializerMethodField
|
||||
from rest_framework.relations import SlugRelatedField
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.core.api.utils import ModelSerializer
|
||||
from authentik.core.models import User
|
||||
from authentik.enterprise.api import EnterpriseRequiredMixin
|
||||
from authentik.enterprise.lifecycle.models import LifecycleRule
|
||||
from authentik.enterprise.lifecycle.utils import (
|
||||
ContentTypeField,
|
||||
ReviewerGroupSerializer,
|
||||
ReviewerUserSerializer,
|
||||
)
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
|
||||
|
||||
class LifecycleRuleSerializer(EnterpriseRequiredMixin, ModelSerializer):
|
||||
content_type = ContentTypeField()
|
||||
target_verbose = SerializerMethodField()
|
||||
reviewer_groups_obj = ReviewerGroupSerializer(
|
||||
many=True, read_only=True, source="reviewer_groups"
|
||||
)
|
||||
reviewers = SlugRelatedField(slug_field="uuid", many=True, queryset=User.objects.all())
|
||||
reviewers_obj = ReviewerUserSerializer(many=True, read_only=True, source="reviewers")
|
||||
|
||||
class Meta:
|
||||
model = LifecycleRule
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"content_type",
|
||||
"object_id",
|
||||
"interval",
|
||||
"grace_period",
|
||||
"reviewer_groups",
|
||||
"reviewer_groups_obj",
|
||||
"min_reviewers",
|
||||
"min_reviewers_is_per_group",
|
||||
"reviewers",
|
||||
"reviewers_obj",
|
||||
"notification_transports",
|
||||
"target_verbose",
|
||||
]
|
||||
read_only_fields = ["id", "reviewers_obj", "reviewer_groups_obj", "target_verbose"]
|
||||
|
||||
def get_target_verbose(self, rule: LifecycleRule) -> str:
|
||||
if rule.object_id is None:
|
||||
return rule.content_type.model_class()._meta.verbose_name_plural
|
||||
else:
|
||||
return f"{rule.content_type.model_class()._meta.verbose_name}: {rule.object}"
|
||||
|
||||
def validate_object_id(self, value: str) -> str | None:
|
||||
if value == "":
|
||||
return None
|
||||
return value
|
||||
|
||||
def validate(self, attrs: dict) -> dict:
|
||||
if (
|
||||
attrs.get("object_id") is not None
|
||||
and not attrs["content_type"]
|
||||
.get_all_objects_for_this_type(pk=attrs["object_id"])
|
||||
.exists()
|
||||
):
|
||||
raise ValidationError({"object_id": _("Object does not exist")})
|
||||
if "reviewer_groups" in attrs or "reviewers" in attrs:
|
||||
reviewer_groups = attrs.get(
|
||||
"reviewer_groups", self.instance.reviewer_groups.all() if self.instance else []
|
||||
)
|
||||
reviewers = attrs.get(
|
||||
"reviewers", self.instance.reviewers.all() if self.instance else []
|
||||
)
|
||||
if len(reviewer_groups) == 0 and len(reviewers) == 0:
|
||||
raise ValidationError(_("Either a reviewer group or a reviewer must be set."))
|
||||
if "grace_period" in attrs or "interval" in attrs:
|
||||
grace_period = attrs.get("grace_period", getattr(self.instance, "grace_period", None))
|
||||
interval = attrs.get("interval", getattr(self.instance, "interval", None))
|
||||
if (
|
||||
grace_period is not None
|
||||
and interval is not None
|
||||
and (timedelta_from_string(grace_period) > timedelta_from_string(interval))
|
||||
):
|
||||
raise ValidationError(
|
||||
{"grace_period": _("Grace period must be shorter than the interval.")}
|
||||
)
|
||||
if "content_type" in attrs or "object_id" in attrs:
|
||||
content_type = attrs.get("content_type", getattr(self.instance, "content_type", None))
|
||||
object_id = attrs.get("object_id", getattr(self.instance, "object_id", None))
|
||||
if content_type is not None and object_id is None:
|
||||
existing = LifecycleRule.objects.filter(
|
||||
content_type=content_type, object_id__isnull=True
|
||||
)
|
||||
if self.instance:
|
||||
existing = existing.exclude(pk=self.instance.pk)
|
||||
if existing.exists():
|
||||
raise ValidationError(
|
||||
{
|
||||
"content_type": _(
|
||||
"Only one type-wide rule for each object type is allowed."
|
||||
)
|
||||
}
|
||||
)
|
||||
return attrs
|
||||
|
||||
|
||||
class LifecycleRuleViewSet(ModelViewSet):
|
||||
queryset = LifecycleRule.objects.all()
|
||||
serializer_class = LifecycleRuleSerializer
|
||||
search_fields = ["content_type__model", "reviewer_groups__name", "reviewers__username"]
|
||||
ordering = ["name"]
|
||||
ordering_fields = ["name", "content_type__model"]
|
||||
filterset_fields = ["content_type__model"]
|
||||
@@ -1,22 +0,0 @@
|
||||
from authentik.enterprise.apps import EnterpriseConfig
|
||||
from authentik.lib.utils.time import fqdn_rand
|
||||
from authentik.tasks.schedules.common import ScheduleSpec
|
||||
|
||||
|
||||
class ReportsConfig(EnterpriseConfig):
|
||||
name = "authentik.enterprise.lifecycle"
|
||||
label = "authentik_lifecycle"
|
||||
verbose_name = "authentik Enterprise.Lifecycle"
|
||||
default = True
|
||||
|
||||
@property
|
||||
def tenant_schedule_specs(self) -> list[ScheduleSpec]:
|
||||
from authentik.enterprise.lifecycle.tasks import apply_lifecycle_rules
|
||||
|
||||
return [
|
||||
ScheduleSpec(
|
||||
actor=apply_lifecycle_rules,
|
||||
crontab=f"{fqdn_rand('lifecycle_apply_lifecycle_rules')} "
|
||||
f"{fqdn_rand('lifecycle_apply_lifecycle_rules', 24)} * * *",
|
||||
)
|
||||
]
|
||||
@@ -1,154 +0,0 @@
|
||||
# Generated by Django 5.2.11 on 2026-02-09 15:57
|
||||
|
||||
import authentik.lib.utils.time
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0057_remove_user_groups_remove_user_user_permissions_and_more"),
|
||||
("authentik_events", "0016_alter_event_action"),
|
||||
("contenttypes", "0002_remove_content_type_name"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="LifecycleRule",
|
||||
fields=[
|
||||
("id", models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
("name", models.TextField(unique=True)),
|
||||
("object_id", models.TextField(default=None, null=True)),
|
||||
(
|
||||
"interval",
|
||||
models.TextField(
|
||||
default="days=60",
|
||||
validators=[authentik.lib.utils.time.timedelta_string_validator],
|
||||
),
|
||||
),
|
||||
(
|
||||
"grace_period",
|
||||
models.TextField(
|
||||
default="days=30",
|
||||
validators=[authentik.lib.utils.time.timedelta_string_validator],
|
||||
),
|
||||
),
|
||||
("min_reviewers", models.PositiveSmallIntegerField(default=1)),
|
||||
("min_reviewers_is_per_group", models.BooleanField(default=False)),
|
||||
(
|
||||
"content_type",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="contenttypes.contenttype"
|
||||
),
|
||||
),
|
||||
(
|
||||
"notification_transports",
|
||||
models.ManyToManyField(
|
||||
blank=True,
|
||||
help_text="Select which transports should be used to notify the reviewers. If none are selected, the notification will only be shown in the authentik UI.",
|
||||
to="authentik_events.notificationtransport",
|
||||
),
|
||||
),
|
||||
("reviewer_groups", models.ManyToManyField(blank=True, to="authentik_core.group")),
|
||||
("reviewers", models.ManyToManyField(blank=True, to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="LifecycleIteration",
|
||||
fields=[
|
||||
(
|
||||
"managed",
|
||||
models.TextField(
|
||||
default=None,
|
||||
help_text="Objects that are managed by authentik. These objects are created and updated automatically. This flag only indicates that an object can be overwritten by migrations. You can still modify the objects via the API, but expect changes to be overwritten in a later update.",
|
||||
null=True,
|
||||
unique=True,
|
||||
verbose_name="Managed by authentik",
|
||||
),
|
||||
),
|
||||
("id", models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
("object_id", models.TextField()),
|
||||
(
|
||||
"state",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("REVIEWED", "Reviewed"),
|
||||
("PENDING", "Pending"),
|
||||
("OVERDUE", "Overdue"),
|
||||
("CANCELED", "Canceled"),
|
||||
],
|
||||
default="PENDING",
|
||||
max_length=10,
|
||||
),
|
||||
),
|
||||
("opened_on", models.DateField(auto_now_add=True)),
|
||||
(
|
||||
"content_type",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="contenttypes.contenttype"
|
||||
),
|
||||
),
|
||||
(
|
||||
"rule",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
to="authentik_lifecycle.lifecyclerule",
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Review",
|
||||
fields=[
|
||||
("id", models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
("timestamp", models.DateTimeField(auto_now_add=True)),
|
||||
("note", models.TextField(null=True)),
|
||||
(
|
||||
"iteration",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="authentik_lifecycle.lifecycleiteration",
|
||||
),
|
||||
),
|
||||
(
|
||||
"reviewer",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="lifecyclerule",
|
||||
index=models.Index(fields=["content_type"], name="authentik_l_content_4e3a6a_idx"),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="lifecyclerule",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(("object_id__isnull", True)),
|
||||
fields=("content_type",),
|
||||
name="uniq_lifecycle_rule_ct_null_object",
|
||||
),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="lifecyclerule",
|
||||
unique_together={("content_type", "object_id")},
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="lifecycleiteration",
|
||||
index=models.Index(
|
||||
fields=["content_type", "opened_on"], name="authentik_l_content_09c32a_idx"
|
||||
),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="review",
|
||||
unique_together={("iteration", "reviewer")},
|
||||
),
|
||||
]
|
||||
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 5.2.11 on 2026-02-13 09:33
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_lifecycle", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="lifecycleiteration",
|
||||
name="opened_on",
|
||||
field=models.DateTimeField(auto_now_add=True),
|
||||
),
|
||||
]
|
||||
@@ -1,292 +0,0 @@
|
||||
from datetime import timedelta
|
||||
from uuid import uuid4
|
||||
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db import models
|
||||
from django.db.models import Q, QuerySet
|
||||
from django.db.models.fields import Field
|
||||
from django.db.models.functions import Cast
|
||||
from django.http import HttpRequest
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext as _
|
||||
from rest_framework.serializers import BaseSerializer
|
||||
|
||||
from authentik.blueprints.models import ManagedModel
|
||||
from authentik.core.models import Group, User
|
||||
from authentik.enterprise.lifecycle.utils import link_for_model, start_of_day
|
||||
from authentik.events.models import Event, EventAction, NotificationSeverity, NotificationTransport
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.lib.utils.time import timedelta_from_string, timedelta_string_validator
|
||||
|
||||
|
||||
class LifecycleRule(SerializerModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4)
|
||||
name = models.TextField(unique=True)
|
||||
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
|
||||
object_id = models.TextField(null=True, default=None)
|
||||
object = GenericForeignKey("content_type", "object_id")
|
||||
|
||||
interval = models.TextField(
|
||||
default="days=60",
|
||||
validators=[timedelta_string_validator],
|
||||
)
|
||||
# Grace period starts after a review is due
|
||||
grace_period = models.TextField(
|
||||
default="days=30",
|
||||
validators=[timedelta_string_validator],
|
||||
)
|
||||
|
||||
# The review has to be conducted by `min_reviewers` members of `reviewer_groups`
|
||||
# (total or per group depending on `min_reviewers_is_per_group` flag) as well
|
||||
# as all of `reviewers`
|
||||
reviewer_groups = models.ManyToManyField("authentik_core.Group", blank=True)
|
||||
min_reviewers = models.PositiveSmallIntegerField(default=1)
|
||||
min_reviewers_is_per_group = models.BooleanField(default=False)
|
||||
reviewers = models.ManyToManyField("authentik_core.User", blank=True)
|
||||
|
||||
notification_transports = models.ManyToManyField(
|
||||
NotificationTransport,
|
||||
help_text=_(
|
||||
"Select which transports should be used to notify the reviewers. If none are "
|
||||
"selected, the notification will only be shown in the authentik UI."
|
||||
),
|
||||
blank=True,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
indexes = [models.Index(fields=["content_type"])]
|
||||
unique_together = [["content_type", "object_id"]]
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["content_type"],
|
||||
condition=Q(object_id__isnull=True),
|
||||
name="uniq_lifecycle_rule_ct_null_object",
|
||||
)
|
||||
]
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[BaseSerializer]:
|
||||
from authentik.enterprise.lifecycle.api.rules import LifecycleRuleSerializer
|
||||
|
||||
return LifecycleRuleSerializer
|
||||
|
||||
def _get_pk_field(self) -> Field:
|
||||
model = self.content_type.model_class()
|
||||
pk = model._meta.pk
|
||||
while hasattr(pk, "target_field"):
|
||||
pk = pk.target_field
|
||||
return pk.__class__()
|
||||
|
||||
def get_objects(self) -> QuerySet:
|
||||
qs = self.content_type.get_all_objects_for_this_type()
|
||||
if self.object_id:
|
||||
qs = qs.filter(pk=self.object_id)
|
||||
else:
|
||||
qs = qs.exclude(
|
||||
pk__in=LifecycleRule.objects.filter(
|
||||
content_type=self.content_type, object_id__isnull=False
|
||||
).values_list(Cast("object_id", output_field=self._get_pk_field()), flat=True)
|
||||
)
|
||||
return qs
|
||||
|
||||
def _get_stale_iterations(self) -> QuerySet[LifecycleIteration]:
|
||||
filter = ~Q(content_type=self.content_type)
|
||||
if self.object_id:
|
||||
filter = filter | ~Q(object_id=self.object_id)
|
||||
filter = Q(state__in=(ReviewState.PENDING, ReviewState.OVERDUE)) & filter
|
||||
return self.lifecycleiteration_set.filter(filter)
|
||||
|
||||
def _get_newly_overdue_iterations(self) -> QuerySet[LifecycleIteration]:
|
||||
return self.lifecycleiteration_set.filter(
|
||||
opened_on__lt=start_of_day(
|
||||
timezone.now() + timedelta(days=1) - timedelta_from_string(self.grace_period)
|
||||
),
|
||||
state=ReviewState.PENDING,
|
||||
)
|
||||
|
||||
def _get_newly_due_objects(self) -> QuerySet:
|
||||
recent_iteration_ids = LifecycleIteration.objects.filter(
|
||||
content_type=self.content_type,
|
||||
object_id__isnull=False,
|
||||
opened_on__gte=start_of_day(
|
||||
timezone.now() + timedelta(days=1) - timedelta_from_string(self.interval)
|
||||
),
|
||||
).values_list(Cast("object_id", output_field=self._get_pk_field()), flat=True)
|
||||
|
||||
return self.get_objects().exclude(pk__in=recent_iteration_ids)
|
||||
|
||||
def apply(self):
|
||||
self._get_stale_iterations().update(state=ReviewState.CANCELED)
|
||||
|
||||
for iteration in self._get_newly_overdue_iterations():
|
||||
iteration.make_overdue()
|
||||
|
||||
for obj in self._get_newly_due_objects():
|
||||
LifecycleIteration.start(content_type=self.content_type, object_id=obj.pk, rule=self)
|
||||
|
||||
def is_satisfied_for_iteration(self, iteration: LifecycleIteration) -> bool:
|
||||
reviewers = self.reviewers.all()
|
||||
if (
|
||||
iteration.review_set.filter(reviewer__in=reviewers).distinct("reviewer").count()
|
||||
< reviewers.count()
|
||||
):
|
||||
return False
|
||||
if self.reviewer_groups.count() == 0:
|
||||
return True
|
||||
if self.min_reviewers_is_per_group:
|
||||
for g in self.reviewer_groups.all():
|
||||
if (
|
||||
iteration.review_set.filter(
|
||||
reviewer__groups__in=Group.objects.filter(pk=g.pk).with_descendants()
|
||||
)
|
||||
.distinct()
|
||||
.count()
|
||||
< self.min_reviewers
|
||||
):
|
||||
return False
|
||||
return True
|
||||
else:
|
||||
return (
|
||||
iteration.review_set.filter(
|
||||
reviewer__groups__in=self.reviewer_groups.all().with_descendants()
|
||||
)
|
||||
.distinct()
|
||||
.count()
|
||||
>= self.min_reviewers
|
||||
)
|
||||
|
||||
def get_reviewers(self) -> QuerySet[User]:
|
||||
return User.objects.filter(
|
||||
Q(id__in=self.reviewers.all().values_list("pk", flat=True))
|
||||
| Q(groups__in=self.reviewer_groups.all().with_descendants())
|
||||
).distinct()
|
||||
|
||||
def notify_reviewers(self, event: Event, severity: str):
|
||||
from authentik.enterprise.lifecycle.tasks import send_notification
|
||||
|
||||
for transport in self.notification_transports.all():
|
||||
for user in self.get_reviewers():
|
||||
send_notification.send_with_options(
|
||||
args=(transport.pk, event.pk, user.pk, severity),
|
||||
rel_obj=transport,
|
||||
)
|
||||
if transport.send_once:
|
||||
break
|
||||
|
||||
|
||||
class ReviewState(models.TextChoices):
|
||||
REVIEWED = "REVIEWED", _("Reviewed")
|
||||
PENDING = "PENDING", _("Pending")
|
||||
OVERDUE = "OVERDUE", _("Overdue")
|
||||
CANCELED = "CANCELED", _("Canceled")
|
||||
|
||||
|
||||
class LifecycleIteration(SerializerModel, ManagedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4)
|
||||
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
|
||||
object_id = models.TextField(null=False)
|
||||
object = GenericForeignKey("content_type", "object_id")
|
||||
|
||||
rule = models.ForeignKey(LifecycleRule, null=True, on_delete=models.SET_NULL)
|
||||
|
||||
state = models.CharField(max_length=10, choices=ReviewState, default=ReviewState.PENDING)
|
||||
opened_on = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
indexes = [models.Index(fields=["content_type", "opened_on"])]
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[BaseSerializer]:
|
||||
from authentik.enterprise.lifecycle.api.iterations import LifecycleIterationSerializer
|
||||
|
||||
return LifecycleIterationSerializer
|
||||
|
||||
def _get_model_name(self) -> str:
|
||||
return self.content_type.name.lower()
|
||||
|
||||
def _get_event_args(self) -> dict:
|
||||
return {
|
||||
"target": self.object,
|
||||
"hyperlink": link_for_model(self.object),
|
||||
"hyperlink_label": _(f"Go to {self._get_model_name()}"),
|
||||
"lifecycle_iteration": self.id,
|
||||
}
|
||||
|
||||
def initialize(self):
|
||||
event = Event.new(
|
||||
EventAction.REVIEW_INITIATED,
|
||||
message=_(f"Access review is due for {self.content_type.name} {str(self.object)}"),
|
||||
**self._get_event_args(),
|
||||
)
|
||||
event.save()
|
||||
self.rule.notify_reviewers(event, NotificationSeverity.NOTICE)
|
||||
|
||||
def make_overdue(self):
|
||||
self.state = ReviewState.OVERDUE
|
||||
|
||||
event = Event.new(
|
||||
EventAction.REVIEW_OVERDUE,
|
||||
message=_(f"Access review is overdue for {self.content_type.name} {str(self.object)}"),
|
||||
**self._get_event_args(),
|
||||
)
|
||||
event.save()
|
||||
self.rule.notify_reviewers(event, NotificationSeverity.ALERT)
|
||||
self.save()
|
||||
|
||||
@staticmethod
|
||||
def start(content_type: ContentType, object_id: str, rule: LifecycleRule) -> LifecycleIteration:
|
||||
iteration = LifecycleIteration.objects.create(
|
||||
content_type=content_type, object_id=object_id, rule=rule
|
||||
)
|
||||
iteration.initialize()
|
||||
return iteration
|
||||
|
||||
def make_reviewed(self, request: HttpRequest):
|
||||
self.state = ReviewState.REVIEWED
|
||||
event = Event.new(
|
||||
EventAction.REVIEW_COMPLETED,
|
||||
message=_(f"Access review completed for {self.content_type.name} {str(self.object)}"),
|
||||
**self._get_event_args(),
|
||||
).from_http(request)
|
||||
event.save()
|
||||
self.rule.notify_reviewers(event, NotificationSeverity.NOTICE)
|
||||
self.save()
|
||||
|
||||
def on_review(self, request: HttpRequest):
|
||||
if self.state not in (ReviewState.PENDING, ReviewState.OVERDUE):
|
||||
raise AssertionError("Review is not pending or overdue")
|
||||
if self.rule.is_satisfied_for_iteration(self):
|
||||
self.make_reviewed(request)
|
||||
|
||||
def user_can_review(self, user: User) -> bool:
|
||||
if self.state not in (ReviewState.PENDING, ReviewState.OVERDUE):
|
||||
return False
|
||||
if self.review_set.filter(reviewer=user).exists():
|
||||
return False
|
||||
groups = self.rule.reviewer_groups.all()
|
||||
if groups:
|
||||
for group in groups:
|
||||
if group.is_member(user):
|
||||
return True
|
||||
return False
|
||||
else:
|
||||
return user in self.rule.get_reviewers()
|
||||
|
||||
|
||||
class Review(SerializerModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4)
|
||||
iteration = models.ForeignKey(LifecycleIteration, on_delete=models.CASCADE)
|
||||
|
||||
reviewer = models.ForeignKey("authentik_core.User", on_delete=models.CASCADE)
|
||||
timestamp = models.DateTimeField(auto_now_add=True)
|
||||
note = models.TextField(null=True)
|
||||
|
||||
class Meta:
|
||||
unique_together = [["iteration", "reviewer"]]
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[BaseSerializer]:
|
||||
from authentik.enterprise.lifecycle.api.reviews import ReviewSerializer
|
||||
|
||||
return ReviewSerializer
|
||||
@@ -1,22 +0,0 @@
|
||||
from django.db.models import Q
|
||||
from django.db.models.signals import post_save, pre_delete
|
||||
from django.dispatch import receiver
|
||||
|
||||
from authentik.enterprise.lifecycle.models import LifecycleRule, ReviewState
|
||||
|
||||
|
||||
@receiver(post_save, sender=LifecycleRule)
|
||||
def post_rule_save(sender, instance: LifecycleRule, created: bool, **_):
|
||||
from authentik.enterprise.lifecycle.tasks import apply_lifecycle_rule
|
||||
|
||||
apply_lifecycle_rule.send_with_options(
|
||||
args=(instance.id,),
|
||||
rel_obj=instance,
|
||||
)
|
||||
|
||||
|
||||
@receiver(pre_delete, sender=LifecycleRule)
|
||||
def pre_rule_delete(sender, instance: LifecycleRule, **_):
|
||||
instance.lifecycleiteration_set.filter(
|
||||
Q(state=ReviewState.PENDING) | Q(state=ReviewState.OVERDUE)
|
||||
).update(state=ReviewState.CANCELED)
|
||||
@@ -1,45 +0,0 @@
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from dramatiq import actor
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.enterprise.lifecycle.models import LifecycleRule
|
||||
from authentik.events.models import Event, Notification, NotificationTransport
|
||||
|
||||
|
||||
@actor(description=_("Dispatch tasks to validate lifecycle rules."))
|
||||
def apply_lifecycle_rules():
|
||||
for rule in LifecycleRule.objects.all():
|
||||
apply_lifecycle_rule.send_with_options(
|
||||
args=(rule.id,),
|
||||
rel_obj=rule,
|
||||
)
|
||||
|
||||
|
||||
@actor(description=_("Apply lifecycle rule."))
|
||||
def apply_lifecycle_rule(rule_id: str):
|
||||
rule = LifecycleRule.objects.filter(pk=rule_id).first()
|
||||
if rule:
|
||||
rule.apply()
|
||||
|
||||
|
||||
@actor(description=_("Send lifecycle rule notification."))
|
||||
def send_notification(transport_pk: int, event_pk: str, user_pk: int, severity: str):
|
||||
event = Event.objects.filter(pk=event_pk).first()
|
||||
if not event:
|
||||
return
|
||||
user = User.objects.filter(pk=user_pk).first()
|
||||
if not user:
|
||||
return
|
||||
|
||||
notification = Notification(
|
||||
severity=severity,
|
||||
body=event.summary,
|
||||
event=event,
|
||||
user=user,
|
||||
hyperlink=event.hyperlink,
|
||||
hyperlink_label=event.hyperlink_label,
|
||||
)
|
||||
transport = NotificationTransport.objects.filter(pk=transport_pk).first()
|
||||
if not transport:
|
||||
return
|
||||
transport.send(notification)
|
||||
@@ -1,425 +0,0 @@
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Application, Group
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_user
|
||||
from authentik.enterprise.lifecycle.models import LifecycleIteration, LifecycleRule, ReviewState
|
||||
from authentik.enterprise.reports.tests.utils import patch_license
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
@patch_license
|
||||
class TestLifecycleRuleAPI(APITestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.user = create_test_admin_user()
|
||||
self.client.force_login(self.user)
|
||||
self.app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
self.content_type = ContentType.objects.get_for_model(Application)
|
||||
self.reviewer_group = Group.objects.create(name=generate_id())
|
||||
|
||||
def test_list_rules(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
response = self.client.get(reverse("authentik_api:lifecyclerule-list"))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertGreaterEqual(len(response.data["results"]), 1)
|
||||
|
||||
def test_create_rule_with_reviewer_group(self):
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:lifecyclerule-list"),
|
||||
{
|
||||
"name": generate_id(),
|
||||
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
|
||||
"object_id": str(self.app.pk),
|
||||
"interval": "days=30",
|
||||
"grace_period": "days=10",
|
||||
"reviewer_groups": [str(self.reviewer_group.pk)],
|
||||
"reviewers": [],
|
||||
"min_reviewers": 1,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
self.assertEqual(response.data["object_id"], str(self.app.pk))
|
||||
self.assertEqual(response.data["interval"], "days=30")
|
||||
|
||||
def test_create_rule_with_explicit_reviewer(self):
|
||||
reviewer = create_test_user()
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:lifecyclerule-list"),
|
||||
{
|
||||
"name": generate_id(),
|
||||
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
|
||||
"object_id": str(self.app.pk),
|
||||
"interval": "days=60",
|
||||
"grace_period": "days=15",
|
||||
"reviewer_groups": [],
|
||||
"reviewers": [str(reviewer.uuid)],
|
||||
"min_reviewers": 1,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
self.assertIn(reviewer.uuid, response.data["reviewers"])
|
||||
|
||||
def test_create_rule_type_level(self):
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:lifecyclerule-list"),
|
||||
{
|
||||
"name": generate_id(),
|
||||
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
|
||||
"object_id": None,
|
||||
"interval": "days=90",
|
||||
"grace_period": "days=30",
|
||||
"reviewer_groups": [str(self.reviewer_group.pk)],
|
||||
"reviewers": [],
|
||||
"min_reviewers": 1,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
self.assertIsNone(response.data["object_id"])
|
||||
|
||||
def test_create_rule_fails_without_reviewers(self):
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:lifecyclerule-list"),
|
||||
{
|
||||
"name": generate_id(),
|
||||
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
|
||||
"object_id": str(self.app.pk),
|
||||
"interval": "days=30",
|
||||
"grace_period": "days=10",
|
||||
"reviewer_groups": [],
|
||||
"reviewers": [],
|
||||
"min_reviewers": 1,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
def test_create_rule_fails_grace_period_longer_than_interval(self):
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:lifecyclerule-list"),
|
||||
{
|
||||
"name": generate_id(),
|
||||
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
|
||||
"object_id": str(self.app.pk),
|
||||
"interval": "days=10",
|
||||
"grace_period": "days=30",
|
||||
"reviewer_groups": [str(self.reviewer_group.pk)],
|
||||
"reviewers": [],
|
||||
"min_reviewers": 1,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertIn("grace_period", response.data)
|
||||
|
||||
def test_create_rule_fails_invalid_object_id(self):
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:lifecyclerule-list"),
|
||||
{
|
||||
"name": generate_id(),
|
||||
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
|
||||
"object_id": "00000000-0000-0000-0000-000000000000",
|
||||
"interval": "days=30",
|
||||
"grace_period": "days=10",
|
||||
"reviewer_groups": [str(self.reviewer_group.pk)],
|
||||
"reviewers": [],
|
||||
"min_reviewers": 1,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertIn("object_id", response.data)
|
||||
|
||||
def test_retrieve_rule(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:lifecyclerule-detail", kwargs={"pk": rule.pk})
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.data["id"], str(rule.pk))
|
||||
|
||||
def test_update_rule(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
interval="days=30",
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
response = self.client.patch(
|
||||
reverse("authentik_api:lifecyclerule-detail", kwargs={"pk": rule.pk}),
|
||||
{"interval": "days=60"},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.data["interval"], "days=60")
|
||||
|
||||
def test_delete_rule(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
response = self.client.delete(
|
||||
reverse("authentik_api:lifecyclerule-detail", kwargs={"pk": rule.pk})
|
||||
)
|
||||
self.assertEqual(response.status_code, 204)
|
||||
self.assertFalse(LifecycleRule.objects.filter(pk=rule.pk).exists())
|
||||
|
||||
|
||||
@patch_license
|
||||
class TestIterationAPI(APITestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.user = create_test_admin_user()
|
||||
self.client.force_login(self.user)
|
||||
self.app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
self.content_type = ContentType.objects.get_for_model(Application)
|
||||
self.reviewer_group = Group.objects.create(name=generate_id())
|
||||
self.reviewer_group.users.add(self.user)
|
||||
|
||||
def test_open_iterations(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
response = self.client.get(reverse("authentik_api:lifecycleiteration-open-iterations"))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertGreaterEqual(len(response.data["results"]), 1)
|
||||
|
||||
for iteration in response.data["results"]:
|
||||
self.assertEqual(iteration["state"], ReviewState.PENDING)
|
||||
|
||||
def test_open_iterations_filter_user_is_reviewer(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:lifecycleiteration-open-iterations"),
|
||||
{"user_is_reviewer": "true"},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
# User is in reviewer_group, so should see the iteration
|
||||
self.assertGreaterEqual(len(response.data["results"]), 1)
|
||||
|
||||
def test_latest_iteration(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
"authentik_api:lifecycleiteration-latest-iteration",
|
||||
kwargs={
|
||||
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
|
||||
"object_id": str(self.app.pk),
|
||||
},
|
||||
)
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.data["object_id"], str(self.app.pk))
|
||||
|
||||
def test_latest_iteration_not_found(self):
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
"authentik_api:lifecycleiteration-latest-iteration",
|
||||
kwargs={
|
||||
"content_type": f"{self.content_type.app_label}.{self.content_type.model}",
|
||||
"object_id": "00000000-0000-0000-0000-000000000000",
|
||||
},
|
||||
)
|
||||
)
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
def test_iteration_includes_user_can_review(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
response = self.client.get(reverse("authentik_api:lifecycleiteration-open-iterations"))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertGreaterEqual(len(response.data["results"]), 1)
|
||||
# user_can_review should be present
|
||||
self.assertIn("user_can_review", response.data["results"][0])
|
||||
|
||||
|
||||
@patch_license
|
||||
class TestReviewAPI(APITestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.user = create_test_admin_user()
|
||||
self.client.force_login(self.user)
|
||||
self.app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
self.content_type = ContentType.objects.get_for_model(Application)
|
||||
self.reviewer_group = Group.objects.create(name=generate_id())
|
||||
self.reviewer_group.users.add(self.user)
|
||||
|
||||
def test_create_review(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
min_reviewers=1,
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
# Get the auto-created iteration
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=self.content_type, object_id=str(self.app.pk), rule=rule
|
||||
)
|
||||
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:review-list"),
|
||||
{
|
||||
"iteration": str(iteration.pk),
|
||||
"note": "Reviewed and approved",
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
self.assertEqual(response.data["iteration"], iteration.pk)
|
||||
self.assertEqual(response.data["note"], "Reviewed and approved")
|
||||
self.assertEqual(response.data["reviewer"]["pk"], self.user.pk)
|
||||
|
||||
def test_create_review_completes_iteration(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
min_reviewers=1,
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=self.content_type, object_id=str(self.app.pk), rule=rule
|
||||
)
|
||||
self.assertEqual(iteration.state, ReviewState.PENDING)
|
||||
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:review-list"),
|
||||
{
|
||||
"iteration": str(iteration.pk),
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.REVIEWED)
|
||||
|
||||
def test_create_review_sets_reviewer_from_request(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
min_reviewers=1,
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=self.content_type, object_id=str(self.app.pk), rule=rule
|
||||
)
|
||||
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:review-list"),
|
||||
{
|
||||
"iteration": str(iteration.pk),
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
# Reviewer should be the logged-in user
|
||||
self.assertEqual(response.data["reviewer"]["pk"], self.user.pk)
|
||||
|
||||
def test_non_reviewer_cannot_review(self):
|
||||
other_group = Group.objects.create(name=generate_id())
|
||||
other_user = create_test_user()
|
||||
other_group.users.add(other_user)
|
||||
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
min_reviewers=1,
|
||||
)
|
||||
rule.reviewer_groups.add(other_group)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=self.content_type, object_id=str(self.app.pk), rule=rule
|
||||
)
|
||||
|
||||
# Current user is not in the reviewer group
|
||||
self.assertFalse(iteration.user_can_review(self.user))
|
||||
|
||||
def test_non_reviewer_review_via_api_rejected(self):
|
||||
other_group = Group.objects.create(name=generate_id())
|
||||
other_user = create_test_user()
|
||||
other_group.users.add(other_user)
|
||||
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
min_reviewers=1,
|
||||
)
|
||||
rule.reviewer_groups.add(other_group)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=self.content_type, object_id=str(self.app.pk), rule=rule
|
||||
)
|
||||
|
||||
# Current user (self.user) is NOT in the reviewer group
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:review-list"),
|
||||
{"iteration": str(iteration.pk)},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
def test_duplicate_review_via_api_rejected(self):
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=self.content_type,
|
||||
object_id=str(self.app.pk),
|
||||
min_reviewers=2,
|
||||
)
|
||||
rule.reviewer_groups.add(self.reviewer_group)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=self.content_type, object_id=str(self.app.pk), rule=rule
|
||||
)
|
||||
|
||||
# First review should succeed
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:review-list"),
|
||||
{"iteration": str(iteration.pk)},
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
|
||||
# Second review by same user should be rejected
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:review-list"),
|
||||
{"iteration": str(iteration.pk)},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
@@ -1,845 +0,0 @@
|
||||
import datetime as dt
|
||||
from datetime import timedelta
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.test import RequestFactory, TestCase
|
||||
from django.utils import timezone
|
||||
|
||||
from authentik.core.models import Application, Group
|
||||
from authentik.core.tests.utils import create_test_user
|
||||
from authentik.enterprise.lifecycle.models import (
|
||||
LifecycleIteration,
|
||||
LifecycleRule,
|
||||
Review,
|
||||
ReviewState,
|
||||
)
|
||||
from authentik.events.models import (
|
||||
Event,
|
||||
EventAction,
|
||||
NotificationSeverity,
|
||||
NotificationTransport,
|
||||
)
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.rbac.models import Role
|
||||
|
||||
|
||||
class TestLifecycleModels(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.factory = RequestFactory()
|
||||
|
||||
def _get_request(self):
|
||||
return self.factory.get("/")
|
||||
|
||||
def _create_object(self, model):
|
||||
if model is Application:
|
||||
return Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
if model is Role:
|
||||
return Role.objects.create(name=generate_id())
|
||||
if model is Group:
|
||||
return Group.objects.create(name=generate_id())
|
||||
raise AssertionError(f"Unsupported model {model}")
|
||||
|
||||
def _create_rule_for_object(self, obj, **kwargs) -> LifecycleRule:
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
return LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=str(obj.pk),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def _create_rule_for_type(self, model, **kwargs) -> LifecycleRule:
|
||||
content_type = ContentType.objects.get_for_model(model)
|
||||
return LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=None,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def test_iteration_start_supported_objects(self):
|
||||
"""Ensure iterations are automatically started for applications, roles, and groups."""
|
||||
for model in (Application, Role, Group):
|
||||
with self.subTest(model=model.__name__):
|
||||
obj = self._create_object(model)
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
before_events = Event.objects.filter(action=EventAction.REVIEW_INITIATED).count()
|
||||
|
||||
rule = self._create_rule_for_object(obj)
|
||||
|
||||
# Verify iteration was created automatically
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(obj.pk), rule=rule
|
||||
)
|
||||
self.assertEqual(iteration.state, ReviewState.PENDING)
|
||||
self.assertEqual(iteration.object, obj)
|
||||
self.assertEqual(iteration.rule, rule)
|
||||
self.assertEqual(
|
||||
Event.objects.filter(action=EventAction.REVIEW_INITIATED).count(),
|
||||
before_events + 1,
|
||||
)
|
||||
|
||||
def test_review_requires_all_explicit_reviewers(self):
|
||||
obj = Group.objects.create(name=generate_id())
|
||||
rule = self._create_rule_for_object(obj)
|
||||
reviewer_one = create_test_user()
|
||||
reviewer_two = create_test_user()
|
||||
rule.reviewers.add(reviewer_one, reviewer_two)
|
||||
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(obj.pk), rule=rule
|
||||
)
|
||||
request = self._get_request()
|
||||
|
||||
Review.objects.create(iteration=iteration, reviewer=reviewer_one)
|
||||
iteration.on_review(request)
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.PENDING)
|
||||
|
||||
Review.objects.create(iteration=iteration, reviewer=reviewer_two)
|
||||
iteration.on_review(request)
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.REVIEWED)
|
||||
self.assertTrue(Event.objects.filter(action=EventAction.REVIEW_COMPLETED).exists())
|
||||
|
||||
def test_review_min_reviewers_from_groups(self):
|
||||
"""Group-based reviews complete once the minimum number of reviewers review."""
|
||||
obj = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
rule = self._create_rule_for_object(obj, min_reviewers=2)
|
||||
|
||||
reviewer_group = Group.objects.create(name=generate_id())
|
||||
reviewer_one = create_test_user()
|
||||
reviewer_two = create_test_user()
|
||||
reviewer_group.users.add(reviewer_one, reviewer_two)
|
||||
rule.reviewer_groups.add(reviewer_group)
|
||||
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(obj.pk), rule=rule
|
||||
)
|
||||
request = self._get_request()
|
||||
|
||||
Review.objects.create(iteration=iteration, reviewer=reviewer_one)
|
||||
iteration.on_review(request)
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.PENDING)
|
||||
|
||||
Review.objects.create(iteration=iteration, reviewer=reviewer_two)
|
||||
iteration.on_review(request)
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.REVIEWED)
|
||||
|
||||
def test_review_explicit_and_group_reviewers(self):
|
||||
"""Reviews require both explicit reviewers AND min_reviewers from groups."""
|
||||
obj = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
rule = self._create_rule_for_object(obj, min_reviewers=1)
|
||||
|
||||
reviewer_group = Group.objects.create(name=generate_id())
|
||||
group_member = create_test_user()
|
||||
reviewer_group.users.add(group_member)
|
||||
rule.reviewer_groups.add(reviewer_group)
|
||||
|
||||
explicit_reviewer = create_test_user()
|
||||
rule.reviewers.add(explicit_reviewer)
|
||||
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(obj.pk), rule=rule
|
||||
)
|
||||
request = self._get_request()
|
||||
|
||||
# Only group member reviews - not satisfied (explicit reviewer missing)
|
||||
Review.objects.create(iteration=iteration, reviewer=group_member)
|
||||
iteration.on_review(request)
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.PENDING)
|
||||
|
||||
# Explicit reviewer reviews - now satisfied
|
||||
Review.objects.create(iteration=iteration, reviewer=explicit_reviewer)
|
||||
iteration.on_review(request)
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.REVIEWED)
|
||||
|
||||
def test_review_min_reviewers_per_group(self):
|
||||
obj = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
rule = self._create_rule_for_object(obj, min_reviewers=1, min_reviewers_is_per_group=True)
|
||||
|
||||
group_one = Group.objects.create(name=generate_id())
|
||||
group_two = Group.objects.create(name=generate_id())
|
||||
member_group_one = create_test_user()
|
||||
member_group_two = create_test_user()
|
||||
group_one.users.add(member_group_one)
|
||||
group_two.users.add(member_group_two)
|
||||
rule.reviewer_groups.add(group_one, group_two)
|
||||
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(obj.pk), rule=rule
|
||||
)
|
||||
request = self._get_request()
|
||||
|
||||
# Only member from group_one reviews - not satisfied (need member from each group)
|
||||
Review.objects.create(iteration=iteration, reviewer=member_group_one)
|
||||
iteration.on_review(request)
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.PENDING)
|
||||
|
||||
# Member from group_two reviews - now satisfied
|
||||
Review.objects.create(iteration=iteration, reviewer=member_group_two)
|
||||
iteration.on_review(request)
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.REVIEWED)
|
||||
|
||||
def test_review_reviewers_from_child_groups(self):
|
||||
obj = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
rule = self._create_rule_for_object(obj, min_reviewers=1)
|
||||
|
||||
parent_group = Group.objects.create(name=generate_id())
|
||||
child_group = Group.objects.create(name=generate_id())
|
||||
child_group.parents.add(parent_group)
|
||||
|
||||
child_member = create_test_user()
|
||||
child_group.users.add(child_member)
|
||||
|
||||
rule.reviewer_groups.add(parent_group)
|
||||
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(obj.pk), rule=rule
|
||||
)
|
||||
request = self._get_request()
|
||||
|
||||
# Child group member should be able to review
|
||||
self.assertTrue(iteration.user_can_review(child_member))
|
||||
|
||||
Review.objects.create(iteration=iteration, reviewer=child_member)
|
||||
iteration.on_review(request)
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.REVIEWED)
|
||||
|
||||
def test_review_reviewers_from_nested_child_groups(self):
|
||||
obj = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
rule = self._create_rule_for_object(obj, min_reviewers=2)
|
||||
|
||||
grandparent = Group.objects.create(name=generate_id())
|
||||
parent = Group.objects.create(name=generate_id())
|
||||
child = Group.objects.create(name=generate_id())
|
||||
parent.parents.add(grandparent)
|
||||
child.parents.add(parent)
|
||||
|
||||
parent_member = create_test_user()
|
||||
child_member = create_test_user()
|
||||
parent.users.add(parent_member)
|
||||
child.users.add(child_member)
|
||||
|
||||
rule.reviewer_groups.add(grandparent)
|
||||
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(obj.pk), rule=rule
|
||||
)
|
||||
request = self._get_request()
|
||||
|
||||
# Both nested members should be able to review
|
||||
self.assertTrue(iteration.user_can_review(parent_member))
|
||||
self.assertTrue(iteration.user_can_review(child_member))
|
||||
|
||||
Review.objects.create(iteration=iteration, reviewer=parent_member)
|
||||
iteration.on_review(request)
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.PENDING)
|
||||
|
||||
Review.objects.create(iteration=iteration, reviewer=child_member)
|
||||
iteration.on_review(request)
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.REVIEWED)
|
||||
|
||||
def test_notify_reviewers_send_once(self):
|
||||
obj = Group.objects.create(name=generate_id())
|
||||
rule = self._create_rule_for_object(obj)
|
||||
|
||||
reviewer_one = create_test_user()
|
||||
reviewer_two = create_test_user()
|
||||
rule.reviewers.add(reviewer_one, reviewer_two)
|
||||
|
||||
transport_once = NotificationTransport.objects.create(
|
||||
name=generate_id(),
|
||||
send_once=True,
|
||||
)
|
||||
transport_all = NotificationTransport.objects.create(
|
||||
name=generate_id(),
|
||||
send_once=False,
|
||||
)
|
||||
rule.notification_transports.add(transport_once, transport_all)
|
||||
|
||||
event = Event.new(EventAction.REVIEW_INITIATED, target=obj)
|
||||
event.save()
|
||||
|
||||
with patch(
|
||||
"authentik.enterprise.lifecycle.tasks.send_notification.send_with_options"
|
||||
) as send_with_options:
|
||||
rule.notify_reviewers(event, NotificationSeverity.NOTICE)
|
||||
|
||||
reviewer_pks = {reviewer_one.pk, reviewer_two.pk}
|
||||
self.assertEqual(send_with_options.call_count, len(reviewer_pks) + 1)
|
||||
|
||||
calls = [call.kwargs["args"] for call in send_with_options.call_args_list]
|
||||
once_calls = [args for args in calls if args[0] == transport_once.pk]
|
||||
all_calls = [args for args in calls if args[0] == transport_all.pk]
|
||||
|
||||
self.assertEqual(len(once_calls), 1)
|
||||
self.assertEqual(len(all_calls), len(reviewer_pks))
|
||||
self.assertIn(once_calls[0][2], reviewer_pks)
|
||||
self.assertEqual({args[2] for args in all_calls}, reviewer_pks)
|
||||
|
||||
def test_apply_marks_overdue_and_opens_due_reviews(self):
|
||||
app_one = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
app_two = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
content_type = ContentType.objects.get_for_model(Application)
|
||||
|
||||
rule_overdue = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=str(app_one.pk),
|
||||
interval="days=365",
|
||||
grace_period="days=10",
|
||||
)
|
||||
|
||||
# Get the automatically created iteration and backdate it past the grace period
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(app_one.pk), rule=rule_overdue
|
||||
)
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=(timezone.now() - timedelta(days=20))
|
||||
)
|
||||
|
||||
# Apply again to trigger overdue logic
|
||||
rule_overdue.apply()
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.OVERDUE)
|
||||
self.assertEqual(
|
||||
LifecycleIteration.objects.filter(
|
||||
content_type=content_type, object_id=str(app_one.pk)
|
||||
).count(),
|
||||
1,
|
||||
)
|
||||
|
||||
LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=str(app_two.pk),
|
||||
interval="days=30",
|
||||
grace_period="days=10",
|
||||
)
|
||||
self.assertEqual(
|
||||
LifecycleIteration.objects.filter(
|
||||
content_type=content_type, object_id=str(app_two.pk)
|
||||
).count(),
|
||||
1,
|
||||
)
|
||||
new_iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(app_two.pk)
|
||||
)
|
||||
self.assertEqual(new_iteration.state, ReviewState.PENDING)
|
||||
|
||||
def test_apply_idempotent(self):
|
||||
app_due = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
app_overdue = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
content_type = ContentType.objects.get_for_model(Application)
|
||||
|
||||
initiated_before = Event.objects.filter(action=EventAction.REVIEW_INITIATED).count()
|
||||
overdue_before = Event.objects.filter(action=EventAction.REVIEW_OVERDUE).count()
|
||||
|
||||
rule_due = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=str(app_due.pk),
|
||||
interval="days=30",
|
||||
grace_period="days=30",
|
||||
)
|
||||
reviewer = create_test_user()
|
||||
rule_due.reviewers.add(reviewer)
|
||||
transport = NotificationTransport.objects.create(name=generate_id())
|
||||
rule_due.notification_transports.add(transport)
|
||||
|
||||
rule_overdue = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=str(app_overdue.pk),
|
||||
interval="days=365",
|
||||
grace_period="days=10",
|
||||
)
|
||||
|
||||
overdue_iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(app_overdue.pk), rule=rule_overdue
|
||||
)
|
||||
LifecycleIteration.objects.filter(pk=overdue_iteration.pk).update(
|
||||
opened_on=(timezone.now() - timedelta(days=20))
|
||||
)
|
||||
|
||||
# Apply overdue rule to mark iteration as overdue
|
||||
rule_overdue.apply()
|
||||
|
||||
due_iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(app_due.pk)
|
||||
)
|
||||
overdue_iteration.refresh_from_db()
|
||||
self.assertEqual(due_iteration.state, ReviewState.PENDING)
|
||||
self.assertEqual(overdue_iteration.state, ReviewState.OVERDUE)
|
||||
|
||||
initiated_after_first = Event.objects.filter(action=EventAction.REVIEW_INITIATED).count()
|
||||
overdue_after_first = Event.objects.filter(action=EventAction.REVIEW_OVERDUE).count()
|
||||
# Both rules created iterations on save
|
||||
self.assertEqual(initiated_after_first, initiated_before + 2)
|
||||
self.assertEqual(overdue_after_first, overdue_before + 1)
|
||||
|
||||
# Apply again - should be idempotent
|
||||
rule_due.apply()
|
||||
rule_overdue.apply()
|
||||
|
||||
due_iteration.refresh_from_db()
|
||||
overdue_iteration.refresh_from_db()
|
||||
self.assertEqual(due_iteration.state, ReviewState.PENDING)
|
||||
self.assertEqual(overdue_iteration.state, ReviewState.OVERDUE)
|
||||
self.assertEqual(
|
||||
Event.objects.filter(action=EventAction.REVIEW_INITIATED).count(),
|
||||
initiated_after_first,
|
||||
)
|
||||
self.assertEqual(
|
||||
Event.objects.filter(action=EventAction.REVIEW_OVERDUE).count(),
|
||||
overdue_after_first,
|
||||
)
|
||||
|
||||
def test_rule_matches_entire_type(self):
|
||||
"""A rule with object_id=None matches all objects of that type."""
|
||||
app_one = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
app_two = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
content_type = ContentType.objects.get_for_model(Application)
|
||||
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=None,
|
||||
interval="days=30",
|
||||
grace_period="days=10",
|
||||
)
|
||||
|
||||
objects = list(rule.get_objects())
|
||||
self.assertIn(app_one, objects)
|
||||
self.assertIn(app_two, objects)
|
||||
|
||||
def test_rule_type_excludes_objects_with_specific_rules(self):
|
||||
app_with_rule = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
app_without_rule = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
content_type = ContentType.objects.get_for_model(Application)
|
||||
|
||||
# Create a specific rule for app_with_rule
|
||||
LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=str(app_with_rule.pk),
|
||||
interval="days=30",
|
||||
)
|
||||
|
||||
# Create a type-level rule
|
||||
type_rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=None,
|
||||
interval="days=60",
|
||||
)
|
||||
|
||||
objects = list(type_rule.get_objects())
|
||||
self.assertNotIn(app_with_rule, objects)
|
||||
self.assertIn(app_without_rule, objects)
|
||||
|
||||
def test_rule_type_apply_creates_iterations_for_all_objects(self):
|
||||
app_one = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
app_two = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
content_type = ContentType.objects.get_for_model(Application)
|
||||
|
||||
LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=None,
|
||||
interval="days=30",
|
||||
grace_period="days=10",
|
||||
)
|
||||
|
||||
self.assertTrue(
|
||||
LifecycleIteration.objects.filter(
|
||||
content_type=content_type, object_id=str(app_one.pk)
|
||||
).exists()
|
||||
)
|
||||
self.assertTrue(
|
||||
LifecycleIteration.objects.filter(
|
||||
content_type=content_type, object_id=str(app_two.pk)
|
||||
).exists()
|
||||
)
|
||||
|
||||
def test_delete_rule_cancels_open_iterations(self):
|
||||
obj = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
|
||||
rule = self._create_rule_for_object(obj)
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
pending_iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(obj.pk), rule=rule
|
||||
)
|
||||
self.assertEqual(pending_iteration.state, ReviewState.PENDING)
|
||||
|
||||
overdue_iteration = LifecycleIteration.objects.create(
|
||||
content_type=content_type,
|
||||
object_id=str(obj.pk),
|
||||
rule=rule,
|
||||
state=ReviewState.OVERDUE,
|
||||
)
|
||||
reviewed_iteration = LifecycleIteration.objects.create(
|
||||
content_type=content_type,
|
||||
object_id=str(obj.pk),
|
||||
rule=rule,
|
||||
state=ReviewState.REVIEWED,
|
||||
)
|
||||
|
||||
rule.delete()
|
||||
|
||||
pending_iteration.refresh_from_db()
|
||||
overdue_iteration.refresh_from_db()
|
||||
reviewed_iteration.refresh_from_db()
|
||||
|
||||
self.assertEqual(pending_iteration.state, ReviewState.CANCELED)
|
||||
self.assertEqual(overdue_iteration.state, ReviewState.CANCELED)
|
||||
self.assertEqual(reviewed_iteration.state, ReviewState.REVIEWED) # Not affected
|
||||
|
||||
def test_update_rule_target_cancels_stale_iterations(self):
|
||||
app_one = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
app_two = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
content_type = ContentType.objects.get_for_model(Application)
|
||||
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=str(app_one.pk),
|
||||
interval="days=30",
|
||||
)
|
||||
|
||||
iteration_for_app_one = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(app_one.pk), rule=rule
|
||||
)
|
||||
self.assertEqual(iteration_for_app_one.state, ReviewState.PENDING)
|
||||
|
||||
# Change rule target to app_two - save() triggers apply() which cancels stale iterations
|
||||
rule.object_id = str(app_two.pk)
|
||||
rule.save()
|
||||
|
||||
iteration_for_app_one.refresh_from_db()
|
||||
self.assertEqual(iteration_for_app_one.state, ReviewState.CANCELED)
|
||||
|
||||
def test_update_rule_content_type_cancels_stale_iterations(self):
|
||||
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
group = Group.objects.create(name=generate_id())
|
||||
app_content_type = ContentType.objects.get_for_model(Application)
|
||||
group_content_type = ContentType.objects.get_for_model(Group)
|
||||
|
||||
# Creating rule triggers automatic apply() which creates a iteration for app
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=app_content_type,
|
||||
object_id=str(app.pk),
|
||||
interval="days=30",
|
||||
)
|
||||
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=app_content_type, object_id=str(app.pk), rule=rule
|
||||
)
|
||||
self.assertEqual(iteration.state, ReviewState.PENDING)
|
||||
|
||||
# Change content type to Group - save() triggers apply() which cancels stale iterations
|
||||
rule.content_type = group_content_type
|
||||
rule.object_id = str(group.pk)
|
||||
rule.save()
|
||||
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.CANCELED)
|
||||
|
||||
def test_user_can_review_checks_group_hierarchy(self):
|
||||
obj = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
rule = self._create_rule_for_object(obj)
|
||||
|
||||
parent_group = Group.objects.create(name=generate_id())
|
||||
child_group = Group.objects.create(name=generate_id())
|
||||
child_group.parents.add(parent_group)
|
||||
|
||||
parent_member = create_test_user()
|
||||
child_member = create_test_user()
|
||||
non_member = create_test_user()
|
||||
parent_group.users.add(parent_member)
|
||||
child_group.users.add(child_member)
|
||||
|
||||
rule.reviewer_groups.add(parent_group)
|
||||
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
# iteration is created automatically when rule is saved
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(obj.pk), rule=rule
|
||||
)
|
||||
|
||||
self.assertTrue(iteration.user_can_review(parent_member))
|
||||
self.assertTrue(iteration.user_can_review(child_member))
|
||||
self.assertFalse(iteration.user_can_review(non_member))
|
||||
|
||||
def test_user_cannot_review_twice(self):
|
||||
obj = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
rule = self._create_rule_for_object(obj)
|
||||
reviewer = create_test_user()
|
||||
rule.reviewers.add(reviewer)
|
||||
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
# iteration is created automatically when rule is saved
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(obj.pk), rule=rule
|
||||
)
|
||||
|
||||
self.assertTrue(iteration.user_can_review(reviewer))
|
||||
|
||||
Review.objects.create(iteration=iteration, reviewer=reviewer)
|
||||
|
||||
self.assertFalse(iteration.user_can_review(reviewer))
|
||||
|
||||
def test_user_cannot_review_completed_iteration(self):
|
||||
obj = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
rule = self._create_rule_for_object(obj)
|
||||
reviewer = create_test_user()
|
||||
rule.reviewers.add(reviewer)
|
||||
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
# Get the automatically created pending iteration and test with different states
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(obj.pk), rule=rule
|
||||
)
|
||||
|
||||
for state in (ReviewState.REVIEWED, ReviewState.CANCELED):
|
||||
iteration.state = state
|
||||
iteration.save()
|
||||
self.assertFalse(iteration.user_can_review(reviewer))
|
||||
|
||||
def test_get_reviewers_includes_child_group_members(self):
|
||||
obj = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
rule = self._create_rule_for_object(obj)
|
||||
|
||||
parent_group = Group.objects.create(name=generate_id())
|
||||
child_group = Group.objects.create(name=generate_id())
|
||||
child_group.parents.add(parent_group)
|
||||
|
||||
parent_member = create_test_user()
|
||||
child_member = create_test_user()
|
||||
parent_group.users.add(parent_member)
|
||||
child_group.users.add(child_member)
|
||||
|
||||
rule.reviewer_groups.add(parent_group)
|
||||
|
||||
reviewers = list(rule.get_reviewers())
|
||||
self.assertIn(parent_member, reviewers)
|
||||
self.assertIn(child_member, reviewers)
|
||||
|
||||
def test_get_reviewers_includes_explicit_reviewers(self):
|
||||
obj = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
rule = self._create_rule_for_object(obj)
|
||||
|
||||
explicit_reviewer = create_test_user()
|
||||
rule.reviewers.add(explicit_reviewer)
|
||||
|
||||
group = Group.objects.create(name=generate_id())
|
||||
group_member = create_test_user()
|
||||
group.users.add(group_member)
|
||||
rule.reviewer_groups.add(group)
|
||||
|
||||
reviewers = list(rule.get_reviewers())
|
||||
self.assertIn(explicit_reviewer, reviewers)
|
||||
self.assertIn(group_member, reviewers)
|
||||
|
||||
|
||||
class TestLifecycleDateBoundaries(TestCase):
|
||||
"""Verify that start_of_day normalization ensures correct overdue/due
|
||||
detection regardless of exact task execution time within a day.
|
||||
|
||||
The daily task may run at any point during the day. The start_of_day
|
||||
normalization in _get_newly_overdue_iterations and _get_newly_due_objects
|
||||
ensures that the boundary is always at midnight, so millisecond variations
|
||||
in task execution time do not affect results."""
|
||||
|
||||
def _create_rule_and_iteration(self, grace_period="days=1", interval="days=365"):
|
||||
app = Application.objects.create(name=generate_id(), slug=generate_id())
|
||||
content_type = ContentType.objects.get_for_model(Application)
|
||||
rule = LifecycleRule.objects.create(
|
||||
name=generate_id(),
|
||||
content_type=content_type,
|
||||
object_id=str(app.pk),
|
||||
interval=interval,
|
||||
grace_period=grace_period,
|
||||
)
|
||||
iteration = LifecycleIteration.objects.get(
|
||||
content_type=content_type, object_id=str(app.pk), rule=rule
|
||||
)
|
||||
return app, rule, iteration
|
||||
|
||||
def test_overdue_iteration_opened_yesterday(self):
|
||||
"""grace_period=1 day: iteration opened yesterday at any time is overdue today."""
|
||||
_, rule, iteration = self._create_rule_and_iteration(grace_period="days=1")
|
||||
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
|
||||
for opened_on in [
|
||||
dt.datetime(2025, 6, 14, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 14, 12, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 14, 23, 59, 59, 999999, tzinfo=dt.UTC),
|
||||
]:
|
||||
with self.subTest(opened_on=opened_on):
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=opened_on, state=ReviewState.PENDING
|
||||
)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertIn(iteration, list(rule._get_newly_overdue_iterations()))
|
||||
|
||||
def test_not_overdue_iteration_opened_today(self):
|
||||
"""grace_period=1 day: iteration opened today at any time is NOT overdue."""
|
||||
_, rule, iteration = self._create_rule_and_iteration(grace_period="days=1")
|
||||
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
|
||||
for opened_on in [
|
||||
dt.datetime(2025, 6, 15, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 23, 59, 59, 999999, tzinfo=dt.UTC),
|
||||
]:
|
||||
with self.subTest(opened_on=opened_on):
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=opened_on, state=ReviewState.PENDING
|
||||
)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertNotIn(iteration, list(rule._get_newly_overdue_iterations()))
|
||||
|
||||
def test_overdue_independent_of_task_execution_time(self):
|
||||
"""Overdue detection gives the same result whether the task runs at 00:00:01 or 23:59:59."""
|
||||
_, rule, iteration = self._create_rule_and_iteration(grace_period="days=1")
|
||||
opened_on = dt.datetime(2025, 6, 14, 18, 0, 0, tzinfo=dt.UTC)
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=opened_on, state=ReviewState.PENDING
|
||||
)
|
||||
for task_time in [
|
||||
dt.datetime(2025, 6, 15, 0, 0, 1, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 12, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 23, 59, 59, tzinfo=dt.UTC),
|
||||
]:
|
||||
with self.subTest(task_time=task_time):
|
||||
with patch("django.utils.timezone.now", return_value=task_time):
|
||||
self.assertIn(iteration, list(rule._get_newly_overdue_iterations()))
|
||||
|
||||
def test_overdue_boundary_multi_day_grace_period(self):
|
||||
"""grace_period=30 days: overdue after 30 full days, not after 29."""
|
||||
_, rule, iteration = self._create_rule_and_iteration(grace_period="days=30")
|
||||
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
|
||||
|
||||
# Opened 30 days ago (May 16), should go overdue
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=dt.datetime(2025, 5, 16, 12, 0, 0, tzinfo=dt.UTC),
|
||||
state=ReviewState.PENDING,
|
||||
)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertIn(iteration, list(rule._get_newly_overdue_iterations()))
|
||||
|
||||
# Opened 29 days ago (May 17), should NOT go overdue
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=dt.datetime(2025, 5, 17, 12, 0, 0, tzinfo=dt.UTC),
|
||||
state=ReviewState.PENDING,
|
||||
)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertNotIn(iteration, list(rule._get_newly_overdue_iterations()))
|
||||
|
||||
def test_due_object_iteration_opened_yesterday(self):
|
||||
"""interval=1 day: object with iteration opened yesterday is due for a new review."""
|
||||
app, rule, iteration = self._create_rule_and_iteration(interval="days=1")
|
||||
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
|
||||
for opened_on in [
|
||||
dt.datetime(2025, 6, 14, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 14, 12, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 14, 23, 59, 59, 999999, tzinfo=dt.UTC),
|
||||
]:
|
||||
with self.subTest(opened_on=opened_on):
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(opened_on=opened_on)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertIn(app, list(rule._get_newly_due_objects()))
|
||||
|
||||
def test_not_due_object_iteration_opened_today(self):
|
||||
"""interval=1 day: object with iteration opened today is NOT due."""
|
||||
app, rule, iteration = self._create_rule_and_iteration(interval="days=1")
|
||||
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
|
||||
for opened_on in [
|
||||
dt.datetime(2025, 6, 15, 0, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 23, 59, 59, 999999, tzinfo=dt.UTC),
|
||||
]:
|
||||
with self.subTest(opened_on=opened_on):
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(opened_on=opened_on)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertNotIn(app, list(rule._get_newly_due_objects()))
|
||||
|
||||
def test_due_independent_of_task_execution_time(self):
|
||||
"""Due detection gives the same result whether the task runs at 00:00:01 or 23:59:59."""
|
||||
app, rule, iteration = self._create_rule_and_iteration(interval="days=1")
|
||||
opened_on = dt.datetime(2025, 6, 14, 18, 0, 0, tzinfo=dt.UTC)
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(opened_on=opened_on)
|
||||
for task_time in [
|
||||
dt.datetime(2025, 6, 15, 0, 0, 1, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 12, 0, 0, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 23, 59, 59, tzinfo=dt.UTC),
|
||||
]:
|
||||
with self.subTest(task_time=task_time):
|
||||
with patch("django.utils.timezone.now", return_value=task_time):
|
||||
self.assertIn(app, list(rule._get_newly_due_objects()))
|
||||
|
||||
def test_due_boundary_multi_day_interval(self):
|
||||
"""interval=30 days: due after 30 full days, not after 29."""
|
||||
app, rule, iteration = self._create_rule_and_iteration(interval="days=30")
|
||||
fixed_now = dt.datetime(2025, 6, 15, 14, 30, 0, tzinfo=dt.UTC)
|
||||
|
||||
# Previous review opened 30 days ago (May 16), review is due for the object
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=dt.datetime(2025, 5, 16, 12, 0, 0, tzinfo=dt.UTC)
|
||||
)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertIn(app, list(rule._get_newly_due_objects()))
|
||||
|
||||
# Previous review opened 29 days ago (May 17), new review is NOT due
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=dt.datetime(2025, 5, 17, 12, 0, 0, tzinfo=dt.UTC)
|
||||
)
|
||||
with patch("django.utils.timezone.now", return_value=fixed_now):
|
||||
self.assertNotIn(app, list(rule._get_newly_due_objects()))
|
||||
|
||||
def test_apply_overdue_at_boundary(self):
|
||||
"""apply() marks iteration overdue when grace period just expired,
|
||||
regardless of what time the daily task runs."""
|
||||
_, rule, iteration = self._create_rule_and_iteration(
|
||||
grace_period="days=1", interval="days=365"
|
||||
)
|
||||
opened_on = dt.datetime(2025, 6, 14, 20, 0, 0, tzinfo=dt.UTC)
|
||||
for task_time in [
|
||||
dt.datetime(2025, 6, 15, 0, 0, 1, tzinfo=dt.UTC),
|
||||
dt.datetime(2025, 6, 15, 23, 59, 59, tzinfo=dt.UTC),
|
||||
]:
|
||||
with self.subTest(task_time=task_time):
|
||||
LifecycleIteration.objects.filter(pk=iteration.pk).update(
|
||||
opened_on=opened_on, state=ReviewState.PENDING
|
||||
)
|
||||
with patch("django.utils.timezone.now", return_value=task_time):
|
||||
rule.apply()
|
||||
iteration.refresh_from_db()
|
||||
self.assertEqual(iteration.state, ReviewState.OVERDUE)
|
||||
@@ -1,11 +0,0 @@
|
||||
"""API URLs"""
|
||||
|
||||
from authentik.enterprise.lifecycle.api.iterations import IterationViewSet
|
||||
from authentik.enterprise.lifecycle.api.reviews import ReviewViewSet
|
||||
from authentik.enterprise.lifecycle.api.rules import LifecycleRuleViewSet
|
||||
|
||||
api_urlpatterns = [
|
||||
("lifecycle/iterations", IterationViewSet),
|
||||
("lifecycle/reviews", ReviewViewSet),
|
||||
("lifecycle/rules", LifecycleRuleViewSet),
|
||||
]
|
||||
@@ -1,75 +0,0 @@
|
||||
from datetime import datetime
|
||||
from urllib import parse
|
||||
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db.models import Model
|
||||
from django.urls import reverse
|
||||
from rest_framework.serializers import ChoiceField, Serializer, UUIDField
|
||||
|
||||
from authentik.core.api.utils import ModelSerializer
|
||||
from authentik.core.models import Application, Group, User
|
||||
from authentik.rbac.models import Role
|
||||
|
||||
|
||||
def parse_content_type(value: str) -> dict:
|
||||
app_label, model = value.split(".")
|
||||
return {"app_label": app_label, "model": model}
|
||||
|
||||
|
||||
def model_choices() -> list[tuple[str, str]]:
|
||||
return [
|
||||
("authentik_core.application", "Application"),
|
||||
("authentik_core.group", "Group"),
|
||||
("authentik_rbac.role", "Role"),
|
||||
]
|
||||
|
||||
|
||||
def admin_link_for_model(model: Model) -> str:
|
||||
if isinstance(model, Application):
|
||||
url = f"/core/applications/{model.slug}"
|
||||
elif isinstance(model, Group):
|
||||
url = f"/identity/groups/{model.pk}"
|
||||
elif isinstance(model, Role):
|
||||
url = f"/identity/roles/{model.pk}"
|
||||
else:
|
||||
raise TypeError("Unsupported model")
|
||||
return url + ";" + parse.quote('{"page":"page-lifecycle"}')
|
||||
|
||||
|
||||
def link_for_model(model: Model) -> str:
|
||||
return f"{reverse("authentik_core:if-admin")}#{admin_link_for_model(model)}"
|
||||
|
||||
|
||||
def start_of_day(dt: datetime) -> datetime:
|
||||
return dt.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
|
||||
class ContentTypeField(ChoiceField):
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(choices=model_choices(), **kwargs)
|
||||
|
||||
def to_representation(self, content_type: ContentType) -> str:
|
||||
return f"{content_type.app_label}.{content_type.model}"
|
||||
|
||||
def to_internal_value(self, data: str) -> ContentType:
|
||||
return ContentType.objects.get(**parse_content_type(data))
|
||||
|
||||
|
||||
class GenericForeignKeySerializer(Serializer):
|
||||
content_type = ContentTypeField()
|
||||
object_id = UUIDField()
|
||||
|
||||
|
||||
class ReviewerGroupSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = Group
|
||||
fields = [
|
||||
"pk",
|
||||
"name",
|
||||
]
|
||||
|
||||
|
||||
class ReviewerUserSerializer(ModelSerializer):
|
||||
class Meta:
|
||||
model = User
|
||||
fields = ["pk", "uuid", "username", "name"]
|
||||
@@ -78,8 +78,7 @@ class MicrosoftEntraUserClient(MicrosoftEntraSyncClient[User, MicrosoftEntraProv
|
||||
def create(self, user: User):
|
||||
"""Create user from scratch and create a connection object"""
|
||||
microsoft_user = self.to_schema(user, None)
|
||||
if microsoft_user.user_principal_name:
|
||||
self.check_email_valid(microsoft_user.user_principal_name)
|
||||
self.check_email_valid(microsoft_user.user_principal_name)
|
||||
with transaction.atomic():
|
||||
try:
|
||||
response = self._request(self.client.users.post(microsoft_user))
|
||||
@@ -119,8 +118,7 @@ class MicrosoftEntraUserClient(MicrosoftEntraSyncClient[User, MicrosoftEntraProv
|
||||
def update(self, user: User, connection: MicrosoftEntraProviderUser):
|
||||
"""Update existing user"""
|
||||
microsoft_user = self.to_schema(user, connection)
|
||||
if microsoft_user.user_principal_name:
|
||||
self.check_email_valid(microsoft_user.user_principal_name)
|
||||
self.check_email_valid(microsoft_user.user_principal_name)
|
||||
response = self._request(
|
||||
self.client.users.by_user_id(connection.microsoft_id).patch(microsoft_user)
|
||||
)
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
|
||||
from django.http import HttpRequest
|
||||
from django.urls import reverse
|
||||
from rest_framework.fields import CharField, SerializerMethodField, URLField
|
||||
from rest_framework.fields import SerializerMethodField, URLField
|
||||
|
||||
from authentik.core.api.providers import ProviderSerializer
|
||||
from authentik.core.models import Provider
|
||||
from authentik.core.models import Application
|
||||
from authentik.enterprise.api import EnterpriseRequiredMixin
|
||||
from authentik.enterprise.providers.ws_federation.models import WSFederationProvider
|
||||
from authentik.enterprise.providers.ws_federation.processors.metadata import MetadataProcessor
|
||||
@@ -16,31 +16,8 @@ class WSFederationProviderSerializer(EnterpriseRequiredMixin, SAMLProviderSerial
|
||||
"""WSFederationProvider Serializer"""
|
||||
|
||||
reply_url = URLField(source="acs_url")
|
||||
wtrealm = CharField(source="audience")
|
||||
url_wsfed = SerializerMethodField()
|
||||
|
||||
def get_url_download_metadata(self, instance: WSFederationProvider) -> str:
|
||||
"""Get metadata download URL"""
|
||||
if "request" not in self._context:
|
||||
return ""
|
||||
request: HttpRequest = self._context["request"]._request
|
||||
try:
|
||||
return request.build_absolute_uri(
|
||||
reverse(
|
||||
"authentik_providers_ws_federation:metadata-download",
|
||||
kwargs={"application_slug": instance.application.slug},
|
||||
)
|
||||
)
|
||||
except Provider.application.RelatedObjectDoesNotExist:
|
||||
return request.build_absolute_uri(
|
||||
reverse(
|
||||
"authentik_api:wsfederationprovider-metadata",
|
||||
kwargs={
|
||||
"pk": instance.pk,
|
||||
},
|
||||
)
|
||||
+ "?download"
|
||||
)
|
||||
wtrealm = SerializerMethodField()
|
||||
|
||||
def get_url_wsfed(self, instance: WSFederationProvider) -> str:
|
||||
"""Get WS-Fed url"""
|
||||
@@ -49,11 +26,16 @@ class WSFederationProviderSerializer(EnterpriseRequiredMixin, SAMLProviderSerial
|
||||
request: HttpRequest = self._context["request"]._request
|
||||
return request.build_absolute_uri(reverse("authentik_providers_ws_federation:wsfed"))
|
||||
|
||||
def get_wtrealm(self, instance: WSFederationProvider) -> str:
|
||||
try:
|
||||
return f"goauthentik.io://app/{instance.application.slug}"
|
||||
except Application.DoesNotExist:
|
||||
return None
|
||||
|
||||
class Meta(SAMLProviderSerializer.Meta):
|
||||
model = WSFederationProvider
|
||||
fields = ProviderSerializer.Meta.fields + [
|
||||
"reply_url",
|
||||
"wtrealm",
|
||||
"assertion_valid_not_before",
|
||||
"assertion_valid_not_on_or_after",
|
||||
"session_valid_not_on_or_after",
|
||||
@@ -69,6 +51,7 @@ class WSFederationProviderSerializer(EnterpriseRequiredMixin, SAMLProviderSerial
|
||||
"default_name_id_policy",
|
||||
"url_download_metadata",
|
||||
"url_wsfed",
|
||||
"wtrealm",
|
||||
]
|
||||
extra_kwargs = ProviderSerializer.Meta.extra_kwargs
|
||||
|
||||
|
||||
@@ -8,10 +8,6 @@ from authentik.providers.saml.models import SAMLProvider
|
||||
class WSFederationProvider(SAMLProvider):
|
||||
"""WS-Federation for applications which support WS-Fed."""
|
||||
|
||||
# Alias'd fields:
|
||||
# - acs_url -> reply_url
|
||||
# - audience -> realm / wtrealm
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[Serializer]:
|
||||
from authentik.enterprise.providers.ws_federation.api.providers import (
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from dataclasses import dataclass
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from django.http import HttpRequest
|
||||
from django.shortcuts import get_object_or_404
|
||||
@@ -36,6 +37,8 @@ class SignInRequest:
|
||||
wreply: str
|
||||
wctx: str | None
|
||||
|
||||
app_slug: str
|
||||
|
||||
@staticmethod
|
||||
def parse(request: HttpRequest) -> SignInRequest:
|
||||
action = request.GET.get("wa")
|
||||
@@ -44,26 +47,26 @@ class SignInRequest:
|
||||
realm = request.GET.get("wtrealm")
|
||||
if not realm:
|
||||
raise ValueError("Missing Realm")
|
||||
parsed = urlparse(realm)
|
||||
|
||||
req = SignInRequest(
|
||||
wa=action,
|
||||
wtrealm=realm,
|
||||
wreply=request.GET.get("wreply"),
|
||||
wctx=request.GET.get("wctx", ""),
|
||||
app_slug=parsed.path[1:],
|
||||
)
|
||||
|
||||
_, provider = req.get_app_provider()
|
||||
if not req.wreply:
|
||||
req.wreply = provider.acs_url
|
||||
if not req.wreply.startswith(provider.acs_url):
|
||||
raise ValueError("Invalid wreply")
|
||||
return req
|
||||
|
||||
def get_app_provider(self):
|
||||
application = get_object_or_404(Application, slug=self.app_slug)
|
||||
provider: WSFederationProvider = get_object_or_404(
|
||||
WSFederationProvider, audience=self.wtrealm
|
||||
WSFederationProvider, pk=application.provider_id
|
||||
)
|
||||
application = get_object_or_404(Application, provider=provider)
|
||||
return application, provider
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from dataclasses import dataclass
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from django.http import HttpRequest
|
||||
from django.shortcuts import get_object_or_404
|
||||
@@ -14,6 +15,8 @@ class SignOutRequest:
|
||||
wtrealm: str
|
||||
wreply: str
|
||||
|
||||
app_slug: str
|
||||
|
||||
@staticmethod
|
||||
def parse(request: HttpRequest) -> SignOutRequest:
|
||||
action = request.GET.get("wa")
|
||||
@@ -22,23 +25,23 @@ class SignOutRequest:
|
||||
realm = request.GET.get("wtrealm")
|
||||
if not realm:
|
||||
raise ValueError("Missing Realm")
|
||||
parsed = urlparse(realm)
|
||||
|
||||
req = SignOutRequest(
|
||||
wa=action,
|
||||
wtrealm=realm,
|
||||
wreply=request.GET.get("wreply"),
|
||||
app_slug=parsed.path[1:],
|
||||
)
|
||||
|
||||
_, provider = req.get_app_provider()
|
||||
if not req.wreply:
|
||||
req.wreply = provider.acs_url
|
||||
if not req.wreply.startswith(provider.acs_url):
|
||||
raise ValueError("Invalid wreply")
|
||||
return req
|
||||
|
||||
def get_app_provider(self):
|
||||
application = get_object_or_404(Application, slug=self.app_slug)
|
||||
provider: WSFederationProvider = get_object_or_404(
|
||||
WSFederationProvider, audience=self.wtrealm
|
||||
WSFederationProvider, pk=application.provider_id
|
||||
)
|
||||
application = get_object_or_404(Application, provider=provider)
|
||||
return application, provider
|
||||
|
||||
@@ -43,6 +43,7 @@ class TestWSFedSignIn(TestCase):
|
||||
wtrealm="",
|
||||
wreply="",
|
||||
wctx=None,
|
||||
app_slug="",
|
||||
),
|
||||
)
|
||||
token = proc.response()[WS_FED_POST_KEY_RESULT]
|
||||
@@ -64,6 +65,7 @@ class TestWSFedSignIn(TestCase):
|
||||
wtrealm="",
|
||||
wreply="",
|
||||
wctx=None,
|
||||
app_slug="",
|
||||
),
|
||||
)
|
||||
token = proc.response()[WS_FED_POST_KEY_RESULT]
|
||||
|
||||
@@ -4,7 +4,6 @@ from django.urls import path
|
||||
|
||||
from authentik.enterprise.providers.ws_federation.api.providers import WSFederationProviderViewSet
|
||||
from authentik.enterprise.providers.ws_federation.views import WSFedEntryView
|
||||
from authentik.providers.saml.views.metadata import MetadataDownload
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
@@ -12,12 +11,6 @@ urlpatterns = [
|
||||
WSFedEntryView.as_view(),
|
||||
name="wsfed",
|
||||
),
|
||||
# Metadata
|
||||
path(
|
||||
"<slug:application_slug>/metadata/",
|
||||
MetadataDownload.as_view(),
|
||||
name="metadata-download",
|
||||
),
|
||||
]
|
||||
|
||||
api_urlpatterns = [
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
from django.http import Http404, HttpRequest, HttpResponse
|
||||
from django.shortcuts import get_object_or_404, redirect
|
||||
from django.urls import reverse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.translation import gettext as _
|
||||
from django.views import View
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import Application, AuthenticatedSession
|
||||
@@ -162,24 +160,3 @@ class WSFedFlowFinalView(ChallengeStageView):
|
||||
"attrs": response,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class MetadataDownload(View):
|
||||
"""Redirect to metadata download"""
|
||||
|
||||
def dispatch(self, request: HttpRequest, application_slug: str) -> HttpResponse:
|
||||
app = Application.objects.filter(slug=application_slug).with_provider().first()
|
||||
if not app:
|
||||
raise Http404
|
||||
provider = app.get_provider()
|
||||
if not provider:
|
||||
raise Http404
|
||||
return redirect(
|
||||
reverse(
|
||||
"authentik_api:wsfederationprovider-metadata",
|
||||
kwargs={
|
||||
"pk": provider.pk,
|
||||
},
|
||||
)
|
||||
+ "?download"
|
||||
)
|
||||
|
||||
@@ -4,7 +4,6 @@ TENANT_APPS = [
|
||||
"authentik.enterprise.audit",
|
||||
"authentik.enterprise.endpoints.connectors.agent",
|
||||
"authentik.enterprise.endpoints.connectors.fleet",
|
||||
"authentik.enterprise.lifecycle",
|
||||
"authentik.enterprise.policies.unique_password",
|
||||
"authentik.enterprise.providers.google_workspace",
|
||||
"authentik.enterprise.providers.microsoft_entra",
|
||||
|
||||
@@ -1,54 +0,0 @@
|
||||
# Generated by Django 5.2.10 on 2026-02-03 09:52
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_events", "0015_alter_event_action_choices"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="event",
|
||||
name="action",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("login", "Login"),
|
||||
("login_failed", "Login Failed"),
|
||||
("logout", "Logout"),
|
||||
("user_write", "User Write"),
|
||||
("suspicious_request", "Suspicious Request"),
|
||||
("password_set", "Password Set"),
|
||||
("secret_view", "Secret View"),
|
||||
("secret_rotate", "Secret Rotate"),
|
||||
("invitation_used", "Invite Used"),
|
||||
("authorize_application", "Authorize Application"),
|
||||
("source_linked", "Source Linked"),
|
||||
("impersonation_started", "Impersonation Started"),
|
||||
("impersonation_ended", "Impersonation Ended"),
|
||||
("flow_execution", "Flow Execution"),
|
||||
("policy_execution", "Policy Execution"),
|
||||
("policy_exception", "Policy Exception"),
|
||||
("property_mapping_exception", "Property Mapping Exception"),
|
||||
("system_task_execution", "System Task Execution"),
|
||||
("system_task_exception", "System Task Exception"),
|
||||
("system_exception", "System Exception"),
|
||||
("configuration_error", "Configuration Error"),
|
||||
("configuration_warning", "Configuration Warning"),
|
||||
("model_created", "Model Created"),
|
||||
("model_updated", "Model Updated"),
|
||||
("model_deleted", "Model Deleted"),
|
||||
("email_sent", "Email Sent"),
|
||||
("update_available", "Update Available"),
|
||||
("export_ready", "Export Ready"),
|
||||
("review_initiated", "Review Initiated"),
|
||||
("review_overdue", "Review Overdue"),
|
||||
("review_attested", "Review Attested"),
|
||||
("review_completed", "Review Completed"),
|
||||
("custom_", "Custom Prefix"),
|
||||
]
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -123,11 +123,6 @@ class EventAction(models.TextChoices):
|
||||
|
||||
EXPORT_READY = "export_ready"
|
||||
|
||||
REVIEW_INITIATED = "review_initiated"
|
||||
REVIEW_OVERDUE = "review_overdue"
|
||||
REVIEW_ATTESTED = "review_attested"
|
||||
REVIEW_COMPLETED = "review_completed"
|
||||
|
||||
CUSTOM_PREFIX = "custom_"
|
||||
|
||||
|
||||
|
||||
@@ -31,9 +31,6 @@ class FlowLayout(models.TextChoices):
|
||||
SIDEBAR_LEFT = "sidebar_left"
|
||||
SIDEBAR_RIGHT = "sidebar_right"
|
||||
|
||||
SIDEBAR_LEFT_FRAME_BACKGROUND = "sidebar_left_frame_background"
|
||||
SIDEBAR_RIGHT_FRAME_BACKGROUND = "sidebar_right_frame_background"
|
||||
|
||||
|
||||
class ErrorDetailSerializer(PassiveSerializer):
|
||||
"""Serializer for rest_framework's error messages"""
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
# Generated by Django 5.2.10 on 2026-01-16 17:50
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_flows", "0030_alter_flow_background"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="flow",
|
||||
name="layout",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("stacked", "Stacked"),
|
||||
("content_left", "Content Left"),
|
||||
("content_right", "Content Right"),
|
||||
("sidebar_left", "Sidebar Left"),
|
||||
("sidebar_right", "Sidebar Right"),
|
||||
("sidebar_left_frame_background", "Sidebar Left Frame Background"),
|
||||
("sidebar_right_frame_background", "Sidebar Right Frame Background"),
|
||||
],
|
||||
default="stacked",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -9,15 +9,7 @@
|
||||
{{ block.super }}
|
||||
<link rel="prefetch" href="{{ flow_background_url }}" />
|
||||
{% if flow.compatibility_mode and not inspector %}
|
||||
{% comment %}
|
||||
@see {@link web/types/webcomponents.d.ts} for type definitions.
|
||||
{% endcomment %}
|
||||
<script data-id="shady-dom">
|
||||
"use strict";
|
||||
|
||||
window.ShadyDOM = window.ShadyDOM || {}
|
||||
window.ShadyDOM.force = true
|
||||
</script>
|
||||
<script data-id="shady-dom">ShadyDOM = { force: true };</script>
|
||||
{% endif %}
|
||||
{% include "base/header_js.html" %}
|
||||
<script data-id="flow-config">
|
||||
@@ -53,11 +45,16 @@
|
||||
slug="{{ flow.slug }}"
|
||||
class="pf-c-login"
|
||||
data-layout="{{ flow.layout|default:'stacked' }}"
|
||||
loading
|
||||
>
|
||||
{% include "base/placeholder.html" %}
|
||||
|
||||
<ak-brand-links name="flow-links" slot="footer"></ak-brand-links>
|
||||
<ak-brand-links
|
||||
slot="footer"
|
||||
exportparts="list:brand-links-list, list-item:brand-links-list-item"
|
||||
role="contentinfo"
|
||||
aria-label="{% trans 'Site footer' %}"
|
||||
class="pf-c-login__footer {% if flow.layout == 'stacked' %}pf-m-dark{% endif %}"
|
||||
></ak-brand-links>
|
||||
</ak-flow-executor>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -42,7 +42,7 @@ ARG_SANITIZE = re.compile(r"[:.-]")
|
||||
|
||||
|
||||
def sanitize_arg(arg_name: str) -> str:
|
||||
return re.sub(ARG_SANITIZE, "_", slugify(arg_name))
|
||||
return re.sub(ARG_SANITIZE, "_", arg_name)
|
||||
|
||||
|
||||
class BaseEvaluator:
|
||||
@@ -311,9 +311,7 @@ class BaseEvaluator:
|
||||
|
||||
def wrap_expression(self, expression: str) -> str:
|
||||
"""Wrap expression in a function, call it, and save the result as `result`"""
|
||||
handler_signature = ",".join(
|
||||
[x for x in [sanitize_arg(x) for x in self._context.keys()] if x]
|
||||
)
|
||||
handler_signature = ",".join(sanitize_arg(x) for x in self._context.keys())
|
||||
full_expression = ""
|
||||
full_expression += f"def handler({handler_signature}):\n"
|
||||
full_expression += indent(expression, " ")
|
||||
|
||||
@@ -1,16 +1,10 @@
|
||||
"""Migration helpers"""
|
||||
|
||||
from collections.abc import Iterable
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from django.apps.registry import Apps
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
|
||||
from authentik.events.utils import cleanse_dict, sanitize_dict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.events.models import EventAction
|
||||
|
||||
|
||||
def fallback_names(app: str, model: str, field: str):
|
||||
"""Factory function that checks all instances of `app`.`model` instance's `field`
|
||||
@@ -71,12 +65,3 @@ def progress_bar(iterable: Iterable):
|
||||
print_progress_bar(i + 1)
|
||||
# Print New Line on Complete
|
||||
print()
|
||||
|
||||
|
||||
def migration_event(
|
||||
apps: Apps, schema_editor: BaseDatabaseSchemaEditor, action: EventAction, **kwargs
|
||||
):
|
||||
db_alias = schema_editor.connection.alias
|
||||
Event = apps.get_model("authentik_events", "Event")
|
||||
event = Event(action=action, app="authentik", context=cleanse_dict(sanitize_dict(kwargs)))
|
||||
event.save(using=db_alias)
|
||||
|
||||
@@ -88,7 +88,7 @@ class DomainlessURLValidator(URLValidator):
|
||||
|
||||
def __call__(self, value: str):
|
||||
# Check if the scheme is valid.
|
||||
scheme = value.split("://", maxsplit=1)[0].lower()
|
||||
scheme = value.split("://")[0].lower()
|
||||
if scheme not in self.schemes:
|
||||
value = "default" + value
|
||||
super().__call__(value)
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""Test Evaluator base functions"""
|
||||
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.test import RequestFactory, TestCase
|
||||
@@ -354,18 +353,3 @@ class TestEvaluator(TestCase):
|
||||
self.assertEqual(message.to, ["to@example.com"])
|
||||
self.assertEqual(message.cc, ["cc1@example.com", "cc2@example.com"])
|
||||
self.assertEqual(message.bcc, ["bcc1@example.com", "bcc2@example.com"])
|
||||
|
||||
def test_expr_arg_escape(self):
|
||||
"""Test escaping of arguments"""
|
||||
eval = BaseEvaluator()
|
||||
eval._context = {
|
||||
'z=getattr(getattr(__import__("os"), "popen")("id > /tmp/test"), "read")()': "bar",
|
||||
"@@": "baz",
|
||||
"{{": "baz",
|
||||
"aa@@": "baz",
|
||||
}
|
||||
res = eval.evaluate("return locals()")
|
||||
self.assertEqual(
|
||||
res, {"zgetattrgetattr__import__os_popenid_tmptest_read": "bar", "aa": "baz"}
|
||||
)
|
||||
self.assertFalse(Path("/tmp/test").exists())
|
||||
|
||||
@@ -12,9 +12,9 @@ HEADER = "### Managed by authentik"
|
||||
FOOTER = "### End Managed by authentik"
|
||||
|
||||
|
||||
def opener(path: Path | str, flags: int):
|
||||
"""File opener to create files as 600 perms"""
|
||||
return os.open(path, flags, 0o600)
|
||||
def opener(path, flags):
|
||||
"""File opener to create files as 700 perms"""
|
||||
return os.open(path, flags, 0o700)
|
||||
|
||||
|
||||
class SSHManagedExternallyException(DockerException):
|
||||
|
||||
@@ -7,7 +7,6 @@ from tempfile import gettempdir
|
||||
from docker.tls import TLSConfig
|
||||
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.outposts.docker_ssh import opener
|
||||
|
||||
|
||||
class DockerInlineTLS:
|
||||
@@ -30,7 +29,7 @@ class DockerInlineTLS:
|
||||
def write_file(self, name: str, contents: str) -> str:
|
||||
"""Wrapper for mkstemp that uses fdopen"""
|
||||
path = Path(gettempdir(), name)
|
||||
with open(path, "w", encoding="utf8", opener=opener) as _file:
|
||||
with open(path, "w", encoding="utf8") as _file:
|
||||
_file.write(contents)
|
||||
self._paths.append(str(path))
|
||||
return str(path)
|
||||
|
||||
@@ -132,14 +132,9 @@ class PolicyEngine:
|
||||
# If we didn't find any static bindings, do nothing
|
||||
return
|
||||
self.logger.debug("P_ENG: Found static bindings", **matched_bindings)
|
||||
if self.mode == PolicyEngineMode.MODE_ANY:
|
||||
if matched_bindings.get("passing", 0) > 0:
|
||||
# Any passing static binding -> passing
|
||||
passing = True
|
||||
elif self.mode == PolicyEngineMode.MODE_ALL:
|
||||
if matched_bindings.get("passing", 0) == matched_bindings["total"]:
|
||||
# All static bindings are passing -> passing
|
||||
passing = True
|
||||
if matched_bindings.get("passing", 0) > 0:
|
||||
# Any passing static binding -> passing
|
||||
passing = True
|
||||
elif matched_bindings["total"] > 0 and matched_bindings.get("passing", 0) < 1:
|
||||
# No matching static bindings but at least one is configured -> not passing
|
||||
passing = False
|
||||
@@ -190,16 +185,6 @@ class PolicyEngine:
|
||||
# Only call .recv() if no result is saved, otherwise we just deadlock here
|
||||
if not proc_info.result:
|
||||
proc_info.result = proc_info.connection.recv()
|
||||
if proc_info.result and proc_info.result._exec_time:
|
||||
HIST_POLICIES_EXECUTION_TIME.labels(
|
||||
binding_order=proc_info.binding.order,
|
||||
binding_target_type=proc_info.binding.target_type,
|
||||
binding_target_name=proc_info.binding.target_name,
|
||||
object_type=(
|
||||
class_to_path(self.request.obj.__class__) if self.request.obj else ""
|
||||
),
|
||||
mode="execute_process",
|
||||
).observe(proc_info.result._exec_time)
|
||||
return self
|
||||
|
||||
@property
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
# Generated by Django 5.2.11 on 2026-02-04 18:28
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_policies_event_matcher", "0025_alter_eventmatcherpolicy_action"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="eventmatcherpolicy",
|
||||
name="action",
|
||||
field=models.TextField(
|
||||
choices=[
|
||||
("login", "Login"),
|
||||
("login_failed", "Login Failed"),
|
||||
("logout", "Logout"),
|
||||
("user_write", "User Write"),
|
||||
("suspicious_request", "Suspicious Request"),
|
||||
("password_set", "Password Set"),
|
||||
("secret_view", "Secret View"),
|
||||
("secret_rotate", "Secret Rotate"),
|
||||
("invitation_used", "Invite Used"),
|
||||
("authorize_application", "Authorize Application"),
|
||||
("source_linked", "Source Linked"),
|
||||
("impersonation_started", "Impersonation Started"),
|
||||
("impersonation_ended", "Impersonation Ended"),
|
||||
("flow_execution", "Flow Execution"),
|
||||
("policy_execution", "Policy Execution"),
|
||||
("policy_exception", "Policy Exception"),
|
||||
("property_mapping_exception", "Property Mapping Exception"),
|
||||
("system_task_execution", "System Task Execution"),
|
||||
("system_task_exception", "System Task Exception"),
|
||||
("system_exception", "System Exception"),
|
||||
("configuration_error", "Configuration Error"),
|
||||
("configuration_warning", "Configuration Warning"),
|
||||
("model_created", "Model Created"),
|
||||
("model_updated", "Model Updated"),
|
||||
("model_deleted", "Model Deleted"),
|
||||
("email_sent", "Email Sent"),
|
||||
("update_available", "Update Available"),
|
||||
("export_ready", "Export Ready"),
|
||||
("review_initiated", "Review Initiated"),
|
||||
("review_overdue", "Review Overdue"),
|
||||
("review_attested", "Review Attested"),
|
||||
("review_completed", "Review Completed"),
|
||||
("custom_", "Custom Prefix"),
|
||||
],
|
||||
default=None,
|
||||
help_text="Match created events with this action type. When left empty, all action types will be matched.",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from multiprocessing import get_context
|
||||
from multiprocessing.connection import Connection
|
||||
from time import perf_counter
|
||||
|
||||
from django.core.cache import cache
|
||||
from sentry_sdk import start_span
|
||||
@@ -12,6 +11,8 @@ from structlog.stdlib import get_logger
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.utils.errors import exception_to_dict
|
||||
from authentik.lib.utils.reflection import class_to_path
|
||||
from authentik.policies.apps import HIST_POLICIES_EXECUTION_TIME
|
||||
from authentik.policies.exceptions import PolicyException
|
||||
from authentik.policies.models import PolicyBinding
|
||||
from authentik.policies.types import CACHE_PREFIX, PolicyRequest, PolicyResult
|
||||
@@ -122,9 +123,18 @@ class PolicyProcess(PROCESS_CLASS):
|
||||
|
||||
def profiling_wrapper(self):
|
||||
"""Run with profiling enabled"""
|
||||
with start_span(
|
||||
op="authentik.policy.process.execute",
|
||||
) as span:
|
||||
with (
|
||||
start_span(
|
||||
op="authentik.policy.process.execute",
|
||||
) as span,
|
||||
HIST_POLICIES_EXECUTION_TIME.labels(
|
||||
binding_order=self.binding.order,
|
||||
binding_target_type=self.binding.target_type,
|
||||
binding_target_name=self.binding.target_name,
|
||||
object_type=class_to_path(self.request.obj.__class__) if self.request.obj else "",
|
||||
mode="execute_process",
|
||||
).time(),
|
||||
):
|
||||
span: Span
|
||||
span.set_data("policy", self.binding.policy)
|
||||
span.set_data("request", self.request)
|
||||
@@ -132,14 +142,8 @@ class PolicyProcess(PROCESS_CLASS):
|
||||
|
||||
def run(self): # pragma: no cover
|
||||
"""Task wrapper to run policy checking"""
|
||||
result = None
|
||||
try:
|
||||
start = perf_counter()
|
||||
result = self.profiling_wrapper()
|
||||
end = perf_counter()
|
||||
result._exec_time = max((end - start), 0)
|
||||
self.connection.send(self.profiling_wrapper())
|
||||
except Exception as exc: # noqa
|
||||
LOGGER.warning("Policy failed to run", exc=exc)
|
||||
result = PolicyResult(False, str(exc))
|
||||
finally:
|
||||
self.connection.send(result)
|
||||
self.connection.send(PolicyResult(False, str(exc)))
|
||||
|
||||
@@ -33,9 +33,6 @@ class TestPolicyEngine(TestCase):
|
||||
self.policy_raises = ExpressionPolicy.objects.create(
|
||||
name=generate_id(), expression="{{ 0/0 }}"
|
||||
)
|
||||
self.group_member = Group.objects.create(name=generate_id())
|
||||
self.user.groups.add(self.group_member)
|
||||
self.group_non_member = Group.objects.create(name=generate_id())
|
||||
|
||||
def test_engine_empty(self):
|
||||
"""Ensure empty policy list passes"""
|
||||
@@ -54,7 +51,7 @@ class TestPolicyEngine(TestCase):
|
||||
self.assertEqual(result.passing, True)
|
||||
self.assertEqual(result.messages, ("dummy",))
|
||||
|
||||
def test_engine_mode_all_dyn(self):
|
||||
def test_engine_mode_all(self):
|
||||
"""Ensure all policies passes with AND mode (false and true -> false)"""
|
||||
pbm = PolicyBindingModel.objects.create(policy_engine_mode=PolicyEngineMode.MODE_ALL)
|
||||
PolicyBinding.objects.create(target=pbm, policy=self.policy_false, order=0)
|
||||
@@ -70,7 +67,7 @@ class TestPolicyEngine(TestCase):
|
||||
),
|
||||
)
|
||||
|
||||
def test_engine_mode_any_dyn(self):
|
||||
def test_engine_mode_any(self):
|
||||
"""Ensure all policies passes with OR mode (false and true -> true)"""
|
||||
pbm = PolicyBindingModel.objects.create(policy_engine_mode=PolicyEngineMode.MODE_ANY)
|
||||
PolicyBinding.objects.create(target=pbm, policy=self.policy_false, order=0)
|
||||
@@ -86,26 +83,6 @@ class TestPolicyEngine(TestCase):
|
||||
),
|
||||
)
|
||||
|
||||
def test_engine_mode_all_static(self):
|
||||
"""Ensure all policies passes with OR mode (false and true -> true)"""
|
||||
pbm = PolicyBindingModel.objects.create(policy_engine_mode=PolicyEngineMode.MODE_ALL)
|
||||
PolicyBinding.objects.create(target=pbm, group=self.group_member, order=0)
|
||||
PolicyBinding.objects.create(target=pbm, group=self.group_non_member, order=1)
|
||||
engine = PolicyEngine(pbm, self.user)
|
||||
result = engine.build().result
|
||||
self.assertEqual(result.passing, False)
|
||||
self.assertEqual(result.messages, ())
|
||||
|
||||
def test_engine_mode_any_static(self):
|
||||
"""Ensure all policies passes with OR mode (false and true -> true)"""
|
||||
pbm = PolicyBindingModel.objects.create(policy_engine_mode=PolicyEngineMode.MODE_ANY)
|
||||
PolicyBinding.objects.create(target=pbm, group=self.group_member, order=0)
|
||||
PolicyBinding.objects.create(target=pbm, group=self.group_non_member, order=1)
|
||||
engine = PolicyEngine(pbm, self.user)
|
||||
result = engine.build().result
|
||||
self.assertEqual(result.passing, True)
|
||||
self.assertEqual(result.messages, ())
|
||||
|
||||
def test_engine_negate(self):
|
||||
"""Test negate flag"""
|
||||
pbm = PolicyBindingModel.objects.create()
|
||||
|
||||
@@ -77,8 +77,6 @@ class PolicyResult:
|
||||
|
||||
log_messages: list[LogEvent] | None
|
||||
|
||||
_exec_time: int | None
|
||||
|
||||
def __init__(self, passing: bool, *messages: str):
|
||||
self.passing = passing
|
||||
self.messages = messages
|
||||
@@ -86,7 +84,6 @@ class PolicyResult:
|
||||
self.source_binding = None
|
||||
self.source_results = []
|
||||
self.log_messages = []
|
||||
self._exec_time = None
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
@@ -68,8 +68,6 @@ class IDToken:
|
||||
at_hash: str | None = None
|
||||
# Session ID, https://openid.net/specs/openid-connect-frontchannel-1_0.html#ClaimsContents
|
||||
sid: str | None = None
|
||||
# JWT ID, https://www.rfc-editor.org/rfc/rfc7519.html#section-4.1.7
|
||||
jti: str | None = None
|
||||
|
||||
claims: dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
@@ -83,7 +81,6 @@ class IDToken:
|
||||
(token.expires if token.expires is not None else default_token_duration()).timestamp()
|
||||
)
|
||||
id_token.iss = provider.get_issuer(request)
|
||||
id_token.jti = generate_id()
|
||||
id_token.aud = provider.client_id
|
||||
id_token.claims = {}
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@ from urllib.parse import parse_qs, urlparse
|
||||
|
||||
from django.test import RequestFactory
|
||||
from django.urls import reverse
|
||||
from django.utils import translation
|
||||
from django.utils.timezone import now
|
||||
|
||||
from authentik.blueprints.tests import apply_blueprint
|
||||
@@ -691,21 +690,18 @@ class TestAuthorize(OAuthTestCase):
|
||||
Application.objects.create(name="app", slug="app", provider=provider)
|
||||
state = generate_id()
|
||||
self.client.logout()
|
||||
try:
|
||||
response = self.client.get(
|
||||
reverse("authentik_providers_oauth2:authorize"),
|
||||
data={
|
||||
"response_type": "code",
|
||||
"client_id": "test",
|
||||
"state": state,
|
||||
"redirect_uri": "foo://localhost",
|
||||
"ui_locales": "invalid fr",
|
||||
},
|
||||
)
|
||||
parsed = parse_qs(urlparse(response.url).query)
|
||||
self.assertEqual(parsed["locale"], ["fr"])
|
||||
finally:
|
||||
translation.deactivate()
|
||||
response = self.client.get(
|
||||
reverse("authentik_providers_oauth2:authorize"),
|
||||
data={
|
||||
"response_type": "code",
|
||||
"client_id": "test",
|
||||
"state": state,
|
||||
"redirect_uri": "foo://localhost",
|
||||
"ui_locales": "invalid fr",
|
||||
},
|
||||
)
|
||||
parsed = parse_qs(urlparse(response.url).query)
|
||||
self.assertEqual(parsed["locale"], ["fr"])
|
||||
|
||||
@apply_blueprint("default/flow-default-authentication-flow.yaml")
|
||||
def test_ui_locales_invalid(self):
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""Device backchannel tests"""
|
||||
|
||||
from base64 import b64encode
|
||||
from json import loads
|
||||
|
||||
from django.urls import reverse
|
||||
@@ -27,7 +26,7 @@ class TesOAuth2DeviceBackchannel(OAuthTestCase):
|
||||
provider=self.provider,
|
||||
)
|
||||
|
||||
def test_backchannel_invalid_client_id_via_post_body(self):
|
||||
def test_backchannel_invalid(self):
|
||||
"""Test backchannel"""
|
||||
res = self.client.post(
|
||||
reverse("authentik_providers_oauth2:device"),
|
||||
@@ -51,7 +50,7 @@ class TesOAuth2DeviceBackchannel(OAuthTestCase):
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
|
||||
def test_backchannel_client_id_via_post_body(self):
|
||||
def test_backchannel(self):
|
||||
"""Test backchannel"""
|
||||
res = self.client.post(
|
||||
reverse("authentik_providers_oauth2:device"),
|
||||
@@ -62,37 +61,3 @@ class TesOAuth2DeviceBackchannel(OAuthTestCase):
|
||||
self.assertEqual(res.status_code, 200)
|
||||
body = loads(res.content.decode())
|
||||
self.assertEqual(body["expires_in"], 60)
|
||||
|
||||
def test_backchannel_invalid_client_id_via_auth_header(self):
|
||||
"""Test backchannel"""
|
||||
creds = b64encode(b"foo:").decode()
|
||||
res = self.client.post(
|
||||
reverse("authentik_providers_oauth2:device"),
|
||||
HTTP_AUTHORIZATION=f"Basic {creds}",
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
res = self.client.post(
|
||||
reverse("authentik_providers_oauth2:device"),
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
# test without application
|
||||
self.application.provider = None
|
||||
self.application.save()
|
||||
res = self.client.post(
|
||||
reverse("authentik_providers_oauth2:device"),
|
||||
data={
|
||||
"client_id": "test",
|
||||
},
|
||||
)
|
||||
self.assertEqual(res.status_code, 400)
|
||||
|
||||
def test_backchannel_client_id_via_auth_header(self):
|
||||
"""Test backchannel"""
|
||||
creds = b64encode(f"{self.provider.client_id}:".encode()).decode()
|
||||
res = self.client.post(
|
||||
reverse("authentik_providers_oauth2:device"),
|
||||
HTTP_AUTHORIZATION=f"Basic {creds}",
|
||||
)
|
||||
self.assertEqual(res.status_code, 200)
|
||||
body = loads(res.content.decode())
|
||||
self.assertEqual(body["expires_in"], 60)
|
||||
|
||||
@@ -16,7 +16,7 @@ from authentik.lib.config import CONFIG
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
from authentik.providers.oauth2.errors import DeviceCodeError
|
||||
from authentik.providers.oauth2.models import DeviceToken, OAuth2Provider
|
||||
from authentik.providers.oauth2.utils import TokenResponse, extract_client_auth
|
||||
from authentik.providers.oauth2.utils import TokenResponse
|
||||
from authentik.providers.oauth2.views.device_init import QS_KEY_CODE
|
||||
|
||||
LOGGER = get_logger()
|
||||
@@ -32,7 +32,7 @@ class DeviceView(View):
|
||||
|
||||
def parse_request(self):
|
||||
"""Parse incoming request"""
|
||||
client_id, _ = extract_client_auth(self.request)
|
||||
client_id = self.request.POST.get("client_id", None)
|
||||
if not client_id:
|
||||
raise DeviceCodeError("invalid_client")
|
||||
provider = OAuth2Provider.objects.filter(client_id=client_id).first()
|
||||
|
||||
@@ -38,7 +38,6 @@ from authentik.core.models import (
|
||||
USER_ATTRIBUTE_EXPIRES,
|
||||
USER_ATTRIBUTE_GENERATED,
|
||||
USER_PATH_SYSTEM_PREFIX,
|
||||
USERNAME_MAX_LENGTH,
|
||||
Application,
|
||||
Token,
|
||||
TokenIntents,
|
||||
@@ -503,7 +502,7 @@ class TokenParams:
|
||||
self.user, _ = User.objects.update_or_create(
|
||||
# trim username to ensure the entire username is max 150 chars
|
||||
# (22 chars being the length of the "template")
|
||||
username=f"ak-{self.provider.name[: USERNAME_MAX_LENGTH - 22]}-client_credentials",
|
||||
username=f"ak-{self.provider.name[:150-22]}-client_credentials",
|
||||
defaults={
|
||||
"last_login": timezone.now(),
|
||||
"name": f"Autogenerated user from application {app.name} (client credentials)",
|
||||
|
||||
@@ -434,16 +434,14 @@ class AssertionProcessor:
|
||||
def build_response(self) -> str:
|
||||
"""Build string XML Response and sign if signing is enabled."""
|
||||
root_response = self.get_response()
|
||||
# Sign assertion first (before encryption)
|
||||
if self.provider.signing_kp and self.provider.sign_assertion:
|
||||
assertion = root_response.xpath("//saml:Assertion", namespaces=NS_MAP)[0]
|
||||
self._sign(assertion)
|
||||
# Encrypt assertion (this replaces Assertion with EncryptedAssertion)
|
||||
if self.provider.signing_kp:
|
||||
if self.provider.sign_assertion:
|
||||
assertion = root_response.xpath("//saml:Assertion", namespaces=NS_MAP)[0]
|
||||
self._sign(assertion)
|
||||
if self.provider.sign_response:
|
||||
response = root_response.xpath("//samlp:Response", namespaces=NS_MAP)[0]
|
||||
self._sign(response)
|
||||
if self.provider.encryption_kp:
|
||||
assertion = root_response.xpath("//saml:Assertion", namespaces=NS_MAP)[0]
|
||||
self._encrypt(assertion, root_response)
|
||||
# Sign response AFTER encryption so signature covers the encrypted content
|
||||
if self.provider.signing_kp and self.provider.sign_response:
|
||||
response = root_response.xpath("//samlp:Response", namespaces=NS_MAP)[0]
|
||||
self._sign(response)
|
||||
return etree.tostring(root_response).decode("utf-8") # nosec
|
||||
|
||||
@@ -37,11 +37,6 @@ class ServiceProviderMetadata:
|
||||
name_id_policy: SAMLNameIDPolicy
|
||||
|
||||
signing_keypair: CertificateKeyPair | None = None
|
||||
encryption_keypair: CertificateKeyPair | None = None
|
||||
|
||||
# Single Logout Service (optional)
|
||||
sls_binding: str | None = None
|
||||
sls_location: str | None = None
|
||||
|
||||
def to_provider(
|
||||
self, name: str, authorization_flow: Flow, invalidation_flow: Flow
|
||||
@@ -55,19 +50,10 @@ class ServiceProviderMetadata:
|
||||
provider.sp_binding = self.acs_binding
|
||||
provider.acs_url = self.acs_location
|
||||
provider.default_name_id_policy = self.name_id_policy
|
||||
# Single Logout Service
|
||||
if self.sls_location:
|
||||
provider.sls_url = self.sls_location
|
||||
if self.sls_binding:
|
||||
provider.sls_binding = self.sls_binding
|
||||
if self.signing_keypair and self.auth_n_request_signed:
|
||||
self.signing_keypair.name = f"Provider {name} - SAML Signing Certificate"
|
||||
self.signing_keypair.save()
|
||||
provider.verification_kp = self.signing_keypair
|
||||
if self.encryption_keypair:
|
||||
self.encryption_keypair.name = f"Provider {name} - SAML Encryption Certificate"
|
||||
self.encryption_keypair.save()
|
||||
provider.encryption_kp = self.encryption_keypair
|
||||
if self.assertion_signed:
|
||||
provider.signing_kp = CertificateKeyPair.objects.exclude(key_data__iexact="").first()
|
||||
# Set all auto-generated Property-mappings as defaults
|
||||
@@ -81,7 +67,7 @@ class ServiceProviderMetadataParser:
|
||||
"""Service-Provider Metadata Parser"""
|
||||
|
||||
def get_signing_cert(self, root: etree.Element) -> CertificateKeyPair | None:
|
||||
"""Extract signing X509Certificate from metadata, when given."""
|
||||
"""Extract X509Certificate from metadata, when given."""
|
||||
signing_certs = root.xpath(
|
||||
'//md:SPSSODescriptor/md:KeyDescriptor[@use="signing"]//ds:X509Certificate/text()',
|
||||
namespaces=NS_MAP,
|
||||
@@ -95,21 +81,6 @@ class ServiceProviderMetadataParser:
|
||||
certificate_data=raw_cert,
|
||||
)
|
||||
|
||||
def get_encryption_cert(self, root: etree.Element) -> CertificateKeyPair | None:
|
||||
"""Extract encryption X509Certificate from metadata, when given."""
|
||||
encryption_certs = root.xpath(
|
||||
'//md:SPSSODescriptor/md:KeyDescriptor[@use="encryption"]//ds:X509Certificate/text()',
|
||||
namespaces=NS_MAP,
|
||||
)
|
||||
if len(encryption_certs) < 1:
|
||||
return None
|
||||
raw_cert = format_cert(encryption_certs[0])
|
||||
# sanity check, make sure the certificate is valid.
|
||||
load_pem_x509_certificate(raw_cert.encode("utf-8"), default_backend())
|
||||
return CertificateKeyPair(
|
||||
certificate_data=raw_cert,
|
||||
)
|
||||
|
||||
def check_signature(self, root: etree.Element, keypair: CertificateKeyPair):
|
||||
"""If Metadata is signed, check validity of signature"""
|
||||
xmlsec.tree.add_ids(root, ["ID"])
|
||||
@@ -166,25 +137,12 @@ class ServiceProviderMetadataParser:
|
||||
signing_keypair = self.get_signing_cert(root)
|
||||
if signing_keypair:
|
||||
self.check_signature(root, signing_keypair)
|
||||
encryption_keypair = self.get_encryption_cert(root)
|
||||
|
||||
name_id_format = descriptor.findall(f"{{{NS_SAML_METADATA}}}NameIDFormat")
|
||||
name_id_policy = SAMLNameIDPolicy.UNSPECIFIED
|
||||
if len(name_id_format) > 0:
|
||||
name_id_policy = SAMLNameIDPolicy(name_id_format[0].text)
|
||||
|
||||
# Parse SingleLogoutService (optional)
|
||||
sls_binding = None
|
||||
sls_location = None
|
||||
sls_services = descriptor.findall(f"{{{NS_SAML_METADATA}}}SingleLogoutService")
|
||||
if len(sls_services) > 0:
|
||||
sls_service = sls_services[0]
|
||||
sls_binding = {
|
||||
SAML_BINDING_REDIRECT: SAMLBindings.REDIRECT,
|
||||
SAML_BINDING_POST: SAMLBindings.POST,
|
||||
}.get(sls_service.attrib.get("Binding"))
|
||||
sls_location = sls_service.attrib.get("Location")
|
||||
|
||||
return ServiceProviderMetadata(
|
||||
entity_id=entity_id,
|
||||
acs_binding=acs_binding,
|
||||
@@ -192,8 +150,5 @@ class ServiceProviderMetadataParser:
|
||||
auth_n_request_signed=auth_n_request_signed,
|
||||
assertion_signed=assertion_signed,
|
||||
signing_keypair=signing_keypair,
|
||||
encryption_keypair=encryption_keypair,
|
||||
name_id_policy=name_id_policy,
|
||||
sls_binding=sls_binding,
|
||||
sls_location=sls_location,
|
||||
)
|
||||
|
||||
@@ -29,7 +29,7 @@ from authentik.providers.saml.models import SAMLPropertyMapping, SAMLProvider
|
||||
from authentik.providers.saml.processors.assertion import AssertionProcessor
|
||||
from authentik.providers.saml.processors.authn_request_parser import AuthNRequestParser
|
||||
from authentik.sources.saml.exceptions import MismatchedRequestID
|
||||
from authentik.sources.saml.models import SAMLBindingTypes, SAMLSource
|
||||
from authentik.sources.saml.models import SAMLSource
|
||||
from authentik.sources.saml.processors.request import SESSION_KEY_REQUEST_ID, RequestProcessor
|
||||
from authentik.sources.saml.processors.response import ResponseProcessor
|
||||
|
||||
@@ -104,7 +104,6 @@ class TestAuthNRequest(TestCase):
|
||||
signing_kp=self.cert,
|
||||
verification_kp=self.cert,
|
||||
signed_assertion=True,
|
||||
binding_type=SAMLBindingTypes.POST,
|
||||
)
|
||||
|
||||
def test_signed_valid(self):
|
||||
@@ -195,213 +194,6 @@ class TestAuthNRequest(TestCase):
|
||||
response_parser = ResponseProcessor(self.source, http_request)
|
||||
response_parser.parse()
|
||||
|
||||
def test_request_sign_response_and_encrypt(self):
|
||||
"""Test SAML with sign_response enabled AND encryption.
|
||||
|
||||
This tests the fix for signature invalidation when encryption is enabled.
|
||||
The response must be signed AFTER encryption, not before, because encryption
|
||||
replaces the Assertion with EncryptedAssertion which changes the response content.
|
||||
"""
|
||||
self.provider.sign_response = True
|
||||
self.provider.sign_assertion = False
|
||||
self.provider.encryption_kp = self.cert
|
||||
self.provider.save()
|
||||
self.source.encryption_kp = self.cert
|
||||
self.source.signed_response = True
|
||||
self.source.signed_assertion = False # Only response is signed, not assertion
|
||||
self.source.save()
|
||||
http_request = self.request_factory.get("/", user=get_anonymous_user())
|
||||
|
||||
# First create an AuthNRequest
|
||||
request_proc = RequestProcessor(self.source, http_request, "test_state")
|
||||
request = request_proc.build_auth_n()
|
||||
|
||||
# To get an assertion we need a parsed request (parsed by provider)
|
||||
parsed_request = AuthNRequestParser(self.provider).parse(
|
||||
b64encode(request.encode()).decode(), "test_state"
|
||||
)
|
||||
# Now create a response and convert it to string (provider)
|
||||
response_proc = AssertionProcessor(self.provider, http_request, parsed_request)
|
||||
response = response_proc.build_response()
|
||||
|
||||
# Verify the response contains EncryptedAssertion and a signature
|
||||
response_xml = fromstring(response)
|
||||
self.assertEqual(len(response_xml.xpath("//saml:EncryptedAssertion", namespaces=NS_MAP)), 1)
|
||||
self.assertEqual(
|
||||
len(response_xml.xpath("//samlp:Response/ds:Signature", namespaces=NS_MAP)), 1
|
||||
)
|
||||
|
||||
# Now parse the response (source) - this will verify the signature and decrypt
|
||||
http_request.POST = QueryDict(mutable=True)
|
||||
http_request.POST["SAMLResponse"] = b64encode(response.encode()).decode()
|
||||
|
||||
response_parser = ResponseProcessor(self.source, http_request)
|
||||
response_parser.parse()
|
||||
|
||||
def test_request_sign_assertion_and_encrypt(self):
|
||||
"""Test SAML with sign_assertion enabled AND encryption.
|
||||
|
||||
The assertion signature should be inside the encrypted content and
|
||||
remain valid after decryption.
|
||||
"""
|
||||
self.provider.sign_response = False
|
||||
self.provider.sign_assertion = True
|
||||
self.provider.encryption_kp = self.cert
|
||||
self.provider.save()
|
||||
self.source.encryption_kp = self.cert
|
||||
self.source.signed_assertion = True
|
||||
self.source.save()
|
||||
http_request = self.request_factory.get("/", user=get_anonymous_user())
|
||||
|
||||
# First create an AuthNRequest
|
||||
request_proc = RequestProcessor(self.source, http_request, "test_state")
|
||||
request = request_proc.build_auth_n()
|
||||
|
||||
# To get an assertion we need a parsed request (parsed by provider)
|
||||
parsed_request = AuthNRequestParser(self.provider).parse(
|
||||
b64encode(request.encode()).decode(), "test_state"
|
||||
)
|
||||
# Now create a response and convert it to string (provider)
|
||||
response_proc = AssertionProcessor(self.provider, http_request, parsed_request)
|
||||
response = response_proc.build_response()
|
||||
|
||||
# Verify the response contains EncryptedAssertion
|
||||
response_xml = fromstring(response)
|
||||
self.assertEqual(len(response_xml.xpath("//saml:EncryptedAssertion", namespaces=NS_MAP)), 1)
|
||||
|
||||
# Now parse the response (source) - this will decrypt and verify assertion signature
|
||||
http_request.POST = QueryDict(mutable=True)
|
||||
http_request.POST["SAMLResponse"] = b64encode(response.encode()).decode()
|
||||
|
||||
response_parser = ResponseProcessor(self.source, http_request)
|
||||
response_parser.parse()
|
||||
|
||||
def test_request_sign_both_and_encrypt(self):
|
||||
"""Test SAML with both sign_assertion and sign_response enabled AND encryption.
|
||||
|
||||
This is the most complex scenario: assertion is signed, then encrypted,
|
||||
then the response is signed. Both signatures should be valid.
|
||||
"""
|
||||
self.provider.sign_response = True
|
||||
self.provider.sign_assertion = True
|
||||
self.provider.encryption_kp = self.cert
|
||||
self.provider.save()
|
||||
self.source.encryption_kp = self.cert
|
||||
self.source.signed_assertion = True
|
||||
self.source.signed_response = True
|
||||
self.source.save()
|
||||
http_request = self.request_factory.get("/", user=get_anonymous_user())
|
||||
|
||||
# First create an AuthNRequest
|
||||
request_proc = RequestProcessor(self.source, http_request, "test_state")
|
||||
request = request_proc.build_auth_n()
|
||||
|
||||
# To get an assertion we need a parsed request (parsed by provider)
|
||||
parsed_request = AuthNRequestParser(self.provider).parse(
|
||||
b64encode(request.encode()).decode(), "test_state"
|
||||
)
|
||||
# Now create a response and convert it to string (provider)
|
||||
response_proc = AssertionProcessor(self.provider, http_request, parsed_request)
|
||||
response = response_proc.build_response()
|
||||
|
||||
# Verify the response contains EncryptedAssertion and response signature
|
||||
response_xml = fromstring(response)
|
||||
self.assertEqual(len(response_xml.xpath("//saml:EncryptedAssertion", namespaces=NS_MAP)), 1)
|
||||
self.assertEqual(
|
||||
len(response_xml.xpath("//samlp:Response/ds:Signature", namespaces=NS_MAP)), 1
|
||||
)
|
||||
|
||||
# Now parse the response (source) - this will verify response signature,
|
||||
# decrypt, then verify assertion signature
|
||||
http_request.POST = QueryDict(mutable=True)
|
||||
http_request.POST["SAMLResponse"] = b64encode(response.encode()).decode()
|
||||
|
||||
response_parser = ResponseProcessor(self.source, http_request)
|
||||
response_parser.parse()
|
||||
|
||||
def test_encrypted_assertion_namespace_preservation(self):
|
||||
"""Test that encrypted assertions include namespace declarations.
|
||||
|
||||
When an assertion is encrypted, the resulting decrypted XML must include
|
||||
the necessary namespace declarations (xmlns:saml) since it's now a standalone
|
||||
document fragment, no longer inheriting namespaces from the parent Response.
|
||||
"""
|
||||
self.provider.encryption_kp = self.cert
|
||||
self.provider.save()
|
||||
self.source.encryption_kp = self.cert
|
||||
self.source.save()
|
||||
http_request = self.request_factory.get("/", user=get_anonymous_user())
|
||||
|
||||
# First create an AuthNRequest
|
||||
request_proc = RequestProcessor(self.source, http_request, "test_state")
|
||||
request = request_proc.build_auth_n()
|
||||
|
||||
# To get an assertion we need a parsed request (parsed by provider)
|
||||
parsed_request = AuthNRequestParser(self.provider).parse(
|
||||
b64encode(request.encode()).decode(), "test_state"
|
||||
)
|
||||
# Now create a response and convert it to string (provider)
|
||||
response_proc = AssertionProcessor(self.provider, http_request, parsed_request)
|
||||
response = response_proc.build_response()
|
||||
|
||||
# Parse the encrypted response
|
||||
response_xml = fromstring(response)
|
||||
encrypted_assertion = response_xml.xpath("//saml:EncryptedAssertion", namespaces=NS_MAP)[0]
|
||||
encrypted_data = encrypted_assertion.xpath("//xenc:EncryptedData", namespaces=NS_MAP)[0]
|
||||
|
||||
# Decrypt the assertion manually to verify namespace is present
|
||||
import xmlsec
|
||||
|
||||
manager = xmlsec.KeysManager()
|
||||
key = xmlsec.Key.from_memory(self.cert.key_data, xmlsec.constants.KeyDataFormatPem, None)
|
||||
manager.add_key(key)
|
||||
enc_ctx = xmlsec.EncryptionContext(manager)
|
||||
decrypted = enc_ctx.decrypt(encrypted_data)
|
||||
|
||||
# The decrypted assertion should have xmlns:saml namespace declaration
|
||||
decrypted_str = etree.tostring(decrypted).decode()
|
||||
self.assertIn("xmlns:saml", decrypted_str)
|
||||
|
||||
# Also verify full round-trip works (source can parse it)
|
||||
http_request.POST = QueryDict(mutable=True)
|
||||
http_request.POST["SAMLResponse"] = b64encode(response.encode()).decode()
|
||||
|
||||
response_parser = ResponseProcessor(self.source, http_request)
|
||||
response_parser.parse()
|
||||
|
||||
def test_encrypted_response_schema_validation(self):
|
||||
"""Test that encrypted SAML responses validate against the SAML schema.
|
||||
|
||||
The response with EncryptedAssertion must be valid per saml-schema-protocol-2.0.xsd.
|
||||
This ensures we don't have invalid elements like EncryptedData inside Assertion.
|
||||
"""
|
||||
self.provider.encryption_kp = self.cert
|
||||
self.provider.save()
|
||||
http_request = self.request_factory.get("/", user=get_anonymous_user())
|
||||
|
||||
# First create an AuthNRequest
|
||||
request_proc = RequestProcessor(self.source, http_request, "test_state")
|
||||
request = request_proc.build_auth_n()
|
||||
|
||||
# To get an assertion we need a parsed request (parsed by provider)
|
||||
parsed_request = AuthNRequestParser(self.provider).parse(
|
||||
b64encode(request.encode()).decode(), "test_state"
|
||||
)
|
||||
# Now create a response and convert it to string (provider)
|
||||
response_proc = AssertionProcessor(self.provider, http_request, parsed_request)
|
||||
response = response_proc.build_response()
|
||||
|
||||
# Validate against SAML schema
|
||||
schema = etree.XMLSchema(
|
||||
etree.parse("schemas/saml-schema-protocol-2.0.xsd", parser=etree.XMLParser()) # nosec
|
||||
)
|
||||
self.assertTrue(schema.validate(lxml_from_string(response)))
|
||||
|
||||
# Verify structure: should have EncryptedAssertion, not Assertion with EncryptedData inside
|
||||
response_xml = fromstring(response)
|
||||
self.assertEqual(len(response_xml.xpath("//saml:EncryptedAssertion", namespaces=NS_MAP)), 1)
|
||||
self.assertEqual(len(response_xml.xpath("//saml:Assertion", namespaces=NS_MAP)), 0)
|
||||
|
||||
def test_request_signed(self):
|
||||
"""Test full SAML Request/Response flow, fully signed"""
|
||||
http_request = self.request_factory.get("/", user=get_anonymous_user())
|
||||
|
||||
@@ -12,7 +12,7 @@ from authentik.lib.xml import lxml_from_string
|
||||
from authentik.providers.saml.models import SAMLPropertyMapping, SAMLProvider
|
||||
from authentik.providers.saml.processors.assertion import AssertionProcessor
|
||||
from authentik.providers.saml.processors.authn_request_parser import AuthNRequestParser
|
||||
from authentik.sources.saml.models import SAMLBindingTypes, SAMLSource
|
||||
from authentik.sources.saml.models import SAMLSource
|
||||
from authentik.sources.saml.processors.request import RequestProcessor
|
||||
|
||||
|
||||
@@ -35,7 +35,6 @@ class TestSchema(TestCase):
|
||||
issuer="authentik",
|
||||
signing_kp=cert,
|
||||
pre_authentication_flow=create_test_flow(),
|
||||
binding_type=SAMLBindingTypes.POST,
|
||||
)
|
||||
self.request_factory = RequestFactory()
|
||||
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
"""authentik SAML IDP Views"""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from django.core.validators import URLValidator
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.http.response import HttpResponseBadRequest
|
||||
@@ -37,8 +35,6 @@ REQUEST_KEY_SAML_SIG_ALG = "SigAlg"
|
||||
REQUEST_KEY_SAML_RESPONSE = "SAMLResponse"
|
||||
REQUEST_KEY_RELAY_STATE = "RelayState"
|
||||
|
||||
DEPRECATION_SP_BINDING_REDIRECT = "authentik.providers.saml.sp_binding_redirect"
|
||||
|
||||
PLAN_CONTEXT_SAML_AUTH_N_REQUEST = "authentik/providers/saml/authn_request"
|
||||
PLAN_CONTEXT_SAML_LOGOUT_REQUEST = "authentik/providers/saml/logout_request"
|
||||
PLAN_CONTEXT_SAML_LOGOUT_NATIVE_SESSIONS = "goauthentik.io/providers/saml/native_sessions"
|
||||
@@ -122,20 +118,6 @@ class SAMLFlowFinalView(ChallengeStageView):
|
||||
},
|
||||
)
|
||||
if provider.sp_binding == SAMLBindings.REDIRECT:
|
||||
if not Event.filter_not_expired(
|
||||
action=EventAction.CONFIGURATION_WARNING,
|
||||
context__deprecation=DEPRECATION_SP_BINDING_REDIRECT,
|
||||
).exists():
|
||||
event = Event.new(
|
||||
EventAction.CONFIGURATION_WARNING,
|
||||
deprecation=DEPRECATION_SP_BINDING_REDIRECT,
|
||||
message=(
|
||||
"Redirect binding for Service Provider binding is deprecated "
|
||||
"and will be removed in a future version. Use Post binding instead."
|
||||
),
|
||||
)
|
||||
event.expires = datetime.now() + timedelta(days=30)
|
||||
event.save()
|
||||
url_args = {
|
||||
REQUEST_KEY_SAML_RESPONSE: deflate_and_base64_encode(response),
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Custom SCIM schemas"""
|
||||
|
||||
from enum import StrEnum
|
||||
from enum import Enum
|
||||
from typing import Self
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
@@ -192,7 +192,7 @@ class ServiceProviderConfiguration(BaseServiceProviderConfiguration):
|
||||
)
|
||||
|
||||
|
||||
class PatchOp(StrEnum):
|
||||
class PatchOp(str, Enum):
|
||||
replace = "replace"
|
||||
remove = "remove"
|
||||
add = "add"
|
||||
|
||||
@@ -16,7 +16,7 @@ def make_many_groups(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
many-to-many relationship in SCIMProvider.group_filters"""
|
||||
db_alias = schema_editor.connection.alias
|
||||
|
||||
from authentik.lib.migrations import migration_event
|
||||
Event = apps.get_model("authentik_events", "Event")
|
||||
|
||||
SCIMProvider = apps.get_model("authentik_providers_scim", "scimprovider")
|
||||
|
||||
@@ -26,10 +26,8 @@ def make_many_groups(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
provider.group_filters.add(provider.filter_group)
|
||||
provider.dry_run = True
|
||||
provider.save(update_fields=["dry_run"])
|
||||
migration_event(
|
||||
apps,
|
||||
schema_editor,
|
||||
EventAction.CONFIGURATION_WARNING,
|
||||
event = Event.new(
|
||||
action=EventAction.CONFIGURATION_WARNING,
|
||||
message=(
|
||||
"SCIM Providers' `filter_group` has been removed in favor of `group_filters`. Your configuration has been migrated."
|
||||
"To prevent users/groups from being removed, the provider's dry-run mode has been enabled. Please review "
|
||||
@@ -37,6 +35,7 @@ def make_many_groups(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
),
|
||||
provider=provider,
|
||||
)
|
||||
event.save(using=db_alias)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
"""OpenID Type tests"""
|
||||
|
||||
import time
|
||||
|
||||
from django.test import RequestFactory, TestCase
|
||||
from jwt import encode
|
||||
from requests_mock import Mocker
|
||||
@@ -64,34 +62,23 @@ class TestTypeOpenID(TestCase):
|
||||
|
||||
@Mocker()
|
||||
def test_userinfo_jwt(self, mock: Mocker):
|
||||
"""Test id_token fallback when profile_url is empty"""
|
||||
"""Test userinfo API call"""
|
||||
jwks_cert = create_test_cert()
|
||||
client_id = generate_id()
|
||||
self.source.profile_url = ""
|
||||
self.source.consumer_key = client_id
|
||||
self.source.oidc_jwks = {"keys": [JWKSView.get_jwk_for_key(jwks_cert, "sig")]}
|
||||
self.source.save()
|
||||
token = generate_id()
|
||||
now = int(time.time())
|
||||
id_token_payload = {
|
||||
"iss": "https://example.com",
|
||||
"sub": OPENID_USER["sub"],
|
||||
"aud": client_id,
|
||||
"exp": now + 3600,
|
||||
"iat": now,
|
||||
"name": OPENID_USER["name"],
|
||||
"email": OPENID_USER["email"],
|
||||
"nickname": OPENID_USER["nickname"],
|
||||
}
|
||||
profile = (
|
||||
OpenIDConnectOAuth2Callback(request=self.factory.get("/"))
|
||||
.get_client(self.source)
|
||||
.get_profile_info(
|
||||
{
|
||||
"token_type": "Bearer",
|
||||
"token_type": "foo",
|
||||
"access_token": token,
|
||||
"id_token": encode(
|
||||
id_token_payload,
|
||||
{
|
||||
"foo": "bar",
|
||||
},
|
||||
key=jwks_cert.private_key,
|
||||
algorithm="RS256",
|
||||
headers={"kid": self.source.oidc_jwks["keys"][0]["kid"]},
|
||||
@@ -99,8 +86,9 @@ class TestTypeOpenID(TestCase):
|
||||
}
|
||||
)
|
||||
)
|
||||
self.assertEqual(profile["sub"], OPENID_USER["sub"])
|
||||
self.assertEqual(profile["name"], OPENID_USER["name"])
|
||||
self.assertEqual(profile["email"], OPENID_USER["email"])
|
||||
self.assertEqual(profile["aud"], client_id)
|
||||
self.assertEqual(profile["iss"], "https://example.com")
|
||||
self.assertEqual(
|
||||
profile,
|
||||
{
|
||||
"foo": "bar",
|
||||
},
|
||||
)
|
||||
|
||||
@@ -49,15 +49,8 @@ class OpenIDConnectClient(UserprofileHeaderAuthClient):
|
||||
raw = get_unverified_header(id_token)
|
||||
jwk = PyJWKSet.from_dict(self.source.oidc_jwks)
|
||||
key = [key for key in jwk.keys if key.key_id == raw["kid"]][0]
|
||||
return decode(
|
||||
id_token,
|
||||
key=key,
|
||||
algorithms=[raw["alg"]],
|
||||
audience=self.get_client_id(),
|
||||
options={"verify_iss": False},
|
||||
)
|
||||
except (PyJWTError, IndexError, ValueError) as exc:
|
||||
self.logger.warning("Failed to decode id_token", exc=exc)
|
||||
return decode(id_token, key=key, algorithms=raw["alg"])
|
||||
except PyJWTError, IndexError, ValueError:
|
||||
return None
|
||||
|
||||
|
||||
|
||||
@@ -7,7 +7,6 @@ from django.http import HttpRequest
|
||||
from django.templatetags.static import static
|
||||
from django.urls import reverse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from lxml.etree import _Element # nosec
|
||||
from rest_framework.serializers import Serializer
|
||||
|
||||
from authentik.common.saml.constants import (
|
||||
@@ -218,8 +217,9 @@ class SAMLSource(Source):
|
||||
def property_mapping_type(self) -> type[PropertyMapping]:
|
||||
return SAMLSourcePropertyMapping
|
||||
|
||||
def get_base_user_properties(self, root: _Element, assertion: _Element, name_id: Any, **kwargs):
|
||||
def get_base_user_properties(self, root: Any, name_id: Any, **kwargs):
|
||||
attributes = {}
|
||||
assertion = root.find(f"{{{NS_SAML_ASSERTION}}}Assertion")
|
||||
if assertion is None:
|
||||
raise ValueError("Assertion element not found")
|
||||
attribute_statement = assertion.find(f"{{{NS_SAML_ASSERTION}}}AttributeStatement")
|
||||
|
||||
@@ -20,7 +20,7 @@ from authentik.lib.xml import remove_xml_newlines
|
||||
from authentik.providers.saml.utils import get_random_id
|
||||
from authentik.providers.saml.utils.encoding import deflate_and_base64_encode
|
||||
from authentik.providers.saml.utils.time import get_time_string
|
||||
from authentik.sources.saml.models import SAMLBindingTypes, SAMLSource
|
||||
from authentik.sources.saml.models import SAMLSource
|
||||
|
||||
SESSION_KEY_REQUEST_ID = "authentik/sources/saml/request_id"
|
||||
|
||||
@@ -70,7 +70,7 @@ class RequestProcessor:
|
||||
# Create issuer object
|
||||
auth_n_request.append(self.get_issuer())
|
||||
|
||||
if self.source.signing_kp and self.source.binding_type != SAMLBindingTypes.REDIRECT:
|
||||
if self.source.signing_kp:
|
||||
sign_algorithm_transform = SIGN_ALGORITHM_TRANSFORM_MAP.get(
|
||||
self.source.signature_algorithm, xmlsec.constants.TransformRsaSha1
|
||||
)
|
||||
@@ -91,7 +91,7 @@ class RequestProcessor:
|
||||
(used for POST Bindings)"""
|
||||
auth_n_request = self.get_auth_n()
|
||||
|
||||
if self.source.signing_kp and self.source.binding_type != SAMLBindingTypes.REDIRECT:
|
||||
if self.source.signing_kp:
|
||||
xmlsec.tree.add_ids(auth_n_request, ["ID"])
|
||||
|
||||
ctx = xmlsec.SignatureContext()
|
||||
|
||||
@@ -23,14 +23,12 @@ from authentik.common.saml.constants import (
|
||||
SAML_NAME_ID_FORMAT_TRANSIENT,
|
||||
SAML_NAME_ID_FORMAT_WINDOWS,
|
||||
SAML_NAME_ID_FORMAT_X509,
|
||||
SAML_STATUS_SUCCESS,
|
||||
)
|
||||
from authentik.core.models import (
|
||||
USER_ATTRIBUTE_DELETE_ON_LOGOUT,
|
||||
USER_ATTRIBUTE_EXPIRES,
|
||||
USER_ATTRIBUTE_GENERATED,
|
||||
USER_ATTRIBUTE_SOURCES,
|
||||
USERNAME_MAX_LENGTH,
|
||||
User,
|
||||
)
|
||||
from authentik.core.sources.flow_manager import SourceFlowManager
|
||||
@@ -66,8 +64,6 @@ class ResponseProcessor:
|
||||
|
||||
_http_request: HttpRequest
|
||||
|
||||
_assertion: _Element | None = None
|
||||
|
||||
def __init__(self, source: SAMLSource, request: HttpRequest):
|
||||
self._source = source
|
||||
self._http_request = request
|
||||
@@ -124,7 +120,6 @@ class ResponseProcessor:
|
||||
index_of,
|
||||
decrypted_assertion,
|
||||
)
|
||||
self._assertion = decrypted_assertion
|
||||
|
||||
def _verify_signature(self, signature_node: _Element):
|
||||
"""Verify a single signature node"""
|
||||
@@ -165,10 +160,6 @@ class ResponseProcessor:
|
||||
raise InvalidSignature("No Signature exists in the Assertion element.")
|
||||
|
||||
self._verify_signature(signature_nodes[0])
|
||||
parent = signature_nodes[0].getparent()
|
||||
if parent is None or parent.tag != f"{{{NS_SAML_ASSERTION}}}Assertion":
|
||||
raise InvalidSignature("No Signature exists in the Assertion element.")
|
||||
self._assertion = parent
|
||||
|
||||
def _verify_request_id(self):
|
||||
if self._source.allow_idp_initiated:
|
||||
@@ -195,19 +186,9 @@ class ResponseProcessor:
|
||||
status = self._root.find(f"{{{NS_SAML_PROTOCOL}}}Status")
|
||||
if status is None:
|
||||
return
|
||||
status_code = status.find(f"{{{NS_SAML_PROTOCOL}}}StatusCode")
|
||||
message = status.find(f"{{{NS_SAML_PROTOCOL}}}StatusMessage")
|
||||
message_text = message.text if message is not None else None
|
||||
detail = status.find(f"{{{NS_SAML_PROTOCOL}}}StatusDetail")
|
||||
detail_text = etree.tostring(detail, encoding="unicode") if detail is not None else None
|
||||
if status_code.attrib.get("Value") != SAML_STATUS_SUCCESS:
|
||||
if detail_text and message_text:
|
||||
raise ValueError(f"{message_text}: {detail_text}")
|
||||
raise ValueError(
|
||||
detail_text or message_text or f"SAML Status: {status_code.attrib.get('Value')}"
|
||||
)
|
||||
if message_text or detail_text:
|
||||
LOGGER.debug("SAML Status message", message=message_text, detail=detail_text)
|
||||
if message is not None:
|
||||
raise ValueError(message.text)
|
||||
|
||||
def _handle_name_id_transient(self) -> SourceFlowManager:
|
||||
"""Handle a NameID with the Format of Transient. This is a bit more complex than other
|
||||
@@ -216,14 +197,11 @@ class ResponseProcessor:
|
||||
on logout and periodically."""
|
||||
# Create a temporary User
|
||||
name_id = self._get_name_id()
|
||||
username = name_id.text
|
||||
# trim username to ensure it is max 150 chars
|
||||
username = f"ak-{username[: USERNAME_MAX_LENGTH - 14]}-transient"
|
||||
expiry = mktime(
|
||||
(now() + timedelta_from_string(self._source.temporary_user_delete_after)).timetuple()
|
||||
)
|
||||
user: User = User.objects.create(
|
||||
username=username,
|
||||
username=name_id.text,
|
||||
attributes={
|
||||
USER_ATTRIBUTE_GENERATED: True,
|
||||
USER_ATTRIBUTE_SOURCES: [
|
||||
@@ -246,21 +224,14 @@ class ResponseProcessor:
|
||||
identifier=str(name_id.text),
|
||||
user_info={
|
||||
"root": self._root,
|
||||
"assertion": self.get_assertion(),
|
||||
"name_id": name_id,
|
||||
},
|
||||
policy_context={},
|
||||
)
|
||||
|
||||
def get_assertion(self) -> Element | None:
|
||||
"""Get assertion element, if we have a signed assertion"""
|
||||
if self._assertion is not None:
|
||||
return self._assertion
|
||||
return self._root.find(f"{{{NS_SAML_ASSERTION}}}Assertion")
|
||||
|
||||
def _get_name_id(self) -> Element:
|
||||
"""Get NameID Element"""
|
||||
assertion = self.get_assertion()
|
||||
assertion = self._root.find(f"{{{NS_SAML_ASSERTION}}}Assertion")
|
||||
if assertion is None:
|
||||
raise ValueError("Assertion element not found")
|
||||
subject = assertion.find(f"{{{NS_SAML_ASSERTION}}}Subject")
|
||||
@@ -313,7 +284,6 @@ class ResponseProcessor:
|
||||
identifier=str(name_id.text),
|
||||
user_info={
|
||||
"root": self._root,
|
||||
"assertion": self.get_assertion(),
|
||||
"name_id": name_id,
|
||||
},
|
||||
policy_context={
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<saml2p:Response xmlns:saml2p="urn:oasis:names:tc:SAML:2.0:protocol" Destination="https://127.0.0.1:9443/source/saml/google/acs/" ID="_ee7a8865ac457e7b22cb4f16b39ceca9" IssueInstant="2022-10-14T13:52:04.479Z" Version="2.0">
|
||||
<saml2:Issuer xmlns:saml2="urn:oasis:names:tc:SAML:2.0:assertion">https://accounts.google.com/o/saml2?idpid=</saml2:Issuer>
|
||||
<saml2p:Status>
|
||||
<saml2p:StatusCode Value="urn:oasis:names:tc:SAML:2.0:status:Requester">
|
||||
<saml2p:StatusCode Value="urn:oasis:names:tc:SAML:2.0:status:RequestDenied"></saml2p:StatusCode>
|
||||
</saml2p:StatusCode>
|
||||
<saml2p:StatusMessage>Authentication failed</saml2p:StatusMessage>
|
||||
<saml2p:StatusDetail>
|
||||
<Cause>User account is disabled</Cause>
|
||||
</saml2p:StatusDetail>
|
||||
</saml2p:Status>
|
||||
</saml2p:Response>
|
||||
@@ -1,68 +0,0 @@
|
||||
<samlp:Response xmlns:samlp="urn:oasis:names:tc:SAML:2.0:protocol" xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion" ID="_8e8dc5f69a98cc4c1ff3427e5ce34606fd672f91e6" Version="2.0" IssueInstant="2014-07-17T01:01:48Z" Destination="http://sp.example.com/demo1/index.php?acs" InResponseTo="ONELOGIN_4fee3b046395c4e751011e97f8900b5273d56685">
|
||||
<saml:Issuer>http://idp.example.com/metadata.php</saml:Issuer>
|
||||
<samlp:Status>
|
||||
<samlp:StatusCode Value="urn:oasis:names:tc:SAML:2.0:status:Success"/>
|
||||
</samlp:Status>
|
||||
<saml:Assertion xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xs="http://www.w3.org/2001/XMLSchema" ID="_other_id_pfxa06693ef-cec7-f4a6-cb7f-ad074445a1a3" Version="2.0" IssueInstant="2014-07-17T01:01:48Z">
|
||||
<saml:Issuer>http://idp.example.com/metadata.php</saml:Issuer>
|
||||
<saml:Subject>
|
||||
<saml:NameID SPNameQualifier="http://sp.example.com/demo1/metadata.php" Format="urn:oasis:names:tc:SAML:2.0:nameid-format:transient">bad</saml:NameID>
|
||||
<saml:SubjectConfirmation Method="urn:oasis:names:tc:SAML:2.0:cm:bearer">
|
||||
<saml:SubjectConfirmationData NotOnOrAfter="2024-01-18T06:21:48Z" Recipient="http://sp.example.com/demo1/index.php?acs" InResponseTo="ONELOGIN_4fee3b046395c4e751011e97f8900b5273d56685"/>
|
||||
</saml:SubjectConfirmation>
|
||||
</saml:Subject>
|
||||
<saml:Conditions NotBefore="2014-07-17T01:01:18Z" NotOnOrAfter="2024-01-18T06:21:48Z">
|
||||
<saml:AudienceRestriction>
|
||||
<saml:Audience>http://sp.example.com/demo1/metadata.php</saml:Audience>
|
||||
</saml:AudienceRestriction>
|
||||
</saml:Conditions>
|
||||
<saml:AuthnStatement AuthnInstant="2014-07-17T01:01:48Z" SessionNotOnOrAfter="2024-07-17T09:01:48Z" SessionIndex="_be9967abd904ddcae3c0eb4189adbe3f71e327cf93">
|
||||
<saml:AuthnContext>
|
||||
<saml:AuthnContextClassRef>urn:oasis:names:tc:SAML:2.0:ac:classes:Password</saml:AuthnContextClassRef>
|
||||
</saml:AuthnContext>
|
||||
</saml:AuthnStatement>
|
||||
<saml:AttributeStatement>
|
||||
<saml:Attribute Name="uid" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic">
|
||||
<saml:AttributeValue xsi:type="xs:string">bad</saml:AttributeValue>
|
||||
</saml:Attribute>
|
||||
<saml:Attribute Name="mail" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic">
|
||||
<saml:AttributeValue xsi:type="xs:string">bad</saml:AttributeValue>
|
||||
</saml:Attribute>
|
||||
</saml:AttributeStatement>
|
||||
</saml:Assertion>
|
||||
<saml:Assertion xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xs="http://www.w3.org/2001/XMLSchema" ID="pfxa06693ef-cec7-f4a6-cb7f-ad074445a1a3" Version="2.0" IssueInstant="2014-07-17T01:01:48Z">
|
||||
<saml:Issuer>http://idp.example.com/metadata.php</saml:Issuer><ds:Signature xmlns:ds="http://www.w3.org/2000/09/xmldsig#">
|
||||
<ds:SignedInfo><ds:CanonicalizationMethod Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/>
|
||||
<ds:SignatureMethod Algorithm="http://www.w3.org/2000/09/xmldsig#rsa-sha1"/>
|
||||
<ds:Reference URI="#pfxa06693ef-cec7-f4a6-cb7f-ad074445a1a3"><ds:Transforms><ds:Transform Algorithm="http://www.w3.org/2000/09/xmldsig#enveloped-signature"/><ds:Transform Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/></ds:Transforms><ds:DigestMethod Algorithm="http://www.w3.org/2000/09/xmldsig#sha1"/><ds:DigestValue>zNDuGxwP4gVkv/Dzt7kiKo/4gzk=</ds:DigestValue></ds:Reference></ds:SignedInfo><ds:SignatureValue>GLP/vE8uxerB0uDpPslUgLPBL6ePQB619MoQ0I2Y5lAtFE6CB1zh8BnzChRx/bFjNy4byfOe8mFfM0r7WUi1PJOFWyUPoatdLl7wHHBIRTnPpYmu3Tb2Gz0sOP0F8wW7JkBft5gJfVw49nk5si9/3Q3o52jnJZ7dPtqfIOh8uNeopikK0HLF6sU05qCCtjcXfniEnLQFNBFMo9uY5GQqmR5n3nqPz1wYyyfFOAbVmGgBIoO2PfGX2GVLQhltc9qf2JMhks4jgZsZ8iLUIiH1lcLGWZEEs94k8k0P6gSv1uZ7Vbhksd/N9Jq9pCVuEJ/jRPcAdVjzbxqKQAj6ELwr8O6fepTzA+CAdwEolBnx/C6TmSbVZ+IWk6QUGe4x4+IAukC+0hkKENlO0ELOScksvyhpgHbxNA4rp+DhGupCaO/I2RrsQkmvavbqm+wSEspK7scK112SDunjDvqPHsPYgukD33T/97PxTLorg2kKP9HHJwPJKoXXeyOGcA6vwK+RqrAlZ2dLGAgcXo+sJcdCLuvxDNz9VXofBjBZIKVKdmYhm0QJaPYHtuQsAyFavQhdOBOmGHb7QX3YE3Xy4dX4LymtT+Jlb1I4FJSht/9HUIHW1FdhfDak4f7gUgjuMamMddLD0jVgeESupSREzFv/gj2IrctkbgjAO0iuuiBgKMg=</ds:SignatureValue>
|
||||
<ds:KeyInfo><ds:X509Data><ds:X509Certificate>MIIFUzCCAzugAwIBAgIRAL6tbNcE9Ej9gNlbGKswfFMwDQYJKoZIhvcNAQELBQAwHTEbMBkGA1UEAwwSYXV0aGVudGlrIDIwMjUuNi4zMB4XDTI1MDcxNTE4MDQzNloXDTI2MDcxNjE4MDQzNlowVjEqMCgGA1UEAwwhYXV0aGVudGlrIFNlbGYtc2lnbmVkIENlcnRpZmljYXRlMRIwEAYDVQQKDAlhdXRoZW50aWsxFDASBgNVBAsMC1NlbGYtc2lnbmVkMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjmut/+bBRLlyrbf+WIfg8ZTw9t6VnsiU1n04nPTulpRAz4nBOoOHNRIruSpZyFeFa6x9jwn4Ma5EFUH7HqnRvhoujm8U17OglXWZt0DLCZ6S5xPmdMogFXjJDmg9okIcI/cb9VbR6I8uvm1oiaOWCr36RTiqZ6rmdjQcuUPLr1+V/LxWQI463S+5QA2HZxAGalp45MJAz2sa9iczktKMgyYlfjj1cruFARxxeheu5qIK7aQWfyPj1QlMb9mi4VQaxUwGrAui4Tq614ivRJY2SkZb0Aq/LLSQoQWYHtYyQIasrOXJm0JuPDqhINPBDowyhu8DihC3uzOpmTXLKc5UoIQk+Q1h5iH74A3/kxOJUw13FXzRiDxC/yGthPYLyFHsDiJolscMKSCqlDvEMcpM4mxFeud9sKUb71SZr8sqmJl3qtvZmKpkR4y8pN2c00p10t0htqONmr5kyPxmhz0HCrosiPYB4olNjaydKviNTtPJ7TtnPyeA3iXGzCP1e80XzUoJrDqON5/GcpYgqsP/kGj8Qvqesa4Fez+1+5pAGHN2VzQbkHAgK3s4YRXrGLTs7wg27F9T0RE28Mm0RYBkYpdp4/5PuTTulthB9mkUBSJMgENmQAYkapvonFDsJkTi39qnsddbZusOLT4z3hsA38eFEwRqnbNZVUGPIp/O1SsCAwEAAaNVMFMwUQYDVR0RAQH/BEcwRYJDRUZ4QXVLRzV6SlVUTWpWNTJoMkRJMUQ5MXdLblZKaXFwNmpwRTRTTy5zZWxmLXNpZ25lZC5nb2F1dGhlbnRpay5pbzANBgkqhkiG9w0BAQsFAAOCAgEAYLThxDVpA1OIAVK/buueRJExIWr6y4s6NtpuR8UQEcfq5hfoc4zMFGHR5+u1WFIb5siK25xh/OnS7bLdLic6AkjZSrx91+0v2Jn9gfUqbs5AJ040XzAAdx/Mb4s0+537yhB+/JXPylR1QxhGbO7koXQ5JDhAXWKCw2O1C+80mN8dbhQvDkEtsXrHrtXclcqf2TT89XAzc5HAC8NmP4SF+FafAREQB1KdaG4QAbc/gnjsX2YJD89SDL+3jMp6F7R1Ym+bWt5oWqx2tkm6HGXd3fbpfQlnfrRN60tMjjLmw1cDMhOhpdragY5zokniEUL2pKVtrxFp7V1ZpoMI0Kt5MKkOXrezi542NWSgkGehlsDLD9wtuCNem2arR0mNnMLdYkMG7G0dpAq3Tl32dgfMfyKnNyE2O/6/EeEuzUH2NfTU1p7AUQfLrf4rtNcJEs9OAPuC9vy7w9YEpF997T+FhR2Ub1C423NQj4bwlS/9f7MIBkSi1EgnQuiSGB5epxAKI3oOVrmzOpTuvr6wZXV9pM3zdfbcoGuFWP6Ix7W8G5vg+0WvoSjc2fwGXYlidEK3xlQSMAaQ4CMClpPsKLScRq1nrQGzPYoiL1DYubsOWx9ohll6+jNjKI6f79WwbHYrW4EeRIOz38+m46EDjAWZBMgrE7J/3DhgeLEVJYBA5K0=</ds:X509Certificate></ds:X509Data></ds:KeyInfo></ds:Signature>
|
||||
<saml:Subject>
|
||||
<saml:NameID SPNameQualifier="http://sp.example.com/demo1/metadata.php" Format="urn:oasis:names:tc:SAML:2.0:nameid-format:transient">_ce3d2948b4cf20146dee0a0b3dd6f69b6cf86f62d7</saml:NameID>
|
||||
<saml:SubjectConfirmation Method="urn:oasis:names:tc:SAML:2.0:cm:bearer">
|
||||
<saml:SubjectConfirmationData NotOnOrAfter="2024-01-18T06:21:48Z" Recipient="http://sp.example.com/demo1/index.php?acs" InResponseTo="ONELOGIN_4fee3b046395c4e751011e97f8900b5273d56685"/>
|
||||
</saml:SubjectConfirmation>
|
||||
</saml:Subject>
|
||||
<saml:Conditions NotBefore="2014-07-17T01:01:18Z" NotOnOrAfter="2024-01-18T06:21:48Z">
|
||||
<saml:AudienceRestriction>
|
||||
<saml:Audience>http://sp.example.com/demo1/metadata.php</saml:Audience>
|
||||
</saml:AudienceRestriction>
|
||||
</saml:Conditions>
|
||||
<saml:AuthnStatement AuthnInstant="2014-07-17T01:01:48Z" SessionNotOnOrAfter="2024-07-17T09:01:48Z" SessionIndex="_be9967abd904ddcae3c0eb4189adbe3f71e327cf93">
|
||||
<saml:AuthnContext>
|
||||
<saml:AuthnContextClassRef>urn:oasis:names:tc:SAML:2.0:ac:classes:Password</saml:AuthnContextClassRef>
|
||||
</saml:AuthnContext>
|
||||
</saml:AuthnStatement>
|
||||
<saml:AttributeStatement>
|
||||
<saml:Attribute Name="uid" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic">
|
||||
<saml:AttributeValue xsi:type="xs:string">test</saml:AttributeValue>
|
||||
</saml:Attribute>
|
||||
<saml:Attribute Name="mail" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic">
|
||||
<saml:AttributeValue xsi:type="xs:string">test@example.com</saml:AttributeValue>
|
||||
</saml:Attribute>
|
||||
<saml:Attribute Name="eduPersonAffiliation" NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:basic">
|
||||
<saml:AttributeValue xsi:type="xs:string">users</saml:AttributeValue>
|
||||
<saml:AttributeValue xsi:type="xs:string">examplerole1</saml:AttributeValue>
|
||||
</saml:Attribute>
|
||||
</saml:AttributeStatement>
|
||||
</saml:Assertion>
|
||||
</samlp:Response>
|
||||
@@ -1,44 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<saml2p:Response xmlns:saml2p="urn:oasis:names:tc:SAML:2.0:protocol" Destination="https://127.0.0.1:9443/source/saml/google/acs/" ID="_1e17063957f10819a5a8e147971fec22" IssueInstant="2022-10-14T14:11:49.590Z" Version="2.0">
|
||||
<saml2:Issuer xmlns:saml2="urn:oasis:names:tc:SAML:2.0:assertion">https://accounts.google.com/o/saml2?idpid=</saml2:Issuer>
|
||||
<saml2p:Status>
|
||||
<saml2p:StatusCode Value="urn:oasis:names:tc:SAML:2.0:status:Success"></saml2p:StatusCode>
|
||||
<saml2p:StatusMessage>Login successful</saml2p:StatusMessage>
|
||||
<saml2p:StatusDetail>
|
||||
<Detail>Additional info from IdP</Detail>
|
||||
</saml2p:StatusDetail>
|
||||
</saml2p:Status>
|
||||
<saml2:Assertion xmlns:saml2="urn:oasis:names:tc:SAML:2.0:assertion" ID="_346001c5708ffd118c40edbc0c72fc60" IssueInstant="2022-10-14T14:11:49.590Z" Version="2.0">
|
||||
<saml2:Issuer>https://accounts.google.com/o/saml2?idpid=</saml2:Issuer>
|
||||
<saml2:Subject>
|
||||
<saml2:NameID Format="urn:oasis:names:tc:SAML:2.0:nameid-format:persistent">jens@goauthentik.io</saml2:NameID>
|
||||
<saml2:SubjectConfirmation Method="urn:oasis:names:tc:SAML:2.0:cm:bearer">
|
||||
<saml2:SubjectConfirmationData NotOnOrAfter="2022-10-14T14:16:49.590Z" Recipient="https://127.0.0.1:9443/source/saml/google/acs/"></saml2:SubjectConfirmationData>
|
||||
</saml2:SubjectConfirmation>
|
||||
</saml2:Subject>
|
||||
<saml2:Conditions NotBefore="2022-10-14T14:06:49.590Z" NotOnOrAfter="2022-10-14T14:16:49.590Z">
|
||||
<saml2:AudienceRestriction>
|
||||
<saml2:Audience>https://accounts.google.com/o/saml2?idpid=</saml2:Audience>
|
||||
</saml2:AudienceRestriction>
|
||||
</saml2:Conditions>
|
||||
<saml2:AttributeStatement>
|
||||
<saml2:Attribute Name="name">
|
||||
<saml2:AttributeValue xmlns:xs="http://www.w3.org/2001/XMLSchema"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="xs:anyType">foo</saml2:AttributeValue>
|
||||
</saml2:Attribute>
|
||||
<saml2:Attribute Name="sn">
|
||||
<saml2:AttributeValue xmlns:xs="http://www.w3.org/2001/XMLSchema"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="xs:anyType">bar</saml2:AttributeValue>
|
||||
</saml2:Attribute>
|
||||
<saml2:Attribute Name="email">
|
||||
<saml2:AttributeValue xmlns:xs="http://www.w3.org/2001/XMLSchema"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="xs:anyType">foo@bar.baz</saml2:AttributeValue>
|
||||
</saml2:Attribute>
|
||||
</saml2:AttributeStatement>
|
||||
<saml2:AuthnStatement AuthnInstant="2022-10-14T12:16:21.000Z" SessionIndex="_346001c5708ffd118c40edbc0c72fc60">
|
||||
<saml2:AuthnContext>
|
||||
<saml2:AuthnContextClassRef>urn:oasis:names:tc:SAML:2.0:ac:classes:unspecified</saml2:AuthnContextClassRef>
|
||||
</saml2:AuthnContext>
|
||||
</saml2:AuthnStatement>
|
||||
</saml2:Assertion>
|
||||
</saml2p:Response>
|
||||
@@ -36,9 +36,7 @@ class TestPropertyMappings(TestCase):
|
||||
|
||||
def test_user_base_properties(self):
|
||||
"""Test user base properties"""
|
||||
properties = self.source.get_base_user_properties(
|
||||
root=ROOT, assertion=ROOT.find(f"{{{NS_SAML_ASSERTION}}}Assertion"), name_id=NAME_ID
|
||||
)
|
||||
properties = self.source.get_base_user_properties(root=ROOT, name_id=NAME_ID)
|
||||
self.assertEqual(
|
||||
properties,
|
||||
{
|
||||
@@ -51,11 +49,7 @@ class TestPropertyMappings(TestCase):
|
||||
|
||||
def test_group_base_properties(self):
|
||||
"""Test group base properties"""
|
||||
properties = self.source.get_base_user_properties(
|
||||
root=ROOT_GROUPS,
|
||||
assertion=ROOT_GROUPS.find(f"{{{NS_SAML_ASSERTION}}}Assertion"),
|
||||
name_id=NAME_ID,
|
||||
)
|
||||
properties = self.source.get_base_user_properties(root=ROOT_GROUPS, name_id=NAME_ID)
|
||||
self.assertEqual(properties["groups"], ["group 1", "group 2"])
|
||||
for group_id in ["group 1", "group 2"]:
|
||||
properties = self.source.get_base_group_properties(root=ROOT, group_id=group_id)
|
||||
|
||||
@@ -72,39 +72,6 @@ class TestResponseProcessor(TestCase):
|
||||
},
|
||||
)
|
||||
|
||||
def test_success_with_status_message_and_detail(self):
|
||||
"""Test success with StatusMessage and StatusDetail present (should not raise error)"""
|
||||
request = self.factory.post(
|
||||
"/",
|
||||
data={
|
||||
"SAMLResponse": b64encode(
|
||||
load_fixture("fixtures/response_success_with_message.xml").encode()
|
||||
).decode()
|
||||
},
|
||||
)
|
||||
|
||||
parser = ResponseProcessor(self.source, request)
|
||||
parser.parse()
|
||||
sfm = parser.prepare_flow_manager()
|
||||
self.assertEqual(sfm.user_properties["username"], "jens@goauthentik.io")
|
||||
|
||||
def test_error_with_message_and_detail(self):
|
||||
"""Test error status with StatusMessage and StatusDetail includes both in error"""
|
||||
request = self.factory.post(
|
||||
"/",
|
||||
data={
|
||||
"SAMLResponse": b64encode(
|
||||
load_fixture("fixtures/response_error_with_detail.xml").encode()
|
||||
).decode()
|
||||
},
|
||||
)
|
||||
|
||||
with self.assertRaises(ValueError) as ctx:
|
||||
ResponseProcessor(self.source, request).parse()
|
||||
# Should contain both detail and message
|
||||
self.assertIn("User account is disabled", str(ctx.exception))
|
||||
self.assertIn("Authentication failed", str(ctx.exception))
|
||||
|
||||
def test_encrypted_correct(self):
|
||||
"""Test encrypted"""
|
||||
key = load_fixture("fixtures/encrypted-key.pem")
|
||||
@@ -164,31 +131,6 @@ class TestResponseProcessor(TestCase):
|
||||
parser = ResponseProcessor(self.source, request)
|
||||
parser.parse()
|
||||
|
||||
def test_verification_assertion_duplicate(self):
|
||||
"""Test verifying signature inside assertion, where the response has another assertion
|
||||
before our signed assertion"""
|
||||
key = load_fixture("fixtures/signature_cert.pem")
|
||||
kp = CertificateKeyPair.objects.create(
|
||||
name=generate_id(),
|
||||
certificate_data=key,
|
||||
)
|
||||
self.source.verification_kp = kp
|
||||
self.source.signed_assertion = True
|
||||
self.source.signed_response = False
|
||||
request = self.factory.post(
|
||||
"/",
|
||||
data={
|
||||
"SAMLResponse": b64encode(
|
||||
load_fixture("fixtures/response_signed_assertion_dup.xml").encode()
|
||||
).decode()
|
||||
},
|
||||
)
|
||||
|
||||
parser = ResponseProcessor(self.source, request)
|
||||
parser.parse()
|
||||
self.assertNotEqual(parser._get_name_id().text, "bad")
|
||||
self.assertEqual(parser._get_name_id().text, "_ce3d2948b4cf20146dee0a0b3dd6f69b6cf86f62d7")
|
||||
|
||||
def test_verification_response(self):
|
||||
"""Test verifying signature inside response"""
|
||||
key = load_fixture("fixtures/signature_cert.pem")
|
||||
|
||||
@@ -156,7 +156,7 @@ class ACSView(View):
|
||||
processor = ResponseProcessor(source, request)
|
||||
try:
|
||||
processor.parse()
|
||||
except (InvalidSignature, MissingSAMLResponse, VerificationError, ValueError) as exc:
|
||||
except (InvalidSignature, MissingSAMLResponse, VerificationError) as exc:
|
||||
return bad_request_message(request, str(exc))
|
||||
|
||||
try:
|
||||
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -6,7 +6,6 @@ from django.contrib.auth.views import redirect_to_login
|
||||
from django.http.request import HttpRequest
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.middleware import get_user
|
||||
from authentik.core.models import Session
|
||||
from authentik.events.context_processors.asn import ASN_CONTEXT_PROCESSOR
|
||||
from authentik.events.context_processors.geoip import GEOIP_CONTEXT_PROCESSOR
|
||||
@@ -55,13 +54,11 @@ class SessionBindingBroken(SentryIgnoredException):
|
||||
|
||||
def logout_extra(request: HttpRequest, exc: SessionBindingBroken):
|
||||
"""Similar to django's logout method, but able to carry more info to the signal"""
|
||||
# Since this middleware runs before the AuthenticationMiddleware, we can't use `request.user`
|
||||
# as it hasn't been populated yet.
|
||||
user = get_user(request)
|
||||
if not getattr(user, "is_authenticated", True):
|
||||
user = None
|
||||
# Dispatch the signal before the user is logged out so the receivers have a
|
||||
# chance to find out *who* logged out.
|
||||
user = getattr(request, "user", None)
|
||||
if not getattr(user, "is_authenticated", True):
|
||||
user = None
|
||||
user_logged_out.send(
|
||||
sender=user.__class__, request=request, user=user, event_extra=exc.to_event()
|
||||
)
|
||||
|
||||
@@ -10,8 +10,6 @@ from django.utils.timezone import now
|
||||
from authentik.blueprints.tests import apply_blueprint
|
||||
from authentik.core.models import AuthenticatedSession, Session
|
||||
from authentik.core.tests.utils import create_test_flow, create_test_user
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.events.utils import get_user
|
||||
from authentik.flows.markers import StageMarker
|
||||
from authentik.flows.models import FlowDesignation, FlowStageBinding
|
||||
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlan
|
||||
@@ -272,7 +270,6 @@ class TestUserLoginStage(FlowTestCase):
|
||||
|
||||
def test_session_binding_broken(self):
|
||||
"""Test session binding"""
|
||||
Event.objects.all().delete()
|
||||
self.client.force_login(self.user)
|
||||
session = self.client.session
|
||||
session[Session.Keys.LAST_IP] = "192.0.2.1"
|
||||
@@ -288,5 +285,3 @@ class TestUserLoginStage(FlowTestCase):
|
||||
)
|
||||
+ f"?{NEXT_ARG_NAME}={reverse("authentik_api:user-me")}",
|
||||
)
|
||||
event = Event.objects.filter(action=EventAction.LOGOUT).first()
|
||||
self.assertEqual(event.user, get_user(self.user))
|
||||
|
||||
@@ -13,9 +13,6 @@ from django_dramatiq_postgres.middleware import HTTPServer
|
||||
from django_dramatiq_postgres.middleware import (
|
||||
MetricsMiddleware as BaseMetricsMiddleware,
|
||||
)
|
||||
from django_dramatiq_postgres.middleware import (
|
||||
_MetricsHandler as BaseMetricsHandler,
|
||||
)
|
||||
from dramatiq.broker import Broker
|
||||
from dramatiq.message import Message
|
||||
from dramatiq.middleware import Middleware
|
||||
@@ -26,7 +23,6 @@ from authentik import authentik_full_version
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.lib.sentry import should_ignore_exception
|
||||
from authentik.lib.utils.reflection import class_to_path
|
||||
from authentik.root.monitoring import monitoring_set
|
||||
from authentik.root.signals import post_startup, pre_startup, startup
|
||||
from authentik.tasks.models import Task, TaskLog, TaskStatus, WorkerStatus
|
||||
from authentik.tenants.models import Tenant
|
||||
@@ -264,15 +260,7 @@ class WorkerStatusMiddleware(Middleware):
|
||||
sleep(30)
|
||||
|
||||
|
||||
class _MetricsHandler(BaseMetricsHandler):
|
||||
def do_GET(self) -> None:
|
||||
monitoring_set.send_robust(self)
|
||||
return super().do_GET()
|
||||
|
||||
|
||||
class MetricsMiddleware(BaseMetricsMiddleware):
|
||||
handler_class = _MetricsHandler
|
||||
|
||||
@property
|
||||
def forks(self):
|
||||
from authentik.tasks.forks import worker_metrics
|
||||
|
||||
@@ -1,18 +1,14 @@
|
||||
"""admin signals"""
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
import pglock
|
||||
from django.db.models import Count
|
||||
from django.dispatch import receiver
|
||||
from django.utils.timezone import now
|
||||
from django_dramatiq_postgres.models import TaskState
|
||||
from django.utils.timezone import now, timedelta
|
||||
from packaging.version import parse
|
||||
from prometheus_client import Gauge
|
||||
|
||||
from authentik import authentik_full_version
|
||||
from authentik.root.monitoring import monitoring_set
|
||||
from authentik.tasks.models import Task, WorkerStatus
|
||||
from authentik.tasks.models import WorkerStatus
|
||||
|
||||
OLD_GAUGE_WORKERS = Gauge(
|
||||
"authentik_admin_workers",
|
||||
@@ -24,11 +20,6 @@ GAUGE_WORKERS = Gauge(
|
||||
"Currently connected workers, their versions and if they are the same version as authentik",
|
||||
["version", "version_matched"],
|
||||
)
|
||||
GAUGE_TASKS_QUEUED = Gauge(
|
||||
"authentik_tasks_queued",
|
||||
"The number of tasks in queue.",
|
||||
["queue_name", "actor_name"],
|
||||
)
|
||||
|
||||
|
||||
_version = parse(authentik_full_version())
|
||||
@@ -52,16 +43,3 @@ def monitoring_set_workers(sender, **kwargs):
|
||||
for version, stats in worker_version_count.items():
|
||||
OLD_GAUGE_WORKERS.labels(version, stats["matching"]).set(stats["count"])
|
||||
GAUGE_WORKERS.labels(version, stats["matching"]).set(stats["count"])
|
||||
|
||||
|
||||
@receiver(monitoring_set)
|
||||
def monitoring_set_queued_tasks(sender, **kwargs):
|
||||
"""Set number of queued tasks"""
|
||||
for stats in Task.objects.values("queue_name", "actor_name").distinct():
|
||||
GAUGE_TASKS_QUEUED.labels(stats["queue_name"], stats["actor_name"]).set(0)
|
||||
for stats in (
|
||||
Task.objects.filter(state=TaskState.QUEUED)
|
||||
.values("queue_name", "actor_name")
|
||||
.annotate(count=Count("pk"))
|
||||
):
|
||||
GAUGE_TASKS_QUEUED.labels(stats["queue_name"], stats["actor_name"]).set(stats["count"])
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"$schema": "http://json-schema.org/draft-07/schema",
|
||||
"$id": "https://goauthentik.io/blueprints/schema.json",
|
||||
"type": "object",
|
||||
"title": "authentik 2026.2.0 Blueprint schema",
|
||||
"title": "authentik 2026.2.0-rc1 Blueprint schema",
|
||||
"required": [
|
||||
"version",
|
||||
"entries"
|
||||
@@ -696,126 +696,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
"model",
|
||||
"identifiers"
|
||||
],
|
||||
"properties": {
|
||||
"model": {
|
||||
"const": "authentik_lifecycle.lifecycleiteration"
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"state": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"absent",
|
||||
"created",
|
||||
"must_created",
|
||||
"present"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
"conditions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"permissions": {
|
||||
"$ref": "#/$defs/model_authentik_lifecycle.lifecycleiteration_permissions"
|
||||
},
|
||||
"attrs": {
|
||||
"$ref": "#/$defs/model_authentik_lifecycle.lifecycleiteration"
|
||||
},
|
||||
"identifiers": {
|
||||
"$ref": "#/$defs/model_authentik_lifecycle.lifecycleiteration"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
"model",
|
||||
"identifiers"
|
||||
],
|
||||
"properties": {
|
||||
"model": {
|
||||
"const": "authentik_lifecycle.lifecyclerule"
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"state": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"absent",
|
||||
"created",
|
||||
"must_created",
|
||||
"present"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
"conditions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"permissions": {
|
||||
"$ref": "#/$defs/model_authentik_lifecycle.lifecyclerule_permissions"
|
||||
},
|
||||
"attrs": {
|
||||
"$ref": "#/$defs/model_authentik_lifecycle.lifecyclerule"
|
||||
},
|
||||
"identifiers": {
|
||||
"$ref": "#/$defs/model_authentik_lifecycle.lifecyclerule"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
"model",
|
||||
"identifiers"
|
||||
],
|
||||
"properties": {
|
||||
"model": {
|
||||
"const": "authentik_lifecycle.review"
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"state": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"absent",
|
||||
"created",
|
||||
"must_created",
|
||||
"present"
|
||||
],
|
||||
"default": "present"
|
||||
},
|
||||
"conditions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"permissions": {
|
||||
"$ref": "#/$defs/model_authentik_lifecycle.review_permissions"
|
||||
},
|
||||
"attrs": {
|
||||
"$ref": "#/$defs/model_authentik_lifecycle.review"
|
||||
},
|
||||
"identifiers": {
|
||||
"$ref": "#/$defs/model_authentik_lifecycle.review"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
@@ -5682,18 +5562,6 @@
|
||||
"authentik_flows.view_flowstagebinding",
|
||||
"authentik_flows.view_flowtoken",
|
||||
"authentik_flows.view_stage",
|
||||
"authentik_lifecycle.add_lifecycleiteration",
|
||||
"authentik_lifecycle.add_lifecyclerule",
|
||||
"authentik_lifecycle.add_review",
|
||||
"authentik_lifecycle.change_lifecycleiteration",
|
||||
"authentik_lifecycle.change_lifecyclerule",
|
||||
"authentik_lifecycle.change_review",
|
||||
"authentik_lifecycle.delete_lifecycleiteration",
|
||||
"authentik_lifecycle.delete_lifecyclerule",
|
||||
"authentik_lifecycle.delete_review",
|
||||
"authentik_lifecycle.view_lifecycleiteration",
|
||||
"authentik_lifecycle.view_lifecyclerule",
|
||||
"authentik_lifecycle.view_review",
|
||||
"authentik_outposts.add_dockerserviceconnection",
|
||||
"authentik_outposts.add_kubernetesserviceconnection",
|
||||
"authentik_outposts.add_outpost",
|
||||
@@ -6770,192 +6638,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"model_authentik_lifecycle.lifecycleiteration": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"content_type": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"authentik_core.application",
|
||||
"authentik_core.group",
|
||||
"authentik_rbac.role"
|
||||
],
|
||||
"title": "Content type"
|
||||
}
|
||||
},
|
||||
"required": []
|
||||
},
|
||||
"model_authentik_lifecycle.lifecycleiteration_permissions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"permission"
|
||||
],
|
||||
"properties": {
|
||||
"permission": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"add_lifecycleiteration",
|
||||
"change_lifecycleiteration",
|
||||
"delete_lifecycleiteration",
|
||||
"view_lifecycleiteration"
|
||||
]
|
||||
},
|
||||
"user": {
|
||||
"type": "integer"
|
||||
},
|
||||
"role": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"model_authentik_lifecycle.lifecyclerule": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
"title": "Name"
|
||||
},
|
||||
"content_type": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"authentik_core.application",
|
||||
"authentik_core.group",
|
||||
"authentik_rbac.role"
|
||||
],
|
||||
"title": "Content type"
|
||||
},
|
||||
"object_id": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
],
|
||||
"minLength": 1,
|
||||
"title": "Object id"
|
||||
},
|
||||
"interval": {
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
"title": "Interval"
|
||||
},
|
||||
"grace_period": {
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
"title": "Grace period"
|
||||
},
|
||||
"reviewer_groups": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"title": "Reviewer groups"
|
||||
},
|
||||
"min_reviewers": {
|
||||
"type": "integer",
|
||||
"minimum": 0,
|
||||
"maximum": 32767,
|
||||
"title": "Min reviewers"
|
||||
},
|
||||
"min_reviewers_is_per_group": {
|
||||
"type": "boolean",
|
||||
"title": "Min reviewers is per group"
|
||||
},
|
||||
"reviewers": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"pattern": "^[-a-zA-Z0-9_]+$"
|
||||
},
|
||||
"title": "Reviewers"
|
||||
},
|
||||
"notification_transports": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"description": "Select which transports should be used to notify the reviewers. If none are selected, the notification will only be shown in the authentik UI."
|
||||
},
|
||||
"title": "Notification transports",
|
||||
"description": "Select which transports should be used to notify the reviewers. If none are selected, the notification will only be shown in the authentik UI."
|
||||
}
|
||||
},
|
||||
"required": []
|
||||
},
|
||||
"model_authentik_lifecycle.lifecyclerule_permissions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"permission"
|
||||
],
|
||||
"properties": {
|
||||
"permission": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"add_lifecyclerule",
|
||||
"change_lifecyclerule",
|
||||
"delete_lifecyclerule",
|
||||
"view_lifecyclerule"
|
||||
]
|
||||
},
|
||||
"user": {
|
||||
"type": "integer"
|
||||
},
|
||||
"role": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"model_authentik_lifecycle.review": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"iteration": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"title": "Iteration"
|
||||
},
|
||||
"note": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
],
|
||||
"minLength": 1,
|
||||
"title": "Note"
|
||||
}
|
||||
},
|
||||
"required": []
|
||||
},
|
||||
"model_authentik_lifecycle.review_permissions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"permission"
|
||||
],
|
||||
"properties": {
|
||||
"permission": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"add_review",
|
||||
"change_review",
|
||||
"delete_review",
|
||||
"view_review"
|
||||
]
|
||||
},
|
||||
"user": {
|
||||
"type": "integer"
|
||||
},
|
||||
"role": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"model_authentik_enterprise.license": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -7483,11 +7165,6 @@
|
||||
"minLength": 1,
|
||||
"title": "Reply url"
|
||||
},
|
||||
"wtrealm": {
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
"title": "Wtrealm"
|
||||
},
|
||||
"assertion_valid_not_before": {
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
@@ -7853,10 +7530,6 @@
|
||||
"email_sent",
|
||||
"update_available",
|
||||
"export_ready",
|
||||
"review_initiated",
|
||||
"review_overdue",
|
||||
"review_attested",
|
||||
"review_completed",
|
||||
"custom_"
|
||||
],
|
||||
"title": "Action"
|
||||
@@ -7975,10 +7648,6 @@
|
||||
"email_sent",
|
||||
"update_available",
|
||||
"export_ready",
|
||||
"review_initiated",
|
||||
"review_overdue",
|
||||
"review_attested",
|
||||
"review_completed",
|
||||
"custom_"
|
||||
],
|
||||
"title": "Action"
|
||||
@@ -8297,9 +7966,7 @@
|
||||
"content_left",
|
||||
"content_right",
|
||||
"sidebar_left",
|
||||
"sidebar_right",
|
||||
"sidebar_left_frame_background",
|
||||
"sidebar_right_frame_background"
|
||||
"sidebar_right"
|
||||
],
|
||||
"title": "Layout"
|
||||
},
|
||||
@@ -8728,10 +8395,6 @@
|
||||
"email_sent",
|
||||
"update_available",
|
||||
"export_ready",
|
||||
"review_initiated",
|
||||
"review_overdue",
|
||||
"review_attested",
|
||||
"review_completed",
|
||||
"custom_"
|
||||
],
|
||||
"title": "Action",
|
||||
@@ -8819,7 +8482,6 @@
|
||||
"authentik.enterprise.audit",
|
||||
"authentik.enterprise.endpoints.connectors.agent",
|
||||
"authentik.enterprise.endpoints.connectors.fleet",
|
||||
"authentik.enterprise.lifecycle",
|
||||
"authentik.enterprise.policies.unique_password",
|
||||
"authentik.enterprise.providers.google_workspace",
|
||||
"authentik.enterprise.providers.microsoft_entra",
|
||||
@@ -8949,9 +8611,6 @@
|
||||
"authentik_brands.brand",
|
||||
"authentik_blueprints.blueprintinstance",
|
||||
"authentik_endpoints_connectors_fleet.fleetconnector",
|
||||
"authentik_lifecycle.lifecyclerule",
|
||||
"authentik_lifecycle.lifecycleiteration",
|
||||
"authentik_lifecycle.review",
|
||||
"authentik_policies_unique_password.uniquepasswordpolicy",
|
||||
"authentik_providers_google_workspace.googleworkspaceprovider",
|
||||
"authentik_providers_google_workspace.googleworkspaceprovidermapping",
|
||||
@@ -11270,18 +10929,6 @@
|
||||
"authentik_flows.view_flowstagebinding",
|
||||
"authentik_flows.view_flowtoken",
|
||||
"authentik_flows.view_stage",
|
||||
"authentik_lifecycle.add_lifecycleiteration",
|
||||
"authentik_lifecycle.add_lifecyclerule",
|
||||
"authentik_lifecycle.add_review",
|
||||
"authentik_lifecycle.change_lifecycleiteration",
|
||||
"authentik_lifecycle.change_lifecyclerule",
|
||||
"authentik_lifecycle.change_review",
|
||||
"authentik_lifecycle.delete_lifecycleiteration",
|
||||
"authentik_lifecycle.delete_lifecyclerule",
|
||||
"authentik_lifecycle.delete_review",
|
||||
"authentik_lifecycle.view_lifecycleiteration",
|
||||
"authentik_lifecycle.view_lifecyclerule",
|
||||
"authentik_lifecycle.view_review",
|
||||
"authentik_outposts.add_dockerserviceconnection",
|
||||
"authentik_outposts.add_kubernetesserviceconnection",
|
||||
"authentik_outposts.add_outpost",
|
||||
|
||||
@@ -29,7 +29,7 @@ entries:
|
||||
password=request.user.password
|
||||
)
|
||||
# ...otherwise we set an immutable ID based on the user's UID
|
||||
user["on_premises_immutable_id"] = request.user.uid
|
||||
user["on_premises_immutable_id"] = request.user.uid,
|
||||
return user
|
||||
- identifiers:
|
||||
managed: goauthentik.io/providers/microsoft_entra/group
|
||||
|
||||
6
go.mod
6
go.mod
@@ -23,16 +23,16 @@ require (
|
||||
github.com/jellydator/ttlcache/v3 v3.4.0
|
||||
github.com/mitchellh/mapstructure v1.5.0
|
||||
github.com/nmcclain/asn1-ber v0.0.0-20170104154839-2661553a0484
|
||||
github.com/pires/go-proxyproto v0.10.0
|
||||
github.com/pires/go-proxyproto v0.9.2
|
||||
github.com/prometheus/client_golang v1.23.2
|
||||
github.com/sethvargo/go-envconfig v1.3.0
|
||||
github.com/sirupsen/logrus v1.9.4
|
||||
github.com/spf13/cobra v1.10.2
|
||||
github.com/stretchr/testify v1.11.1
|
||||
github.com/wwt/guac v1.3.2
|
||||
goauthentik.io/api/v3 v3.2026020.17-0.20260210174940-ae049de99535
|
||||
goauthentik.io/api/v3 v3.2026020.17-0.20260203144237-cf0a7b7393e7
|
||||
golang.org/x/exp v0.0.0-20230210204819-062eb4c674ab
|
||||
golang.org/x/oauth2 v0.35.0
|
||||
golang.org/x/oauth2 v0.34.0
|
||||
golang.org/x/sync v0.19.0
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
gorm.io/driver/postgres v1.6.0
|
||||
|
||||
16
go.sum
16
go.sum
@@ -158,8 +158,8 @@ github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=
|
||||
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||
github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4=
|
||||
github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8=
|
||||
github.com/pires/go-proxyproto v0.10.0 h1:08wrdt9NQYTjLWeag3EBIS7ZNi6Vwl3rGsEjVLaAhvU=
|
||||
github.com/pires/go-proxyproto v0.10.0/go.mod h1:ZKAAyp3cgy5Y5Mo4n9AlScrkCZwUy0g3Jf+slqQVcuU=
|
||||
github.com/pires/go-proxyproto v0.9.2 h1:H1UdHn695zUVVmB0lQ354lOWHOy6TZSpzBl3tgN0s1U=
|
||||
github.com/pires/go-proxyproto v0.9.2/go.mod h1:ZKAAyp3cgy5Y5Mo4n9AlScrkCZwUy0g3Jf+slqQVcuU=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
@@ -214,10 +214,10 @@ go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI=
|
||||
go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU=
|
||||
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
|
||||
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
||||
goauthentik.io/api/v3 v3.2026020.17-0.20260205232234-280022b0a8de h1:X1ELA34R1N+S+EWR8mcZRTwyZTze3bVKJh4cmeppxIY=
|
||||
goauthentik.io/api/v3 v3.2026020.17-0.20260205232234-280022b0a8de/go.mod h1:uYa+yGMglhJy8ymyUQ8KQiJjOb3UZTuPQ24Ot2s9BCo=
|
||||
goauthentik.io/api/v3 v3.2026020.17-0.20260210174940-ae049de99535 h1:DPk8z6SGesp0gbmaD2zTAKVSd/NQ++Nu+lu3UrCkNvE=
|
||||
goauthentik.io/api/v3 v3.2026020.17-0.20260210174940-ae049de99535/go.mod h1:uYa+yGMglhJy8ymyUQ8KQiJjOb3UZTuPQ24Ot2s9BCo=
|
||||
goauthentik.io/api/v3 v3.2026020.17-0.20260126165226-52b0b9497497 h1:uebnevXt0MnVIdmBPh39hCggT5Mz/DW8diDvv1n9W50=
|
||||
goauthentik.io/api/v3 v3.2026020.17-0.20260126165226-52b0b9497497/go.mod h1:uYa+yGMglhJy8ymyUQ8KQiJjOb3UZTuPQ24Ot2s9BCo=
|
||||
goauthentik.io/api/v3 v3.2026020.17-0.20260203144237-cf0a7b7393e7 h1:0dDYUvv3LXNgYgY0uSpws78J0EPBWGyk6hw45OZwFmY=
|
||||
goauthentik.io/api/v3 v3.2026020.17-0.20260203144237-cf0a7b7393e7/go.mod h1:uYa+yGMglhJy8ymyUQ8KQiJjOb3UZTuPQ24Ot2s9BCo=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
|
||||
@@ -234,8 +234,8 @@ golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
|
||||
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
|
||||
golang.org/x/oauth2 v0.35.0 h1:Mv2mzuHuZuY2+bkyWXIHMfhNdJAdwW3FuWeCPYN5GVQ=
|
||||
golang.org/x/oauth2 v0.35.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
|
||||
golang.org/x/oauth2 v0.34.0 h1:hqK/t4AKgbqWkdkcAeI8XLmbK+4m4G5YeQRrmiotGlw=
|
||||
golang.org/x/oauth2 v0.34.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
|
||||
@@ -1 +1 @@
|
||||
2026.2.0
|
||||
2026.2.0-rc1
|
||||
@@ -1,7 +1,6 @@
|
||||
package application
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
@@ -20,41 +19,8 @@ func (a *Application) handleAuthStart(rw http.ResponseWriter, r *http.Request, f
|
||||
state, err := a.createState(r, rw, fwd)
|
||||
if err != nil {
|
||||
a.log.WithError(err).Warning("failed to create state")
|
||||
if !strings.HasPrefix(err.Error(), "failed to get session") {
|
||||
rw.WriteHeader(400)
|
||||
return
|
||||
}
|
||||
|
||||
// Client has a cookie but we're unable to load the session from
|
||||
// storage (TMPDIR=/dev/shm). This can happen if the session file
|
||||
// was deleted due to container restart or session invalidation
|
||||
// (e.g., logout on auth server).
|
||||
//
|
||||
// Re-save an empty session and try again.
|
||||
|
||||
session, err := a.sessions.Get(r, a.SessionName())
|
||||
if err != nil && !strings.HasSuffix(err.Error(), "no such file or directory") {
|
||||
a.log.WithError(err).Warning("failed to get session")
|
||||
rw.WriteHeader(400)
|
||||
return
|
||||
}
|
||||
err = a.sessions.Save(r, rw, session)
|
||||
if err != nil {
|
||||
a.log.WithError(err).Warning("failed to save session")
|
||||
rw.WriteHeader(400)
|
||||
return
|
||||
}
|
||||
|
||||
// The registry caches the previous attempt to open the session so it
|
||||
// needs to be cleared in order to get the session in createState().
|
||||
*r = *r.WithContext(context.Background())
|
||||
|
||||
state, err = a.createState(r, rw, fwd)
|
||||
if err != nil {
|
||||
a.log.WithError(err).Warning("failed to create state on retry")
|
||||
rw.WriteHeader(400)
|
||||
return
|
||||
}
|
||||
rw.WriteHeader(400)
|
||||
return
|
||||
}
|
||||
http.Redirect(rw, r, a.oauthConfig.AuthCodeURL(state), http.StatusFound)
|
||||
}
|
||||
@@ -76,7 +42,7 @@ func (a *Application) redirectToStart(rw http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
}
|
||||
|
||||
redirectUrl := urlJoin(a.proxyConfig.ExternalHost, r.URL.EscapedPath())
|
||||
redirectUrl := urlJoin(a.proxyConfig.ExternalHost, r.URL.Path)
|
||||
|
||||
if a.Mode() == api.PROXYMODE_FORWARD_DOMAIN {
|
||||
dom := strings.TrimPrefix(*a.proxyConfig.CookieDomain, ".")
|
||||
|
||||
@@ -27,24 +27,6 @@ func TestRedirectToStart_Proxy(t *testing.T) {
|
||||
assert.Equal(t, "https://test.goauthentik.io/foo/bar/baz", s.Values[constants.SessionRedirect])
|
||||
}
|
||||
|
||||
func TestRedirectToStart_Proxy_EncodedSlash(t *testing.T) {
|
||||
a := newTestApplication()
|
||||
a.proxyConfig.Mode = api.PROXYMODE_PROXY.Ptr()
|
||||
a.proxyConfig.ExternalHost = "https://test.goauthentik.io"
|
||||
// %2F is a URL-encoded forward slash, used by apps like RabbitMQ in queue paths
|
||||
req, _ := http.NewRequest("GET", "/api/queues/%2F/MYChannelCreated", nil)
|
||||
|
||||
rr := httptest.NewRecorder()
|
||||
a.redirectToStart(rr, req)
|
||||
|
||||
assert.Equal(t, http.StatusFound, rr.Code)
|
||||
loc, _ := rr.Result().Location()
|
||||
assert.Contains(t, loc.String(), "%252F", "encoded slash %2F must be preserved in redirect URL")
|
||||
|
||||
s, _ := a.sessions.Get(req, a.SessionName())
|
||||
assert.Contains(t, s.Values[constants.SessionRedirect].(string), "%2F", "encoded slash %2F must be preserved in session redirect")
|
||||
}
|
||||
|
||||
func TestRedirectToStart_Forward(t *testing.T) {
|
||||
a := newTestApplication()
|
||||
a.proxyConfig.Mode = api.PROXYMODE_FORWARD_SINGLE.Ptr()
|
||||
|
||||
8
lifecycle/aws/package-lock.json
generated
8
lifecycle/aws/package-lock.json
generated
@@ -9,7 +9,7 @@
|
||||
"version": "0.0.0",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"aws-cdk": "^2.1105.0",
|
||||
"aws-cdk": "^2.1104.0",
|
||||
"cross-env": "^10.1.0"
|
||||
},
|
||||
"engines": {
|
||||
@@ -25,9 +25,9 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/aws-cdk": {
|
||||
"version": "2.1105.0",
|
||||
"resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.1105.0.tgz",
|
||||
"integrity": "sha512-1RY2UZJv31XYobEGFHQEb7c2HXNzDbHuHqdnfdYyygvZW4Nrm8MJCW42lqItQCn+wF52Ixc7r2VR5eR4YGtVhA==",
|
||||
"version": "2.1104.0",
|
||||
"resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.1104.0.tgz",
|
||||
"integrity": "sha512-TGIK2Mpfqi0BA6Np9aJz0d5HAvTxWd17FrwtXlJuwqdQbR9R/IRqsabF6xRAuhFTz7/YrrHHU9H4VK/Xfnh7Vg==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"aws-cfn": "cross-env CI=false cdk synth --version-reporting=false > template.yaml"
|
||||
},
|
||||
"devDependencies": {
|
||||
"aws-cdk": "^2.1105.0",
|
||||
"aws-cdk": "^2.1104.0",
|
||||
"cross-env": "^10.1.0"
|
||||
},
|
||||
"engines": {
|
||||
|
||||
@@ -18,7 +18,7 @@ Parameters:
|
||||
Description: authentik Docker image
|
||||
AuthentikVersion:
|
||||
Type: String
|
||||
Default: 2026.2.0
|
||||
Default: 2026.2.0-rc1
|
||||
Description: authentik Docker image tag
|
||||
AuthentikServerCPU:
|
||||
Type: Number
|
||||
|
||||
@@ -31,7 +31,7 @@ services:
|
||||
AUTHENTIK_POSTGRESQL__PASSWORD: ${PG_PASS}
|
||||
AUTHENTIK_POSTGRESQL__USER: ${PG_USER:-authentik}
|
||||
AUTHENTIK_SECRET_KEY: ${AUTHENTIK_SECRET_KEY:?secret key required}
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2026.2.0}
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2026.2.0-rc1}
|
||||
ports:
|
||||
- ${COMPOSE_PORT_HTTP:-9000}:9000
|
||||
- ${COMPOSE_PORT_HTTPS:-9443}:9443
|
||||
@@ -53,7 +53,7 @@ services:
|
||||
AUTHENTIK_POSTGRESQL__PASSWORD: ${PG_PASS}
|
||||
AUTHENTIK_POSTGRESQL__USER: ${PG_USER:-authentik}
|
||||
AUTHENTIK_SECRET_KEY: ${AUTHENTIK_SECRET_KEY:?secret key required}
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2026.2.0}
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2026.2.0-rc1}
|
||||
restart: unless-stopped
|
||||
shm_size: 512mb
|
||||
user: root
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user