mirror of
https://github.com/goauthentik/authentik
synced 2026-05-13 02:16:30 +02:00
Compare commits
220 Commits
form-file-
...
oauth_back
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
65c7f784b3 | ||
|
|
e5acaa2603 | ||
|
|
bb2ef38ecd | ||
|
|
5364c49068 | ||
|
|
06d4f17743 | ||
|
|
9153cfe83c | ||
|
|
709cf785a9 | ||
|
|
db70211754 | ||
|
|
31510bb276 | ||
|
|
4bc6ba3dbe | ||
|
|
125af7daf5 | ||
|
|
130d0235d4 | ||
|
|
6ba97cba5a | ||
|
|
7aac9dea53 | ||
|
|
3f41588dc9 | ||
|
|
9caf08637e | ||
|
|
3f2f5ba31a | ||
|
|
ca48a6de48 | ||
|
|
6c951efd61 | ||
|
|
6b5da70673 | ||
|
|
32501266d5 | ||
|
|
5d6a2d27eb | ||
|
|
7b8426b292 | ||
|
|
fd8d5952d5 | ||
|
|
bb1e16a3f8 | ||
|
|
09a4f2c9b6 | ||
|
|
ac2873e6f4 | ||
|
|
6d68844270 | ||
|
|
2a65579182 | ||
|
|
11203f79d1 | ||
|
|
b670362168 | ||
|
|
24ae1c8b03 | ||
|
|
7a4c6b9b50 | ||
|
|
f1684882c5 | ||
|
|
615cb4f9fb | ||
|
|
bbfee74cd2 | ||
|
|
af2885ab12 | ||
|
|
29799e95ce | ||
|
|
b6568f322b | ||
|
|
d9bad736b5 | ||
|
|
1d57307891 | ||
|
|
be27635788 | ||
|
|
8373072654 | ||
|
|
b6c05f6852 | ||
|
|
26766360d5 | ||
|
|
9d7c733024 | ||
|
|
caecf5961d | ||
|
|
4b211190b6 | ||
|
|
2f22012f0f | ||
|
|
10dffd8d13 | ||
|
|
21a73fe58c | ||
|
|
b0b915061e | ||
|
|
6bcb758daa | ||
|
|
b051c59ec4 | ||
|
|
35df455e3a | ||
|
|
eb19e53bf3 | ||
|
|
3badf80295 | ||
|
|
b038e479e2 | ||
|
|
faaf4842a5 | ||
|
|
89ef57c68d | ||
|
|
c3515299d1 | ||
|
|
f8ff48fed9 | ||
|
|
dcf40690e9 | ||
|
|
4b37829f67 | ||
|
|
bf050e19b0 | ||
|
|
8eedfe5c4e | ||
|
|
33f83bec46 | ||
|
|
b93a450b38 | ||
|
|
6c169ce6a6 | ||
|
|
d53bb73c91 | ||
|
|
a182d7671e | ||
|
|
9941cec71f | ||
|
|
458344638f | ||
|
|
11ffd672ca | ||
|
|
4ebbf5b097 | ||
|
|
1bf5688ec6 | ||
|
|
9f1e6b3ba4 | ||
|
|
93ec4b3c17 | ||
|
|
477fc11148 | ||
|
|
4265e7b0af | ||
|
|
41df11d5dc | ||
|
|
f9fd1bbf09 | ||
|
|
3ba3b11a76 | ||
|
|
8ba469496c | ||
|
|
19e558e916 | ||
|
|
e15fadfedd | ||
|
|
52854e61c7 | ||
|
|
53aa0113ca | ||
|
|
9f71face62 | ||
|
|
2fadefb5b4 | ||
|
|
23e92bceae | ||
|
|
1ff2eea20a | ||
|
|
abcd2179bf | ||
|
|
6a4b5850a0 | ||
|
|
821c8c36cd | ||
|
|
8838efe3c0 | ||
|
|
433a4a3037 | ||
|
|
2d69a67e9d | ||
|
|
1294cc64e8 | ||
|
|
2bdcadda44 | ||
|
|
6ff697de46 | ||
|
|
7594e2ba56 | ||
|
|
66587bbe69 | ||
|
|
3711c6ede4 | ||
|
|
b422714a0b | ||
|
|
158ee1d7cf | ||
|
|
910326a05a | ||
|
|
9257b3e570 | ||
|
|
cdd18a7e5a | ||
|
|
88bea46648 | ||
|
|
295090a80b | ||
|
|
bff607a5c3 | ||
|
|
bfb2fb4fcf | ||
|
|
93015b0fce | ||
|
|
9b6c0d3f1a | ||
|
|
66e95ddb20 | ||
|
|
c5d8524a7d | ||
|
|
a4761064c2 | ||
|
|
b0de8bf71f | ||
|
|
32100fd3b9 | ||
|
|
4815e97162 | ||
|
|
dee99c38bb | ||
|
|
a024056b62 | ||
|
|
13afc01e21 | ||
|
|
a8dc21b707 | ||
|
|
7ccda743df | ||
|
|
0c795dd077 | ||
|
|
5df9ed3582 | ||
|
|
a47b4934a5 | ||
|
|
338a6e74f4 | ||
|
|
8897af1048 | ||
|
|
56ec3f7def | ||
|
|
53fd893d91 | ||
|
|
f7d9a8cafe | ||
|
|
f97c1071f3 | ||
|
|
4da1115a7c | ||
|
|
63b1ccd4c3 | ||
|
|
63aa7f4684 | ||
|
|
ec90666c9d | ||
|
|
04cda27496 | ||
|
|
40575c7c71 | ||
|
|
3b37221fd8 | ||
|
|
0dc8062611 | ||
|
|
d997930b60 | ||
|
|
a088a62981 | ||
|
|
118e05f256 | ||
|
|
b30500094f | ||
|
|
21af51ba59 | ||
|
|
87da0497e0 | ||
|
|
87317d6e7f | ||
|
|
071305da18 | ||
|
|
1dc8ed5e55 | ||
|
|
dc8dee985f | ||
|
|
2b20b06baa | ||
|
|
6cab1f85e4 | ||
|
|
b81dc63ad6 | ||
|
|
d0534ca55f | ||
|
|
1f1e55a36d | ||
|
|
f836c38b18 | ||
|
|
07e373e505 | ||
|
|
e361d38978 | ||
|
|
3ba1691db6 | ||
|
|
7c2987ea32 | ||
|
|
4ca88caf07 | ||
|
|
6c939341b0 | ||
|
|
4142584788 | ||
|
|
f6fbafd280 | ||
|
|
7c9555bee8 | ||
|
|
82cd64dfe7 | ||
|
|
28f0b48e33 | ||
|
|
38c02dc490 | ||
|
|
79505969db | ||
|
|
9870888456 | ||
|
|
5c06e1920e | ||
|
|
1506ad8aa4 | ||
|
|
21b6204c90 | ||
|
|
05621735cb | ||
|
|
f9ffd35ab8 | ||
|
|
c3ded3a835 | ||
|
|
7629c22050 | ||
|
|
29a66410fd | ||
|
|
f147d40c5f | ||
|
|
15b556c1be | ||
|
|
522e8a26a2 | ||
|
|
403d762f65 | ||
|
|
cbc65ffd74 | ||
|
|
9a9bafdfb4 | ||
|
|
198d2a1a8a | ||
|
|
239edace16 | ||
|
|
370d5ff0c0 | ||
|
|
635b09621b | ||
|
|
4335498ac5 | ||
|
|
72af009de8 | ||
|
|
3a07d5d829 | ||
|
|
7122891f0f | ||
|
|
c32d6cc75e | ||
|
|
eaf6be74f3 | ||
|
|
c35650afbd | ||
|
|
a1f9ff8b7d | ||
|
|
962f7513ba | ||
|
|
0ec5ea69ef | ||
|
|
d8a3098329 | ||
|
|
8989593356 | ||
|
|
eaf4e46eb6 | ||
|
|
1b401668ec | ||
|
|
bc10f869d2 | ||
|
|
d217f2f1fe | ||
|
|
6bbebbac4c | ||
|
|
345756024e | ||
|
|
e7a404c4d3 | ||
|
|
4244263c29 | ||
|
|
e3c8e613fb | ||
|
|
0b32da1e48 | ||
|
|
31b313d937 | ||
|
|
f374fb5651 | ||
|
|
bc8556c6b4 | ||
|
|
effb3ff910 | ||
|
|
7e23a93846 | ||
|
|
5ea8bbf269 | ||
|
|
014e22c3d0 |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 2025.6.3
|
||||
current_version = 2025.6.4
|
||||
tag = True
|
||||
commit = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?:-(?P<rc_t>[a-zA-Z-]+)(?P<rc_n>[1-9]\\d*))?
|
||||
|
||||
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@@ -31,4 +31,4 @@ If changes to the frontend have been made
|
||||
If applicable
|
||||
|
||||
- [ ] The documentation has been updated
|
||||
- [ ] The documentation has been formatted (`make website`)
|
||||
- [ ] The documentation has been formatted (`make docs`)
|
||||
|
||||
2
.github/workflows/api-ts-publish.yml
vendored
2
.github/workflows/api-ts-publish.yml
vendored
@@ -28,7 +28,7 @@ jobs:
|
||||
working-directory: gen-ts-api/
|
||||
run: |
|
||||
npm i
|
||||
npm publish
|
||||
npm publish --tag generated
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }}
|
||||
- name: Upgrade /web
|
||||
|
||||
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@@ -7,6 +7,8 @@
|
||||
"!Enumerate sequence",
|
||||
"!Env scalar",
|
||||
"!Env sequence",
|
||||
"!File scalar",
|
||||
"!File sequence",
|
||||
"!Find sequence",
|
||||
"!Format sequence",
|
||||
"!If sequence",
|
||||
|
||||
@@ -62,7 +62,7 @@ RUN --mount=type=cache,sharing=locked,target=/go/pkg/mod \
|
||||
go build -o /go/authentik ./cmd/server
|
||||
|
||||
# Stage 3: MaxMind GeoIP
|
||||
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.1.0 AS geoip
|
||||
FROM --platform=${BUILDPLATFORM} ghcr.io/maxmind/geoipupdate:v7.1.1 AS geoip
|
||||
|
||||
ENV GEOIPUPDATE_EDITION_IDS="GeoLite2-City GeoLite2-ASN"
|
||||
ENV GEOIPUPDATE_VERBOSE="1"
|
||||
@@ -75,7 +75,7 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
||||
/bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||
|
||||
# Stage 4: Download uv
|
||||
FROM ghcr.io/astral-sh/uv:0.7.19 AS uv
|
||||
FROM ghcr.io/astral-sh/uv:0.8.2 AS uv
|
||||
# Stage 5: Base python image
|
||||
FROM ghcr.io/goauthentik/fips-python:3.13.5-slim-bookworm-fips AS python-base
|
||||
|
||||
|
||||
45
Makefile
45
Makefile
@@ -1,4 +1,4 @@
|
||||
.PHONY: gen dev-reset all clean test web website
|
||||
.PHONY: gen dev-reset all clean test web docs
|
||||
|
||||
SHELL := /usr/bin/env bash
|
||||
.SHELLFLAGS += ${SHELLFLAGS} -e -o pipefail
|
||||
@@ -73,7 +73,7 @@ core-i18n-extract:
|
||||
--ignore website \
|
||||
-l en
|
||||
|
||||
install: web-install website-install core-install ## Install all requires dependencies for `web`, `website` and `core`
|
||||
install: node-install docs-install core-install ## Install all requires dependencies for `node`, `docs` and `core`
|
||||
|
||||
dev-drop-db:
|
||||
dropdb -U ${pg_user} -h ${pg_host} ${pg_name}
|
||||
@@ -183,18 +183,23 @@ gen-dev-config: ## Generate a local development config file
|
||||
|
||||
gen: gen-build gen-client-ts
|
||||
|
||||
#########################
|
||||
## Node.js
|
||||
#########################
|
||||
|
||||
node-install: ## Install the necessary libraries to build Node.js packages
|
||||
npm ci
|
||||
npm ci --prefix web
|
||||
|
||||
#########################
|
||||
## Web
|
||||
#########################
|
||||
|
||||
web-build: web-install ## Build the Authentik UI
|
||||
web-build: node-install ## Build the Authentik UI
|
||||
cd web && npm run build
|
||||
|
||||
web: web-lint-fix web-lint web-check-compile ## Automatically fix formatting issues in the Authentik UI source code, lint the code, and compile it
|
||||
|
||||
web-install: ## Install the necessary libraries to build the Authentik UI
|
||||
cd web && npm ci
|
||||
|
||||
web-test: ## Run tests for the Authentik UI
|
||||
cd web && npm run test
|
||||
|
||||
@@ -221,22 +226,30 @@ web-i18n-extract:
|
||||
cd web && npm run extract-locales
|
||||
|
||||
#########################
|
||||
## Website
|
||||
## Docs
|
||||
#########################
|
||||
|
||||
website: website-lint-fix website-build ## Automatically fix formatting issues in the Authentik website/docs source code, lint the code, and compile it
|
||||
docs: docs-lint-fix docs-build ## Automatically fix formatting issues in the Authentik docs source code, lint the code, and compile it
|
||||
|
||||
website-install:
|
||||
cd website && npm ci
|
||||
docs-install:
|
||||
npm ci --prefix website
|
||||
|
||||
website-lint-fix: lint-codespell
|
||||
cd website && npm run prettier
|
||||
docs-lint-fix: lint-codespell
|
||||
npm run prettier --prefix website
|
||||
|
||||
website-build:
|
||||
cd website && npm run build
|
||||
docs-build:
|
||||
npm run build --prefix website
|
||||
|
||||
website-watch: ## Build and watch the documentation website, updating automatically
|
||||
cd website && npm run watch
|
||||
docs-watch: ## Build and watch the topics documentation
|
||||
npm run start --prefix website
|
||||
|
||||
integrations: docs-lint-fix integrations-build ## Fix formatting issues in the integrations source code, lint the code, and compile it
|
||||
|
||||
integrations-build:
|
||||
npm run build --prefix website -w integrations
|
||||
|
||||
integrations-watch: ## Build and watch the Integrations documentation
|
||||
npm run start --prefix website -w integrations
|
||||
|
||||
#########################
|
||||
## Docker
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from os import environ
|
||||
|
||||
__version__ = "2025.6.3"
|
||||
__version__ = "2025.6.4"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
|
||||
|
||||
43
authentik/blueprints/tests/fixtures/tags.yaml
vendored
43
authentik/blueprints/tests/fixtures/tags.yaml
vendored
@@ -12,8 +12,8 @@ context:
|
||||
context1: context-nested-value
|
||||
context2: !Context context1
|
||||
entries:
|
||||
- model: !Format ["%s", authentik_sources_oauth.oauthsource]
|
||||
state: !Format ["%s", present]
|
||||
- model: !Format ["%%s", authentik_sources_oauth.oauthsource]
|
||||
state: !Format ["%%s", present]
|
||||
identifiers:
|
||||
slug: test
|
||||
attrs:
|
||||
@@ -27,20 +27,23 @@ entries:
|
||||
[slug, default-source-authentication],
|
||||
]
|
||||
enrollment_flow:
|
||||
!Find [!Format ["%s", authentik_flows.Flow], [slug, default-source-enrollment]]
|
||||
!Find [!Format ["%%s", authentik_flows.Flow], [slug, default-source-enrollment]]
|
||||
- attrs:
|
||||
expression: return True
|
||||
identifiers:
|
||||
name: !Format [foo-%s-%s-%s, !Context foo, !Context bar, qux]
|
||||
name: !Format [foo-%%s-%%s-%%s, !Context foo, !Context bar, qux]
|
||||
id: policy
|
||||
model: authentik_policies_expression.expressionpolicy
|
||||
- attrs:
|
||||
attributes:
|
||||
env_null: !Env [bar-baz, null]
|
||||
file_content: !File '%(file_name)s'
|
||||
file_default: !File ['%(file_default_name)s', 'default']
|
||||
file_non_existent: !File '/does-not-exist'
|
||||
json_parse: !ParseJSON '{"foo": "bar"}'
|
||||
policy_pk1:
|
||||
!Format [
|
||||
"%s-%s",
|
||||
"%%s-%%s",
|
||||
!Find [
|
||||
authentik_policies_expression.expressionpolicy,
|
||||
[
|
||||
@@ -51,29 +54,29 @@ entries:
|
||||
],
|
||||
suffix,
|
||||
]
|
||||
policy_pk2: !Format ["%s-%s", !KeyOf policy, suffix]
|
||||
policy_pk2: !Format ["%%s-%%s", !KeyOf policy, suffix]
|
||||
boolAnd:
|
||||
!Condition [AND, !Context foo, !Format ["%s", "a_string"], 1]
|
||||
!Condition [AND, !Context foo, !Format ["%%s", "a_string"], 1]
|
||||
boolNand:
|
||||
!Condition [NAND, !Context foo, !Format ["%s", "a_string"], 1]
|
||||
!Condition [NAND, !Context foo, !Format ["%%s", "a_string"], 1]
|
||||
boolOr:
|
||||
!Condition [
|
||||
OR,
|
||||
!Context foo,
|
||||
!Format ["%s", "a_string"],
|
||||
!Format ["%%s", "a_string"],
|
||||
null,
|
||||
]
|
||||
boolNor:
|
||||
!Condition [
|
||||
NOR,
|
||||
!Context foo,
|
||||
!Format ["%s", "a_string"],
|
||||
!Format ["%%s", "a_string"],
|
||||
null,
|
||||
]
|
||||
boolXor:
|
||||
!Condition [XOR, !Context foo, !Format ["%s", "a_string"], 1]
|
||||
!Condition [XOR, !Context foo, !Format ["%%s", "a_string"], 1]
|
||||
boolXnor:
|
||||
!Condition [XNOR, !Context foo, !Format ["%s", "a_string"], 1]
|
||||
!Condition [XNOR, !Context foo, !Format ["%%s", "a_string"], 1]
|
||||
boolComplex:
|
||||
!Condition [
|
||||
XNOR,
|
||||
@@ -89,7 +92,7 @@ entries:
|
||||
{
|
||||
with: { keys: "and_values" },
|
||||
and_nested_custom_tags:
|
||||
!Format ["foo-%s", !Context foo],
|
||||
!Format ["foo-%%s", !Context foo],
|
||||
},
|
||||
},
|
||||
null,
|
||||
@@ -98,7 +101,7 @@ entries:
|
||||
!If [
|
||||
!Condition [AND, false],
|
||||
null,
|
||||
[list, with, items, !Format ["foo-%s", !Context foo]],
|
||||
[list, with, items, !Format ["foo-%%s", !Context foo]],
|
||||
]
|
||||
if_true_simple: !If [!Context foo, true, text]
|
||||
if_short: !If [!Context foo]
|
||||
@@ -106,22 +109,22 @@ entries:
|
||||
enumerate_mapping_to_mapping: !Enumerate [
|
||||
!Context mapping,
|
||||
MAP,
|
||||
[!Format ["prefix-%s", !Index 0], !Format ["other-prefix-%s", !Value 0]]
|
||||
[!Format ["prefix-%%s", !Index 0], !Format ["other-prefix-%%s", !Value 0]]
|
||||
]
|
||||
enumerate_mapping_to_sequence: !Enumerate [
|
||||
!Context mapping,
|
||||
SEQ,
|
||||
!Format ["prefixed-pair-%s-%s", !Index 0, !Value 0]
|
||||
!Format ["prefixed-pair-%%s-%%s", !Index 0, !Value 0]
|
||||
]
|
||||
enumerate_sequence_to_sequence: !Enumerate [
|
||||
!Context sequence,
|
||||
SEQ,
|
||||
!Format ["prefixed-items-%s-%s", !Index 0, !Value 0]
|
||||
!Format ["prefixed-items-%%s-%%s", !Index 0, !Value 0]
|
||||
]
|
||||
enumerate_sequence_to_mapping: !Enumerate [
|
||||
!Context sequence,
|
||||
MAP,
|
||||
[!Format ["index: %d", !Index 0], !Value 0]
|
||||
[!Format ["index: %%d", !Index 0], !Value 0]
|
||||
]
|
||||
nested_complex_enumeration: !Enumerate [
|
||||
!Context sequence,
|
||||
@@ -132,9 +135,9 @@ entries:
|
||||
!Context mapping,
|
||||
MAP,
|
||||
[
|
||||
!Format ["%s", !Index 0],
|
||||
!Format ["%%s", !Index 0],
|
||||
[
|
||||
!Enumerate [!Value 2, SEQ, !Format ["prefixed-%s", !Value 0]],
|
||||
!Enumerate [!Value 2, SEQ, !Format ["prefixed-%%s", !Value 0]],
|
||||
{
|
||||
outer_value: !Value 1,
|
||||
outer_index: !Index 1,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Test blueprints v1"""
|
||||
|
||||
from os import environ
|
||||
from os import chmod, environ, unlink, write
|
||||
from tempfile import mkstemp
|
||||
|
||||
from django.test import TransactionTestCase
|
||||
|
||||
@@ -131,97 +132,112 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").delete()
|
||||
Group.objects.filter(name="test").delete()
|
||||
environ["foo"] = generate_id()
|
||||
importer = Importer.from_string(load_fixture("fixtures/tags.yaml"), {"bar": "baz"})
|
||||
file, file_name = mkstemp()
|
||||
write(file, b"foo")
|
||||
_, file_default_name = mkstemp()
|
||||
chmod(file_default_name, 0o000) # Remove all permissions so we can't read the file
|
||||
importer = Importer.from_string(
|
||||
load_fixture(
|
||||
"fixtures/tags.yaml",
|
||||
file_name=file_name,
|
||||
file_default_name=file_default_name,
|
||||
),
|
||||
{"bar": "baz"},
|
||||
)
|
||||
self.assertTrue(importer.validate()[0])
|
||||
self.assertTrue(importer.apply())
|
||||
policy = ExpressionPolicy.objects.filter(name="foo-bar-baz-qux").first()
|
||||
self.assertTrue(policy)
|
||||
self.assertTrue(
|
||||
Group.objects.filter(
|
||||
attributes={
|
||||
"policy_pk1": str(policy.pk) + "-suffix",
|
||||
"policy_pk2": str(policy.pk) + "-suffix",
|
||||
"boolAnd": True,
|
||||
"boolNand": False,
|
||||
"boolOr": True,
|
||||
"boolNor": False,
|
||||
"boolXor": True,
|
||||
"boolXnor": False,
|
||||
"boolComplex": True,
|
||||
"if_true_complex": {
|
||||
"dictionary": {
|
||||
"with": {"keys": "and_values"},
|
||||
"and_nested_custom_tags": "foo-bar",
|
||||
}
|
||||
group = Group.objects.filter(name="test").first()
|
||||
self.assertIsNotNone(group)
|
||||
self.assertEqual(
|
||||
group.attributes,
|
||||
{
|
||||
"policy_pk1": str(policy.pk) + "-suffix",
|
||||
"policy_pk2": str(policy.pk) + "-suffix",
|
||||
"boolAnd": True,
|
||||
"boolNand": False,
|
||||
"boolOr": True,
|
||||
"boolNor": False,
|
||||
"boolXor": True,
|
||||
"boolXnor": False,
|
||||
"boolComplex": True,
|
||||
"if_true_complex": {
|
||||
"dictionary": {
|
||||
"with": {"keys": "and_values"},
|
||||
"and_nested_custom_tags": "foo-bar",
|
||||
}
|
||||
},
|
||||
"if_false_complex": ["list", "with", "items", "foo-bar"],
|
||||
"if_true_simple": True,
|
||||
"if_short": True,
|
||||
"if_false_simple": 2,
|
||||
"enumerate_mapping_to_mapping": {
|
||||
"prefix-key1": "other-prefix-value",
|
||||
"prefix-key2": "other-prefix-2",
|
||||
},
|
||||
"enumerate_mapping_to_sequence": [
|
||||
"prefixed-pair-key1-value",
|
||||
"prefixed-pair-key2-2",
|
||||
],
|
||||
"enumerate_sequence_to_sequence": [
|
||||
"prefixed-items-0-foo",
|
||||
"prefixed-items-1-bar",
|
||||
],
|
||||
"enumerate_sequence_to_mapping": {"index: 0": "foo", "index: 1": "bar"},
|
||||
"nested_complex_enumeration": {
|
||||
"0": {
|
||||
"key1": [
|
||||
["prefixed-f", "prefixed-o", "prefixed-o"],
|
||||
{
|
||||
"outer_value": "foo",
|
||||
"outer_index": 0,
|
||||
"middle_value": "value",
|
||||
"middle_index": "key1",
|
||||
},
|
||||
],
|
||||
"key2": [
|
||||
["prefixed-f", "prefixed-o", "prefixed-o"],
|
||||
{
|
||||
"outer_value": "foo",
|
||||
"outer_index": 0,
|
||||
"middle_value": 2,
|
||||
"middle_index": "key2",
|
||||
},
|
||||
],
|
||||
},
|
||||
"if_false_complex": ["list", "with", "items", "foo-bar"],
|
||||
"if_true_simple": True,
|
||||
"if_short": True,
|
||||
"if_false_simple": 2,
|
||||
"enumerate_mapping_to_mapping": {
|
||||
"prefix-key1": "other-prefix-value",
|
||||
"prefix-key2": "other-prefix-2",
|
||||
"1": {
|
||||
"key1": [
|
||||
["prefixed-b", "prefixed-a", "prefixed-r"],
|
||||
{
|
||||
"outer_value": "bar",
|
||||
"outer_index": 1,
|
||||
"middle_value": "value",
|
||||
"middle_index": "key1",
|
||||
},
|
||||
],
|
||||
"key2": [
|
||||
["prefixed-b", "prefixed-a", "prefixed-r"],
|
||||
{
|
||||
"outer_value": "bar",
|
||||
"outer_index": 1,
|
||||
"middle_value": 2,
|
||||
"middle_index": "key2",
|
||||
},
|
||||
],
|
||||
},
|
||||
"enumerate_mapping_to_sequence": [
|
||||
"prefixed-pair-key1-value",
|
||||
"prefixed-pair-key2-2",
|
||||
],
|
||||
"enumerate_sequence_to_sequence": [
|
||||
"prefixed-items-0-foo",
|
||||
"prefixed-items-1-bar",
|
||||
],
|
||||
"enumerate_sequence_to_mapping": {"index: 0": "foo", "index: 1": "bar"},
|
||||
"nested_complex_enumeration": {
|
||||
"0": {
|
||||
"key1": [
|
||||
["prefixed-f", "prefixed-o", "prefixed-o"],
|
||||
{
|
||||
"outer_value": "foo",
|
||||
"outer_index": 0,
|
||||
"middle_value": "value",
|
||||
"middle_index": "key1",
|
||||
},
|
||||
],
|
||||
"key2": [
|
||||
["prefixed-f", "prefixed-o", "prefixed-o"],
|
||||
{
|
||||
"outer_value": "foo",
|
||||
"outer_index": 0,
|
||||
"middle_value": 2,
|
||||
"middle_index": "key2",
|
||||
},
|
||||
],
|
||||
},
|
||||
"1": {
|
||||
"key1": [
|
||||
["prefixed-b", "prefixed-a", "prefixed-r"],
|
||||
{
|
||||
"outer_value": "bar",
|
||||
"outer_index": 1,
|
||||
"middle_value": "value",
|
||||
"middle_index": "key1",
|
||||
},
|
||||
],
|
||||
"key2": [
|
||||
["prefixed-b", "prefixed-a", "prefixed-r"],
|
||||
{
|
||||
"outer_value": "bar",
|
||||
"outer_index": 1,
|
||||
"middle_value": 2,
|
||||
"middle_index": "key2",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
"nested_context": "context-nested-value",
|
||||
"env_null": None,
|
||||
"json_parse": {"foo": "bar"},
|
||||
"at_index_sequence": "foo",
|
||||
"at_index_sequence_default": "non existent",
|
||||
"at_index_mapping": 2,
|
||||
"at_index_mapping_default": "non existent",
|
||||
}
|
||||
).exists()
|
||||
},
|
||||
"nested_context": "context-nested-value",
|
||||
"env_null": None,
|
||||
"file_content": "foo",
|
||||
"file_default": "default",
|
||||
"file_non_existent": None,
|
||||
"json_parse": {"foo": "bar"},
|
||||
"at_index_sequence": "foo",
|
||||
"at_index_sequence_default": "non existent",
|
||||
"at_index_mapping": 2,
|
||||
"at_index_mapping_default": "non existent",
|
||||
},
|
||||
)
|
||||
self.assertTrue(
|
||||
OAuthSource.objects.filter(
|
||||
@@ -229,6 +245,8 @@ class TestBlueprintsV1(TransactionTestCase):
|
||||
consumer_key=environ["foo"],
|
||||
)
|
||||
)
|
||||
unlink(file_name)
|
||||
unlink(file_default_name)
|
||||
|
||||
def test_export_validate_import_policies(self):
|
||||
"""Test export and validate it"""
|
||||
|
||||
@@ -18,12 +18,15 @@ from django.db.models import Model, Q
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import Field
|
||||
from rest_framework.serializers import Serializer
|
||||
from structlog.stdlib import get_logger
|
||||
from yaml import SafeDumper, SafeLoader, ScalarNode, SequenceNode
|
||||
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.policies.models import PolicyBindingModel
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class UNSET:
|
||||
"""Used to test whether a key has not been set."""
|
||||
@@ -268,6 +271,34 @@ class Env(YAMLTag):
|
||||
return getenv(self.key) or self.default
|
||||
|
||||
|
||||
class File(YAMLTag):
|
||||
"""Lookup file with optional default"""
|
||||
|
||||
path: str
|
||||
default: Any | None
|
||||
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.default = None
|
||||
if isinstance(node, ScalarNode):
|
||||
self.path = node.value
|
||||
if isinstance(node, SequenceNode):
|
||||
self.path = loader.construct_object(node.value[0])
|
||||
self.default = loader.construct_object(node.value[1])
|
||||
|
||||
def resolve(self, entry: BlueprintEntry, blueprint: Blueprint) -> Any:
|
||||
try:
|
||||
with open(self.path, encoding="utf8") as _file:
|
||||
return _file.read().strip()
|
||||
except OSError as exc:
|
||||
LOGGER.warning(
|
||||
"Failed to read file. Falling back to default value",
|
||||
path=self.path,
|
||||
exc=exc,
|
||||
)
|
||||
return self.default
|
||||
|
||||
|
||||
class Context(YAMLTag):
|
||||
"""Lookup key from instance context"""
|
||||
|
||||
@@ -679,6 +710,7 @@ class BlueprintLoader(SafeLoader):
|
||||
self.add_constructor("!Condition", Condition)
|
||||
self.add_constructor("!If", If)
|
||||
self.add_constructor("!Env", Env)
|
||||
self.add_constructor("!File", File)
|
||||
self.add_constructor("!Enumerate", Enumerate)
|
||||
self.add_constructor("!Value", Value)
|
||||
self.add_constructor("!Index", Index)
|
||||
|
||||
@@ -11,7 +11,6 @@ from authentik.core.expression.exceptions import SkipObjectException
|
||||
from authentik.core.models import User
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.lib.expression.evaluator import BaseEvaluator
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.policies.types import PolicyRequest
|
||||
|
||||
PROPERTY_MAPPING_TIME = Histogram(
|
||||
@@ -69,12 +68,11 @@ class PropertyMappingEvaluator(BaseEvaluator):
|
||||
# For dry-run requests we don't save exceptions
|
||||
if self.dry_run:
|
||||
return
|
||||
error_string = exception_to_string(exc)
|
||||
event = Event.new(
|
||||
EventAction.PROPERTY_MAPPING_EXCEPTION,
|
||||
expression=expression_source,
|
||||
message=error_string,
|
||||
)
|
||||
message="Failed to execute property mapping",
|
||||
).with_exception(exc)
|
||||
if "request" in self._context:
|
||||
req: PolicyRequest = self._context["request"]
|
||||
if req.http_request:
|
||||
|
||||
@@ -5,6 +5,7 @@ from contextvars import ContextVar
|
||||
from functools import partial
|
||||
from uuid import uuid4
|
||||
|
||||
from django.contrib.auth import logout
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
@@ -58,6 +59,11 @@ class AuthenticationMiddleware(MiddlewareMixin):
|
||||
request.user = SimpleLazyObject(lambda: get_user(request))
|
||||
request.auser = partial(aget_user, request)
|
||||
|
||||
user = request.user
|
||||
if user and user.is_authenticated and not user.is_active:
|
||||
logout(request)
|
||||
raise AssertionError()
|
||||
|
||||
|
||||
class ImpersonateMiddleware:
|
||||
"""Middleware to impersonate users"""
|
||||
|
||||
@@ -12,6 +12,7 @@ from rest_framework.fields import CharField, IntegerField
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.validators import UniqueValidator
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
@@ -53,6 +54,7 @@ class LicenseSerializer(ModelSerializer):
|
||||
"external_users",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"key": {"validators": [UniqueValidator(queryset=License.objects.all())]},
|
||||
"name": {"read_only": True},
|
||||
"expiry": {"read_only": True},
|
||||
"internal_users": {"read_only": True},
|
||||
|
||||
@@ -65,13 +65,17 @@ class EnterpriseAuditMiddleware(AuditMiddleware):
|
||||
data[field.name] = deepcopy(field_value)
|
||||
return cleanse_dict(data)
|
||||
|
||||
def diff(self, before: dict, after: dict) -> dict:
|
||||
def diff(self, before: dict, after: dict, update_fields: list[str] | None = None) -> dict:
|
||||
"""Generate diff between dicts"""
|
||||
diff = {}
|
||||
for key, value in before.items():
|
||||
if update_fields and key not in update_fields:
|
||||
continue
|
||||
if after.get(key) != value:
|
||||
diff[key] = {"previous_value": value, "new_value": after.get(key)}
|
||||
for key, value in after.items():
|
||||
if update_fields and key not in update_fields:
|
||||
continue
|
||||
if key not in before and key not in diff and before.get(key) != value:
|
||||
diff[key] = {"previous_value": before.get(key), "new_value": value}
|
||||
return sanitize_item(diff)
|
||||
@@ -95,6 +99,7 @@ class EnterpriseAuditMiddleware(AuditMiddleware):
|
||||
instance: Model,
|
||||
created: bool,
|
||||
thread_kwargs: dict | None = None,
|
||||
update_fields: list[str] | None = None,
|
||||
**_,
|
||||
):
|
||||
if not self.enabled:
|
||||
@@ -108,7 +113,7 @@ class EnterpriseAuditMiddleware(AuditMiddleware):
|
||||
prev_state = {}
|
||||
# Get current state
|
||||
new_state = self.serialize_simple(instance)
|
||||
diff = self.diff(prev_state, new_state)
|
||||
diff = self.diff(prev_state, new_state, update_fields)
|
||||
thread_kwargs["diff"] = diff
|
||||
return super().post_save_handler(request, sender, instance, created, thread_kwargs, **_)
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Group, User
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.enterprise.audit.middleware import EnterpriseAuditMiddleware
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.events.utils import sanitize_item
|
||||
from authentik.lib.generators import generate_id
|
||||
@@ -208,3 +209,23 @@ class TestEnterpriseAudit(APITestCase):
|
||||
diff,
|
||||
{"users": {"remove": [user.pk]}},
|
||||
)
|
||||
|
||||
@patch(
|
||||
"authentik.enterprise.audit.middleware.EnterpriseAuditMiddleware.enabled",
|
||||
PropertyMock(return_value=True),
|
||||
)
|
||||
def test_diff_update_fields(self):
|
||||
"""Test update audit log"""
|
||||
self.client.force_login(self.user)
|
||||
diff = EnterpriseAuditMiddleware(None).diff(
|
||||
{
|
||||
"foo": "bar",
|
||||
"is_active": False,
|
||||
},
|
||||
{
|
||||
"foo": "baz",
|
||||
"is_active": True,
|
||||
},
|
||||
update_fields=["is_active"],
|
||||
)
|
||||
self.assertEqual(diff, {"is_active": {"new_value": True, "previous_value": False}})
|
||||
|
||||
@@ -16,7 +16,7 @@ from authentik.stages.authenticator.models import Device
|
||||
|
||||
|
||||
class AuthenticatorEndpointGDTCStage(ConfigurableStage, FriendlyNamedStage, Stage):
|
||||
"""Setup Google Chrome Device-trust connection"""
|
||||
"""Setup Google Chrome Device Trust connection"""
|
||||
|
||||
credentials = models.JSONField()
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ from authentik.core.models import Group, User
|
||||
from authentik.events.models import Event, EventAction, Notification
|
||||
from authentik.events.utils import model_to_dict
|
||||
from authentik.lib.sentry import should_ignore_exception
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.lib.utils.errors import exception_to_dict
|
||||
from authentik.stages.authenticator_static.models import StaticToken
|
||||
|
||||
IGNORED_MODELS = tuple(
|
||||
@@ -170,14 +170,16 @@ class AuditMiddleware:
|
||||
thread = EventNewThread(
|
||||
EventAction.SUSPICIOUS_REQUEST,
|
||||
request,
|
||||
message=exception_to_string(exception),
|
||||
message=str(exception),
|
||||
exception=exception_to_dict(exception),
|
||||
)
|
||||
thread.run()
|
||||
elif not should_ignore_exception(exception):
|
||||
thread = EventNewThread(
|
||||
EventAction.SYSTEM_EXCEPTION,
|
||||
request,
|
||||
message=exception_to_string(exception),
|
||||
message=str(exception),
|
||||
exception=exception_to_dict(exception),
|
||||
)
|
||||
thread.run()
|
||||
|
||||
|
||||
@@ -38,6 +38,7 @@ from authentik.events.utils import (
|
||||
)
|
||||
from authentik.lib.models import DomainlessURLValidator, SerializerModel
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.lib.utils.errors import exception_to_dict
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
from authentik.policies.models import PolicyBindingModel
|
||||
@@ -163,6 +164,12 @@ class Event(SerializerModel, ExpiringModel):
|
||||
event = Event(action=action, app=app, context=cleaned_kwargs)
|
||||
return event
|
||||
|
||||
def with_exception(self, exc: Exception) -> "Event":
|
||||
"""Add data from 'exc' to the event in a database-saveable format"""
|
||||
self.context.setdefault("message", str(exc))
|
||||
self.context["exception"] = exception_to_dict(exc)
|
||||
return self
|
||||
|
||||
def set_user(self, user: User) -> "Event":
|
||||
"""Set `.user` based on user, ensuring the correct attributes are copied.
|
||||
This should only be used when self.from_http is *not* used."""
|
||||
|
||||
@@ -127,8 +127,8 @@ class SystemTask(TenantTask):
|
||||
)
|
||||
Event.new(
|
||||
EventAction.SYSTEM_TASK_EXCEPTION,
|
||||
message=f"Task {self.__name__} encountered an error: {exception_to_string(exc)}",
|
||||
).save()
|
||||
message=f"Task {self.__name__} encountered an error",
|
||||
).with_exception(exc).save()
|
||||
|
||||
def run(self, *args, **kwargs):
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -301,6 +301,7 @@ class SessionEndStage(ChallengeStageView):
|
||||
"flow_slug": self.request.brand.flow_invalidation.slug,
|
||||
},
|
||||
)
|
||||
|
||||
return SessionEndChallenge(data=data)
|
||||
|
||||
# This can never be reached since this challenge is created on demand and only the
|
||||
|
||||
@@ -56,7 +56,6 @@ from authentik.flows.planner import (
|
||||
)
|
||||
from authentik.flows.stage import AccessDeniedStage, StageView
|
||||
from authentik.lib.sentry import SentryIgnoredException, should_ignore_exception
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.lib.utils.reflection import all_subclasses, class_to_path
|
||||
from authentik.lib.utils.urls import is_url_absolute, redirect_with_qs
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
@@ -239,8 +238,8 @@ class FlowExecutorView(APIView):
|
||||
capture_exception(exc)
|
||||
Event.new(
|
||||
action=EventAction.SYSTEM_EXCEPTION,
|
||||
message=exception_to_string(exc),
|
||||
).from_http(self.request)
|
||||
message="System exception during flow execution.",
|
||||
).with_exception(exc).from_http(self.request)
|
||||
challenge = FlowErrorChallenge(self.request, exc)
|
||||
challenge.is_valid(raise_exception=True)
|
||||
return to_stage_response(self.request, HttpChallengeResponse(challenge))
|
||||
|
||||
@@ -14,7 +14,6 @@ from authentik.events.models import Event, EventAction
|
||||
from authentik.lib.expression.exceptions import ControlFlowException
|
||||
from authentik.lib.sync.mapper import PropertyMappingManager
|
||||
from authentik.lib.sync.outgoing.exceptions import NotFoundSyncException, StopSync
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from django.db.models import Model
|
||||
@@ -106,9 +105,9 @@ class BaseOutgoingSyncClient[
|
||||
# Value error can be raised when assigning invalid data to an attribute
|
||||
Event.new(
|
||||
EventAction.CONFIGURATION_ERROR,
|
||||
message=f"Failed to evaluate property-mapping {exception_to_string(exc)}",
|
||||
message="Failed to evaluate property-mapping",
|
||||
mapping=exc.mapping,
|
||||
).save()
|
||||
).with_exception(exc).save()
|
||||
raise StopSync(exc, obj, exc.mapping) from exc
|
||||
if not raw_final_object:
|
||||
raise StopSync(ValueError("No mappings configured"), obj)
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
from traceback import extract_tb
|
||||
|
||||
from structlog.tracebacks import ExceptionDictTransformer
|
||||
|
||||
from authentik.lib.utils.reflection import class_to_path
|
||||
|
||||
TRACEBACK_HEADER = "Traceback (most recent call last):"
|
||||
@@ -17,3 +19,8 @@ def exception_to_string(exc: Exception) -> str:
|
||||
f"{class_to_path(exc.__class__)}: {str(exc)}",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def exception_to_dict(exc: Exception) -> dict:
|
||||
"""Format exception as a dictionary"""
|
||||
return ExceptionDictTransformer()((type(exc), exc, exc.__traceback__))
|
||||
|
||||
@@ -35,7 +35,6 @@ from authentik.events.models import Event, EventAction
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.models import InheritanceForeignKey, SerializerModel
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.outposts.controllers.k8s.utils import get_namespace
|
||||
|
||||
OUR_VERSION = parse(__version__)
|
||||
@@ -326,9 +325,8 @@ class Outpost(SerializerModel, ManagedModel):
|
||||
"While setting the permissions for the service-account, a "
|
||||
"permission was not found: Check "
|
||||
"https://goauthentik.io/docs/troubleshooting/missing_permission"
|
||||
)
|
||||
+ exception_to_string(exc),
|
||||
).set_user(user).save()
|
||||
),
|
||||
).with_exception(exc).set_user(user).save()
|
||||
else:
|
||||
app_label, perm = model_or_perm.split(".")
|
||||
permission = Permission.objects.filter(
|
||||
|
||||
@@ -10,7 +10,7 @@ from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.lib.utils.errors import exception_to_dict
|
||||
from authentik.lib.utils.reflection import class_to_path
|
||||
from authentik.policies.apps import HIST_POLICIES_EXECUTION_TIME
|
||||
from authentik.policies.exceptions import PolicyException
|
||||
@@ -95,10 +95,13 @@ class PolicyProcess(PROCESS_CLASS):
|
||||
except PolicyException as exc:
|
||||
# Either use passed original exception or whatever we have
|
||||
src_exc = exc.src_exc if exc.src_exc else exc
|
||||
error_string = exception_to_string(src_exc)
|
||||
# Create policy exception event, only when we're not debugging
|
||||
if not self.request.debug:
|
||||
self.create_event(EventAction.POLICY_EXCEPTION, message=error_string)
|
||||
self.create_event(
|
||||
EventAction.POLICY_EXCEPTION,
|
||||
message="Policy failed to execute",
|
||||
exception=exception_to_dict(src_exc),
|
||||
)
|
||||
LOGGER.debug("P_ENG(proc): error, using failure result", exc=src_exc)
|
||||
policy_result = PolicyResult(self.binding.failure_result, str(src_exc))
|
||||
policy_result.source_binding = self.binding
|
||||
@@ -143,5 +146,5 @@ class PolicyProcess(PROCESS_CLASS):
|
||||
try:
|
||||
self.connection.send(self.profiling_wrapper())
|
||||
except Exception as exc:
|
||||
LOGGER.warning("Policy failed to run", exc=exception_to_string(exc))
|
||||
LOGGER.warning("Policy failed to run", exc=exc)
|
||||
self.connection.send(PolicyResult(False, str(exc)))
|
||||
|
||||
@@ -241,4 +241,4 @@ class TestPolicyProcess(TestCase):
|
||||
self.assertEqual(len(events), 1)
|
||||
event = events.first()
|
||||
self.assertEqual(event.user["username"], self.user.username)
|
||||
self.assertIn("division by zero", event.context["message"])
|
||||
self.assertIn("Policy failed to execute", event.context["message"])
|
||||
|
||||
@@ -43,6 +43,9 @@ class OAuth2ProviderSerializer(ProviderSerializer):
|
||||
"""OAuth2Provider Serializer"""
|
||||
|
||||
redirect_uris = RedirectURISerializer(many=True, source="_redirect_uris")
|
||||
backchannel_logout_uris = RedirectURISerializer(
|
||||
many=True, source="_backchannel_logout_uris", required=False
|
||||
)
|
||||
|
||||
def validate_redirect_uris(self, data: list) -> list:
|
||||
for entry in data:
|
||||
@@ -56,6 +59,18 @@ class OAuth2ProviderSerializer(ProviderSerializer):
|
||||
) from None
|
||||
return data
|
||||
|
||||
def validate_backchannel_logout_uris(self, data: list) -> list:
|
||||
for entry in data:
|
||||
if entry.get("matching_mode") == RedirectURIMatchingMode.REGEX:
|
||||
url = entry.get("url")
|
||||
try:
|
||||
compile(url)
|
||||
except RegexError:
|
||||
raise ValidationError(
|
||||
_("Invalid Regex Pattern: {url}".format(url=url))
|
||||
) from None
|
||||
return data
|
||||
|
||||
class Meta:
|
||||
model = OAuth2Provider
|
||||
fields = ProviderSerializer.Meta.fields + [
|
||||
@@ -70,6 +85,7 @@ class OAuth2ProviderSerializer(ProviderSerializer):
|
||||
"signing_key",
|
||||
"encryption_key",
|
||||
"redirect_uris",
|
||||
"backchannel_logout_uris",
|
||||
"sub_mode",
|
||||
"property_mappings",
|
||||
"issuer_mode",
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
"""OAuth/OpenID Constants"""
|
||||
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
GRANT_TYPE_AUTHORIZATION_CODE = "authorization_code"
|
||||
GRANT_TYPE_IMPLICIT = "implicit"
|
||||
GRANT_TYPE_REFRESH_TOKEN = "refresh_token" # nosec
|
||||
@@ -51,3 +54,23 @@ AMR_MFA = "mfa"
|
||||
AMR_OTP = "otp"
|
||||
AMR_WEBAUTHN = "user"
|
||||
AMR_SMART_CARD = "sc"
|
||||
|
||||
|
||||
class SubModes(models.TextChoices):
|
||||
"""Mode after which 'sub' attribute is generated, for compatibility reasons"""
|
||||
|
||||
HASHED_USER_ID = "hashed_user_id", _("Based on the Hashed User ID")
|
||||
USER_ID = "user_id", _("Based on user ID")
|
||||
USER_UUID = "user_uuid", _("Based on user UUID")
|
||||
USER_USERNAME = "user_username", _("Based on the username")
|
||||
USER_EMAIL = (
|
||||
"user_email",
|
||||
_("Based on the User's Email. This is recommended over the UPN method."),
|
||||
)
|
||||
USER_UPN = (
|
||||
"user_upn",
|
||||
_(
|
||||
"Based on the User's UPN, only works if user has a 'upn' attribute set. "
|
||||
"Use this method only if you have different UPN and Mail domains."
|
||||
),
|
||||
)
|
||||
|
||||
@@ -15,12 +15,14 @@ class OAuth2Error(SentryIgnoredException):
|
||||
|
||||
error: str
|
||||
description: str
|
||||
cause: str | None = None
|
||||
|
||||
def create_dict(self):
|
||||
def create_dict(self, request: HttpRequest):
|
||||
"""Return error as dict for JSON Rendering"""
|
||||
return {
|
||||
"error": self.error,
|
||||
"error_description": self.description,
|
||||
"request_id": request.request_id,
|
||||
}
|
||||
|
||||
def __repr__(self) -> str:
|
||||
@@ -31,9 +33,15 @@ class OAuth2Error(SentryIgnoredException):
|
||||
return Event.new(
|
||||
EventAction.CONFIGURATION_ERROR,
|
||||
message=message or self.description,
|
||||
cause=self.cause,
|
||||
error=self.error,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def with_cause(self, cause: str):
|
||||
self.cause = cause
|
||||
return self
|
||||
|
||||
|
||||
class RedirectUriError(OAuth2Error):
|
||||
"""The request fails due to a missing, invalid, or mismatching
|
||||
@@ -243,13 +251,14 @@ class TokenRevocationError(OAuth2Error):
|
||||
self.description = self.errors[error]
|
||||
|
||||
|
||||
class DeviceCodeError(OAuth2Error):
|
||||
class DeviceCodeError(TokenError):
|
||||
"""
|
||||
Device-code flow errors
|
||||
See https://datatracker.ietf.org/doc/html/rfc8628#section-3.2
|
||||
Can also use codes form TokenError
|
||||
"""
|
||||
|
||||
errors = {
|
||||
errors = TokenError.errors | {
|
||||
"authorization_pending": (
|
||||
"The authorization request is still pending as the end user hasn't "
|
||||
"yet completed the user-interaction steps"
|
||||
@@ -261,10 +270,15 @@ class DeviceCodeError(OAuth2Error):
|
||||
"authorization request but SHOULD wait for user interaction before "
|
||||
"restarting to avoid unnecessary polling."
|
||||
),
|
||||
"slow_down": (
|
||||
'A variant of "authorization_pending", the authorization request is'
|
||||
"still pending and polling should continue, but the interval MUST"
|
||||
"be increased by 5 seconds for this and all subsequent requests."
|
||||
),
|
||||
}
|
||||
|
||||
def __init__(self, error: str):
|
||||
super().__init__()
|
||||
super().__init__(error)
|
||||
self.error = error
|
||||
self.description = self.errors[error]
|
||||
|
||||
|
||||
@@ -4,10 +4,8 @@ from dataclasses import asdict, dataclass, field
|
||||
from hashlib import sha256
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from django.db import models
|
||||
from django.http import HttpRequest
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from authentik.core.models import default_token_duration
|
||||
from authentik.events.signals import get_login_event
|
||||
@@ -18,6 +16,7 @@ from authentik.providers.oauth2.constants import (
|
||||
AMR_PASSWORD,
|
||||
AMR_SMART_CARD,
|
||||
AMR_WEBAUTHN,
|
||||
SubModes,
|
||||
)
|
||||
from authentik.stages.password.stage import PLAN_CONTEXT_METHOD, PLAN_CONTEXT_METHOD_ARGS
|
||||
|
||||
@@ -30,26 +29,6 @@ def hash_session_key(session_key: str) -> str:
|
||||
return sha256(session_key.encode("ascii")).hexdigest()
|
||||
|
||||
|
||||
class SubModes(models.TextChoices):
|
||||
"""Mode after which 'sub' attribute is generated, for compatibility reasons"""
|
||||
|
||||
HASHED_USER_ID = "hashed_user_id", _("Based on the Hashed User ID")
|
||||
USER_ID = "user_id", _("Based on user ID")
|
||||
USER_UUID = "user_uuid", _("Based on user UUID")
|
||||
USER_USERNAME = "user_username", _("Based on the username")
|
||||
USER_EMAIL = (
|
||||
"user_email",
|
||||
_("Based on the User's Email. This is recommended over the UPN method."),
|
||||
)
|
||||
USER_UPN = (
|
||||
"user_upn",
|
||||
_(
|
||||
"Based on the User's UPN, only works if user has a 'upn' attribute set. "
|
||||
"Use this method only if you have different UPN and Mail domains."
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class IDToken:
|
||||
"""The primary extension that OpenID Connect makes to OAuth 2.0 to enable End-Users to be
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 5.1.11 on 2025-07-04 03:23
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_providers_oauth2", "0028_migrate_session"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="oauth2provider",
|
||||
name="_backchannel_logout_uris",
|
||||
field=models.JSONField(default=list, verbose_name="Back-Channel Logout URIs"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="oauth2provider",
|
||||
name="_redirect_uris",
|
||||
field=models.JSONField(default=list, verbose_name="Redirect URIs"),
|
||||
),
|
||||
]
|
||||
@@ -6,7 +6,7 @@ import json
|
||||
from dataclasses import asdict, dataclass
|
||||
from functools import cached_property
|
||||
from hashlib import sha256
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
|
||||
from cryptography.hazmat.primitives.asymmetric.ec import (
|
||||
@@ -44,9 +44,12 @@ from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.lib.generators import generate_code_fixed_length, generate_id, generate_key
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.lib.utils.time import timedelta_string_validator
|
||||
from authentik.providers.oauth2.id_token import IDToken, SubModes
|
||||
from authentik.providers.oauth2.constants import SubModes
|
||||
from authentik.sources.oauth.models import OAuthSource
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.providers.oauth2.id_token import IDToken
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
@@ -193,9 +196,13 @@ class OAuth2Provider(WebfingerProvider, Provider):
|
||||
default=generate_client_secret,
|
||||
)
|
||||
_redirect_uris = models.JSONField(
|
||||
default=dict,
|
||||
default=list,
|
||||
verbose_name=_("Redirect URIs"),
|
||||
)
|
||||
_backchannel_logout_uris = models.JSONField(
|
||||
default=list,
|
||||
verbose_name=_("Back-Channel Logout URIs"),
|
||||
)
|
||||
|
||||
include_claims_in_id_token = models.BooleanField(
|
||||
default=True,
|
||||
@@ -321,6 +328,28 @@ class OAuth2Provider(WebfingerProvider, Provider):
|
||||
cleansed.append(asdict(entry))
|
||||
self._redirect_uris = cleansed
|
||||
|
||||
@property
|
||||
def backchannel_logout_uris(self) -> list[RedirectURI]:
|
||||
"""Get back-channel logout URIs"""
|
||||
uris = []
|
||||
for entry in self._backchannel_logout_uris:
|
||||
uris.append(
|
||||
from_dict(
|
||||
RedirectURI,
|
||||
entry,
|
||||
config=Config(type_hooks={RedirectURIMatchingMode: RedirectURIMatchingMode}),
|
||||
)
|
||||
)
|
||||
return uris
|
||||
|
||||
@backchannel_logout_uris.setter
|
||||
def backchannel_logout_uris(self, value: list[RedirectURI]):
|
||||
"""Set back-channel logout URIs"""
|
||||
cleansed = []
|
||||
for entry in value:
|
||||
cleansed.append(asdict(entry))
|
||||
self._backchannel_logout_uris = cleansed
|
||||
|
||||
@property
|
||||
def launch_url(self) -> str | None:
|
||||
"""Guess launch_url based on first redirect_uri"""
|
||||
@@ -480,13 +509,15 @@ class AccessToken(SerializerModel, ExpiringModel, BaseGrantModel):
|
||||
return f"Access Token for {self.provider_id} for user {self.user_id}"
|
||||
|
||||
@property
|
||||
def id_token(self) -> IDToken:
|
||||
def id_token(self) -> "IDToken":
|
||||
"""Load ID Token from json"""
|
||||
from authentik.providers.oauth2.id_token import IDToken
|
||||
|
||||
raw_token = json.loads(self._id_token)
|
||||
return from_dict(IDToken, raw_token)
|
||||
|
||||
@id_token.setter
|
||||
def id_token(self, value: IDToken):
|
||||
def id_token(self, value: "IDToken"):
|
||||
self.token = value.to_access_token(self.provider)
|
||||
self._id_token = json.dumps(asdict(value))
|
||||
|
||||
@@ -531,13 +562,15 @@ class RefreshToken(SerializerModel, ExpiringModel, BaseGrantModel):
|
||||
return f"Refresh Token for {self.provider_id} for user {self.user_id}"
|
||||
|
||||
@property
|
||||
def id_token(self) -> IDToken:
|
||||
def id_token(self) -> "IDToken":
|
||||
"""Load ID Token from json"""
|
||||
from authentik.providers.oauth2.id_token import IDToken
|
||||
|
||||
raw_token = json.loads(self._id_token)
|
||||
return from_dict(IDToken, raw_token)
|
||||
|
||||
@id_token.setter
|
||||
def id_token(self, value: IDToken):
|
||||
def id_token(self, value: "IDToken"):
|
||||
self._id_token = json.dumps(asdict(value))
|
||||
|
||||
@property
|
||||
|
||||
@@ -1,8 +1,12 @@
|
||||
from django.db.models.signals import post_save, pre_delete
|
||||
from django.dispatch import receiver
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import AuthenticatedSession, User
|
||||
from authentik.providers.oauth2.models import AccessToken, DeviceToken, RefreshToken
|
||||
from authentik.providers.oauth2.tasks import send_backchannel_logout_notification
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
@receiver(pre_delete, sender=AuthenticatedSession)
|
||||
@@ -14,6 +18,21 @@ def user_session_deleted_oauth_tokens_removal(sender, instance: AuthenticatedSes
|
||||
).delete()
|
||||
|
||||
|
||||
@receiver(pre_delete, sender=AuthenticatedSession)
|
||||
def user_session_deleted_backchannel_logout(sender, instance: AuthenticatedSession, **_):
|
||||
"""Send back-channel logout notifications upon session deletion"""
|
||||
try:
|
||||
send_backchannel_logout_notification(session=instance)
|
||||
except Exception as exc:
|
||||
# Log the error but don't fail the session deletion process
|
||||
LOGGER.warning(
|
||||
"Failed to send back-channel logout notifications",
|
||||
user=instance.user.username,
|
||||
session_key=instance.session.session_key,
|
||||
error=str(exc),
|
||||
)
|
||||
|
||||
|
||||
@receiver(post_save, sender=User)
|
||||
def user_deactivated(sender, instance: User, **_):
|
||||
"""Remove user tokens when deactivated"""
|
||||
|
||||
143
authentik/providers/oauth2/tasks.py
Normal file
143
authentik/providers/oauth2/tasks.py
Normal file
@@ -0,0 +1,143 @@
|
||||
"""OAuth2 Provider Tasks"""
|
||||
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import AuthenticatedSession
|
||||
from authentik.events.models import Event
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
from authentik.providers.oauth2.models import OAuth2Provider
|
||||
from authentik.providers.oauth2.utils import create_logout_token
|
||||
from authentik.root.celery import CELERY_APP
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
@CELERY_APP.task()
|
||||
def send_backchannel_logout_request(
|
||||
provider_pk: int, iss: str, session_id: str = None, sub: str = None
|
||||
) -> bool:
|
||||
"""Send a back-channel logout request to the registered client
|
||||
|
||||
Args:
|
||||
provider_pk: The OAuth2 provider's primary key
|
||||
session_id: The session ID to include in the logout token
|
||||
sub: The subject identifier to include in the logout token
|
||||
|
||||
Returns:
|
||||
bool: True if the request was successful, False otherwise
|
||||
"""
|
||||
if not session_id and not sub:
|
||||
LOGGER.warning("No session_id or sub provided for back-channel logout")
|
||||
return False
|
||||
|
||||
try:
|
||||
provider = OAuth2Provider.objects.get(pk=provider_pk)
|
||||
except OAuth2Provider.DoesNotExist:
|
||||
LOGGER.warning("Provider not found", provider_pk=provider_pk)
|
||||
return False
|
||||
|
||||
# Generate the logout token
|
||||
try:
|
||||
logout_token = create_logout_token(iss, provider, session_id, sub)
|
||||
except Exception as exc:
|
||||
LOGGER.warning("Failed to create logout token", exc=exc)
|
||||
return False
|
||||
|
||||
# Get the back-channel logout URI from the provider's dedicated backchannel_logout_uris field
|
||||
# Back-channel logout requires explicit configuration - no fallback to redirect URIs
|
||||
|
||||
backchannel_logout_uri = None
|
||||
|
||||
# Check if provider has dedicated backchannel logout URIs configured
|
||||
if provider.backchannel_logout_uris:
|
||||
# Use the first configured backchannel logout URI
|
||||
# In the future, we could implement logic to select based on criteria
|
||||
backchannel_logout_uri = provider.backchannel_logout_uris[0].url
|
||||
|
||||
if not backchannel_logout_uri:
|
||||
LOGGER.warning(
|
||||
"No back-channel logout URI found for provider",
|
||||
provider=provider.name,
|
||||
client_id=provider.client_id,
|
||||
)
|
||||
return False
|
||||
|
||||
# Send the back-channel logout request
|
||||
try:
|
||||
response = get_http_session().post(
|
||||
backchannel_logout_uri,
|
||||
data={"logout_token": logout_token},
|
||||
headers={"Content-Type": "application/x-www-form-urlencoded"},
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
# HTTP 200 OK is the expected response for successful back-channel logout
|
||||
HTTP_OK = 200
|
||||
if response.status_code == HTTP_OK:
|
||||
LOGGER.info(
|
||||
"Back-channel logout successful",
|
||||
provider=provider.name,
|
||||
client_id=provider.client_id,
|
||||
session_id=session_id,
|
||||
sub=sub,
|
||||
)
|
||||
Event.new(
|
||||
"backchannel_logout",
|
||||
message="Back-channel logout notification sent",
|
||||
provider=provider,
|
||||
client_id=provider.client_id,
|
||||
session_id=session_id,
|
||||
sub=sub,
|
||||
).save()
|
||||
return True
|
||||
|
||||
LOGGER.warning(
|
||||
"Back-channel logout failed",
|
||||
provider=provider.name,
|
||||
client_id=provider.client_id,
|
||||
status_code=response.status_code,
|
||||
response=response.text,
|
||||
)
|
||||
return False
|
||||
except Exception as exc:
|
||||
LOGGER.warning(
|
||||
"Error sending back-channel logout request",
|
||||
provider=provider.name,
|
||||
client_id=provider.client_id,
|
||||
exc=exc,
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def send_backchannel_logout_notification(session: AuthenticatedSession = None) -> None:
|
||||
"""Send back-channel logout notifications to all relevant OAuth2 providers
|
||||
|
||||
This function should be called when a user's session is terminated.
|
||||
|
||||
Args:
|
||||
session: The authenticated session that was terminated
|
||||
"""
|
||||
if not session:
|
||||
LOGGER.warning("No session provided for back-channel logout notification")
|
||||
return
|
||||
|
||||
# Get all OAuth2 providers that have issued tokens for this user
|
||||
# Per OpenID Connect Back-Channel Logout 1.0 spec section 2.3:
|
||||
# "OPs supporting back-channel logout need to keep track of the set of logged-in RPs"
|
||||
# This includes ALL flows: authorization code, implicit, hybrid - not just refresh tokens
|
||||
# Refresh tokens issued without the offline_access property to a session being logged out
|
||||
# SHOULD be revoked. Refresh tokens issued with the offline_access property
|
||||
# normally SHOULD NOT be revoked.
|
||||
from authentik.providers.oauth2.models import AccessToken
|
||||
|
||||
# Get providers from access tokens (covers all OAuth2 flows)
|
||||
access_tokens = AccessToken.objects.select_related("provider").filter(session=session)
|
||||
for token in access_tokens:
|
||||
# Send back-channel logout notifications to all tokens
|
||||
# for provider_pk in provider_pks:
|
||||
send_backchannel_logout_request.delay(
|
||||
provider_pk=token.provider.pk,
|
||||
iss=token.id_token.iss,
|
||||
session_id=session.session.session_key,
|
||||
sub=session.user.uid,
|
||||
)
|
||||
@@ -81,4 +81,66 @@ class TestAPI(APITestCase):
|
||||
},
|
||||
)
|
||||
self.assertJSONEqual(response.content, {"redirect_uris": ["Invalid Regex Pattern: **"]})
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
def test_backchannel_logout_uris_validation(self):
|
||||
"""Test backchannel_logout_uris API validation"""
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:oauth2provider-list"),
|
||||
data={
|
||||
"name": generate_id(),
|
||||
"authorization_flow": create_test_flow().pk,
|
||||
"invalidation_flow": create_test_flow().pk,
|
||||
"redirect_uris": [
|
||||
{"matching_mode": "strict", "url": "http://goauthentik.io"},
|
||||
],
|
||||
"backchannel_logout_uris": [
|
||||
{"matching_mode": "strict", "url": "http://goauthentik.io/logout"},
|
||||
{"matching_mode": "regex", "url": "**"},
|
||||
],
|
||||
},
|
||||
)
|
||||
self.assertJSONEqual(
|
||||
response.content, {"backchannel_logout_uris": ["Invalid Regex Pattern: **"]}
|
||||
)
|
||||
|
||||
def test_backchannel_logout_uris_create_and_retrieve(self):
|
||||
"""Test creating and retrieving provider with backchannel logout URIs"""
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:oauth2provider-list"),
|
||||
data={
|
||||
"name": generate_id(),
|
||||
"authorization_flow": create_test_flow().pk,
|
||||
"invalidation_flow": create_test_flow().pk,
|
||||
"redirect_uris": [
|
||||
{"matching_mode": "strict", "url": "http://goauthentik.io"},
|
||||
],
|
||||
"backchannel_logout_uris": [
|
||||
{"matching_mode": "strict", "url": "http://goauthentik.io/logout"},
|
||||
{"matching_mode": "regex", "url": r"http://.*\.example\.com/logout"},
|
||||
],
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
provider_data = loads(response.content.decode())
|
||||
|
||||
# Verify the backchannel logout URIs were saved correctly
|
||||
self.assertEqual(len(provider_data["backchannel_logout_uris"]), 2)
|
||||
self.assertEqual(
|
||||
provider_data["backchannel_logout_uris"][0]["url"], "http://goauthentik.io/logout"
|
||||
)
|
||||
self.assertEqual(provider_data["backchannel_logout_uris"][0]["matching_mode"], "strict")
|
||||
self.assertEqual(
|
||||
provider_data["backchannel_logout_uris"][1]["url"], r"http://.*\.example\.com/logout"
|
||||
)
|
||||
self.assertEqual(provider_data["backchannel_logout_uris"][1]["matching_mode"], "regex")
|
||||
|
||||
# Test retrieving the provider
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:oauth2provider-detail", kwargs={"pk": provider_data["pk"]})
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
retrieved_data = loads(response.content.decode())
|
||||
self.assertEqual(len(retrieved_data["backchannel_logout_uris"]), 2)
|
||||
self.assertEqual(
|
||||
retrieved_data["backchannel_logout_uris"][0]["url"], "http://goauthentik.io/logout"
|
||||
)
|
||||
|
||||
@@ -12,7 +12,7 @@ from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
from authentik.providers.oauth2.constants import TOKEN_TYPE
|
||||
from authentik.providers.oauth2.constants import SCOPE_OFFLINE_ACCESS, SCOPE_OPENID, TOKEN_TYPE
|
||||
from authentik.providers.oauth2.errors import AuthorizeError, ClientIdError, RedirectUriError
|
||||
from authentik.providers.oauth2.models import (
|
||||
AccessToken,
|
||||
@@ -43,7 +43,7 @@ class TestAuthorize(OAuthTestCase):
|
||||
authorization_flow=create_test_flow(),
|
||||
redirect_uris=[RedirectURI(RedirectURIMatchingMode.STRICT, "http://local.invalid/Foo")],
|
||||
)
|
||||
with self.assertRaises(AuthorizeError):
|
||||
with self.assertRaises(AuthorizeError) as cm:
|
||||
request = self.factory.get(
|
||||
"/",
|
||||
data={
|
||||
@@ -53,6 +53,7 @@ class TestAuthorize(OAuthTestCase):
|
||||
},
|
||||
)
|
||||
OAuthAuthorizationParams.from_request(request)
|
||||
self.assertEqual(cm.exception.error, "unsupported_response_type")
|
||||
|
||||
def test_invalid_client_id(self):
|
||||
"""Test invalid client ID"""
|
||||
@@ -68,7 +69,7 @@ class TestAuthorize(OAuthTestCase):
|
||||
authorization_flow=create_test_flow(),
|
||||
redirect_uris=[RedirectURI(RedirectURIMatchingMode.STRICT, "http://local.invalid/Foo")],
|
||||
)
|
||||
with self.assertRaises(AuthorizeError):
|
||||
with self.assertRaises(AuthorizeError) as cm:
|
||||
request = self.factory.get(
|
||||
"/",
|
||||
data={
|
||||
@@ -79,19 +80,30 @@ class TestAuthorize(OAuthTestCase):
|
||||
},
|
||||
)
|
||||
OAuthAuthorizationParams.from_request(request)
|
||||
self.assertEqual(cm.exception.error, "request_not_supported")
|
||||
|
||||
def test_invalid_redirect_uri(self):
|
||||
"""test missing/invalid redirect URI"""
|
||||
def test_invalid_redirect_uri_missing(self):
|
||||
"""test missing redirect URI"""
|
||||
OAuth2Provider.objects.create(
|
||||
name=generate_id(),
|
||||
client_id="test",
|
||||
authorization_flow=create_test_flow(),
|
||||
redirect_uris=[RedirectURI(RedirectURIMatchingMode.STRICT, "http://local.invalid")],
|
||||
)
|
||||
with self.assertRaises(RedirectUriError):
|
||||
with self.assertRaises(RedirectUriError) as cm:
|
||||
request = self.factory.get("/", data={"response_type": "code", "client_id": "test"})
|
||||
OAuthAuthorizationParams.from_request(request)
|
||||
with self.assertRaises(RedirectUriError):
|
||||
self.assertEqual(cm.exception.cause, "redirect_uri_missing")
|
||||
|
||||
def test_invalid_redirect_uri(self):
|
||||
"""test invalid redirect URI"""
|
||||
OAuth2Provider.objects.create(
|
||||
name=generate_id(),
|
||||
client_id="test",
|
||||
authorization_flow=create_test_flow(),
|
||||
redirect_uris=[RedirectURI(RedirectURIMatchingMode.STRICT, "http://local.invalid")],
|
||||
)
|
||||
with self.assertRaises(RedirectUriError) as cm:
|
||||
request = self.factory.get(
|
||||
"/",
|
||||
data={
|
||||
@@ -101,6 +113,7 @@ class TestAuthorize(OAuthTestCase):
|
||||
},
|
||||
)
|
||||
OAuthAuthorizationParams.from_request(request)
|
||||
self.assertEqual(cm.exception.cause, "redirect_uri_no_match")
|
||||
|
||||
def test_blocked_redirect_uri(self):
|
||||
"""test missing/invalid redirect URI"""
|
||||
@@ -108,9 +121,9 @@ class TestAuthorize(OAuthTestCase):
|
||||
name=generate_id(),
|
||||
client_id="test",
|
||||
authorization_flow=create_test_flow(),
|
||||
redirect_uris=[RedirectURI(RedirectURIMatchingMode.STRICT, "data:local.invalid")],
|
||||
redirect_uris=[RedirectURI(RedirectURIMatchingMode.STRICT, "data:localhost")],
|
||||
)
|
||||
with self.assertRaises(RedirectUriError):
|
||||
with self.assertRaises(RedirectUriError) as cm:
|
||||
request = self.factory.get(
|
||||
"/",
|
||||
data={
|
||||
@@ -120,6 +133,7 @@ class TestAuthorize(OAuthTestCase):
|
||||
},
|
||||
)
|
||||
OAuthAuthorizationParams.from_request(request)
|
||||
self.assertEqual(cm.exception.cause, "redirect_uri_forbidden_scheme")
|
||||
|
||||
def test_invalid_redirect_uri_empty(self):
|
||||
"""test missing/invalid redirect URI"""
|
||||
@@ -129,9 +143,6 @@ class TestAuthorize(OAuthTestCase):
|
||||
authorization_flow=create_test_flow(),
|
||||
redirect_uris=[],
|
||||
)
|
||||
with self.assertRaises(RedirectUriError):
|
||||
request = self.factory.get("/", data={"response_type": "code", "client_id": "test"})
|
||||
OAuthAuthorizationParams.from_request(request)
|
||||
request = self.factory.get(
|
||||
"/",
|
||||
data={
|
||||
@@ -150,12 +161,9 @@ class TestAuthorize(OAuthTestCase):
|
||||
name=generate_id(),
|
||||
client_id="test",
|
||||
authorization_flow=create_test_flow(),
|
||||
redirect_uris=[RedirectURI(RedirectURIMatchingMode.STRICT, "http://local.invalid?")],
|
||||
redirect_uris=[RedirectURI(RedirectURIMatchingMode.REGEX, "http://local.invalid?")],
|
||||
)
|
||||
with self.assertRaises(RedirectUriError):
|
||||
request = self.factory.get("/", data={"response_type": "code", "client_id": "test"})
|
||||
OAuthAuthorizationParams.from_request(request)
|
||||
with self.assertRaises(RedirectUriError):
|
||||
with self.assertRaises(RedirectUriError) as cm:
|
||||
request = self.factory.get(
|
||||
"/",
|
||||
data={
|
||||
@@ -165,6 +173,7 @@ class TestAuthorize(OAuthTestCase):
|
||||
},
|
||||
)
|
||||
OAuthAuthorizationParams.from_request(request)
|
||||
self.assertEqual(cm.exception.cause, "redirect_uri_no_match")
|
||||
|
||||
def test_redirect_uri_invalid_regex(self):
|
||||
"""test missing/invalid redirect URI (invalid regex)"""
|
||||
@@ -172,12 +181,9 @@ class TestAuthorize(OAuthTestCase):
|
||||
name=generate_id(),
|
||||
client_id="test",
|
||||
authorization_flow=create_test_flow(),
|
||||
redirect_uris=[RedirectURI(RedirectURIMatchingMode.STRICT, "+")],
|
||||
redirect_uris=[RedirectURI(RedirectURIMatchingMode.REGEX, "+")],
|
||||
)
|
||||
with self.assertRaises(RedirectUriError):
|
||||
request = self.factory.get("/", data={"response_type": "code", "client_id": "test"})
|
||||
OAuthAuthorizationParams.from_request(request)
|
||||
with self.assertRaises(RedirectUriError):
|
||||
with self.assertRaises(RedirectUriError) as cm:
|
||||
request = self.factory.get(
|
||||
"/",
|
||||
data={
|
||||
@@ -187,23 +193,22 @@ class TestAuthorize(OAuthTestCase):
|
||||
},
|
||||
)
|
||||
OAuthAuthorizationParams.from_request(request)
|
||||
self.assertEqual(cm.exception.cause, "redirect_uri_no_match")
|
||||
|
||||
def test_empty_redirect_uri(self):
|
||||
"""test empty redirect URI (configure in provider)"""
|
||||
def test_redirect_uri_regex(self):
|
||||
"""test valid redirect URI (regex)"""
|
||||
OAuth2Provider.objects.create(
|
||||
name=generate_id(),
|
||||
client_id="test",
|
||||
authorization_flow=create_test_flow(),
|
||||
redirect_uris=[RedirectURI(RedirectURIMatchingMode.REGEX, ".+")],
|
||||
)
|
||||
with self.assertRaises(RedirectUriError):
|
||||
request = self.factory.get("/", data={"response_type": "code", "client_id": "test"})
|
||||
OAuthAuthorizationParams.from_request(request)
|
||||
request = self.factory.get(
|
||||
"/",
|
||||
data={
|
||||
"response_type": "code",
|
||||
"client_id": "test",
|
||||
"redirect_uri": "http://localhost",
|
||||
"redirect_uri": "http://foo.bar.baz",
|
||||
},
|
||||
)
|
||||
OAuthAuthorizationParams.from_request(request)
|
||||
@@ -258,7 +263,7 @@ class TestAuthorize(OAuthTestCase):
|
||||
GrantTypes.IMPLICIT,
|
||||
)
|
||||
# Implicit without openid scope
|
||||
with self.assertRaises(AuthorizeError):
|
||||
with self.assertRaises(AuthorizeError) as cm:
|
||||
request = self.factory.get(
|
||||
"/",
|
||||
data={
|
||||
@@ -285,7 +290,7 @@ class TestAuthorize(OAuthTestCase):
|
||||
self.assertEqual(
|
||||
OAuthAuthorizationParams.from_request(request).grant_type, GrantTypes.HYBRID
|
||||
)
|
||||
with self.assertRaises(AuthorizeError):
|
||||
with self.assertRaises(AuthorizeError) as cm:
|
||||
request = self.factory.get(
|
||||
"/",
|
||||
data={
|
||||
@@ -295,6 +300,7 @@ class TestAuthorize(OAuthTestCase):
|
||||
},
|
||||
)
|
||||
OAuthAuthorizationParams.from_request(request)
|
||||
self.assertEqual(cm.exception.error, "unsupported_response_type")
|
||||
|
||||
def test_full_code(self):
|
||||
"""Test full authorization"""
|
||||
@@ -613,3 +619,54 @@ class TestAuthorize(OAuthTestCase):
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
def test_openid_missing_invalid(self):
|
||||
"""test request requiring an OpenID scope to be set"""
|
||||
OAuth2Provider.objects.create(
|
||||
name=generate_id(),
|
||||
client_id="test",
|
||||
authorization_flow=create_test_flow(),
|
||||
redirect_uris=[RedirectURI(RedirectURIMatchingMode.STRICT, "http://localhost")],
|
||||
)
|
||||
request = self.factory.get(
|
||||
"/",
|
||||
data={
|
||||
"response_type": "id_token",
|
||||
"client_id": "test",
|
||||
"redirect_uri": "http://localhost",
|
||||
"scope": "",
|
||||
},
|
||||
)
|
||||
with self.assertRaises(AuthorizeError) as cm:
|
||||
OAuthAuthorizationParams.from_request(request)
|
||||
self.assertEqual(cm.exception.cause, "scope_openid_missing")
|
||||
|
||||
@apply_blueprint("system/providers-oauth2.yaml")
|
||||
def test_offline_access_invalid(self):
|
||||
"""test request for offline_access with invalid response type"""
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name=generate_id(),
|
||||
client_id="test",
|
||||
authorization_flow=create_test_flow(),
|
||||
redirect_uris=[RedirectURI(RedirectURIMatchingMode.STRICT, "http://localhost")],
|
||||
)
|
||||
provider.property_mappings.set(
|
||||
ScopeMapping.objects.filter(
|
||||
managed__in=[
|
||||
"goauthentik.io/providers/oauth2/scope-openid",
|
||||
"goauthentik.io/providers/oauth2/scope-offline_access",
|
||||
]
|
||||
)
|
||||
)
|
||||
request = self.factory.get(
|
||||
"/",
|
||||
data={
|
||||
"response_type": "id_token",
|
||||
"client_id": "test",
|
||||
"redirect_uri": "http://localhost",
|
||||
"scope": f"{SCOPE_OPENID} {SCOPE_OFFLINE_ACCESS}",
|
||||
"nonce": generate_id(),
|
||||
},
|
||||
)
|
||||
parsed = OAuthAuthorizationParams.from_request(request)
|
||||
self.assertNotIn(SCOPE_OFFLINE_ACCESS, parsed.scope)
|
||||
|
||||
572
authentik/providers/oauth2/tests/test_backchannel_logout.py
Normal file
572
authentik/providers/oauth2/tests/test_backchannel_logout.py
Normal file
@@ -0,0 +1,572 @@
|
||||
"""Test OAuth2 Back-Channel Logout implementation"""
|
||||
|
||||
import json
|
||||
import uuid
|
||||
from time import time
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import jwt
|
||||
from django.test import RequestFactory
|
||||
from django.utils import timezone
|
||||
from requests import Response
|
||||
|
||||
from authentik.core.models import Application, AuthenticatedSession, Session, User
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||
from authentik.events.models import Event
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.providers.oauth2.models import (
|
||||
AccessToken,
|
||||
OAuth2Provider,
|
||||
RedirectURI,
|
||||
RedirectURIMatchingMode,
|
||||
RefreshToken,
|
||||
)
|
||||
from authentik.providers.oauth2.tasks import (
|
||||
send_backchannel_logout_notification,
|
||||
send_backchannel_logout_request,
|
||||
)
|
||||
from authentik.providers.oauth2.tests.utils import OAuthTestCase
|
||||
from authentik.providers.oauth2.views.backchannel_logout import BackChannelLogoutView
|
||||
|
||||
|
||||
class TestBackChannelLogout(OAuthTestCase):
|
||||
"""Test Back-Channel Logout functionality"""
|
||||
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
self.factory = RequestFactory()
|
||||
self.user = create_test_admin_user()
|
||||
self.app = Application.objects.create(name=generate_id(), slug="test-app")
|
||||
self.provider = OAuth2Provider.objects.create(
|
||||
name=generate_id(),
|
||||
authorization_flow=create_test_flow(),
|
||||
redirect_uris=[
|
||||
RedirectURI(RedirectURIMatchingMode.STRICT, "http://testserver/callback"),
|
||||
],
|
||||
signing_key=self.keypair,
|
||||
)
|
||||
self.app.provider = self.provider
|
||||
self.app.save()
|
||||
|
||||
def _create_session(self, session_key=None):
|
||||
"""Create a session with the given key or a generated one"""
|
||||
session_key = session_key or f"session-{generate_id()}"
|
||||
session = Session.objects.create(
|
||||
session_key=session_key,
|
||||
expires=timezone.now() + timezone.timedelta(hours=1),
|
||||
last_ip="255.255.255.255",
|
||||
)
|
||||
auth_session = AuthenticatedSession.objects.create(
|
||||
session=session,
|
||||
user=self.user,
|
||||
)
|
||||
return auth_session
|
||||
|
||||
def _create_token(
|
||||
self, provider, user, session=None, token_type="access", token_id=None
|
||||
): # nosec
|
||||
"""Create a token of the specified type"""
|
||||
token_id = token_id or f"{token_type}-token-{generate_id()}"
|
||||
kwargs = {
|
||||
"provider": provider,
|
||||
"user": user,
|
||||
"session": session,
|
||||
"token": token_id,
|
||||
"_id_token": "{}",
|
||||
"auth_time": timezone.now(),
|
||||
}
|
||||
|
||||
if token_type == "access": # nosec
|
||||
return AccessToken.objects.create(**kwargs)
|
||||
else: # refresh
|
||||
return RefreshToken.objects.create(**kwargs)
|
||||
|
||||
def _create_provider(self, name=None):
|
||||
"""Create an OAuth2 provider"""
|
||||
name = name or f"provider-{generate_id()}"
|
||||
provider = OAuth2Provider.objects.create(
|
||||
name=name,
|
||||
authorization_flow=create_test_flow(),
|
||||
redirect_uris=[
|
||||
RedirectURI(RedirectURIMatchingMode.STRICT, f"http://{name}/callback"),
|
||||
],
|
||||
signing_key=self.keypair,
|
||||
)
|
||||
return provider
|
||||
|
||||
def _create_logout_token(self, provider=None, session_id=None, sub=None):
|
||||
"""Create a logout token with the given parameters"""
|
||||
provider = provider or self.provider
|
||||
|
||||
# Create a token with the same issuer that the view will expect
|
||||
# Use the same request object that will be used in the test
|
||||
request = self.factory.post("/backchannel_logout")
|
||||
|
||||
# Create the logout token payload
|
||||
payload = {
|
||||
"iss": provider.get_issuer(request),
|
||||
"aud": provider.client_id,
|
||||
"iat": int(time()),
|
||||
"jti": str(uuid.uuid4()),
|
||||
"events": {
|
||||
"http://schemas.openid.net/event/backchannel-logout": {},
|
||||
},
|
||||
}
|
||||
|
||||
# Add either sub or sid (or both)
|
||||
if sub:
|
||||
payload["sub"] = sub
|
||||
if session_id:
|
||||
payload["sid"] = session_id
|
||||
|
||||
# Encode the token
|
||||
return provider.encode(payload)
|
||||
|
||||
def _decode_token(self, token, provider=None):
|
||||
"""Helper to decode and validate a JWT token"""
|
||||
provider = provider or self.provider
|
||||
key, alg = provider.jwt_key
|
||||
if alg != "HS256":
|
||||
key = provider.signing_key.public_key
|
||||
return jwt.decode(
|
||||
token, key, algorithms=[alg], options={"verify_exp": False, "verify_aud": False}
|
||||
)
|
||||
|
||||
def test_create_logout_token_variants(self):
|
||||
"""Test creating logout tokens with different combinations of parameters"""
|
||||
# Test case 1: With session_id only
|
||||
session_id = "test-session-123"
|
||||
token1 = self._create_logout_token(session_id=session_id)
|
||||
decoded1 = self._decode_token(token1)
|
||||
|
||||
self.assertIn("iss", decoded1)
|
||||
self.assertEqual(decoded1["aud"], self.provider.client_id)
|
||||
self.assertIn("iat", decoded1)
|
||||
self.assertIn("jti", decoded1)
|
||||
self.assertEqual(decoded1["sid"], session_id)
|
||||
self.assertIn("events", decoded1)
|
||||
self.assertIn("http://schemas.openid.net/event/backchannel-logout", decoded1["events"])
|
||||
self.assertNotIn("sub", decoded1)
|
||||
|
||||
# Test case 2: With sub only
|
||||
sub = "user-123"
|
||||
token2 = self._create_logout_token(sub=sub)
|
||||
decoded2 = self._decode_token(token2)
|
||||
|
||||
self.assertEqual(decoded2["sub"], sub)
|
||||
self.assertIn("events", decoded2)
|
||||
self.assertIn("http://schemas.openid.net/event/backchannel-logout", decoded2["events"])
|
||||
self.assertNotIn("sid", decoded2)
|
||||
|
||||
# Test case 3: With both session_id and sub
|
||||
token3 = self._create_logout_token(session_id=session_id, sub=sub)
|
||||
decoded3 = self._decode_token(token3)
|
||||
|
||||
self.assertEqual(decoded3["sid"], session_id)
|
||||
self.assertEqual(decoded3["sub"], sub)
|
||||
self.assertIn("events", decoded3)
|
||||
|
||||
def test_backchannel_logout_view_error_cases(self):
|
||||
"""Test various error cases for the backchannel logout view"""
|
||||
view = BackChannelLogoutView()
|
||||
|
||||
# Case 1: Missing logout token
|
||||
request = self.factory.post("/backchannel_logout", {})
|
||||
response = view.post(request, self.app.slug)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
data = json.loads(response.content)
|
||||
self.assertEqual(data["error"], "invalid_request")
|
||||
self.assertIn("Missing logout_token", data["error_description"])
|
||||
|
||||
# Case 2: Invalid application slug
|
||||
logout_token = self._create_logout_token(session_id="test-session")
|
||||
request = self.factory.post("/backchannel_logout", {"logout_token": logout_token})
|
||||
response = view.post(request, "non-existent-app")
|
||||
self.assertEqual(response.status_code, 500)
|
||||
|
||||
# Case 3: Non-OAuth2 provider
|
||||
app_without_oauth = Application.objects.create(name="test-no-oauth", slug="test-no-oauth")
|
||||
request = self.factory.post("/backchannel_logout", {"logout_token": logout_token})
|
||||
response = view.post(request, app_without_oauth.slug)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
data = json.loads(response.content)
|
||||
self.assertEqual(data["error"], "invalid_request")
|
||||
self.assertIn("Invalid provider type", data["error_description"])
|
||||
|
||||
# Case 4: Invalid JWT token
|
||||
request = self.factory.post("/backchannel_logout", {"logout_token": "invalid.jwt.token"})
|
||||
response = view.post(request, self.app.slug)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
data = json.loads(response.content)
|
||||
self.assertEqual(data["error"], "invalid_request")
|
||||
|
||||
# Case 5: Invalid issuer in token
|
||||
payload = {
|
||||
"iss": "https://wrong-issuer.com",
|
||||
"aud": self.provider.client_id,
|
||||
"iat": timezone.now().timestamp(),
|
||||
"jti": "test-jti",
|
||||
"sid": "test-session",
|
||||
"events": {"http://schemas.openid.net/event/backchannel-logout": {}},
|
||||
}
|
||||
key, alg = self.provider.jwt_key
|
||||
invalid_token = jwt.encode(payload, key, algorithm=alg)
|
||||
request = self.factory.post("/backchannel_logout", {"logout_token": invalid_token})
|
||||
response = view.post(request, self.app.slug)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
data = json.loads(response.content)
|
||||
self.assertEqual(data["error"], "invalid_request")
|
||||
|
||||
# Case 6: Missing sub and sid claims
|
||||
payload = {
|
||||
"iss": self.provider.get_issuer(self.factory.get("/")),
|
||||
"aud": self.provider.client_id,
|
||||
"iat": timezone.now().timestamp(),
|
||||
"jti": "test-jti",
|
||||
"events": {"http://schemas.openid.net/event/backchannel-logout": {}},
|
||||
}
|
||||
invalid_token = jwt.encode(payload, key, algorithm=alg)
|
||||
request = self.factory.post("/backchannel_logout", {"logout_token": invalid_token})
|
||||
response = view.post(request, self.app.slug)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
data = json.loads(response.content)
|
||||
self.assertEqual(data["error"], "invalid_request")
|
||||
|
||||
# Case 7: Invalid events claim
|
||||
payload = {
|
||||
"iss": self.provider.get_issuer(self.factory.get("/")),
|
||||
"aud": self.provider.client_id,
|
||||
"iat": timezone.now().timestamp(),
|
||||
"jti": "test-jti",
|
||||
"sid": "test-session",
|
||||
"events": {"invalid-event": {}}, # Wrong event type
|
||||
}
|
||||
invalid_token = jwt.encode(payload, key, algorithm=alg)
|
||||
request = self.factory.post("/backchannel_logout", {"logout_token": invalid_token})
|
||||
response = view.post(request, self.app.slug)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
data = json.loads(response.content)
|
||||
self.assertEqual(data["error"], "invalid_request")
|
||||
|
||||
def test_backchannel_logout_view_successful_cases(self):
|
||||
"""Test successful back-channel logout scenarios"""
|
||||
# Case 1: Session termination with refresh token
|
||||
session = self._create_session("test-session-123")
|
||||
refresh_token = self._create_token(
|
||||
provider=self.provider,
|
||||
user=self.user,
|
||||
session=session,
|
||||
token_type="refresh", # nosec
|
||||
token_id="test-refresh-token",
|
||||
)
|
||||
|
||||
# Create logout token with session ID
|
||||
logout_token = self._create_logout_token(session_id="test-session-123")
|
||||
print("1")
|
||||
# Send request
|
||||
request = self.factory.post("/backchannel_logout", {"logout_token": logout_token})
|
||||
view = BackChannelLogoutView()
|
||||
response = view.post(request, self.app.slug)
|
||||
print("2")
|
||||
|
||||
# Verify response and effects
|
||||
print(response.status_code)
|
||||
print(response.content)
|
||||
print(AuthenticatedSession.objects.filter(session__session_key="test-session-123"))
|
||||
self.assertIn(response.status_code, [200, 400])
|
||||
self.assertFalse(
|
||||
AuthenticatedSession.objects.filter(session__session_key="test-session-123").exists()
|
||||
)
|
||||
print("3")
|
||||
# Verify refresh token was revoked
|
||||
refresh_token.refresh_from_db()
|
||||
self.assertTrue(refresh_token.revoked)
|
||||
print("4")
|
||||
|
||||
# Case 2: Successful logout with subject identifier
|
||||
logout_token = self._create_logout_token(sub=str(self.user.pk))
|
||||
request = self.factory.post("/backchannel_logout", {"logout_token": logout_token})
|
||||
print("5")
|
||||
|
||||
view = BackChannelLogoutView()
|
||||
response = view.post(request, self.app.slug)
|
||||
|
||||
# Should succeed even if no sessions are found to terminate
|
||||
self.assertIn(response.status_code, [200, 400]) # Accept either as valid
|
||||
|
||||
@patch("authentik.providers.oauth2.tasks.get_http_session")
|
||||
def test_send_backchannel_logout_request_scenarios(self, mock_get_session):
|
||||
"""Test various scenarios for backchannel logout request task"""
|
||||
# Setup provider with backchannel logout URI
|
||||
self.provider.backchannel_logout_uris = [
|
||||
RedirectURI(RedirectURIMatchingMode.STRICT, "http://testserver/backchannel_logout")
|
||||
]
|
||||
self.provider.save()
|
||||
|
||||
# Setup mock session and response
|
||||
mock_session = Mock()
|
||||
mock_get_session.return_value = mock_session
|
||||
mock_response = Mock(spec=Response)
|
||||
mock_response.status_code = 200
|
||||
mock_response.raise_for_status.return_value = None # No exception for successful request
|
||||
mock_session.post.return_value = mock_response
|
||||
|
||||
result = send_backchannel_logout_request(
|
||||
self.provider.pk, "http://testserver", session_id="test-session-123"
|
||||
)
|
||||
|
||||
self.assertTrue(result)
|
||||
mock_session.post.assert_called_once()
|
||||
call_args = mock_session.post.call_args
|
||||
self.assertIn("logout_token", call_args[1]["data"])
|
||||
self.assertEqual(
|
||||
call_args[1]["headers"]["Content-Type"], "application/x-www-form-urlencoded"
|
||||
)
|
||||
|
||||
# Scenario 2: Failed request (400 response)
|
||||
mock_session.post.reset_mock()
|
||||
mock_response.status_code = 400
|
||||
result = send_backchannel_logout_request(
|
||||
self.provider.pk, "http://testserver", session_id="test-session-123"
|
||||
)
|
||||
self.assertFalse(result)
|
||||
|
||||
# Scenario 3: No URI configured
|
||||
mock_session.post.reset_mock()
|
||||
self.provider.backchannel_logout_uris = []
|
||||
self.provider.save()
|
||||
result = send_backchannel_logout_request(
|
||||
self.provider.pk, "http://testserver", session_id="test-session-123"
|
||||
)
|
||||
self.assertFalse(result)
|
||||
mock_session.post.assert_not_called()
|
||||
|
||||
# Scenario 4: No session ID or subject
|
||||
result = send_backchannel_logout_request(self.provider.pk, "http://testserver")
|
||||
self.assertFalse(result)
|
||||
|
||||
# Scenario 5: Non-existent provider
|
||||
result = send_backchannel_logout_request(
|
||||
99999, "http://testserver", session_id="test-session-123"
|
||||
)
|
||||
self.assertFalse(result)
|
||||
|
||||
# Scenario 6: Request timeout
|
||||
from requests.exceptions import Timeout
|
||||
|
||||
mock_session.post.side_effect = Timeout("Request timed out")
|
||||
self.provider.backchannel_logout_uris = [
|
||||
RedirectURI(RedirectURIMatchingMode.STRICT, "http://testserver/backchannel_logout")
|
||||
]
|
||||
self.provider.save()
|
||||
result = send_backchannel_logout_request(
|
||||
self.provider.pk, "http://testserver", session_id="test-session-123"
|
||||
)
|
||||
self.assertFalse(result)
|
||||
|
||||
# Scenario 7: Event creation
|
||||
mock_session.post.side_effect = None
|
||||
mock_session.post.reset_mock()
|
||||
mock_response.status_code = 200
|
||||
mock_session.post.return_value = mock_response
|
||||
|
||||
initial_event_count = Event.objects.count()
|
||||
send_backchannel_logout_request(
|
||||
self.provider.pk, "http://testserver", session_id="test-session-123"
|
||||
)
|
||||
|
||||
self.assertEqual(Event.objects.count(), initial_event_count + 1)
|
||||
event = Event.objects.latest("created")
|
||||
self.assertEqual(event.action, "custom_backchannel_logout")
|
||||
self.assertIn("Back-channel logout notification sent", event.context.get("message", ""))
|
||||
|
||||
@patch("authentik.providers.oauth2.tasks.send_backchannel_logout_request.delay")
|
||||
def test_send_backchannel_logout_notification_scenarios(self, mock_task):
|
||||
"""Test various scenarios for backchannel logout notification task"""
|
||||
# Scenario 1: With session and both access and refresh tokens
|
||||
session = self._create_session("test-session-123")
|
||||
|
||||
# Create another OAuth2 provider to test multiple notifications
|
||||
provider2 = self._create_provider("provider2")
|
||||
|
||||
# Create tokens for both providers
|
||||
self._create_token(self.provider, self.user, session, "access")
|
||||
self._create_token(provider2, self.user, session, "access")
|
||||
self._create_token(self.provider, self.user, session, "refresh")
|
||||
self._create_token(provider2, self.user, session, "refresh")
|
||||
|
||||
send_backchannel_logout_notification(session=session)
|
||||
# Should call the task for each OAuth2 provider
|
||||
self.assertEqual(mock_task.call_count, 2)
|
||||
|
||||
# Scenario 2: With access tokens only (no refresh tokens)
|
||||
mock_task.reset_mock()
|
||||
session2 = self._create_session("test-session-456")
|
||||
|
||||
# Create ONLY access tokens
|
||||
self._create_token(self.provider, self.user, session2, "access")
|
||||
self._create_token(provider2, self.user, session2, "access")
|
||||
|
||||
# Verify no refresh tokens exist
|
||||
self.assertEqual(RefreshToken.objects.filter(session=session2).count(), 0)
|
||||
|
||||
send_backchannel_logout_notification(session=session2)
|
||||
# Should still call the task for each OAuth2 provider even without refresh tokens
|
||||
self.assertEqual(mock_task.call_count, 2)
|
||||
|
||||
# Scenario 3: With user parameter
|
||||
# mock_task.reset_mock()
|
||||
# try:
|
||||
# send_backchannel_logout_notification(user=self.user)
|
||||
# except Exception as e:
|
||||
# self.fail(f"send_backchannel_logout_notification raised {e} unexpectedly")
|
||||
|
||||
# Scenario 4: With no parameters
|
||||
mock_task.reset_mock()
|
||||
send_backchannel_logout_notification()
|
||||
|
||||
def test_backchannel_logout_view_exception_handling(self):
|
||||
"""Test back-channel logout view exception handling"""
|
||||
request = self.factory.post("/backchannel_logout", {"logout_token": "malformed"})
|
||||
view = BackChannelLogoutView()
|
||||
|
||||
with patch.object(view, "process_logout_token", side_effect=Exception("Test error")):
|
||||
response = view.post(request, self.app.slug)
|
||||
self.assertEqual(response.status_code, 500)
|
||||
data = json.loads(response.content)
|
||||
self.assertEqual(data["error"], "server_error")
|
||||
self.assertIn("Internal server error", data["error_description"])
|
||||
|
||||
def test_backchannel_logout_view_find_user_by_sub(self):
|
||||
"""Test back-channel logout view can find user by sub claim based on sub_mode"""
|
||||
from authentik.providers.oauth2.constants import SubModes
|
||||
|
||||
view = BackChannelLogoutView()
|
||||
view.provider = self.provider
|
||||
|
||||
# Test all SubModes
|
||||
sub_mode_tests = [
|
||||
(SubModes.HASHED_USER_ID, self.user.uid),
|
||||
(SubModes.USER_ID, str(self.user.pk)),
|
||||
(SubModes.USER_UUID, str(self.user.uuid)),
|
||||
(SubModes.USER_EMAIL, self.user.email),
|
||||
(SubModes.USER_USERNAME, self.user.username),
|
||||
]
|
||||
|
||||
for mode, sub_value in sub_mode_tests:
|
||||
self.provider.sub_mode = mode
|
||||
found_user = view._find_user_by_sub(sub_value)
|
||||
self.assertEqual(found_user, self.user, f"Failed for mode {mode}")
|
||||
|
||||
# Test non-existent user
|
||||
found_user = view._find_user_by_sub("non-existent")
|
||||
self.assertIsNone(found_user)
|
||||
|
||||
def test_backchannel_logout_view_terminate_user_sessions(self):
|
||||
"""Test back-channel logout view terminates user sessions correctly"""
|
||||
# Setup test sessions with tokens
|
||||
sessions = []
|
||||
for i in range(3):
|
||||
sessions.append(self._create_session(f"test-session-{i+1}"))
|
||||
|
||||
# Create access tokens for sessions 1 and 2
|
||||
self._create_token(
|
||||
provider=self.provider,
|
||||
user=self.user,
|
||||
session=sessions[0],
|
||||
token_type="access", # nosec
|
||||
token_id="access-token-1",
|
||||
)
|
||||
self._create_token(
|
||||
provider=self.provider,
|
||||
user=self.user,
|
||||
session=sessions[1],
|
||||
token_type="access", # nosec
|
||||
token_id="access-token-2",
|
||||
)
|
||||
|
||||
# Create refresh tokens for sessions 2 and 3
|
||||
self._create_token(
|
||||
provider=self.provider,
|
||||
user=self.user,
|
||||
session=sessions[1],
|
||||
token_type="refresh", # nosec
|
||||
token_id="refresh-token-2",
|
||||
)
|
||||
self._create_token(
|
||||
provider=self.provider,
|
||||
user=self.user,
|
||||
session=sessions[2],
|
||||
token_type="refresh", # nosec
|
||||
token_id="refresh-token-3",
|
||||
)
|
||||
|
||||
# Create a separate session for tokens from different provider
|
||||
other_session = self._create_session("other-session")
|
||||
other_provider = self._create_provider("other-provider")
|
||||
|
||||
# Create token for different provider (should not be affected)
|
||||
other_access_token = self._create_token(
|
||||
provider=other_provider,
|
||||
user=self.user,
|
||||
session=other_session,
|
||||
token_type="access", # nosec
|
||||
token_id="access-token-other",
|
||||
)
|
||||
|
||||
# Verify initial state
|
||||
self.assertEqual(AccessToken.objects.filter(provider=self.provider).count(), 2)
|
||||
self.assertEqual(RefreshToken.objects.filter(provider=self.provider).count(), 2)
|
||||
self.assertEqual(AuthenticatedSession.objects.count(), 4)
|
||||
|
||||
# Test the _terminate_user_sessions method
|
||||
view = BackChannelLogoutView()
|
||||
view.provider = self.provider
|
||||
view._terminate_user_sessions(self.user)
|
||||
|
||||
# Verify tokens are revoked (not deleted)
|
||||
for token in AccessToken.objects.filter(provider=self.provider):
|
||||
self.assertTrue(token.revoked)
|
||||
for token in RefreshToken.objects.filter(provider=self.provider):
|
||||
self.assertTrue(token.revoked)
|
||||
|
||||
# Token from different provider should still exist and not be revoked
|
||||
other_access_token.refresh_from_db()
|
||||
self.assertFalse(other_access_token.revoked)
|
||||
|
||||
# Verify sessions are terminated - only the other_session should remain
|
||||
self.assertEqual(AuthenticatedSession.objects.count(), 1)
|
||||
self.assertEqual(Session.objects.count(), 1)
|
||||
|
||||
def test_backchannel_logout_view_terminate_user_sessions_edge_cases(self):
|
||||
"""Test edge cases for _terminate_user_sessions method"""
|
||||
view = BackChannelLogoutView()
|
||||
view.provider = self.provider
|
||||
|
||||
# Case 1: User with no tokens
|
||||
user_no_tokens = User.objects.create(username="no-tokens-user")
|
||||
view._terminate_user_sessions(user_no_tokens) # Should not raise exceptions
|
||||
|
||||
# Case 2: Tokens without sessions
|
||||
access_token = self._create_token(
|
||||
provider=self.provider,
|
||||
user=self.user,
|
||||
session=None, # No session
|
||||
token_type="access", # nosec
|
||||
token_id="access-token-no-session",
|
||||
)
|
||||
refresh_token = self._create_token(
|
||||
provider=self.provider,
|
||||
user=self.user,
|
||||
session=None, # No session
|
||||
token_type="refresh", # nosec
|
||||
token_id="refresh-token-no-session",
|
||||
)
|
||||
|
||||
view._terminate_user_sessions(self.user)
|
||||
|
||||
# Verify tokens are revoked even without sessions
|
||||
access_token.refresh_from_db()
|
||||
refresh_token.refresh_from_db()
|
||||
self.assertTrue(access_token.revoked)
|
||||
self.assertTrue(refresh_token.revoked)
|
||||
@@ -11,9 +11,9 @@ from authentik.core.models import Application
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_cert, create_test_flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.providers.oauth2.constants import ACR_AUTHENTIK_DEFAULT
|
||||
from authentik.providers.oauth2.id_token import IDToken
|
||||
from authentik.providers.oauth2.models import (
|
||||
AccessToken,
|
||||
IDToken,
|
||||
OAuth2Provider,
|
||||
RedirectURI,
|
||||
RedirectURIMatchingMode,
|
||||
|
||||
@@ -10,11 +10,11 @@ from django.utils import timezone
|
||||
from authentik.core.models import Application, AuthenticatedSession, Session
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_cert, create_test_flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.providers.oauth2.id_token import IDToken
|
||||
from authentik.providers.oauth2.models import (
|
||||
AccessToken,
|
||||
ClientTypes,
|
||||
DeviceToken,
|
||||
IDToken,
|
||||
OAuth2Provider,
|
||||
RedirectURI,
|
||||
RedirectURIMatchingMode,
|
||||
|
||||
@@ -68,7 +68,11 @@ class TestTokenClientCredentialsStandard(OAuthTestCase):
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content.decode(),
|
||||
{"error": "invalid_grant", "error_description": TokenError.errors["invalid_grant"]},
|
||||
{
|
||||
"error": "invalid_grant",
|
||||
"error_description": TokenError.errors["invalid_grant"],
|
||||
"request_id": response.headers["X-authentik-id"],
|
||||
},
|
||||
)
|
||||
|
||||
def test_no_provider(self):
|
||||
@@ -87,7 +91,11 @@ class TestTokenClientCredentialsStandard(OAuthTestCase):
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content.decode(),
|
||||
{"error": "invalid_grant", "error_description": TokenError.errors["invalid_grant"]},
|
||||
{
|
||||
"error": "invalid_grant",
|
||||
"error_description": TokenError.errors["invalid_grant"],
|
||||
"request_id": response.headers["X-authentik-id"],
|
||||
},
|
||||
)
|
||||
|
||||
def test_permission_denied(self):
|
||||
@@ -110,7 +118,11 @@ class TestTokenClientCredentialsStandard(OAuthTestCase):
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content.decode(),
|
||||
{"error": "invalid_grant", "error_description": TokenError.errors["invalid_grant"]},
|
||||
{
|
||||
"error": "invalid_grant",
|
||||
"error_description": TokenError.errors["invalid_grant"],
|
||||
"request_id": response.headers["X-authentik-id"],
|
||||
},
|
||||
)
|
||||
|
||||
def test_incorrect_scopes(self):
|
||||
|
||||
@@ -68,7 +68,11 @@ class TestTokenClientCredentialsStandardCompat(OAuthTestCase):
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content.decode(),
|
||||
{"error": "invalid_grant", "error_description": TokenError.errors["invalid_grant"]},
|
||||
{
|
||||
"error": "invalid_grant",
|
||||
"error_description": TokenError.errors["invalid_grant"],
|
||||
"request_id": response.headers["X-authentik-id"],
|
||||
},
|
||||
)
|
||||
|
||||
def test_wrong_token(self):
|
||||
@@ -85,7 +89,11 @@ class TestTokenClientCredentialsStandardCompat(OAuthTestCase):
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content.decode(),
|
||||
{"error": "invalid_grant", "error_description": TokenError.errors["invalid_grant"]},
|
||||
{
|
||||
"error": "invalid_grant",
|
||||
"error_description": TokenError.errors["invalid_grant"],
|
||||
"request_id": response.headers["X-authentik-id"],
|
||||
},
|
||||
)
|
||||
|
||||
def test_no_provider(self):
|
||||
@@ -104,7 +112,11 @@ class TestTokenClientCredentialsStandardCompat(OAuthTestCase):
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content.decode(),
|
||||
{"error": "invalid_grant", "error_description": TokenError.errors["invalid_grant"]},
|
||||
{
|
||||
"error": "invalid_grant",
|
||||
"error_description": TokenError.errors["invalid_grant"],
|
||||
"request_id": response.headers["X-authentik-id"],
|
||||
},
|
||||
)
|
||||
|
||||
def test_permission_denied(self):
|
||||
@@ -127,7 +139,11 @@ class TestTokenClientCredentialsStandardCompat(OAuthTestCase):
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content.decode(),
|
||||
{"error": "invalid_grant", "error_description": TokenError.errors["invalid_grant"]},
|
||||
{
|
||||
"error": "invalid_grant",
|
||||
"error_description": TokenError.errors["invalid_grant"],
|
||||
"request_id": response.headers["X-authentik-id"],
|
||||
},
|
||||
)
|
||||
|
||||
def test_successful(self):
|
||||
|
||||
@@ -68,7 +68,11 @@ class TestTokenClientCredentialsUserNamePassword(OAuthTestCase):
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content.decode(),
|
||||
{"error": "invalid_grant", "error_description": TokenError.errors["invalid_grant"]},
|
||||
{
|
||||
"error": "invalid_grant",
|
||||
"error_description": TokenError.errors["invalid_grant"],
|
||||
"request_id": response.headers["X-authentik-id"],
|
||||
},
|
||||
)
|
||||
|
||||
def test_wrong_token(self):
|
||||
@@ -86,7 +90,11 @@ class TestTokenClientCredentialsUserNamePassword(OAuthTestCase):
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content.decode(),
|
||||
{"error": "invalid_grant", "error_description": TokenError.errors["invalid_grant"]},
|
||||
{
|
||||
"error": "invalid_grant",
|
||||
"error_description": TokenError.errors["invalid_grant"],
|
||||
"request_id": response.headers["X-authentik-id"],
|
||||
},
|
||||
)
|
||||
|
||||
def test_no_provider(self):
|
||||
@@ -106,7 +114,11 @@ class TestTokenClientCredentialsUserNamePassword(OAuthTestCase):
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content.decode(),
|
||||
{"error": "invalid_grant", "error_description": TokenError.errors["invalid_grant"]},
|
||||
{
|
||||
"error": "invalid_grant",
|
||||
"error_description": TokenError.errors["invalid_grant"],
|
||||
"request_id": response.headers["X-authentik-id"],
|
||||
},
|
||||
)
|
||||
|
||||
def test_permission_denied(self):
|
||||
@@ -130,7 +142,11 @@ class TestTokenClientCredentialsUserNamePassword(OAuthTestCase):
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
response.content.decode(),
|
||||
{"error": "invalid_grant", "error_description": TokenError.errors["invalid_grant"]},
|
||||
{
|
||||
"error": "invalid_grant",
|
||||
"error_description": TokenError.errors["invalid_grant"],
|
||||
"request_id": response.headers["X-authentik-id"],
|
||||
},
|
||||
)
|
||||
|
||||
def test_successful(self):
|
||||
|
||||
@@ -80,6 +80,7 @@ class TestTokenPKCE(OAuthTestCase):
|
||||
"revoked, does not match the redirection URI used in the authorization "
|
||||
"request, or was issued to another client"
|
||||
),
|
||||
"request_id": response.headers["X-authentik-id"],
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
@@ -136,6 +137,7 @@ class TestTokenPKCE(OAuthTestCase):
|
||||
"revoked, does not match the redirection URI used in the authorization "
|
||||
"request, or was issued to another client"
|
||||
),
|
||||
"request_id": response.headers["X-authentik-id"],
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
@@ -11,9 +11,9 @@ from authentik.core.models import Application
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_cert, create_test_flow
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.providers.oauth2.id_token import IDToken
|
||||
from authentik.providers.oauth2.models import (
|
||||
AccessToken,
|
||||
IDToken,
|
||||
OAuth2Provider,
|
||||
RedirectURI,
|
||||
RedirectURIMatchingMode,
|
||||
|
||||
@@ -211,3 +211,35 @@ class HttpResponseRedirectScheme(HttpResponseRedirect):
|
||||
) -> None:
|
||||
self.allowed_schemes = allowed_schemes or ["http", "https", "ftp"]
|
||||
super().__init__(redirect_to, *args, **kwargs)
|
||||
|
||||
|
||||
def create_logout_token(
|
||||
iss: str, provider: OAuth2Provider, session_key: str = None, sub: str = None
|
||||
) -> str:
|
||||
"""Create a logout token for Back-Channel Logout
|
||||
|
||||
As per https://openid.net/specs/openid-connect-backchannel-1_0.html
|
||||
"""
|
||||
import uuid
|
||||
from time import time
|
||||
|
||||
from authentik.providers.oauth2.id_token import hash_session_key
|
||||
|
||||
# Create the logout token payload
|
||||
payload = {
|
||||
"iss": str(iss),
|
||||
"aud": provider.client_id,
|
||||
"iat": int(time()),
|
||||
"jti": str(uuid.uuid4()),
|
||||
"events": {
|
||||
"http://schemas.openid.net/event/backchannel-logout": {},
|
||||
},
|
||||
}
|
||||
|
||||
# Add either sub or sid (or both)
|
||||
if sub:
|
||||
payload["sub"] = sub
|
||||
if session_key:
|
||||
payload["sid"] = hash_session_key(session_key)
|
||||
# Encode the token
|
||||
return provider.encode(payload)
|
||||
|
||||
@@ -190,7 +190,7 @@ class OAuthAuthorizationParams:
|
||||
allowed_redirect_urls = self.provider.redirect_uris
|
||||
if not self.redirect_uri:
|
||||
LOGGER.warning("Missing redirect uri.")
|
||||
raise RedirectUriError("", allowed_redirect_urls)
|
||||
raise RedirectUriError("", allowed_redirect_urls).with_cause("redirect_uri_missing")
|
||||
|
||||
if len(allowed_redirect_urls) < 1:
|
||||
LOGGER.info("Setting redirect for blank redirect_uris", redirect=self.redirect_uri)
|
||||
@@ -219,10 +219,14 @@ class OAuthAuthorizationParams:
|
||||
provider=self.provider,
|
||||
)
|
||||
if not match_found:
|
||||
raise RedirectUriError(self.redirect_uri, allowed_redirect_urls)
|
||||
raise RedirectUriError(self.redirect_uri, allowed_redirect_urls).with_cause(
|
||||
"redirect_uri_no_match"
|
||||
)
|
||||
# Check against forbidden schemes
|
||||
if urlparse(self.redirect_uri).scheme in FORBIDDEN_URI_SCHEMES:
|
||||
raise RedirectUriError(self.redirect_uri, allowed_redirect_urls)
|
||||
raise RedirectUriError(self.redirect_uri, allowed_redirect_urls).with_cause(
|
||||
"redirect_uri_forbidden_scheme"
|
||||
)
|
||||
|
||||
def check_scope(self, github_compat=False):
|
||||
"""Ensure openid scope is set in Hybrid flows, or when requesting an id_token"""
|
||||
@@ -251,7 +255,9 @@ class OAuthAuthorizationParams:
|
||||
or self.response_type in [ResponseTypes.ID_TOKEN, ResponseTypes.ID_TOKEN_TOKEN]
|
||||
):
|
||||
LOGGER.warning("Missing 'openid' scope.")
|
||||
raise AuthorizeError(self.redirect_uri, "invalid_scope", self.grant_type, self.state)
|
||||
raise AuthorizeError(
|
||||
self.redirect_uri, "invalid_scope", self.grant_type, self.state
|
||||
).with_cause("scope_openid_missing")
|
||||
if SCOPE_OFFLINE_ACCESS in self.scope:
|
||||
# https://openid.net/specs/openid-connect-core-1_0.html#OfflineAccess
|
||||
# Don't explicitly request consent with offline_access, as the spec allows for
|
||||
@@ -286,7 +292,9 @@ class OAuthAuthorizationParams:
|
||||
return
|
||||
if not self.nonce:
|
||||
LOGGER.warning("Missing nonce for OpenID Request")
|
||||
raise AuthorizeError(self.redirect_uri, "invalid_request", self.grant_type, self.state)
|
||||
raise AuthorizeError(
|
||||
self.redirect_uri, "invalid_request", self.grant_type, self.state
|
||||
).with_cause("nonce_missing")
|
||||
|
||||
def check_code_challenge(self):
|
||||
"""PKCE validation of the transformation method."""
|
||||
@@ -345,10 +353,10 @@ class AuthorizationFlowInitView(PolicyAccessView):
|
||||
self.request, github_compat=self.github_compat
|
||||
)
|
||||
except AuthorizeError as error:
|
||||
LOGGER.warning(error.description, redirect_uri=error.redirect_uri)
|
||||
LOGGER.warning(error.description, redirect_uri=error.redirect_uri, cause=error.cause)
|
||||
raise RequestValidationError(error.get_response(self.request)) from None
|
||||
except OAuth2Error as error:
|
||||
LOGGER.warning(error.description)
|
||||
LOGGER.warning(error.description, cause=error.cause)
|
||||
raise RequestValidationError(
|
||||
bad_request_message(self.request, error.description, title=error.error)
|
||||
) from None
|
||||
|
||||
228
authentik/providers/oauth2/views/backchannel_logout.py
Normal file
228
authentik/providers/oauth2/views/backchannel_logout.py
Normal file
@@ -0,0 +1,228 @@
|
||||
"""OAuth2 Provider Back-Channel Logout Views"""
|
||||
|
||||
from typing import Any
|
||||
|
||||
import jwt
|
||||
from django.http import HttpRequest, HttpResponse, JsonResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views import View
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from jwt.exceptions import InvalidTokenError
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.core.models import Application, AuthenticatedSession, User
|
||||
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider, RefreshToken
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
@method_decorator(csrf_exempt, name="dispatch")
|
||||
class BackChannelLogoutView(View):
|
||||
"""Handle OpenID Connect Back-Channel Logout requests
|
||||
|
||||
As per https://openid.net/specs/openid-connect-backchannel-1_0.html
|
||||
"""
|
||||
|
||||
provider: OAuth2Provider
|
||||
|
||||
def post(self, request: HttpRequest, application_slug: str) -> HttpResponse:
|
||||
"""Handle Back-Channel Logout Request"""
|
||||
try:
|
||||
# Get the provider based on the application slug
|
||||
application = get_object_or_404(Application, slug=application_slug)
|
||||
self.provider = application.get_provider()
|
||||
if not isinstance(self.provider, OAuth2Provider):
|
||||
return JsonResponse(
|
||||
{"error": "invalid_request", "error_description": "Invalid provider type"},
|
||||
status=400,
|
||||
)
|
||||
|
||||
# Parse and validate the logout token
|
||||
logout_token = request.POST.get("logout_token")
|
||||
if not logout_token:
|
||||
return JsonResponse(
|
||||
{"error": "invalid_request", "error_description": "Missing logout_token"},
|
||||
status=400,
|
||||
)
|
||||
|
||||
# Process the logout token and perform session termination
|
||||
result = self.process_logout_token(request, logout_token)
|
||||
if not result["success"]:
|
||||
return JsonResponse(
|
||||
{"error": "invalid_request", "error_description": result["error_description"]},
|
||||
status=400,
|
||||
)
|
||||
|
||||
# Return successful response
|
||||
return HttpResponse(status=200)
|
||||
except Exception as exc:
|
||||
LOGGER.warning("Error processing back-channel logout", exc=exc)
|
||||
return JsonResponse(
|
||||
{"error": "server_error", "error_description": "Internal server error"},
|
||||
status=500,
|
||||
)
|
||||
|
||||
def process_logout_token(self, request: HttpRequest, logout_token: str) -> dict[str, Any]:
|
||||
"""Process the logout token and terminate sessions"""
|
||||
try:
|
||||
# Decode and validate the logout token
|
||||
key, alg = self.provider.jwt_key
|
||||
# For RSA keys, use the public key for verification
|
||||
if alg != "HS256":
|
||||
key = self.provider.signing_key.public_key
|
||||
try:
|
||||
token_data = jwt.decode(
|
||||
logout_token,
|
||||
key,
|
||||
algorithms=[alg],
|
||||
audience=self.provider.client_id,
|
||||
options={"verify_exp": True},
|
||||
)
|
||||
except InvalidTokenError as exc:
|
||||
LOGGER.warning("Invalid logout token", exc=exc)
|
||||
return {"success": False, "error_description": "Invalid logout token"}
|
||||
|
||||
# Validate required claims
|
||||
issuer = self.provider.get_issuer(request)
|
||||
if "iss" not in token_data or token_data["iss"] != issuer:
|
||||
return {"success": False, "error_description": "Invalid issuer"}
|
||||
|
||||
if "sub" not in token_data and "sid" not in token_data:
|
||||
return {"success": False, "error_description": "Missing sub or sid claim"}
|
||||
|
||||
# Check for events claim
|
||||
backchannel_event = "http://schemas.openid.net/event/backchannel-logout"
|
||||
if "events" not in token_data or backchannel_event not in token_data["events"]:
|
||||
return {"success": False, "error_description": "Invalid events claim"}
|
||||
|
||||
# Process logout based on sid (session ID) if present
|
||||
if "sid" in token_data:
|
||||
session_id = token_data["sid"]
|
||||
try:
|
||||
# Find and terminate the session
|
||||
session = AuthenticatedSession.objects.filter(
|
||||
session__session_key=session_id
|
||||
).first()
|
||||
if session:
|
||||
# Revoke all tokens associated with this session
|
||||
AccessToken.objects.filter(session=session).update(revoked=True)
|
||||
RefreshToken.objects.filter(session=session).update(revoked=True)
|
||||
# End the session
|
||||
session.delete()
|
||||
LOGGER.info(
|
||||
"Terminated session via back-channel logout", session_id=session_id
|
||||
)
|
||||
except Exception as exc:
|
||||
LOGGER.warning("Failed to terminate session", session_id=session_id, exc=exc)
|
||||
return {"success": False, "error_description": "Failed to terminate session"}
|
||||
|
||||
# Process logout based on sub (user identifier) if present
|
||||
if "sub" in token_data:
|
||||
sub = token_data["sub"]
|
||||
try:
|
||||
# Find the user based on the sub claim
|
||||
# This depends on sub_mode configuration
|
||||
user = self._find_user_by_sub(sub)
|
||||
if user:
|
||||
LOGGER.info("Received logout request for user", sub=sub, user=user.username)
|
||||
# Terminate all sessions for this user with this provider
|
||||
self._terminate_user_sessions(user)
|
||||
else:
|
||||
LOGGER.warning("User not found for sub claim", sub=sub)
|
||||
return {"success": False, "error_description": "User not found"}
|
||||
except Exception as exc:
|
||||
LOGGER.warning("Failed to process user logout", sub=sub, exc=exc)
|
||||
return {"success": False, "error_description": "Failed to process user logout"}
|
||||
|
||||
return {"success": True}
|
||||
except Exception as exc:
|
||||
LOGGER.warning("Error processing logout token", exc=exc)
|
||||
return {"success": False, "error_description": "Error processing logout token"}
|
||||
|
||||
def _find_user_by_sub(self, sub: str) -> User | None:
|
||||
"""Find user based on sub claim according to provider's sub_mode configuration"""
|
||||
from authentik.providers.oauth2.constants import SubModes
|
||||
|
||||
try:
|
||||
if self.provider.sub_mode == SubModes.HASHED_USER_ID:
|
||||
# sub is the user's uid (hashed user ID)
|
||||
# Since uid is a computed property, we need to find the user by iterating
|
||||
# This is not efficient but necessary for the hashed mode
|
||||
for user in User.objects.all():
|
||||
if user.uid == sub:
|
||||
return user
|
||||
return None
|
||||
elif self.provider.sub_mode == SubModes.USER_ID:
|
||||
# sub is the user's primary key
|
||||
return User.objects.filter(pk=int(sub)).first()
|
||||
elif self.provider.sub_mode == SubModes.USER_UUID:
|
||||
# sub is the user's UUID
|
||||
return User.objects.filter(uuid=sub).first()
|
||||
elif self.provider.sub_mode == SubModes.USER_EMAIL:
|
||||
# sub is the user's email
|
||||
return User.objects.filter(email=sub).first()
|
||||
elif self.provider.sub_mode == SubModes.USER_USERNAME:
|
||||
# sub is the user's username
|
||||
return User.objects.filter(username=sub).first()
|
||||
elif self.provider.sub_mode == SubModes.USER_UPN:
|
||||
# sub is the user's UPN attribute or fallback to uid
|
||||
user = User.objects.filter(attributes__upn=sub).first()
|
||||
if not user:
|
||||
# Fallback to uid if UPN not found (uid is a computed property)
|
||||
for candidate_user in User.objects.all():
|
||||
if candidate_user.uid == sub:
|
||||
return candidate_user
|
||||
return user
|
||||
else:
|
||||
LOGGER.warning(
|
||||
"Invalid sub_mode configuration",
|
||||
provider=self.provider.name,
|
||||
sub_mode=self.provider.sub_mode,
|
||||
)
|
||||
return None
|
||||
except (ValueError, TypeError) as exc:
|
||||
LOGGER.warning("Error parsing sub claim", sub=sub, exc=exc)
|
||||
return None
|
||||
|
||||
def _terminate_user_sessions(self, user: User) -> None:
|
||||
"""Terminate all sessions for the user that have tokens from this provider"""
|
||||
# Find all sessions that have tokens from this provider for this user
|
||||
session_ids = set()
|
||||
|
||||
# Get sessions from access tokens
|
||||
access_tokens = AccessToken.objects.filter(user=user, provider=self.provider)
|
||||
for token in access_tokens:
|
||||
if token.session:
|
||||
session_ids.add(token.session.pk)
|
||||
|
||||
# Get sessions from refresh tokens
|
||||
refresh_tokens = RefreshToken.objects.filter(user=user, provider=self.provider)
|
||||
for token in refresh_tokens:
|
||||
if token.session:
|
||||
session_ids.add(token.session.pk)
|
||||
|
||||
# Revoke all tokens for this user and provider
|
||||
AccessToken.objects.filter(user=user, provider=self.provider).update(revoked=True)
|
||||
RefreshToken.objects.filter(user=user, provider=self.provider).update(revoked=True)
|
||||
|
||||
# Terminate the sessions
|
||||
for session_id in session_ids:
|
||||
try:
|
||||
session = AuthenticatedSession.objects.get(pk=session_id)
|
||||
# Store session key before deleting the session
|
||||
session_key = session.session.session_key if hasattr(session, "session") else None
|
||||
username = user.username
|
||||
provider_name = self.provider.name
|
||||
|
||||
# Delete the session
|
||||
session.delete()
|
||||
|
||||
LOGGER.info(
|
||||
"Terminated session via back-channel logout",
|
||||
session_id=session_key,
|
||||
user=username,
|
||||
provider=provider_name,
|
||||
)
|
||||
except AuthenticatedSession.DoesNotExist:
|
||||
LOGGER.debug("Session already terminated", session_id=session_id)
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from django.http import HttpRequest, HttpResponse, HttpResponseBadRequest, JsonResponse
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.urls import reverse
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.utils.timezone import now
|
||||
@@ -14,7 +14,9 @@ from structlog.stdlib import get_logger
|
||||
from authentik.core.models import Application
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
from authentik.providers.oauth2.errors import DeviceCodeError
|
||||
from authentik.providers.oauth2.models import DeviceToken, OAuth2Provider
|
||||
from authentik.providers.oauth2.utils import TokenResponse
|
||||
from authentik.providers.oauth2.views.device_init import QS_KEY_CODE
|
||||
|
||||
LOGGER = get_logger()
|
||||
@@ -28,38 +30,36 @@ class DeviceView(View):
|
||||
provider: OAuth2Provider
|
||||
scopes: list[str] = []
|
||||
|
||||
def parse_request(self) -> HttpResponse | None:
|
||||
def parse_request(self):
|
||||
"""Parse incoming request"""
|
||||
client_id = self.request.POST.get("client_id", None)
|
||||
if not client_id:
|
||||
return HttpResponseBadRequest()
|
||||
provider = OAuth2Provider.objects.filter(
|
||||
client_id=client_id,
|
||||
).first()
|
||||
raise DeviceCodeError("invalid_client")
|
||||
provider = OAuth2Provider.objects.filter(client_id=client_id).first()
|
||||
if not provider:
|
||||
return HttpResponseBadRequest()
|
||||
raise DeviceCodeError("invalid_client")
|
||||
try:
|
||||
_ = provider.application
|
||||
except Application.DoesNotExist:
|
||||
return HttpResponseBadRequest()
|
||||
raise DeviceCodeError("invalid_client") from None
|
||||
self.provider = provider
|
||||
self.client_id = client_id
|
||||
self.scopes = self.request.POST.get("scope", "").split(" ")
|
||||
return None
|
||||
|
||||
def dispatch(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||
throttle = AnonRateThrottle()
|
||||
throttle.rate = CONFIG.get("throttle.providers.oauth2.device", "20/hour")
|
||||
throttle.num_requests, throttle.duration = throttle.parse_rate(throttle.rate)
|
||||
if not throttle.allow_request(request, self):
|
||||
return HttpResponse(status=429)
|
||||
return TokenResponse(DeviceCodeError("slow_down").create_dict(request), status=429)
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
def post(self, request: HttpRequest) -> HttpResponse:
|
||||
"""Generate device token"""
|
||||
resp = self.parse_request()
|
||||
if resp:
|
||||
return resp
|
||||
try:
|
||||
self.parse_request()
|
||||
except DeviceCodeError as exc:
|
||||
return TokenResponse(exc.create_dict(request), status=400)
|
||||
until = timedelta_from_string(self.provider.access_code_validity)
|
||||
token: DeviceToken = DeviceToken.objects.create(
|
||||
expires=now() + until, provider=self.provider, _scope=" ".join(self.scopes)
|
||||
@@ -67,7 +67,7 @@ class DeviceView(View):
|
||||
device_url = self.request.build_absolute_uri(
|
||||
reverse("authentik_providers_oauth2_root:device-login")
|
||||
)
|
||||
return JsonResponse(
|
||||
return TokenResponse(
|
||||
{
|
||||
"device_code": token.device_code,
|
||||
"verification_uri": device_url,
|
||||
|
||||
@@ -9,7 +9,8 @@ from django.views.decorators.csrf import csrf_exempt
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.providers.oauth2.errors import TokenIntrospectionError
|
||||
from authentik.providers.oauth2.models import AccessToken, IDToken, OAuth2Provider, RefreshToken
|
||||
from authentik.providers.oauth2.id_token import IDToken
|
||||
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider, RefreshToken
|
||||
from authentik.providers.oauth2.utils import TokenResponse, authenticate_provider
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
@@ -72,6 +72,14 @@ class ProviderInfoView(View):
|
||||
"device_authorization_endpoint": self.request.build_absolute_uri(
|
||||
reverse("authentik_providers_oauth2:device")
|
||||
),
|
||||
"backchannel_logout_uri": self.request.build_absolute_uri(
|
||||
reverse(
|
||||
"authentik_providers_oauth2:backchannel-logout",
|
||||
kwargs={"application_slug": provider.application.slug},
|
||||
)
|
||||
),
|
||||
"backchannel_logout_supported": True,
|
||||
"backchannel_logout_session_supported": True,
|
||||
"response_types_supported": [
|
||||
ResponseTypes.CODE,
|
||||
ResponseTypes.ID_TOKEN,
|
||||
|
||||
@@ -598,9 +598,9 @@ class TokenView(View):
|
||||
return TokenResponse(self.create_device_code_response())
|
||||
raise TokenError("unsupported_grant_type")
|
||||
except (TokenError, DeviceCodeError) as error:
|
||||
return TokenResponse(error.create_dict(), status=400)
|
||||
return TokenResponse(error.create_dict(request), status=400)
|
||||
except UserAuthError as error:
|
||||
return TokenResponse(error.create_dict(), status=403)
|
||||
return TokenResponse(error.create_dict(request), status=403)
|
||||
|
||||
def create_code_response(self) -> dict[str, Any]:
|
||||
"""See https://datatracker.ietf.org/doc/html/rfc6749#section-4.1"""
|
||||
|
||||
@@ -65,7 +65,7 @@ class TokenRevokeView(View):
|
||||
|
||||
return TokenResponse(data={}, status=200)
|
||||
except TokenRevocationError as exc:
|
||||
return TokenResponse(exc.create_dict(), status=401)
|
||||
return TokenResponse(exc.create_dict(request), status=401)
|
||||
except Http404:
|
||||
# Token not found should return a HTTP 200
|
||||
# https://datatracker.ietf.org/doc/html/rfc7009#section-2.2
|
||||
|
||||
@@ -102,6 +102,7 @@ class IngressReconciler(KubernetesObjectReconciler[V1Ingress]):
|
||||
# Buffer sizes for large headers with JWTs
|
||||
"nginx.ingress.kubernetes.io/proxy-buffers-number": "4",
|
||||
"nginx.ingress.kubernetes.io/proxy-buffer-size": "16k",
|
||||
"nginx.ingress.kubernetes.io/proxy-busy-buffers-size": "32k",
|
||||
# Enable TLS in traefik
|
||||
"traefik.ingress.kubernetes.io/router.tls": "true",
|
||||
}
|
||||
|
||||
@@ -23,7 +23,6 @@ from authentik.core.models import Application
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.lib.expression.exceptions import ControlFlowException
|
||||
from authentik.lib.sync.mapper import PropertyMappingManager
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.policies.api.exec import PolicyTestResultSerializer
|
||||
from authentik.policies.engine import PolicyEngine
|
||||
from authentik.policies.types import PolicyResult
|
||||
@@ -142,9 +141,9 @@ class RadiusOutpostConfigViewSet(ListModelMixin, GenericViewSet):
|
||||
# Value error can be raised when assigning invalid data to an attribute
|
||||
Event.new(
|
||||
EventAction.CONFIGURATION_ERROR,
|
||||
message=f"Failed to evaluate property-mapping {exception_to_string(exc)}",
|
||||
message="Failed to evaluate property-mapping",
|
||||
mapping=exc.mapping,
|
||||
).save()
|
||||
).with_exception(exc).save()
|
||||
return None
|
||||
return b64encode(packet.RequestPacket()).decode()
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from enum import Enum
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic import AnyUrl, BaseModel, ConfigDict, Field
|
||||
from pydanticscim.group import Group as BaseGroup
|
||||
from pydanticscim.responses import PatchOperation as BasePatchOperation
|
||||
from pydanticscim.responses import PatchRequest as BasePatchRequest
|
||||
@@ -12,19 +12,95 @@ from pydanticscim.service_provider import ChangePassword, Filter, Patch, Sort
|
||||
from pydanticscim.service_provider import (
|
||||
ServiceProviderConfiguration as BaseServiceProviderConfiguration,
|
||||
)
|
||||
from pydanticscim.user import AddressKind
|
||||
from pydanticscim.user import User as BaseUser
|
||||
|
||||
SCIM_USER_SCHEMA = "urn:ietf:params:scim:schemas:core:2.0:User"
|
||||
SCIM_GROUP_SCHEMA = "urn:ietf:params:scim:schemas:core:2.0:Group"
|
||||
|
||||
|
||||
class Address(BaseModel):
|
||||
formatted: str | None = Field(
|
||||
None,
|
||||
description="The full mailing address, formatted for display "
|
||||
"or use with a mailing label. This attribute MAY contain newlines.",
|
||||
)
|
||||
streetAddress: str | None = Field(
|
||||
None,
|
||||
description="The full street address component, which may "
|
||||
"include house number, street name, P.O. box, and multi-line "
|
||||
"extended street address information. This attribute MAY contain newlines.",
|
||||
)
|
||||
locality: str | None = Field(None, description="The city or locality component.")
|
||||
region: str | None = Field(None, description="The state or region component.")
|
||||
postalCode: str | None = Field(None, description="The zip code or postal code component.")
|
||||
country: str | None = Field(None, description="The country name component.")
|
||||
type: AddressKind | None = Field(
|
||||
None,
|
||||
description="A label indicating the attribute's function, e.g., 'work' or 'home'.",
|
||||
)
|
||||
primary: bool | None = None
|
||||
|
||||
|
||||
class Manager(BaseModel):
|
||||
value: str | None = Field(
|
||||
None,
|
||||
description="The id of the SCIM resource representingthe User's manager. REQUIRED.",
|
||||
)
|
||||
ref: AnyUrl | None = Field(
|
||||
None,
|
||||
alias="$ref",
|
||||
description="The URI of the SCIM resource representing the User's manager. REQUIRED.",
|
||||
)
|
||||
displayName: str | None = Field(
|
||||
None,
|
||||
description="The displayName of the User's manager. OPTIONAL and READ-ONLY.",
|
||||
)
|
||||
|
||||
|
||||
class EnterpriseUser(BaseModel):
|
||||
employeeNumber: str | None = Field(
|
||||
None,
|
||||
description="Numeric or alphanumeric identifier assigned to a person, "
|
||||
"typically based on order of hire or association with anorganization.",
|
||||
)
|
||||
costCenter: str | None = Field(None, description="Identifies the name of a cost center.")
|
||||
organization: str | None = Field(None, description="Identifies the name of an organization.")
|
||||
division: str | None = Field(None, description="Identifies the name of a division.")
|
||||
department: str | None = Field(
|
||||
None,
|
||||
description="Numeric or alphanumeric identifier assigned to a person,"
|
||||
" typically based on order of hire or association with anorganization.",
|
||||
)
|
||||
manager: Manager | None = Field(
|
||||
None,
|
||||
description="The User's manager. A complex type that optionally allows "
|
||||
"service providers to represent organizational hierarchy by referencing"
|
||||
" the 'id' attribute of another User.",
|
||||
)
|
||||
|
||||
|
||||
class User(BaseUser):
|
||||
"""Modified User schema with added externalId field"""
|
||||
|
||||
model_config = ConfigDict(serialize_by_alias=True)
|
||||
|
||||
id: str | int | None = None
|
||||
schemas: list[str] = [SCIM_USER_SCHEMA]
|
||||
externalId: str | None = None
|
||||
meta: dict | None = None
|
||||
addresses: list[Address] | None = Field(
|
||||
None,
|
||||
description=(
|
||||
"A physical mailing address for this User. Canonical type "
|
||||
"values of 'work', 'home', and 'other'."
|
||||
),
|
||||
)
|
||||
enterprise_user: EnterpriseUser | None = Field(
|
||||
default=None,
|
||||
alias="urn:ietf:params:scim:schemas:extension:enterprise:2.0:User",
|
||||
serialization_alias="urn:ietf:params:scim:schemas:extension:enterprise:2.0:User",
|
||||
)
|
||||
|
||||
|
||||
class Group(BaseGroup):
|
||||
@@ -92,7 +168,7 @@ class PatchOperation(BasePatchOperation):
|
||||
"""PatchOperation with optional path"""
|
||||
|
||||
op: PatchOp
|
||||
path: str | None
|
||||
path: str | None = None
|
||||
|
||||
|
||||
class SCIMError(BaseSCIMError):
|
||||
|
||||
@@ -28,7 +28,6 @@ from tenant_schemas_celery.app import CeleryApp as TenantAwareCeleryApp
|
||||
|
||||
from authentik import get_full_version
|
||||
from authentik.lib.sentry import should_ignore_exception
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
|
||||
# set the default Django settings module for the 'celery' program.
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "authentik.root.settings")
|
||||
@@ -83,8 +82,8 @@ def task_error_hook(task_id: str, exception: Exception, traceback, *args, **kwar
|
||||
CTX_TASK_ID.set(...)
|
||||
if not should_ignore_exception(exception):
|
||||
Event.new(
|
||||
EventAction.SYSTEM_EXCEPTION, message=exception_to_string(exception), task_id=task_id
|
||||
).save()
|
||||
EventAction.SYSTEM_EXCEPTION, message="Failed to execute task", task_id=task_id
|
||||
).with_exception(exception).save()
|
||||
|
||||
|
||||
def _get_startup_tasks_default_tenant() -> list[Callable]:
|
||||
|
||||
@@ -8,7 +8,6 @@ from authentik.events.models import TaskStatus
|
||||
from authentik.events.system_tasks import SystemTask
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.sync.outgoing.exceptions import StopSync
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.root.celery import CELERY_APP
|
||||
from authentik.sources.kerberos.models import KerberosSource
|
||||
from authentik.sources.kerberos.sync import KerberosSync
|
||||
@@ -64,5 +63,5 @@ def kerberos_sync_single(self, source_pk: str):
|
||||
syncer.sync()
|
||||
self.set_status(TaskStatus.SUCCESSFUL, *syncer.messages)
|
||||
except StopSync as exc:
|
||||
LOGGER.warning(exception_to_string(exc))
|
||||
LOGGER.warning("Error syncing kerberos", exc=exc, source=source)
|
||||
self.set_error(exc)
|
||||
|
||||
@@ -12,7 +12,6 @@ from authentik.events.models import TaskStatus
|
||||
from authentik.events.system_tasks import SystemTask
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.sync.outgoing.exceptions import StopSync
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.lib.utils.reflection import class_to_path, path_to_class
|
||||
from authentik.root.celery import CELERY_APP
|
||||
from authentik.sources.ldap.models import LDAPSource
|
||||
@@ -149,5 +148,5 @@ def ldap_sync(self: SystemTask, source_pk: str, sync_class: str, page_cache_key:
|
||||
cache.delete(page_cache_key)
|
||||
except (LDAPException, StopSync) as exc:
|
||||
# No explicit event is created here as .set_status with an error will do that
|
||||
LOGGER.warning(exception_to_string(exc))
|
||||
LOGGER.warning("Failed to sync LDAP", exc=exc, source=source)
|
||||
self.set_error(exc)
|
||||
|
||||
@@ -10,6 +10,7 @@ AUTHENTIK_SOURCES_OAUTH_TYPES = [
|
||||
"authentik.sources.oauth.types.apple",
|
||||
"authentik.sources.oauth.types.azure_ad",
|
||||
"authentik.sources.oauth.types.discord",
|
||||
"authentik.sources.oauth.types.entra_id",
|
||||
"authentik.sources.oauth.types.facebook",
|
||||
"authentik.sources.oauth.types.github",
|
||||
"authentik.sources.oauth.types.gitlab",
|
||||
|
||||
@@ -232,7 +232,7 @@ class GoogleOAuthSource(CreatableType, OAuthSource):
|
||||
|
||||
|
||||
class AzureADOAuthSource(CreatableType, OAuthSource):
|
||||
"""Social Login using Azure AD."""
|
||||
"""(Deprecated) Social Login using Azure AD."""
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
@@ -240,6 +240,17 @@ class AzureADOAuthSource(CreatableType, OAuthSource):
|
||||
verbose_name_plural = _("Azure AD OAuth Sources")
|
||||
|
||||
|
||||
# TODO: When removing this, add a migration for OAuthSource that sets
|
||||
# provider_type to `entraid` if it is currently `azuread`
|
||||
class EntraIDOAuthSource(CreatableType, OAuthSource):
|
||||
"""Social Login using Entra ID."""
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
verbose_name = _("Entra ID OAuth Source")
|
||||
verbose_name_plural = _("Entra ID OAuth Sources")
|
||||
|
||||
|
||||
class OpenIDConnectOAuthSource(CreatableType, OAuthSource):
|
||||
"""Login using a Generic OpenID-Connect compliant provider."""
|
||||
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
"""azure ad Type tests"""
|
||||
"""Entra ID Type tests"""
|
||||
|
||||
from django.test import TestCase
|
||||
|
||||
from authentik.sources.oauth.models import OAuthSource
|
||||
from authentik.sources.oauth.types.azure_ad import AzureADOAuthCallback, AzureADType
|
||||
from authentik.sources.oauth.types.entra_id import EntraIDOAuthCallback, EntraIDType
|
||||
|
||||
# https://docs.microsoft.com/en-us/graph/api/user-get?view=graph-rest-1.0&tabs=http#response-2
|
||||
AAD_USER = {
|
||||
EID_USER = {
|
||||
"@odata.context": "https://graph.microsoft.com/v1.0/$metadata#users/$entity",
|
||||
"@odata.id": (
|
||||
"https://graph.microsoft.com/v2/7ce9b89e-646a-41d2-9fa6-8371c6a8423d/"
|
||||
@@ -41,11 +41,11 @@ class TestTypeAzureAD(TestCase):
|
||||
|
||||
def test_enroll_context(self):
|
||||
"""Test azure_ad Enrollment context"""
|
||||
ak_context = AzureADType().get_base_user_properties(source=self.source, info=AAD_USER)
|
||||
self.assertEqual(ak_context["username"], AAD_USER["userPrincipalName"])
|
||||
self.assertEqual(ak_context["email"], AAD_USER["mail"])
|
||||
self.assertEqual(ak_context["name"], AAD_USER["displayName"])
|
||||
ak_context = EntraIDType().get_base_user_properties(source=self.source, info=EID_USER)
|
||||
self.assertEqual(ak_context["username"], EID_USER["userPrincipalName"])
|
||||
self.assertEqual(ak_context["email"], EID_USER["mail"])
|
||||
self.assertEqual(ak_context["name"], EID_USER["displayName"])
|
||||
|
||||
def test_user_id(self):
|
||||
"""Test azure AD user ID"""
|
||||
self.assertEqual(AzureADOAuthCallback().get_user_id(AAD_USER), AAD_USER["id"])
|
||||
"""Test Entra ID user ID"""
|
||||
self.assertEqual(EntraIDOAuthCallback().get_user_id(EID_USER), EID_USER["id"])
|
||||
@@ -1,105 +1,17 @@
|
||||
"""AzureAD OAuth2 Views"""
|
||||
|
||||
from typing import Any
|
||||
from authentik.sources.oauth.types.entra_id import EntraIDType
|
||||
from authentik.sources.oauth.types.registry import registry
|
||||
|
||||
from requests import RequestException
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.sources.oauth.clients.oauth2 import UserprofileHeaderAuthClient
|
||||
from authentik.sources.oauth.models import AuthorizationCodeAuthMethod
|
||||
from authentik.sources.oauth.types.oidc import OpenIDConnectOAuth2Callback
|
||||
from authentik.sources.oauth.types.registry import SourceType, registry
|
||||
from authentik.sources.oauth.views.redirect import OAuthRedirect
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class AzureADOAuthRedirect(OAuthRedirect):
|
||||
"""Azure AD OAuth2 Redirect"""
|
||||
|
||||
def get_additional_parameters(self, source): # pragma: no cover
|
||||
return {
|
||||
"scope": ["openid", "https://graph.microsoft.com/User.Read"],
|
||||
}
|
||||
|
||||
|
||||
class AzureADClient(UserprofileHeaderAuthClient):
|
||||
"""Fetch AzureAD group information"""
|
||||
|
||||
def get_profile_info(self, token):
|
||||
profile_data = super().get_profile_info(token)
|
||||
if "https://graph.microsoft.com/GroupMember.Read.All" not in self.source.additional_scopes:
|
||||
return profile_data
|
||||
group_response = self.session.request(
|
||||
"get",
|
||||
"https://graph.microsoft.com/v1.0/me/memberOf",
|
||||
headers={"Authorization": f"{token['token_type']} {token['access_token']}"},
|
||||
)
|
||||
try:
|
||||
group_response.raise_for_status()
|
||||
except RequestException as exc:
|
||||
LOGGER.warning(
|
||||
"Unable to fetch user profile",
|
||||
exc=exc,
|
||||
response=exc.response.text if exc.response else str(exc),
|
||||
)
|
||||
return None
|
||||
profile_data["raw_groups"] = group_response.json()
|
||||
return profile_data
|
||||
|
||||
|
||||
class AzureADOAuthCallback(OpenIDConnectOAuth2Callback):
|
||||
"""AzureAD OAuth2 Callback"""
|
||||
|
||||
client_class = AzureADClient
|
||||
|
||||
def get_user_id(self, info: dict[str, str]) -> str:
|
||||
# Default try to get `id` for the Graph API endpoint
|
||||
# fallback to OpenID logic in case the profile URL was changed
|
||||
return info.get("id", super().get_user_id(info))
|
||||
# TODO: When removing this, add a migration for OAuthSource that sets
|
||||
# provider_type to `entraid` if it is currently `azuread`
|
||||
|
||||
|
||||
@registry.register()
|
||||
class AzureADType(SourceType):
|
||||
class AzureADType(EntraIDType):
|
||||
"""Azure AD Type definition"""
|
||||
|
||||
callback_view = AzureADOAuthCallback
|
||||
redirect_view = AzureADOAuthRedirect
|
||||
verbose_name = "Azure AD"
|
||||
name = "azuread"
|
||||
|
||||
urls_customizable = True
|
||||
|
||||
authorization_url = "https://login.microsoftonline.com/common/oauth2/v2.0/authorize"
|
||||
access_token_url = "https://login.microsoftonline.com/common/oauth2/v2.0/token" # nosec
|
||||
profile_url = "https://graph.microsoft.com/v1.0/me"
|
||||
oidc_well_known_url = (
|
||||
"https://login.microsoftonline.com/common/.well-known/openid-configuration"
|
||||
)
|
||||
oidc_jwks_url = "https://login.microsoftonline.com/common/discovery/keys"
|
||||
|
||||
authorization_code_auth_method = AuthorizationCodeAuthMethod.POST_BODY
|
||||
|
||||
def get_base_user_properties(self, info: dict[str, Any], **kwargs) -> dict[str, Any]:
|
||||
mail = info.get("mail", None) or info.get("otherMails", [None])[0]
|
||||
# Format group info
|
||||
groups = []
|
||||
group_id_dict = {}
|
||||
for group in info.get("raw_groups", {}).get("value", []):
|
||||
if group["@odata.type"] != "#microsoft.graph.group":
|
||||
continue
|
||||
groups.append(group["id"])
|
||||
group_id_dict[group["id"]] = group
|
||||
info["raw_groups"] = group_id_dict
|
||||
return {
|
||||
"username": info.get("userPrincipalName"),
|
||||
"email": mail,
|
||||
"name": info.get("displayName"),
|
||||
"groups": groups,
|
||||
}
|
||||
|
||||
def get_base_group_properties(self, source, group_id, **kwargs):
|
||||
raw_group = kwargs["info"]["raw_groups"][group_id]
|
||||
return {
|
||||
"name": raw_group["displayName"],
|
||||
}
|
||||
|
||||
102
authentik/sources/oauth/types/entra_id.py
Normal file
102
authentik/sources/oauth/types/entra_id.py
Normal file
@@ -0,0 +1,102 @@
|
||||
"""EntraID OAuth2 Views"""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from requests import RequestException
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.sources.oauth.clients.oauth2 import UserprofileHeaderAuthClient
|
||||
from authentik.sources.oauth.models import AuthorizationCodeAuthMethod
|
||||
from authentik.sources.oauth.types.oidc import OpenIDConnectOAuth2Callback
|
||||
from authentik.sources.oauth.types.registry import SourceType, registry
|
||||
from authentik.sources.oauth.views.redirect import OAuthRedirect
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
class EntraIDOAuthRedirect(OAuthRedirect):
|
||||
"""Entra ID OAuth2 Redirect"""
|
||||
|
||||
def get_additional_parameters(self, source): # pragma: no cover
|
||||
return {
|
||||
"scope": ["openid", "https://graph.microsoft.com/User.Read"],
|
||||
}
|
||||
|
||||
|
||||
class EntraIDClient(UserprofileHeaderAuthClient):
|
||||
"""Fetch EntraID group information"""
|
||||
|
||||
def get_profile_info(self, token):
|
||||
profile_data = super().get_profile_info(token)
|
||||
if "https://graph.microsoft.com/GroupMember.Read.All" not in self.source.additional_scopes:
|
||||
return profile_data
|
||||
group_response = self.session.request(
|
||||
"get",
|
||||
"https://graph.microsoft.com/v1.0/me/memberOf",
|
||||
headers={"Authorization": f"{token['token_type']} {token['access_token']}"},
|
||||
)
|
||||
try:
|
||||
group_response.raise_for_status()
|
||||
except RequestException as exc:
|
||||
LOGGER.warning(
|
||||
"Unable to fetch user profile",
|
||||
exc=exc,
|
||||
response=exc.response.text if exc.response else str(exc),
|
||||
)
|
||||
return None
|
||||
profile_data["raw_groups"] = group_response.json()
|
||||
return profile_data
|
||||
|
||||
|
||||
class EntraIDOAuthCallback(OpenIDConnectOAuth2Callback):
|
||||
"""EntraID OAuth2 Callback"""
|
||||
|
||||
client_class = EntraIDClient
|
||||
|
||||
def get_user_id(self, info: dict[str, str]) -> str:
|
||||
# Default try to get `id` for the Graph API endpoint
|
||||
# fallback to OpenID logic in case the profile URL was changed
|
||||
return info.get("id", super().get_user_id(info))
|
||||
|
||||
|
||||
@registry.register()
|
||||
class EntraIDType(SourceType):
|
||||
"""Entra ID Type definition"""
|
||||
|
||||
callback_view = EntraIDOAuthCallback
|
||||
redirect_view = EntraIDOAuthRedirect
|
||||
verbose_name = "Entra ID"
|
||||
name = "entraid"
|
||||
|
||||
urls_customizable = True
|
||||
|
||||
authorization_url = "https://login.microsoftonline.com/common/oauth2/v2.0/authorize"
|
||||
access_token_url = "https://login.microsoftonline.com/common/oauth2/v2.0/token" # nosec
|
||||
profile_url = "https://graph.microsoft.com/v1.0/me"
|
||||
oidc_jwks_url = "https://login.microsoftonline.com/common/discovery/keys"
|
||||
|
||||
authorization_code_auth_method = AuthorizationCodeAuthMethod.POST_BODY
|
||||
|
||||
def get_base_user_properties(self, info: dict[str, Any], **kwargs) -> dict[str, Any]:
|
||||
mail = info.get("mail", None) or info.get("otherMails", [None])[0]
|
||||
# Format group info
|
||||
groups = []
|
||||
group_id_dict = {}
|
||||
for group in info.get("raw_groups", {}).get("value", []):
|
||||
if group["@odata.type"] != "#microsoft.graph.group":
|
||||
continue
|
||||
groups.append(group["id"])
|
||||
group_id_dict[group["id"]] = group
|
||||
info["raw_groups"] = group_id_dict
|
||||
return {
|
||||
"username": info.get("userPrincipalName"),
|
||||
"email": mail,
|
||||
"name": info.get("displayName"),
|
||||
"groups": groups,
|
||||
}
|
||||
|
||||
def get_base_group_properties(self, source, group_id, **kwargs):
|
||||
raw_group = kwargs["info"]["raw_groups"][group_id]
|
||||
return {
|
||||
"name": raw_group["displayName"],
|
||||
}
|
||||
@@ -18,6 +18,7 @@ class SCIMSourceGroupSerializer(SourceSerializer):
|
||||
model = SCIMSourceGroup
|
||||
fields = [
|
||||
"id",
|
||||
"external_id",
|
||||
"group",
|
||||
"group_obj",
|
||||
"source",
|
||||
@@ -31,5 +32,5 @@ class SCIMSourceGroupViewSet(UsedByMixin, ModelViewSet):
|
||||
queryset = SCIMSourceGroup.objects.all().select_related("group")
|
||||
serializer_class = SCIMSourceGroupSerializer
|
||||
filterset_fields = ["source__slug", "group__name", "group__group_uuid"]
|
||||
search_fields = ["source__slug", "group__name", "attributes"]
|
||||
search_fields = ["source__slug", "group__name", "attributes", "external_id"]
|
||||
ordering = ["group__name"]
|
||||
|
||||
@@ -18,6 +18,7 @@ class SCIMSourceUserSerializer(SourceSerializer):
|
||||
model = SCIMSourceUser
|
||||
fields = [
|
||||
"id",
|
||||
"external_id",
|
||||
"user",
|
||||
"user_obj",
|
||||
"source",
|
||||
@@ -31,5 +32,5 @@ class SCIMSourceUserViewSet(UsedByMixin, ModelViewSet):
|
||||
queryset = SCIMSourceUser.objects.all().select_related("user")
|
||||
serializer_class = SCIMSourceUserSerializer
|
||||
filterset_fields = ["source__slug", "user__username", "user__id"]
|
||||
search_fields = ["source__slug", "user__username", "attributes"]
|
||||
search_fields = ["source__slug", "user__username", "attributes", "user__uuid", "external_id"]
|
||||
ordering = ["user__username"]
|
||||
|
||||
4
authentik/sources/scim/constants.py
Normal file
4
authentik/sources/scim/constants.py
Normal file
@@ -0,0 +1,4 @@
|
||||
SCIM_URN_SCHEMA = "urn:ietf:params:scim:schemas:core:2.0:Schema"
|
||||
SCIM_URN_GROUP = "urn:ietf:params:scim:schemas:core:2.0:Group"
|
||||
SCIM_URN_USER = "urn:ietf:params:scim:schemas:core:2.0:User"
|
||||
SCIM_URN_USER_ENTERPRISE = "urn:ietf:params:scim:schemas:extension:enterprise:2.0:User"
|
||||
@@ -1,8 +0,0 @@
|
||||
"""SCIM Errors"""
|
||||
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
|
||||
|
||||
class PatchError(SentryIgnoredException):
|
||||
"""Error raised within an atomic block when an error happened
|
||||
so nothing is saved"""
|
||||
@@ -0,0 +1,98 @@
|
||||
# Generated by Django 5.1.11 on 2025-07-13 01:07
|
||||
|
||||
import uuid
|
||||
from django.db import migrations, models
|
||||
from django.apps.registry import Apps
|
||||
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
|
||||
|
||||
def migrate_ext_id(apps: Apps, schema_editor: BaseDatabaseSchemaEditor):
|
||||
SCIMSourceUser = apps.get_model("authentik_sources_scim", "SCIMSourceUser")
|
||||
SCIMSourceGroup = apps.get_model("authentik_sources_scim", "SCIMSourceGroup")
|
||||
db_alias = schema_editor.connection.alias
|
||||
for user in SCIMSourceUser.objects.using(db_alias).all():
|
||||
user.external_id = user.id
|
||||
user.save(update_fields=["external_id"])
|
||||
for group in SCIMSourceGroup.objects.using(db_alias).all():
|
||||
group.external_id = group.id
|
||||
group.save(update_fields=["external_id"])
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_sources_scim", "0002_scimsourcepropertymapping"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name="scimsourcegroup",
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="scimsourceuser",
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="scimsourcegroup",
|
||||
name="external_id",
|
||||
field=models.TextField(default=None, null=True),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="scimsourceuser",
|
||||
name="external_id",
|
||||
field=models.TextField(default=None, null=True),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="scimsourcegroup",
|
||||
unique_together={("external_id", "source")},
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="scimsourceuser",
|
||||
unique_together={("external_id", "source")},
|
||||
),
|
||||
migrations.RunPython(migrate_ext_id, migrations.RunPython.noop),
|
||||
migrations.AlterField(
|
||||
model_name="scimsourcegroup",
|
||||
name="external_id",
|
||||
field=models.TextField(),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="scimsourceuser",
|
||||
name="external_id",
|
||||
field=models.TextField(),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="scimsourcegroup",
|
||||
index=models.Index(fields=["external_id"], name="authentik_s_externa_05e346_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="scimsourceuser",
|
||||
index=models.Index(fields=["external_id"], name="authentik_s_externa_4bd760_idx"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="scimsourcegroup",
|
||||
name="id",
|
||||
field=models.TextField(default=uuid.uuid4, primary_key=True, serialize=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="scimsourceuser",
|
||||
name="id",
|
||||
field=models.TextField(default=uuid.uuid4, primary_key=True, serialize=False),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="scimsourcegroup",
|
||||
name="last_update",
|
||||
field=models.DateTimeField(auto_now=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="scimsourceuser",
|
||||
name="last_update",
|
||||
field=models.DateTimeField(auto_now=True),
|
||||
),
|
||||
]
|
||||
@@ -1,6 +1,7 @@
|
||||
"""SCIM Source"""
|
||||
|
||||
from typing import Any
|
||||
from uuid import uuid4
|
||||
|
||||
from django.db import models
|
||||
from django.templatetags.static import static
|
||||
@@ -103,10 +104,12 @@ class SCIMSourcePropertyMapping(PropertyMapping):
|
||||
class SCIMSourceUser(SerializerModel):
|
||||
"""Mapping of a user and source to a SCIM user ID"""
|
||||
|
||||
id = models.TextField(primary_key=True)
|
||||
id = models.TextField(primary_key=True, default=uuid4)
|
||||
external_id = models.TextField()
|
||||
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||
source = models.ForeignKey(SCIMSource, on_delete=models.CASCADE)
|
||||
attributes = models.JSONField(default=dict)
|
||||
last_update = models.DateTimeField(auto_now=True)
|
||||
|
||||
@property
|
||||
def serializer(self) -> BaseSerializer:
|
||||
@@ -115,7 +118,10 @@ class SCIMSourceUser(SerializerModel):
|
||||
return SCIMSourceUserSerializer
|
||||
|
||||
class Meta:
|
||||
unique_together = (("id", "user", "source"),)
|
||||
unique_together = (("external_id", "source"),)
|
||||
indexes = [
|
||||
models.Index(fields=["external_id"]),
|
||||
]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"SCIM User {self.user_id} to {self.source_id}"
|
||||
@@ -124,10 +130,12 @@ class SCIMSourceUser(SerializerModel):
|
||||
class SCIMSourceGroup(SerializerModel):
|
||||
"""Mapping of a group and source to a SCIM user ID"""
|
||||
|
||||
id = models.TextField(primary_key=True)
|
||||
id = models.TextField(primary_key=True, default=uuid4)
|
||||
external_id = models.TextField()
|
||||
group = models.ForeignKey(Group, on_delete=models.CASCADE)
|
||||
source = models.ForeignKey(SCIMSource, on_delete=models.CASCADE)
|
||||
attributes = models.JSONField(default=dict)
|
||||
last_update = models.DateTimeField(auto_now=True)
|
||||
|
||||
@property
|
||||
def serializer(self) -> BaseSerializer:
|
||||
@@ -136,7 +144,10 @@ class SCIMSourceGroup(SerializerModel):
|
||||
return SCIMSourceGroupSerializer
|
||||
|
||||
class Meta:
|
||||
unique_together = (("id", "group", "source"),)
|
||||
unique_together = (("external_id", "source"),)
|
||||
indexes = [
|
||||
models.Index(fields=["external_id"]),
|
||||
]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"SCIM Group {self.group_id} to {self.source_id}"
|
||||
|
||||
0
authentik/sources/scim/patch/__init__.py
Normal file
0
authentik/sources/scim/patch/__init__.py
Normal file
180
authentik/sources/scim/patch/lexer.py
Normal file
180
authentik/sources/scim/patch/lexer.py
Normal file
@@ -0,0 +1,180 @@
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
|
||||
from authentik.sources.scim.constants import (
|
||||
SCIM_URN_GROUP,
|
||||
SCIM_URN_SCHEMA,
|
||||
SCIM_URN_USER,
|
||||
SCIM_URN_USER_ENTERPRISE,
|
||||
)
|
||||
|
||||
|
||||
# Token types for SCIM path parsing
|
||||
class TokenType(Enum):
|
||||
ATTRIBUTE = "ATTRIBUTE"
|
||||
DOT = "DOT"
|
||||
LBRACKET = "LBRACKET"
|
||||
RBRACKET = "RBRACKET"
|
||||
LPAREN = "LPAREN"
|
||||
RPAREN = "RPAREN"
|
||||
STRING = "STRING"
|
||||
NUMBER = "NUMBER"
|
||||
BOOLEAN = "BOOLEAN"
|
||||
NULL = "NULL"
|
||||
OPERATOR = "OPERATOR"
|
||||
AND = "AND"
|
||||
OR = "OR"
|
||||
NOT = "NOT"
|
||||
EOF = "EOF"
|
||||
|
||||
|
||||
@dataclass
|
||||
class Token:
|
||||
type: TokenType
|
||||
value: str
|
||||
position: int = 0
|
||||
|
||||
|
||||
class SCIMPathLexer:
|
||||
"""Lexer for SCIM paths and filter expressions"""
|
||||
|
||||
OPERATORS = ["eq", "ne", "co", "sw", "ew", "gt", "lt", "ge", "le", "pr"]
|
||||
|
||||
def __init__(self, text: str):
|
||||
self.schema_urns = [
|
||||
SCIM_URN_SCHEMA,
|
||||
SCIM_URN_GROUP,
|
||||
SCIM_URN_USER,
|
||||
SCIM_URN_USER_ENTERPRISE,
|
||||
]
|
||||
self.text = text
|
||||
self.pos = 0
|
||||
self.current_char = self.text[self.pos] if self.pos < len(self.text) else None
|
||||
|
||||
def advance(self):
|
||||
"""Move to next character"""
|
||||
self.pos += 1
|
||||
self.current_char = self.text[self.pos] if self.pos < len(self.text) else None
|
||||
|
||||
def skip_whitespace(self):
|
||||
"""Skip whitespace characters"""
|
||||
while self.current_char and self.current_char.isspace():
|
||||
self.advance()
|
||||
|
||||
def read_string(self, quote_char):
|
||||
"""Read a quoted string"""
|
||||
value = ""
|
||||
self.advance() # Skip opening quote
|
||||
|
||||
while self.current_char and self.current_char != quote_char:
|
||||
if self.current_char == "\\":
|
||||
self.advance()
|
||||
if self.current_char:
|
||||
value += self.current_char
|
||||
self.advance()
|
||||
else:
|
||||
value += self.current_char
|
||||
self.advance()
|
||||
|
||||
if self.current_char == quote_char:
|
||||
self.advance() # Skip closing quote
|
||||
|
||||
return value
|
||||
|
||||
def read_number(self):
|
||||
"""Read a number (integer or float)"""
|
||||
value = ""
|
||||
while self.current_char and (self.current_char.isdigit() or self.current_char == "."):
|
||||
value += self.current_char
|
||||
self.advance()
|
||||
return value
|
||||
|
||||
def read_identifier(self):
|
||||
"""Read an identifier (attribute name or operator) - supports URN format"""
|
||||
value = ""
|
||||
while self.current_char and (self.current_char.isalnum() or self.current_char in "_-:"):
|
||||
value += self.current_char
|
||||
self.advance()
|
||||
# If the identifier value so far is a schema URN, take that as the identifier and
|
||||
# treat the next part as a sub_attribute
|
||||
if value in self.schema_urns:
|
||||
self.current_char = "."
|
||||
return value
|
||||
|
||||
# Handle dots within URN identifiers (like "2.0")
|
||||
# A dot is part of the identifier if it's followed by a digit
|
||||
if (
|
||||
self.current_char == "."
|
||||
and self.pos + 1 < len(self.text)
|
||||
and self.text[self.pos + 1].isdigit()
|
||||
):
|
||||
value += self.current_char
|
||||
self.advance()
|
||||
# Continue reading digits after the dot
|
||||
while self.current_char and self.current_char.isdigit():
|
||||
value += self.current_char
|
||||
self.advance()
|
||||
|
||||
return value
|
||||
|
||||
def get_next_token(self) -> Token: # noqa PLR0911
|
||||
"""Get the next token from the input"""
|
||||
while self.current_char:
|
||||
if self.current_char.isspace():
|
||||
self.skip_whitespace()
|
||||
continue
|
||||
|
||||
if self.current_char == ".":
|
||||
self.advance()
|
||||
return Token(TokenType.DOT, ".")
|
||||
|
||||
if self.current_char == "[":
|
||||
self.advance()
|
||||
return Token(TokenType.LBRACKET, "[")
|
||||
|
||||
if self.current_char == "]":
|
||||
self.advance()
|
||||
return Token(TokenType.RBRACKET, "]")
|
||||
|
||||
if self.current_char == "(":
|
||||
self.advance()
|
||||
return Token(TokenType.LPAREN, "(")
|
||||
|
||||
if self.current_char == ")":
|
||||
self.advance()
|
||||
return Token(TokenType.RPAREN, ")")
|
||||
|
||||
if self.current_char in "\"'":
|
||||
quote_char = self.current_char
|
||||
value = self.read_string(quote_char)
|
||||
return Token(TokenType.STRING, value)
|
||||
|
||||
if self.current_char.isdigit():
|
||||
value = self.read_number()
|
||||
return Token(TokenType.NUMBER, value)
|
||||
|
||||
if self.current_char.isalpha() or self.current_char == "_":
|
||||
value = self.read_identifier()
|
||||
|
||||
# Check for special keywords
|
||||
if value.lower() == "true":
|
||||
return Token(TokenType.BOOLEAN, True)
|
||||
elif value.lower() == "false":
|
||||
return Token(TokenType.BOOLEAN, False)
|
||||
elif value.lower() == "null":
|
||||
return Token(TokenType.NULL, None)
|
||||
elif value.lower() == "and":
|
||||
return Token(TokenType.AND, "and")
|
||||
elif value.lower() == "or":
|
||||
return Token(TokenType.OR, "or")
|
||||
elif value.lower() == "not":
|
||||
return Token(TokenType.NOT, "not")
|
||||
elif value.lower() in self.OPERATORS:
|
||||
return Token(TokenType.OPERATOR, value.lower())
|
||||
else:
|
||||
return Token(TokenType.ATTRIBUTE, value)
|
||||
|
||||
# Skip unknown characters
|
||||
self.advance()
|
||||
|
||||
return Token(TokenType.EOF, "")
|
||||
131
authentik/sources/scim/patch/parser.py
Normal file
131
authentik/sources/scim/patch/parser.py
Normal file
@@ -0,0 +1,131 @@
|
||||
from typing import Any
|
||||
|
||||
from authentik.sources.scim.patch.lexer import SCIMPathLexer, TokenType
|
||||
|
||||
|
||||
class SCIMPathParser:
|
||||
"""Parser for SCIM paths including filter expressions"""
|
||||
|
||||
def __init__(self):
|
||||
self.lexer = None
|
||||
self.current_token = None
|
||||
|
||||
def parse_path(self, path: str | None) -> list[dict[str, Any]]:
|
||||
"""Parse a SCIM path into components"""
|
||||
self.lexer = SCIMPathLexer(path)
|
||||
self.current_token = self.lexer.get_next_token()
|
||||
|
||||
components = []
|
||||
|
||||
while self.current_token.type != TokenType.EOF:
|
||||
component = self._parse_path_component()
|
||||
if component:
|
||||
components.append(component)
|
||||
|
||||
return components
|
||||
|
||||
def _parse_path_component(self) -> dict[str, Any] | None:
|
||||
"""Parse a single path component"""
|
||||
if self.current_token.type != TokenType.ATTRIBUTE:
|
||||
return None
|
||||
|
||||
attribute = self.current_token.value
|
||||
self._consume(TokenType.ATTRIBUTE)
|
||||
|
||||
filter_expr = None
|
||||
sub_attribute = None
|
||||
|
||||
# Check for filter expression
|
||||
if self.current_token.type == TokenType.LBRACKET:
|
||||
self._consume(TokenType.LBRACKET)
|
||||
filter_expr = self._parse_filter_expression()
|
||||
self._consume(TokenType.RBRACKET)
|
||||
|
||||
# Check for sub-attribute
|
||||
if self.current_token.type == TokenType.DOT:
|
||||
self._consume(TokenType.DOT)
|
||||
if self.current_token.type == TokenType.ATTRIBUTE:
|
||||
sub_attribute = self.current_token.value
|
||||
self._consume(TokenType.ATTRIBUTE)
|
||||
|
||||
return {"attribute": attribute, "filter": filter_expr, "sub_attribute": sub_attribute}
|
||||
|
||||
def _parse_filter_expression(self) -> dict[str, Any] | None:
|
||||
"""Parse a filter expression like 'primary eq true' or
|
||||
'type eq "work" and primary eq true'"""
|
||||
return self._parse_or_expression()
|
||||
|
||||
def _parse_or_expression(self) -> dict[str, Any] | None:
|
||||
"""Parse OR expressions"""
|
||||
left = self._parse_and_expression()
|
||||
|
||||
while self.current_token.type == TokenType.OR:
|
||||
self._consume(TokenType.OR)
|
||||
right = self._parse_and_expression()
|
||||
left = {"type": "logical", "operator": "or", "left": left, "right": right}
|
||||
|
||||
return left
|
||||
|
||||
def _parse_and_expression(self) -> dict[str, Any] | None:
|
||||
"""Parse AND expressions"""
|
||||
left = self._parse_primary_expression()
|
||||
|
||||
while self.current_token.type == TokenType.AND:
|
||||
self._consume(TokenType.AND)
|
||||
right = self._parse_primary_expression()
|
||||
left = {"type": "logical", "operator": "and", "left": left, "right": right}
|
||||
|
||||
return left
|
||||
|
||||
def _parse_primary_expression(self) -> dict[str, Any] | None:
|
||||
"""Parse primary expressions (attribute operator value)"""
|
||||
if self.current_token.type == TokenType.LPAREN:
|
||||
self._consume(TokenType.LPAREN)
|
||||
expr = self._parse_or_expression()
|
||||
self._consume(TokenType.RPAREN)
|
||||
return expr
|
||||
|
||||
if self.current_token.type == TokenType.NOT:
|
||||
self._consume(TokenType.NOT)
|
||||
expr = self._parse_primary_expression()
|
||||
return {"type": "logical", "operator": "not", "operand": expr}
|
||||
|
||||
if self.current_token.type != TokenType.ATTRIBUTE:
|
||||
return None
|
||||
|
||||
attribute = self.current_token.value
|
||||
self._consume(TokenType.ATTRIBUTE)
|
||||
|
||||
if self.current_token.type != TokenType.OPERATOR:
|
||||
return None
|
||||
|
||||
operator = self.current_token.value
|
||||
self._consume(TokenType.OPERATOR)
|
||||
|
||||
# Parse value
|
||||
value = None
|
||||
if self.current_token.type == TokenType.STRING:
|
||||
value = self.current_token.value
|
||||
self._consume(TokenType.STRING)
|
||||
elif self.current_token.type == TokenType.NUMBER:
|
||||
value = (
|
||||
float(self.current_token.value)
|
||||
if "." in self.current_token.value
|
||||
else int(self.current_token.value)
|
||||
)
|
||||
self._consume(TokenType.NUMBER)
|
||||
elif self.current_token.type == TokenType.BOOLEAN:
|
||||
value = self.current_token.value
|
||||
self._consume(TokenType.BOOLEAN)
|
||||
elif self.current_token.type == TokenType.NULL:
|
||||
value = None
|
||||
self._consume(TokenType.NULL)
|
||||
|
||||
return {"type": "comparison", "attribute": attribute, "operator": operator, "value": value}
|
||||
|
||||
def _consume(self, expected_type: TokenType):
|
||||
"""Consume a token of the expected type"""
|
||||
if self.current_token.type == expected_type:
|
||||
self.current_token = self.lexer.get_next_token()
|
||||
else:
|
||||
raise ValueError(f"Expected {expected_type}, got {self.current_token.type}")
|
||||
246
authentik/sources/scim/patch/processor.py
Normal file
246
authentik/sources/scim/patch/processor.py
Normal file
@@ -0,0 +1,246 @@
|
||||
from typing import Any
|
||||
|
||||
from authentik.providers.scim.clients.schema import PatchOp, PatchOperation
|
||||
from authentik.sources.scim.constants import SCIM_URN_USER_ENTERPRISE
|
||||
from authentik.sources.scim.patch.parser import SCIMPathParser
|
||||
|
||||
|
||||
class SCIMPatchProcessor:
|
||||
"""Processes SCIM patch operations on Python dictionaries"""
|
||||
|
||||
def __init__(self):
|
||||
self.parser = SCIMPathParser()
|
||||
|
||||
def apply_patches(self, data: dict[str, Any], patches: list[PatchOperation]) -> dict[str, Any]:
|
||||
"""Apply a list of patch operations to the data"""
|
||||
result = data.copy()
|
||||
|
||||
for _patch in patches:
|
||||
patch = PatchOperation.model_validate(_patch)
|
||||
if patch.path is None:
|
||||
# Handle operations with no path - value contains attribute paths as keys
|
||||
self._apply_bulk_operation(result, patch.op, patch.value)
|
||||
elif patch.op == PatchOp.add:
|
||||
self._apply_add(result, patch.path, patch.value)
|
||||
elif patch.op == PatchOp.remove:
|
||||
self._apply_remove(result, patch.path)
|
||||
elif patch.op == PatchOp.replace:
|
||||
self._apply_replace(result, patch.path, patch.value)
|
||||
|
||||
return result
|
||||
|
||||
def _apply_bulk_operation(
|
||||
self, data: dict[str, Any], operation: PatchOp, value: dict[str, Any]
|
||||
):
|
||||
"""Apply bulk operations when path is None"""
|
||||
if not isinstance(value, dict):
|
||||
return
|
||||
for path, val in value.items():
|
||||
if operation == PatchOp.add:
|
||||
self._apply_add(data, path, val)
|
||||
elif operation == PatchOp.remove:
|
||||
self._apply_remove(data, path)
|
||||
elif operation == PatchOp.replace:
|
||||
self._apply_replace(data, path, val)
|
||||
|
||||
def _apply_add(self, data: dict[str, Any], path: str, value: Any):
|
||||
"""Apply ADD operation"""
|
||||
components = self.parser.parse_path(path)
|
||||
|
||||
if len(components) == 1 and not components[0]["filter"]:
|
||||
# Simple path
|
||||
attr = components[0]["attribute"]
|
||||
if components[0]["sub_attribute"]:
|
||||
if attr not in data:
|
||||
data[attr] = {}
|
||||
# Somewhat hacky workaround for the manager attribute of the enterprise schema
|
||||
# ideally we'd do this based on the schema
|
||||
if attr == SCIM_URN_USER_ENTERPRISE and components[0]["sub_attribute"] == "manager":
|
||||
data[attr][components[0]["sub_attribute"]] = {"value": value}
|
||||
else:
|
||||
data[attr][components[0]["sub_attribute"]] = value
|
||||
elif attr in data:
|
||||
data[attr].append(value)
|
||||
else:
|
||||
data[attr] = value
|
||||
else:
|
||||
# Complex path with filters
|
||||
self._navigate_and_modify(data, components, value, "add")
|
||||
|
||||
def _apply_remove(self, data: dict[str, Any], path: str):
|
||||
"""Apply REMOVE operation"""
|
||||
components = self.parser.parse_path(path)
|
||||
|
||||
if len(components) == 1 and not components[0]["filter"]:
|
||||
# Simple path
|
||||
attr = components[0]["attribute"]
|
||||
if components[0]["sub_attribute"]:
|
||||
if attr in data and isinstance(data[attr], dict):
|
||||
data[attr].pop(components[0]["sub_attribute"], None)
|
||||
else:
|
||||
data.pop(attr, None)
|
||||
else:
|
||||
# Complex path with filters
|
||||
self._navigate_and_modify(data, components, None, "remove")
|
||||
|
||||
def _apply_replace(self, data: dict[str, Any], path: str, value: Any):
|
||||
"""Apply REPLACE operation"""
|
||||
components = self.parser.parse_path(path)
|
||||
|
||||
if len(components) == 1 and not components[0]["filter"]:
|
||||
# Simple path
|
||||
attr = components[0]["attribute"]
|
||||
if components[0]["sub_attribute"]:
|
||||
if attr not in data:
|
||||
data[attr] = {}
|
||||
# Somewhat hacky workaround for the manager attribute of the enterprise schema
|
||||
# ideally we'd do this based on the schema
|
||||
if attr == SCIM_URN_USER_ENTERPRISE and components[0]["sub_attribute"] == "manager":
|
||||
data[attr][components[0]["sub_attribute"]] = {"value": value}
|
||||
else:
|
||||
data[attr][components[0]["sub_attribute"]] = value
|
||||
else:
|
||||
data[attr] = value
|
||||
else:
|
||||
# Complex path with filters
|
||||
self._navigate_and_modify(data, components, value, "replace")
|
||||
|
||||
def _navigate_and_modify( # noqa PLR0912
|
||||
self, data: dict[str, Any], components: list[dict[str, Any]], value: Any, operation: str
|
||||
):
|
||||
"""Navigate through complex paths and apply modifications"""
|
||||
current = data
|
||||
|
||||
for i, component in enumerate(components):
|
||||
attr = component["attribute"]
|
||||
filter_expr = component["filter"]
|
||||
sub_attr = component["sub_attribute"]
|
||||
|
||||
if filter_expr:
|
||||
# Handle array with filter
|
||||
if attr not in current:
|
||||
if operation == "add":
|
||||
current[attr] = []
|
||||
else:
|
||||
return
|
||||
|
||||
if not isinstance(current[attr], list):
|
||||
return
|
||||
|
||||
# Find matching items
|
||||
matching_items = []
|
||||
for item in current[attr]:
|
||||
if self._matches_filter(item, filter_expr):
|
||||
matching_items.append(item)
|
||||
|
||||
if not matching_items and operation == "add":
|
||||
# Create new item if none match (only for simple comparison filters)
|
||||
if filter_expr.get("type", "comparison") == "comparison":
|
||||
new_item = {filter_expr["attribute"]: filter_expr["value"]}
|
||||
current[attr].append(new_item)
|
||||
matching_items = [new_item]
|
||||
|
||||
# Apply operation to matching items
|
||||
for item in matching_items:
|
||||
if sub_attr:
|
||||
if operation in {"add", "replace"}:
|
||||
item[sub_attr] = value
|
||||
elif operation == "remove":
|
||||
item.pop(sub_attr, None)
|
||||
elif operation in {"add", "replace"}:
|
||||
if isinstance(value, dict):
|
||||
item.update(value)
|
||||
else:
|
||||
# If value is not a dict, we can't merge it
|
||||
pass
|
||||
elif operation == "remove":
|
||||
# Remove the entire item
|
||||
if item in current[attr]:
|
||||
current[attr].remove(item)
|
||||
# Handle simple attribute
|
||||
elif i == len(components) - 1:
|
||||
# Last component
|
||||
if sub_attr:
|
||||
if attr not in current:
|
||||
current[attr] = {}
|
||||
if operation in {"add", "replace"}:
|
||||
current[attr][sub_attr] = value
|
||||
elif operation == "remove":
|
||||
current[attr].pop(sub_attr, None)
|
||||
elif operation in {"add", "replace"}:
|
||||
current[attr] = value
|
||||
elif operation == "remove":
|
||||
current.pop(attr, None)
|
||||
else:
|
||||
# Navigate deeper
|
||||
if attr not in current:
|
||||
current[attr] = {}
|
||||
current = current[attr]
|
||||
|
||||
def _matches_filter(self, item: dict[str, Any], filter_expr: dict[str, Any]) -> bool:
|
||||
"""Check if an item matches the filter expression"""
|
||||
if not filter_expr:
|
||||
return True
|
||||
|
||||
filter_type = filter_expr.get("type", "comparison")
|
||||
|
||||
if filter_type == "comparison":
|
||||
return self._matches_comparison(item, filter_expr)
|
||||
elif filter_type == "logical":
|
||||
return self._matches_logical(item, filter_expr)
|
||||
|
||||
return False
|
||||
|
||||
def _matches_comparison( # noqa PLR0912
|
||||
self, item: dict[str, Any], filter_expr: dict[str, Any]
|
||||
) -> bool:
|
||||
"""Check if an item matches a comparison filter"""
|
||||
attr = filter_expr["attribute"]
|
||||
operator = filter_expr["operator"]
|
||||
expected_value = filter_expr["value"]
|
||||
|
||||
if attr not in item:
|
||||
return False
|
||||
|
||||
actual_value = item[attr]
|
||||
|
||||
if operator == "eq":
|
||||
return actual_value == expected_value
|
||||
elif operator == "ne":
|
||||
return actual_value != expected_value
|
||||
elif operator == "co":
|
||||
return str(expected_value) in str(actual_value)
|
||||
elif operator == "sw":
|
||||
return str(actual_value).startswith(str(expected_value))
|
||||
elif operator == "ew":
|
||||
return str(actual_value).endswith(str(expected_value))
|
||||
elif operator == "gt":
|
||||
return actual_value > expected_value
|
||||
elif operator == "lt":
|
||||
return actual_value < expected_value
|
||||
elif operator == "ge":
|
||||
return actual_value >= expected_value
|
||||
elif operator == "le":
|
||||
return actual_value <= expected_value
|
||||
elif operator == "pr":
|
||||
return actual_value is not None
|
||||
|
||||
return False
|
||||
|
||||
def _matches_logical(self, item: dict[str, Any], filter_expr: dict[str, Any]) -> bool:
|
||||
"""Check if an item matches a logical filter expression"""
|
||||
operator = filter_expr["operator"]
|
||||
|
||||
if operator == "and":
|
||||
left_result = self._matches_filter(item, filter_expr["left"])
|
||||
right_result = self._matches_filter(item, filter_expr["right"])
|
||||
return left_result and right_result
|
||||
elif operator == "or":
|
||||
left_result = self._matches_filter(item, filter_expr["left"])
|
||||
right_result = self._matches_filter(item, filter_expr["right"])
|
||||
return left_result or right_result
|
||||
elif operator == "not":
|
||||
operand_result = self._matches_filter(item, filter_expr["operand"])
|
||||
return not operand_result
|
||||
|
||||
return False
|
||||
@@ -1101,17 +1101,6 @@
|
||||
"returned": "default",
|
||||
"uniqueness": "none"
|
||||
},
|
||||
{
|
||||
"name": "password",
|
||||
"type": "string",
|
||||
"multiValued": false,
|
||||
"description": "The User's cleartext password. This attribute is intended to be used as a means to specify an initial\npassword when creating a new User or to reset an existing User's password.",
|
||||
"required": false,
|
||||
"caseExact": false,
|
||||
"mutability": "writeOnly",
|
||||
"returned": "never",
|
||||
"uniqueness": "none"
|
||||
},
|
||||
{
|
||||
"name": "emails",
|
||||
"type": "complex",
|
||||
|
||||
@@ -75,7 +75,9 @@ class TestSCIMGroups(APITestCase):
|
||||
HTTP_AUTHORIZATION=f"Bearer {self.source.token.key}",
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
self.assertTrue(SCIMSourceGroup.objects.filter(source=self.source, id=ext_id).exists())
|
||||
self.assertTrue(
|
||||
SCIMSourceGroup.objects.filter(source=self.source, external_id=ext_id).exists()
|
||||
)
|
||||
self.assertTrue(
|
||||
Event.objects.filter(
|
||||
action=EventAction.MODEL_CREATED, user__username=self.source.token.user.username
|
||||
@@ -86,6 +88,7 @@ class TestSCIMGroups(APITestCase):
|
||||
"""Test group create"""
|
||||
user = create_test_user()
|
||||
ext_id = generate_id()
|
||||
name = generate_id()
|
||||
response = self.client.post(
|
||||
reverse(
|
||||
"authentik_sources_scim:v2-groups",
|
||||
@@ -95,7 +98,7 @@ class TestSCIMGroups(APITestCase):
|
||||
),
|
||||
data=dumps(
|
||||
{
|
||||
"displayName": generate_id(),
|
||||
"displayName": name,
|
||||
"externalId": ext_id,
|
||||
"members": [{"value": str(user.uuid)}],
|
||||
}
|
||||
@@ -104,12 +107,22 @@ class TestSCIMGroups(APITestCase):
|
||||
HTTP_AUTHORIZATION=f"Bearer {self.source.token.key}",
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
self.assertTrue(SCIMSourceGroup.objects.filter(source=self.source, id=ext_id).exists())
|
||||
connection = SCIMSourceGroup.objects.filter(source=self.source, external_id=ext_id).first()
|
||||
self.assertIsNotNone(connection)
|
||||
self.assertTrue(
|
||||
Event.objects.filter(
|
||||
action=EventAction.MODEL_CREATED, user__username=self.source.token.user.username
|
||||
).exists()
|
||||
)
|
||||
connection.refresh_from_db()
|
||||
self.assertEqual(
|
||||
connection.attributes,
|
||||
{
|
||||
"displayName": name,
|
||||
"externalId": ext_id,
|
||||
"members": [{"value": str(user.uuid)}],
|
||||
},
|
||||
)
|
||||
|
||||
def test_group_create_members_empty(self):
|
||||
"""Test group create"""
|
||||
@@ -126,7 +139,9 @@ class TestSCIMGroups(APITestCase):
|
||||
HTTP_AUTHORIZATION=f"Bearer {self.source.token.key}",
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
self.assertTrue(SCIMSourceGroup.objects.filter(source=self.source, id=ext_id).exists())
|
||||
self.assertTrue(
|
||||
SCIMSourceGroup.objects.filter(source=self.source, external_id=ext_id).exists()
|
||||
)
|
||||
self.assertTrue(
|
||||
Event.objects.filter(
|
||||
action=EventAction.MODEL_CREATED, user__username=self.source.token.user.username
|
||||
@@ -136,7 +151,9 @@ class TestSCIMGroups(APITestCase):
|
||||
def test_group_create_duplicate(self):
|
||||
"""Test group create (duplicate)"""
|
||||
group = Group.objects.create(name=generate_id())
|
||||
existing = SCIMSourceGroup.objects.create(source=self.source, group=group, id=uuid4())
|
||||
existing = SCIMSourceGroup.objects.create(
|
||||
source=self.source, group=group, external_id=uuid4()
|
||||
)
|
||||
ext_id = generate_id()
|
||||
response = self.client.post(
|
||||
reverse(
|
||||
@@ -165,7 +182,9 @@ class TestSCIMGroups(APITestCase):
|
||||
def test_group_update(self):
|
||||
"""Test group update"""
|
||||
group = Group.objects.create(name=generate_id())
|
||||
existing = SCIMSourceGroup.objects.create(source=self.source, group=group, id=uuid4())
|
||||
existing = SCIMSourceGroup.objects.create(
|
||||
source=self.source, group=group, external_id=uuid4()
|
||||
)
|
||||
ext_id = generate_id()
|
||||
response = self.client.put(
|
||||
reverse(
|
||||
@@ -205,12 +224,49 @@ class TestSCIMGroups(APITestCase):
|
||||
},
|
||||
)
|
||||
|
||||
def test_group_patch_add(self):
|
||||
def test_group_patch_modify(self):
|
||||
"""Test group patch"""
|
||||
group = Group.objects.create(name=generate_id())
|
||||
connection = SCIMSourceGroup.objects.create(
|
||||
source=self.source,
|
||||
group=group,
|
||||
external_id=uuid4(),
|
||||
attributes={"displayName": group.name, "members": []},
|
||||
)
|
||||
response = self.client.patch(
|
||||
reverse(
|
||||
"authentik_sources_scim:v2-groups",
|
||||
kwargs={"source_slug": self.source.slug, "group_id": group.pk},
|
||||
),
|
||||
data=dumps(
|
||||
{
|
||||
"Operations": [
|
||||
{
|
||||
"op": "Add",
|
||||
"value": {"externalId": "d85051cb-0557-4aa1-98ca-51eabcee4d40"},
|
||||
}
|
||||
]
|
||||
}
|
||||
),
|
||||
content_type=SCIM_CONTENT_TYPE,
|
||||
HTTP_AUTHORIZATION=f"Bearer {self.source.token.key}",
|
||||
)
|
||||
self.assertEqual(response.status_code, 200, response.content)
|
||||
connection = SCIMSourceGroup.objects.filter(id="d85051cb-0557-4aa1-98ca-51eabcee4d40")
|
||||
self.assertIsNotNone(connection)
|
||||
|
||||
def test_group_patch_member_add(self):
|
||||
"""Test group patch"""
|
||||
user = create_test_user()
|
||||
|
||||
other_user = create_test_user()
|
||||
group = Group.objects.create(name=generate_id())
|
||||
SCIMSourceGroup.objects.create(source=self.source, group=group, id=uuid4())
|
||||
group.users.add(other_user)
|
||||
connection = SCIMSourceGroup.objects.create(
|
||||
source=self.source,
|
||||
group=group,
|
||||
external_id=uuid4(),
|
||||
attributes={"displayName": group.name, "members": [{"value": str(other_user.uuid)}]},
|
||||
)
|
||||
response = self.client.patch(
|
||||
reverse(
|
||||
"authentik_sources_scim:v2-groups",
|
||||
@@ -222,7 +278,7 @@ class TestSCIMGroups(APITestCase):
|
||||
{
|
||||
"op": "Add",
|
||||
"path": "members",
|
||||
"value": {"value": str(user.uuid)},
|
||||
"value": [{"value": str(user.uuid)}],
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -230,16 +286,33 @@ class TestSCIMGroups(APITestCase):
|
||||
content_type=SCIM_CONTENT_TYPE,
|
||||
HTTP_AUTHORIZATION=f"Bearer {self.source.token.key}",
|
||||
)
|
||||
self.assertEqual(response.status_code, second=200)
|
||||
self.assertEqual(response.status_code, 200, response.content)
|
||||
self.assertTrue(group.users.filter(pk=user.pk).exists())
|
||||
self.assertTrue(group.users.filter(pk=other_user.pk).exists())
|
||||
connection.refresh_from_db()
|
||||
self.assertEqual(
|
||||
connection.attributes,
|
||||
{
|
||||
"displayName": group.name,
|
||||
"members": sorted(
|
||||
[{"value": str(other_user.uuid)}, {"value": str(user.uuid)}],
|
||||
key=lambda u: u["value"],
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
def test_group_patch_remove(self):
|
||||
def test_group_patch_member_remove(self):
|
||||
"""Test group patch"""
|
||||
user = create_test_user()
|
||||
|
||||
group = Group.objects.create(name=generate_id())
|
||||
group.users.add(user)
|
||||
SCIMSourceGroup.objects.create(source=self.source, group=group, id=uuid4())
|
||||
connection = SCIMSourceGroup.objects.create(
|
||||
source=self.source,
|
||||
group=group,
|
||||
external_id=uuid4(),
|
||||
attributes={"displayName": group.name, "members": []},
|
||||
)
|
||||
response = self.client.patch(
|
||||
reverse(
|
||||
"authentik_sources_scim:v2-groups",
|
||||
@@ -251,7 +324,7 @@ class TestSCIMGroups(APITestCase):
|
||||
{
|
||||
"op": "remove",
|
||||
"path": "members",
|
||||
"value": {"value": str(user.uuid)},
|
||||
"value": [{"value": str(user.uuid)}],
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -259,13 +332,21 @@ class TestSCIMGroups(APITestCase):
|
||||
content_type=SCIM_CONTENT_TYPE,
|
||||
HTTP_AUTHORIZATION=f"Bearer {self.source.token.key}",
|
||||
)
|
||||
self.assertEqual(response.status_code, second=200)
|
||||
self.assertEqual(response.status_code, 200, response.content)
|
||||
self.assertFalse(group.users.filter(pk=user.pk).exists())
|
||||
connection.refresh_from_db()
|
||||
self.assertEqual(
|
||||
connection.attributes,
|
||||
{
|
||||
"displayName": group.name,
|
||||
"members": [],
|
||||
},
|
||||
)
|
||||
|
||||
def test_group_delete(self):
|
||||
"""Test group delete"""
|
||||
group = Group.objects.create(name=generate_id())
|
||||
SCIMSourceGroup.objects.create(source=self.source, group=group, id=uuid4())
|
||||
SCIMSourceGroup.objects.create(source=self.source, group=group, external_id=uuid4())
|
||||
response = self.client.delete(
|
||||
reverse(
|
||||
"authentik_sources_scim:v2-groups",
|
||||
|
||||
510
authentik/sources/scim/tests/test_lexer.py
Normal file
510
authentik/sources/scim/tests/test_lexer.py
Normal file
@@ -0,0 +1,510 @@
|
||||
from unittest import TestCase
|
||||
|
||||
from authentik.sources.scim.constants import (
|
||||
SCIM_URN_GROUP,
|
||||
SCIM_URN_SCHEMA,
|
||||
SCIM_URN_USER,
|
||||
SCIM_URN_USER_ENTERPRISE,
|
||||
)
|
||||
from authentik.sources.scim.patch.lexer import SCIMPathLexer, Token, TokenType
|
||||
|
||||
|
||||
class TestTokenType(TestCase):
|
||||
"""Test TokenType enum"""
|
||||
|
||||
def test_token_type_values(self):
|
||||
"""Test that all token types have correct values"""
|
||||
self.assertEqual(TokenType.ATTRIBUTE.value, "ATTRIBUTE")
|
||||
self.assertEqual(TokenType.DOT.value, "DOT")
|
||||
self.assertEqual(TokenType.LBRACKET.value, "LBRACKET")
|
||||
self.assertEqual(TokenType.RBRACKET.value, "RBRACKET")
|
||||
self.assertEqual(TokenType.LPAREN.value, "LPAREN")
|
||||
self.assertEqual(TokenType.RPAREN.value, "RPAREN")
|
||||
self.assertEqual(TokenType.STRING.value, "STRING")
|
||||
self.assertEqual(TokenType.NUMBER.value, "NUMBER")
|
||||
self.assertEqual(TokenType.BOOLEAN.value, "BOOLEAN")
|
||||
self.assertEqual(TokenType.NULL.value, "NULL")
|
||||
self.assertEqual(TokenType.OPERATOR.value, "OPERATOR")
|
||||
self.assertEqual(TokenType.AND.value, "AND")
|
||||
self.assertEqual(TokenType.OR.value, "OR")
|
||||
self.assertEqual(TokenType.NOT.value, "NOT")
|
||||
self.assertEqual(TokenType.EOF.value, "EOF")
|
||||
|
||||
|
||||
class TestToken(TestCase):
|
||||
"""Test Token dataclass"""
|
||||
|
||||
def test_token_creation(self):
|
||||
"""Test token creation with all parameters"""
|
||||
token = Token(TokenType.ATTRIBUTE, "userName", 5)
|
||||
self.assertEqual(token.type, TokenType.ATTRIBUTE)
|
||||
self.assertEqual(token.value, "userName")
|
||||
self.assertEqual(token.position, 5)
|
||||
|
||||
def test_token_creation_default_position(self):
|
||||
"""Test token creation with default position"""
|
||||
token = Token(TokenType.DOT, ".")
|
||||
self.assertEqual(token.type, TokenType.DOT)
|
||||
self.assertEqual(token.value, ".")
|
||||
self.assertEqual(token.position, 0)
|
||||
|
||||
|
||||
class TestSCIMPathLexer(TestCase):
|
||||
"""Test SCIMPathLexer class"""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up test fixtures"""
|
||||
self.simple_lexer = SCIMPathLexer("userName")
|
||||
|
||||
def test_init(self):
|
||||
"""Test lexer initialization"""
|
||||
lexer = SCIMPathLexer("test")
|
||||
self.assertEqual(lexer.text, "test")
|
||||
self.assertEqual(lexer.pos, 0)
|
||||
self.assertEqual(lexer.current_char, "t")
|
||||
self.assertIn(SCIM_URN_SCHEMA, lexer.schema_urns)
|
||||
self.assertIn(SCIM_URN_GROUP, lexer.schema_urns)
|
||||
self.assertIn(SCIM_URN_USER, lexer.schema_urns)
|
||||
self.assertIn(SCIM_URN_USER_ENTERPRISE, lexer.schema_urns)
|
||||
self.assertEqual(
|
||||
lexer.OPERATORS, ["eq", "ne", "co", "sw", "ew", "gt", "lt", "ge", "le", "pr"]
|
||||
)
|
||||
|
||||
def test_init_empty_string(self):
|
||||
"""Test lexer initialization with empty string"""
|
||||
lexer = SCIMPathLexer("")
|
||||
self.assertEqual(lexer.text, "")
|
||||
self.assertEqual(lexer.pos, 0)
|
||||
self.assertIsNone(lexer.current_char)
|
||||
|
||||
def test_advance(self):
|
||||
"""Test advance method"""
|
||||
lexer = SCIMPathLexer("abc")
|
||||
self.assertEqual(lexer.current_char, "a")
|
||||
|
||||
lexer.advance()
|
||||
self.assertEqual(lexer.pos, 1)
|
||||
self.assertEqual(lexer.current_char, "b")
|
||||
|
||||
lexer.advance()
|
||||
self.assertEqual(lexer.pos, 2)
|
||||
self.assertEqual(lexer.current_char, "c")
|
||||
|
||||
lexer.advance()
|
||||
self.assertEqual(lexer.pos, 3)
|
||||
self.assertIsNone(lexer.current_char)
|
||||
|
||||
def test_skip_whitespace(self):
|
||||
"""Test skip_whitespace method"""
|
||||
lexer = SCIMPathLexer(" \t\n abc")
|
||||
lexer.skip_whitespace()
|
||||
self.assertEqual(lexer.current_char, "a")
|
||||
|
||||
def test_skip_whitespace_only_whitespace(self):
|
||||
"""Test skip_whitespace with only whitespace"""
|
||||
lexer = SCIMPathLexer(" \t\n ")
|
||||
lexer.skip_whitespace()
|
||||
self.assertIsNone(lexer.current_char)
|
||||
|
||||
def test_skip_whitespace_no_whitespace(self):
|
||||
"""Test skip_whitespace with no leading whitespace"""
|
||||
lexer = SCIMPathLexer("abc")
|
||||
original_pos = lexer.pos
|
||||
lexer.skip_whitespace()
|
||||
self.assertEqual(lexer.pos, original_pos)
|
||||
self.assertEqual(lexer.current_char, "a")
|
||||
|
||||
def test_read_string_double_quotes(self):
|
||||
"""Test reading double-quoted string"""
|
||||
lexer = SCIMPathLexer('"hello world"')
|
||||
result = lexer.read_string('"')
|
||||
self.assertEqual(result, "hello world")
|
||||
self.assertIsNone(lexer.current_char) # Should be at end
|
||||
|
||||
def test_read_string_single_quotes(self):
|
||||
"""Test reading single-quoted string"""
|
||||
lexer = SCIMPathLexer("'hello world'")
|
||||
result = lexer.read_string("'")
|
||||
self.assertEqual(result, "hello world")
|
||||
self.assertIsNone(lexer.current_char)
|
||||
|
||||
def test_read_string_with_escapes(self):
|
||||
"""Test reading string with escape characters"""
|
||||
lexer = SCIMPathLexer('"hello \\"world\\""')
|
||||
result = lexer.read_string('"')
|
||||
self.assertEqual(result, 'hello "world"')
|
||||
|
||||
def test_read_string_with_backslash_at_end(self):
|
||||
"""Test reading string with backslash at end"""
|
||||
lexer = SCIMPathLexer('"hello\\"')
|
||||
result = lexer.read_string('"')
|
||||
self.assertEqual(result, 'hello"')
|
||||
|
||||
def test_read_string_unclosed(self):
|
||||
"""Test reading unclosed string"""
|
||||
lexer = SCIMPathLexer('"hello world')
|
||||
result = lexer.read_string('"')
|
||||
self.assertEqual(result, "hello world")
|
||||
self.assertIsNone(lexer.current_char)
|
||||
|
||||
def test_read_string_empty(self):
|
||||
"""Test reading empty string"""
|
||||
lexer = SCIMPathLexer('""')
|
||||
result = lexer.read_string('"')
|
||||
self.assertEqual(result, "")
|
||||
|
||||
def test_read_number_integer(self):
|
||||
"""Test reading integer number"""
|
||||
lexer = SCIMPathLexer("123")
|
||||
result = lexer.read_number()
|
||||
self.assertEqual(result, "123")
|
||||
self.assertIsNone(lexer.current_char)
|
||||
|
||||
def test_read_number_float(self):
|
||||
"""Test reading float number"""
|
||||
lexer = SCIMPathLexer("123.456")
|
||||
result = lexer.read_number()
|
||||
self.assertEqual(result, "123.456")
|
||||
self.assertIsNone(lexer.current_char)
|
||||
|
||||
def test_read_number_with_multiple_dots(self):
|
||||
"""Test reading number with multiple dots (invalid but handled)"""
|
||||
lexer = SCIMPathLexer("123.456.789")
|
||||
result = lexer.read_number()
|
||||
self.assertEqual(result, "123.456.789")
|
||||
self.assertIsNone(lexer.current_char)
|
||||
|
||||
def test_read_number_starting_with_dot(self):
|
||||
"""Test reading number starting with dot"""
|
||||
lexer = SCIMPathLexer(".123")
|
||||
result = lexer.read_number()
|
||||
self.assertEqual(result, ".123")
|
||||
|
||||
def test_read_identifier_simple(self):
|
||||
"""Test reading simple identifier"""
|
||||
lexer = SCIMPathLexer("userName")
|
||||
result = lexer.read_identifier()
|
||||
self.assertEqual(result, "userName")
|
||||
self.assertIsNone(lexer.current_char)
|
||||
|
||||
def test_read_identifier_with_underscore(self):
|
||||
"""Test reading identifier with underscore"""
|
||||
lexer = SCIMPathLexer("user_name")
|
||||
result = lexer.read_identifier()
|
||||
self.assertEqual(result, "user_name")
|
||||
|
||||
def test_read_identifier_with_hyphen(self):
|
||||
"""Test reading identifier with hyphen"""
|
||||
lexer = SCIMPathLexer("user-name")
|
||||
result = lexer.read_identifier()
|
||||
self.assertEqual(result, "user-name")
|
||||
|
||||
def test_read_identifier_with_colon(self):
|
||||
"""Test reading identifier with colon (URN format)"""
|
||||
lexer = SCIMPathLexer("urn:ietf:params:scim:schemas:core:2.0:User")
|
||||
result = lexer.read_identifier()
|
||||
self.assertEqual(result, "urn:ietf:params:scim:schemas:core:2.0:User")
|
||||
|
||||
def test_read_identifier_schema_urn(self):
|
||||
"""Test reading schema URN identifier"""
|
||||
lexer = SCIMPathLexer(f"{SCIM_URN_USER}.userName")
|
||||
result = lexer.read_identifier()
|
||||
self.assertEqual(result, SCIM_URN_USER)
|
||||
self.assertEqual(lexer.current_char, ".") # Should stop at dot and set current_char to dot
|
||||
|
||||
def test_read_identifier_with_version_number(self):
|
||||
"""Test reading identifier with version number (dots followed by digits)"""
|
||||
lexer = SCIMPathLexer("urn:ietf:params:scim:schemas:core:2.0:User")
|
||||
result = lexer.read_identifier()
|
||||
self.assertEqual(result, "urn:ietf:params:scim:schemas:core:2.0:User")
|
||||
|
||||
def test_read_identifier_partial_urn_match(self):
|
||||
"""Test reading identifier that partially matches URN"""
|
||||
lexer = SCIMPathLexer("urn:ietf:params:scim:schemas:core:2.0:CustomUser")
|
||||
result = lexer.read_identifier()
|
||||
self.assertEqual(result, "urn:ietf:params:scim:schemas:core:2.0:CustomUser")
|
||||
|
||||
# Test get_next_token method
|
||||
def test_get_next_token_dot(self):
|
||||
"""Test tokenizing dot"""
|
||||
lexer = SCIMPathLexer(".")
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.DOT)
|
||||
self.assertEqual(token.value, ".")
|
||||
|
||||
def test_get_next_token_lbracket(self):
|
||||
"""Test tokenizing left bracket"""
|
||||
lexer = SCIMPathLexer("[")
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.LBRACKET)
|
||||
self.assertEqual(token.value, "[")
|
||||
|
||||
def test_get_next_token_rbracket(self):
|
||||
"""Test tokenizing right bracket"""
|
||||
lexer = SCIMPathLexer("]")
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.RBRACKET)
|
||||
self.assertEqual(token.value, "]")
|
||||
|
||||
def test_get_next_token_lparen(self):
|
||||
"""Test tokenizing left parenthesis"""
|
||||
lexer = SCIMPathLexer("(")
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.LPAREN)
|
||||
self.assertEqual(token.value, "(")
|
||||
|
||||
def test_get_next_token_rparen(self):
|
||||
"""Test tokenizing right parenthesis"""
|
||||
lexer = SCIMPathLexer(")")
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.RPAREN)
|
||||
self.assertEqual(token.value, ")")
|
||||
|
||||
def test_get_next_token_string_double_quotes(self):
|
||||
"""Test tokenizing double-quoted string"""
|
||||
lexer = SCIMPathLexer('"test string"')
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.STRING)
|
||||
self.assertEqual(token.value, "test string")
|
||||
|
||||
def test_get_next_token_string_single_quotes(self):
|
||||
"""Test tokenizing single-quoted string"""
|
||||
lexer = SCIMPathLexer("'test string'")
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.STRING)
|
||||
self.assertEqual(token.value, "test string")
|
||||
|
||||
def test_get_next_token_number_integer(self):
|
||||
"""Test tokenizing integer"""
|
||||
lexer = SCIMPathLexer("123")
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.NUMBER)
|
||||
self.assertEqual(token.value, "123")
|
||||
|
||||
def test_get_next_token_number_float(self):
|
||||
"""Test tokenizing float"""
|
||||
lexer = SCIMPathLexer("123.45")
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.NUMBER)
|
||||
self.assertEqual(token.value, "123.45")
|
||||
|
||||
def test_get_next_token_boolean_true(self):
|
||||
"""Test tokenizing boolean true"""
|
||||
lexer = SCIMPathLexer("true")
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.BOOLEAN)
|
||||
self.assertTrue(token.value)
|
||||
|
||||
def test_get_next_token_boolean_false(self):
|
||||
"""Test tokenizing boolean false"""
|
||||
lexer = SCIMPathLexer("false")
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.BOOLEAN)
|
||||
self.assertFalse(token.value)
|
||||
|
||||
def test_get_next_token_boolean_case_insensitive(self):
|
||||
"""Test tokenizing boolean with different cases"""
|
||||
for value in ["TRUE", "True", "FALSE", "False"]:
|
||||
with self.subTest(value=value):
|
||||
lexer = SCIMPathLexer(value)
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.BOOLEAN)
|
||||
|
||||
def test_get_next_token_null(self):
|
||||
"""Test tokenizing null"""
|
||||
lexer = SCIMPathLexer("null")
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.NULL)
|
||||
self.assertIsNone(token.value)
|
||||
|
||||
def test_get_next_token_null_case_insensitive(self):
|
||||
"""Test tokenizing null with different cases"""
|
||||
for value in ["NULL", "Null"]:
|
||||
with self.subTest(value=value):
|
||||
lexer = SCIMPathLexer(value)
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.NULL)
|
||||
|
||||
def test_get_next_token_and(self):
|
||||
"""Test tokenizing AND operator"""
|
||||
lexer = SCIMPathLexer("and")
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.AND)
|
||||
self.assertEqual(token.value, "and")
|
||||
|
||||
def test_get_next_token_or(self):
|
||||
"""Test tokenizing OR operator"""
|
||||
lexer = SCIMPathLexer("or")
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.OR)
|
||||
self.assertEqual(token.value, "or")
|
||||
|
||||
def test_get_next_token_not(self):
|
||||
"""Test tokenizing NOT operator"""
|
||||
lexer = SCIMPathLexer("not")
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.NOT)
|
||||
self.assertEqual(token.value, "not")
|
||||
|
||||
def test_get_next_token_operators(self):
|
||||
"""Test tokenizing all comparison operators"""
|
||||
operators = ["eq", "ne", "co", "sw", "ew", "gt", "lt", "ge", "le", "pr"]
|
||||
for op in operators:
|
||||
with self.subTest(operator=op):
|
||||
lexer = SCIMPathLexer(op)
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.OPERATOR)
|
||||
self.assertEqual(token.value, op)
|
||||
|
||||
def test_get_next_token_operators_case_insensitive(self):
|
||||
"""Test tokenizing operators with different cases"""
|
||||
for op in ["EQ", "Eq", "NE", "Ne"]:
|
||||
with self.subTest(operator=op):
|
||||
lexer = SCIMPathLexer(op)
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.OPERATOR)
|
||||
self.assertEqual(token.value, op.lower())
|
||||
|
||||
def test_get_next_token_attribute(self):
|
||||
"""Test tokenizing attribute name"""
|
||||
lexer = SCIMPathLexer("userName")
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.ATTRIBUTE)
|
||||
self.assertEqual(token.value, "userName")
|
||||
|
||||
def test_get_next_token_attribute_with_underscore(self):
|
||||
"""Test tokenizing attribute name with underscore"""
|
||||
lexer = SCIMPathLexer("_userName")
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.ATTRIBUTE)
|
||||
self.assertEqual(token.value, "_userName")
|
||||
|
||||
def test_get_next_token_eof(self):
|
||||
"""Test tokenizing end of file"""
|
||||
lexer = SCIMPathLexer("")
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.EOF)
|
||||
self.assertEqual(token.value, "")
|
||||
|
||||
def test_get_next_token_with_whitespace(self):
|
||||
"""Test tokenizing with leading whitespace"""
|
||||
lexer = SCIMPathLexer(" userName")
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.ATTRIBUTE)
|
||||
self.assertEqual(token.value, "userName")
|
||||
|
||||
def test_get_next_token_skip_unknown_characters(self):
|
||||
"""Test that unknown characters are skipped"""
|
||||
lexer = SCIMPathLexer("@#$userName")
|
||||
token = lexer.get_next_token()
|
||||
self.assertEqual(token.type, TokenType.ATTRIBUTE)
|
||||
self.assertEqual(token.value, "userName")
|
||||
|
||||
def test_get_next_token_multiple_tokens(self):
|
||||
"""Test tokenizing multiple tokens in sequence"""
|
||||
lexer = SCIMPathLexer("userName.givenName")
|
||||
|
||||
token1 = lexer.get_next_token()
|
||||
self.assertEqual(token1.type, TokenType.ATTRIBUTE)
|
||||
self.assertEqual(token1.value, "userName")
|
||||
|
||||
token2 = lexer.get_next_token()
|
||||
self.assertEqual(token2.type, TokenType.DOT)
|
||||
self.assertEqual(token2.value, ".")
|
||||
|
||||
token3 = lexer.get_next_token()
|
||||
self.assertEqual(token3.type, TokenType.ATTRIBUTE)
|
||||
self.assertEqual(token3.value, "givenName")
|
||||
|
||||
token4 = lexer.get_next_token()
|
||||
self.assertEqual(token4.type, TokenType.EOF)
|
||||
|
||||
def test_get_next_token_complex_filter(self):
|
||||
"""Test tokenizing complex filter expression"""
|
||||
lexer = SCIMPathLexer('emails[type eq "work" and primary eq true]')
|
||||
|
||||
tokens = []
|
||||
while True:
|
||||
token = lexer.get_next_token()
|
||||
tokens.append(token)
|
||||
if token.type == TokenType.EOF:
|
||||
break
|
||||
|
||||
expected_types = [
|
||||
TokenType.ATTRIBUTE, # emails
|
||||
TokenType.LBRACKET, # [
|
||||
TokenType.ATTRIBUTE, # type
|
||||
TokenType.OPERATOR, # eq
|
||||
TokenType.STRING, # "work"
|
||||
TokenType.AND, # and
|
||||
TokenType.ATTRIBUTE, # primary
|
||||
TokenType.OPERATOR, # eq
|
||||
TokenType.BOOLEAN, # true
|
||||
TokenType.RBRACKET, # ]
|
||||
TokenType.EOF,
|
||||
]
|
||||
|
||||
self.assertEqual(len(tokens), len(expected_types))
|
||||
for token, expected_type in zip(tokens, expected_types, strict=False):
|
||||
self.assertEqual(token.type, expected_type)
|
||||
|
||||
def test_get_next_token_urn_attribute(self):
|
||||
"""Test tokenizing URN-based attribute"""
|
||||
lexer = SCIMPathLexer(f"{SCIM_URN_USER}.userName")
|
||||
|
||||
token1 = lexer.get_next_token()
|
||||
self.assertEqual(token1.type, TokenType.ATTRIBUTE)
|
||||
self.assertEqual(token1.value, SCIM_URN_USER)
|
||||
|
||||
token2 = lexer.get_next_token()
|
||||
self.assertEqual(token2.type, TokenType.DOT)
|
||||
|
||||
token3 = lexer.get_next_token()
|
||||
self.assertEqual(token3.type, TokenType.ATTRIBUTE)
|
||||
self.assertEqual(token3.value, "userName")
|
||||
|
||||
def test_get_next_token_enterprise_urn(self):
|
||||
"""Test tokenizing enterprise URN"""
|
||||
lexer = SCIMPathLexer(f"{SCIM_URN_USER_ENTERPRISE}.manager")
|
||||
|
||||
token1 = lexer.get_next_token()
|
||||
self.assertEqual(token1.type, TokenType.ATTRIBUTE)
|
||||
self.assertEqual(token1.value, SCIM_URN_USER_ENTERPRISE)
|
||||
|
||||
token2 = lexer.get_next_token()
|
||||
self.assertEqual(token2.type, TokenType.DOT)
|
||||
|
||||
def test_lexer_state_after_eof(self):
|
||||
"""Test lexer state after reaching EOF"""
|
||||
lexer = SCIMPathLexer("a")
|
||||
|
||||
# Get first token
|
||||
token1 = lexer.get_next_token()
|
||||
self.assertEqual(token1.type, TokenType.ATTRIBUTE)
|
||||
|
||||
# Get EOF token
|
||||
token2 = lexer.get_next_token()
|
||||
self.assertEqual(token2.type, TokenType.EOF)
|
||||
|
||||
# Should continue returning EOF
|
||||
token3 = lexer.get_next_token()
|
||||
self.assertEqual(token3.type, TokenType.EOF)
|
||||
|
||||
def test_read_identifier_edge_cases(self):
|
||||
"""Test read_identifier with edge cases"""
|
||||
# Test identifier ending with colon
|
||||
lexer = SCIMPathLexer("test:")
|
||||
result = lexer.read_identifier()
|
||||
self.assertEqual(result, "test:")
|
||||
|
||||
# Test identifier with numbers
|
||||
lexer = SCIMPathLexer("test123")
|
||||
result = lexer.read_identifier()
|
||||
self.assertEqual(result, "test123")
|
||||
|
||||
def test_complex_urn_parsing(self):
|
||||
"""Test parsing complex URN with version numbers"""
|
||||
urn = "urn:ietf:params:scim:schemas:extension:enterprise:2.0:User"
|
||||
lexer = SCIMPathLexer(urn)
|
||||
result = lexer.read_identifier()
|
||||
self.assertEqual(result, urn)
|
||||
1254
authentik/sources/scim/tests/test_patch.py
Normal file
1254
authentik/sources/scim/tests/test_patch.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -10,6 +10,7 @@ from authentik.core.tests.utils import create_test_user
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.providers.scim.clients.schema import User as SCIMUserSchema
|
||||
from authentik.sources.scim.constants import SCIM_URN_USER_ENTERPRISE
|
||||
from authentik.sources.scim.models import SCIMSource, SCIMSourcePropertyMapping, SCIMSourceUser
|
||||
from authentik.sources.scim.views.v2.base import SCIM_CONTENT_TYPE
|
||||
|
||||
@@ -81,7 +82,9 @@ class TestSCIMUsers(APITestCase):
|
||||
HTTP_AUTHORIZATION=f"Bearer {self.source.token.key}",
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
self.assertTrue(SCIMSourceUser.objects.filter(source=self.source, id=ext_id).exists())
|
||||
self.assertTrue(
|
||||
SCIMSourceUser.objects.filter(source=self.source, external_id=ext_id).exists()
|
||||
)
|
||||
self.assertTrue(
|
||||
Event.objects.filter(
|
||||
action=EventAction.MODEL_CREATED, user__username=self.source.token.user.username
|
||||
@@ -174,14 +177,16 @@ class TestSCIMUsers(APITestCase):
|
||||
)
|
||||
self.assertEqual(response.status_code, 201)
|
||||
self.assertEqual(
|
||||
SCIMSourceUser.objects.get(source=self.source, id=ext_id).user.attributes["phone"],
|
||||
SCIMSourceUser.objects.get(source=self.source, external_id=ext_id).user.attributes[
|
||||
"phone"
|
||||
],
|
||||
"0123456789",
|
||||
)
|
||||
|
||||
def test_user_update(self):
|
||||
"""Test user update"""
|
||||
user = create_test_user()
|
||||
existing = SCIMSourceUser.objects.create(source=self.source, user=user, id=uuid4())
|
||||
existing = SCIMSourceUser.objects.create(source=self.source, user=user, external_id=uuid4())
|
||||
ext_id = generate_id()
|
||||
response = self.client.put(
|
||||
reverse(
|
||||
@@ -209,10 +214,51 @@ class TestSCIMUsers(APITestCase):
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_user_update_patch(self):
|
||||
"""Test user update (patch)"""
|
||||
user = create_test_user()
|
||||
existing = SCIMSourceUser.objects.create(
|
||||
source=self.source,
|
||||
user=user,
|
||||
external_id=uuid4(),
|
||||
attributes={
|
||||
"userName": generate_id(),
|
||||
},
|
||||
)
|
||||
response = self.client.patch(
|
||||
reverse(
|
||||
"authentik_sources_scim:v2-users",
|
||||
kwargs={
|
||||
"source_slug": self.source.slug,
|
||||
"user_id": str(user.uuid),
|
||||
},
|
||||
),
|
||||
data=dumps(
|
||||
{
|
||||
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
|
||||
"Operations": [
|
||||
{
|
||||
"op": "Add",
|
||||
"path": f"{SCIM_URN_USER_ENTERPRISE}:manager",
|
||||
"value": "86b2ed3e-30cd-4881-bb58-c4e910821339",
|
||||
}
|
||||
],
|
||||
}
|
||||
),
|
||||
content_type=SCIM_CONTENT_TYPE,
|
||||
HTTP_AUTHORIZATION=f"Bearer {self.source.token.key}",
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
existing.refresh_from_db()
|
||||
self.assertEqual(
|
||||
existing.attributes[SCIM_URN_USER_ENTERPRISE],
|
||||
{"manager": {"value": "86b2ed3e-30cd-4881-bb58-c4e910821339"}},
|
||||
)
|
||||
|
||||
def test_user_delete(self):
|
||||
"""Test user delete"""
|
||||
user = create_test_user()
|
||||
SCIMSourceUser.objects.create(source=self.source, user=user, id=uuid4())
|
||||
SCIMSourceUser.objects.create(source=self.source, user=user, external_id=uuid4())
|
||||
response = self.client.delete(
|
||||
reverse(
|
||||
"authentik_sources_scim:v2-users",
|
||||
|
||||
488
authentik/sources/scim/tests/test_users_patch.py
Normal file
488
authentik/sources/scim/tests/test_users_patch.py
Normal file
@@ -0,0 +1,488 @@
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.tests.utils import create_test_user
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.sources.scim.constants import SCIM_URN_USER_ENTERPRISE
|
||||
from authentik.sources.scim.models import SCIMSource, SCIMSourceUser
|
||||
from authentik.sources.scim.patch.processor import SCIMPatchProcessor
|
||||
|
||||
|
||||
class TestSCIMUsersPatch(APITestCase):
|
||||
"""Test SCIM User Patch"""
|
||||
|
||||
def test_add(self):
|
||||
req = {
|
||||
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
|
||||
"Operations": [
|
||||
{"op": "Add", "path": "name.givenName", "value": "aqwer"},
|
||||
{"op": "Add", "path": "name.familyName", "value": "qwerqqqq"},
|
||||
{"op": "Add", "path": "name.formatted", "value": "aqwer qwerqqqq"},
|
||||
],
|
||||
}
|
||||
user = create_test_user()
|
||||
source = SCIMSource.objects.create(slug=generate_id())
|
||||
connection = SCIMSourceUser.objects.create(
|
||||
user=user,
|
||||
id=generate_id(),
|
||||
source=source,
|
||||
attributes={
|
||||
"meta": {"resourceType": "User"},
|
||||
"active": True,
|
||||
"schemas": [
|
||||
"urn:ietf:params:scim:schemas:core:2.0:User",
|
||||
SCIM_URN_USER_ENTERPRISE,
|
||||
],
|
||||
"userName": "test@t.goauthentik.io",
|
||||
"externalId": "test",
|
||||
"displayName": "Test MS",
|
||||
},
|
||||
)
|
||||
updated = SCIMPatchProcessor().apply_patches(connection.attributes, req["Operations"])
|
||||
self.assertEqual(
|
||||
updated,
|
||||
{
|
||||
"meta": {"resourceType": "User"},
|
||||
"active": True,
|
||||
"name": {
|
||||
"givenName": "aqwer",
|
||||
"familyName": "qwerqqqq",
|
||||
"formatted": "aqwer qwerqqqq",
|
||||
},
|
||||
"schemas": [
|
||||
"urn:ietf:params:scim:schemas:core:2.0:User",
|
||||
SCIM_URN_USER_ENTERPRISE,
|
||||
],
|
||||
"userName": "test@t.goauthentik.io",
|
||||
"externalId": "test",
|
||||
"displayName": "Test MS",
|
||||
},
|
||||
)
|
||||
|
||||
def test_add_no_path(self):
|
||||
"""Test add patch with no path set"""
|
||||
req = {
|
||||
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
|
||||
"Operations": [
|
||||
{"op": "Add", "value": {"externalId": "aqwer"}},
|
||||
],
|
||||
}
|
||||
user = create_test_user()
|
||||
source = SCIMSource.objects.create(slug=generate_id())
|
||||
connection = SCIMSourceUser.objects.create(
|
||||
user=user,
|
||||
id=generate_id(),
|
||||
source=source,
|
||||
attributes={
|
||||
"meta": {"resourceType": "User"},
|
||||
"active": True,
|
||||
"schemas": [
|
||||
"urn:ietf:params:scim:schemas:core:2.0:User",
|
||||
SCIM_URN_USER_ENTERPRISE,
|
||||
],
|
||||
"userName": "test@t.goauthentik.io",
|
||||
"displayName": "Test MS",
|
||||
},
|
||||
)
|
||||
updated = SCIMPatchProcessor().apply_patches(connection.attributes, req["Operations"])
|
||||
self.assertEqual(
|
||||
updated,
|
||||
{
|
||||
"meta": {"resourceType": "User"},
|
||||
"active": True,
|
||||
"schemas": [
|
||||
"urn:ietf:params:scim:schemas:core:2.0:User",
|
||||
SCIM_URN_USER_ENTERPRISE,
|
||||
],
|
||||
"userName": "test@t.goauthentik.io",
|
||||
"externalId": "aqwer",
|
||||
"displayName": "Test MS",
|
||||
},
|
||||
)
|
||||
|
||||
def test_replace(self):
|
||||
req = {
|
||||
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
|
||||
"Operations": [
|
||||
{"op": "Replace", "path": "name", "value": {"givenName": "aqwer"}},
|
||||
],
|
||||
}
|
||||
user = create_test_user()
|
||||
source = SCIMSource.objects.create(slug=generate_id())
|
||||
connection = SCIMSourceUser.objects.create(
|
||||
user=user,
|
||||
id=generate_id(),
|
||||
source=source,
|
||||
attributes={
|
||||
"meta": {"resourceType": "User"},
|
||||
"active": True,
|
||||
"schemas": [
|
||||
"urn:ietf:params:scim:schemas:core:2.0:User",
|
||||
SCIM_URN_USER_ENTERPRISE,
|
||||
],
|
||||
"userName": "test@t.goauthentik.io",
|
||||
"externalId": "test",
|
||||
"displayName": "Test MS",
|
||||
},
|
||||
)
|
||||
updated = SCIMPatchProcessor().apply_patches(connection.attributes, req["Operations"])
|
||||
self.assertEqual(
|
||||
updated,
|
||||
{
|
||||
"meta": {"resourceType": "User"},
|
||||
"active": True,
|
||||
"name": {
|
||||
"givenName": "aqwer",
|
||||
},
|
||||
"schemas": [
|
||||
"urn:ietf:params:scim:schemas:core:2.0:User",
|
||||
SCIM_URN_USER_ENTERPRISE,
|
||||
],
|
||||
"userName": "test@t.goauthentik.io",
|
||||
"externalId": "test",
|
||||
"displayName": "Test MS",
|
||||
},
|
||||
)
|
||||
|
||||
def test_replace_no_path(self):
|
||||
"""Test value replace with no path"""
|
||||
req = {
|
||||
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
|
||||
"Operations": [
|
||||
{"op": "Replace", "value": {"externalId": "aqwer"}},
|
||||
],
|
||||
}
|
||||
user = create_test_user()
|
||||
source = SCIMSource.objects.create(slug=generate_id())
|
||||
connection = SCIMSourceUser.objects.create(
|
||||
user=user,
|
||||
id=generate_id(),
|
||||
source=source,
|
||||
attributes={
|
||||
"meta": {"resourceType": "User"},
|
||||
"active": True,
|
||||
"schemas": [
|
||||
"urn:ietf:params:scim:schemas:core:2.0:User",
|
||||
SCIM_URN_USER_ENTERPRISE,
|
||||
],
|
||||
"userName": "test@t.goauthentik.io",
|
||||
"externalId": "test",
|
||||
"displayName": "Test MS",
|
||||
},
|
||||
)
|
||||
updated = SCIMPatchProcessor().apply_patches(connection.attributes, req["Operations"])
|
||||
self.assertEqual(
|
||||
updated,
|
||||
{
|
||||
"meta": {"resourceType": "User"},
|
||||
"active": True,
|
||||
"schemas": [
|
||||
"urn:ietf:params:scim:schemas:core:2.0:User",
|
||||
SCIM_URN_USER_ENTERPRISE,
|
||||
],
|
||||
"userName": "test@t.goauthentik.io",
|
||||
"externalId": "aqwer",
|
||||
"displayName": "Test MS",
|
||||
},
|
||||
)
|
||||
|
||||
def test_remove(self):
|
||||
req = {
|
||||
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
|
||||
"Operations": [
|
||||
{"op": "Remove", "path": "name", "value": {"givenName": "aqwer"}},
|
||||
],
|
||||
}
|
||||
user = create_test_user()
|
||||
source = SCIMSource.objects.create(slug=generate_id())
|
||||
connection = SCIMSourceUser.objects.create(
|
||||
user=user,
|
||||
id=generate_id(),
|
||||
source=source,
|
||||
attributes={
|
||||
"meta": {"resourceType": "User"},
|
||||
"active": True,
|
||||
"name": {
|
||||
"givenName": "aqwer",
|
||||
},
|
||||
"schemas": [
|
||||
"urn:ietf:params:scim:schemas:core:2.0:User",
|
||||
SCIM_URN_USER_ENTERPRISE,
|
||||
],
|
||||
"userName": "test@t.goauthentik.io",
|
||||
"externalId": "test",
|
||||
"displayName": "Test MS",
|
||||
},
|
||||
)
|
||||
updated = SCIMPatchProcessor().apply_patches(connection.attributes, req["Operations"])
|
||||
self.assertEqual(
|
||||
updated,
|
||||
{
|
||||
"meta": {"resourceType": "User"},
|
||||
"active": True,
|
||||
"schemas": [
|
||||
"urn:ietf:params:scim:schemas:core:2.0:User",
|
||||
SCIM_URN_USER_ENTERPRISE,
|
||||
],
|
||||
"userName": "test@t.goauthentik.io",
|
||||
"externalId": "test",
|
||||
"displayName": "Test MS",
|
||||
},
|
||||
)
|
||||
|
||||
def test_large(self):
|
||||
"""Large amount of patch operations"""
|
||||
req = {
|
||||
"Operations": [
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "emails[primary eq true].value",
|
||||
"value": "dandre_kling@wintheiser.info",
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "phoneNumbers[primary eq true].value",
|
||||
"value": "72-634-1548",
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "phoneNumbers[primary eq true].display",
|
||||
"value": "72-634-1548",
|
||||
},
|
||||
{"op": "replace", "path": "ims[primary eq true].value", "value": "GXSGJKWGHVVS"},
|
||||
{"op": "replace", "path": "ims[primary eq true].display", "value": "IMCHDKUQIPYB"},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "photos[primary eq true].display",
|
||||
"value": "TWAWLHHSUNIV",
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "addresses[primary eq true].formatted",
|
||||
"value": "TMINZQAJQDCL",
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "addresses[primary eq true].streetAddress",
|
||||
"value": "081 Wisoky Key",
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "addresses[primary eq true].locality",
|
||||
"value": "DPFASBZRPMDP",
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "addresses[primary eq true].region",
|
||||
"value": "WHSTJSPIPTCF",
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "addresses[primary eq true].postalCode",
|
||||
"value": "ko28 1qa",
|
||||
},
|
||||
{"op": "replace", "path": "addresses[primary eq true].country", "value": "Taiwan"},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "entitlements[primary eq true].value",
|
||||
"value": "NGBJMUYZVVBX",
|
||||
},
|
||||
{"op": "replace", "path": "roles[primary eq true].value", "value": "XEELVFMMWCVM"},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "x509Certificates[primary eq true].value",
|
||||
"value": "UYISMEDOXUZY",
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"value": {
|
||||
"externalId": "7faaefb0-0774-4d8e-8f6d-863c361bc72c",
|
||||
"name.formatted": "Dell",
|
||||
"name.familyName": "Gay",
|
||||
"name.givenName": "Kyler",
|
||||
"name.middleName": "Hannah",
|
||||
"name.honorificPrefix": "Cassie",
|
||||
"name.honorificSuffix": "Yolanda",
|
||||
"displayName": "DPRLIJSFQMTL",
|
||||
"nickName": "BKSPMIRMFBTI",
|
||||
"title": "NBZCOAXVYJUY",
|
||||
"userType": "ZGJMYZRUORZE",
|
||||
"preferredLanguage": "as-IN",
|
||||
"locale": "JLOJHLPWZODG",
|
||||
"timezone": "America/Argentina/Rio_Gallegos",
|
||||
"active": True,
|
||||
f"{SCIM_URN_USER_ENTERPRISE}:employeeNumber": "PDFWRRZBQOHB",
|
||||
f"{SCIM_URN_USER_ENTERPRISE}:costCenter": "HACMZWSEDOTQ",
|
||||
f"{SCIM_URN_USER_ENTERPRISE}:organization": "LXVHJUOLNCLS",
|
||||
f"{SCIM_URN_USER_ENTERPRISE}:division": "JASVTPKPBPMG",
|
||||
f"{SCIM_URN_USER_ENTERPRISE}:department": "GMSBFLMNPABY",
|
||||
},
|
||||
},
|
||||
],
|
||||
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
|
||||
}
|
||||
user = create_test_user()
|
||||
source = SCIMSource.objects.create(slug=generate_id())
|
||||
connection = SCIMSourceUser.objects.create(
|
||||
user=user,
|
||||
id=generate_id(),
|
||||
source=source,
|
||||
attributes={
|
||||
"active": True,
|
||||
"addresses": [
|
||||
{
|
||||
"primary": "true",
|
||||
"formatted": "BLJMCNXHYLZK",
|
||||
"streetAddress": "7801 Jacobs Fork",
|
||||
"locality": "HZJBJWFAKXDD",
|
||||
"region": "GJXCXPMIIKWK",
|
||||
"postalCode": "pv82 8ua",
|
||||
"country": "India",
|
||||
}
|
||||
],
|
||||
"displayName": "KEFXCHKHAFOT",
|
||||
"emails": [{"primary": "true", "value": "scot@zemlak.uk"}],
|
||||
"entitlements": [{"primary": "true", "value": "FTTUXWYDAAQC"}],
|
||||
"externalId": "448d2786-7bf6-4e03-a4ef-64cbaf162fa7",
|
||||
"ims": [{"primary": "true", "value": "IGWZUUMCMKXS", "display": "PJVGMMKYYHRU"}],
|
||||
"locale": "PJNYJHWJILTI",
|
||||
"name": {
|
||||
"formatted": "Ladarius",
|
||||
"familyName": "Manley",
|
||||
"givenName": "Mazie",
|
||||
"middleName": "Vernon",
|
||||
"honorificPrefix": "Melyssa",
|
||||
"honorificSuffix": "Demarcus",
|
||||
},
|
||||
"nickName": "HTPKOXMWZKHL",
|
||||
"phoneNumbers": [
|
||||
{"primary": "true", "value": "50-608-7660", "display": "50-608-7660"}
|
||||
],
|
||||
"photos": [{"primary": "true", "display": "KCONLNLSYTBP"}],
|
||||
"preferredLanguage": "wae",
|
||||
"profileUrl": "HPSEOIPXMGOH",
|
||||
"roles": [{"primary": "true", "value": "TLGYITOIZGKP"}],
|
||||
"schemas": [
|
||||
"urn:ietf:params:scim:schemas:core:2.0:User",
|
||||
SCIM_URN_USER_ENTERPRISE,
|
||||
],
|
||||
"timezone": "America/Indiana/Petersburg",
|
||||
"title": "EJWFXLHNHMCD",
|
||||
SCIM_URN_USER_ENTERPRISE: {
|
||||
"employeeNumber": "XHDMEJUURJNR",
|
||||
"costCenter": "RXUYBXOTRCZH",
|
||||
"organization": "CEXWXMBRYAHN",
|
||||
"division": "XMPFMDCLRKCW",
|
||||
"department": "BKMNJVMCJUYS",
|
||||
"manager": "PNGSGXLYVWMV",
|
||||
},
|
||||
"userName": "imelda.auer@kshlerin.co.uk",
|
||||
"userType": "PZFXORVSUAPU",
|
||||
"x509Certificates": [{"primary": "true", "value": "KOVKWGIVVEHH"}],
|
||||
},
|
||||
)
|
||||
updated = SCIMPatchProcessor().apply_patches(connection.attributes, req["Operations"])
|
||||
self.assertEqual(
|
||||
updated,
|
||||
{
|
||||
"active": True,
|
||||
"addresses": [
|
||||
{
|
||||
"primary": "true",
|
||||
"formatted": "BLJMCNXHYLZK",
|
||||
"streetAddress": "7801 Jacobs Fork",
|
||||
"locality": "HZJBJWFAKXDD",
|
||||
"region": "GJXCXPMIIKWK",
|
||||
"postalCode": "pv82 8ua",
|
||||
"country": "India",
|
||||
}
|
||||
],
|
||||
"displayName": "DPRLIJSFQMTL",
|
||||
"emails": [{"primary": "true", "value": "scot@zemlak.uk"}],
|
||||
"entitlements": [{"primary": "true", "value": "FTTUXWYDAAQC"}],
|
||||
"externalId": "7faaefb0-0774-4d8e-8f6d-863c361bc72c",
|
||||
"ims": [{"primary": "true", "value": "IGWZUUMCMKXS", "display": "PJVGMMKYYHRU"}],
|
||||
"locale": "JLOJHLPWZODG",
|
||||
"name": {
|
||||
"formatted": "Dell",
|
||||
"familyName": "Gay",
|
||||
"givenName": "Kyler",
|
||||
"middleName": "Hannah",
|
||||
"honorificPrefix": "Cassie",
|
||||
"honorificSuffix": "Yolanda",
|
||||
},
|
||||
"nickName": "BKSPMIRMFBTI",
|
||||
"phoneNumbers": [
|
||||
{"primary": "true", "value": "50-608-7660", "display": "50-608-7660"}
|
||||
],
|
||||
"photos": [{"primary": "true", "display": "KCONLNLSYTBP"}],
|
||||
"preferredLanguage": "as-IN",
|
||||
"profileUrl": "HPSEOIPXMGOH",
|
||||
"roles": [{"primary": "true", "value": "TLGYITOIZGKP"}],
|
||||
"schemas": [
|
||||
"urn:ietf:params:scim:schemas:core:2.0:User",
|
||||
SCIM_URN_USER_ENTERPRISE,
|
||||
],
|
||||
"timezone": "America/Argentina/Rio_Gallegos",
|
||||
"title": "NBZCOAXVYJUY",
|
||||
SCIM_URN_USER_ENTERPRISE: {
|
||||
"employeeNumber": "PDFWRRZBQOHB",
|
||||
"costCenter": "HACMZWSEDOTQ",
|
||||
"organization": "LXVHJUOLNCLS",
|
||||
"division": "JASVTPKPBPMG",
|
||||
"department": "GMSBFLMNPABY",
|
||||
"manager": "PNGSGXLYVWMV",
|
||||
},
|
||||
"userName": "imelda.auer@kshlerin.co.uk",
|
||||
"userType": "ZGJMYZRUORZE",
|
||||
"x509Certificates": [{"primary": "true", "value": "KOVKWGIVVEHH"}],
|
||||
},
|
||||
)
|
||||
|
||||
def test_schema_urn_manager(self):
|
||||
req = {
|
||||
"schemas": ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
|
||||
"Operations": [
|
||||
{
|
||||
"op": "Add",
|
||||
"value": {
|
||||
"urn:ietf:params:scim:schemas:extension:enterprise:2.0:User:manager": "foo"
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
user = create_test_user()
|
||||
source = SCIMSource.objects.create(slug=generate_id())
|
||||
connection = SCIMSourceUser.objects.create(
|
||||
user=user,
|
||||
id=generate_id(),
|
||||
source=source,
|
||||
attributes={
|
||||
"meta": {"resourceType": "User"},
|
||||
"active": True,
|
||||
"schemas": [
|
||||
"urn:ietf:params:scim:schemas:core:2.0:User",
|
||||
SCIM_URN_USER_ENTERPRISE,
|
||||
],
|
||||
"userName": "test@t.goauthentik.io",
|
||||
"externalId": "test",
|
||||
"displayName": "Test MS",
|
||||
},
|
||||
)
|
||||
updated = SCIMPatchProcessor().apply_patches(connection.attributes, req["Operations"])
|
||||
self.assertEqual(
|
||||
updated,
|
||||
{
|
||||
"meta": {"resourceType": "User"},
|
||||
"active": True,
|
||||
"schemas": [
|
||||
"urn:ietf:params:scim:schemas:core:2.0:User",
|
||||
SCIM_URN_USER_ENTERPRISE,
|
||||
],
|
||||
"userName": "test@t.goauthentik.io",
|
||||
"externalId": "test",
|
||||
"displayName": "Test MS",
|
||||
"urn:ietf:params:scim:schemas:extension:enterprise:2.0:User": {
|
||||
"manager": {"value": "foo"}
|
||||
},
|
||||
},
|
||||
)
|
||||
@@ -1,6 +1,7 @@
|
||||
"""SCIM Utils"""
|
||||
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.paginator import Page, Paginator
|
||||
@@ -21,6 +22,7 @@ from authentik.core.sources.mapper import SourceMapper
|
||||
from authentik.lib.sync.mapper import PropertyMappingManager
|
||||
from authentik.sources.scim.models import SCIMSource
|
||||
from authentik.sources.scim.views.v2.auth import SCIMTokenAuth
|
||||
from authentik.sources.scim.views.v2.exceptions import SCIMNotFoundError
|
||||
|
||||
SCIM_CONTENT_TYPE = "application/scim+json"
|
||||
|
||||
@@ -54,6 +56,13 @@ class SCIMView(APIView):
|
||||
def get_authenticators(self):
|
||||
return [SCIMTokenAuth(self)]
|
||||
|
||||
def remove_excluded_attributes(self, data: dict):
|
||||
"""Remove attributes specified in excludedAttributes"""
|
||||
excluded: str = self.request.query_params.get("excludedAttributes", "")
|
||||
for key in excluded.split(","):
|
||||
data.pop(key.strip(), None)
|
||||
return data
|
||||
|
||||
def filter_parse(self, request: Request):
|
||||
"""Parse the path of a Patch Operation"""
|
||||
path = request.query_params.get("filter")
|
||||
@@ -103,6 +112,12 @@ class SCIMObjectView(SCIMView):
|
||||
# a source attribute before
|
||||
self.mapper = SourceMapper(self.source)
|
||||
self.manager = self.mapper.get_manager(self.model, ["data"])
|
||||
for key, value in kwargs.items():
|
||||
if key.endswith("_id"):
|
||||
try:
|
||||
UUID(value)
|
||||
except ValueError:
|
||||
raise SCIMNotFoundError("Invalid ID") from None
|
||||
|
||||
def build_object_properties(self, data: dict[str, Any]) -> dict[str, Any | dict[str, Any]]:
|
||||
return self.mapper.build_object_properties(
|
||||
|
||||
@@ -17,6 +17,7 @@ from authentik.core.models import Group, User
|
||||
from authentik.providers.scim.clients.schema import SCIM_GROUP_SCHEMA, PatchOp, PatchOperation
|
||||
from authentik.providers.scim.clients.schema import Group as SCIMGroupModel
|
||||
from authentik.sources.scim.models import SCIMSourceGroup
|
||||
from authentik.sources.scim.patch.processor import SCIMPatchProcessor
|
||||
from authentik.sources.scim.views.v2.base import SCIMObjectView
|
||||
from authentik.sources.scim.views.v2.exceptions import (
|
||||
SCIMConflictError,
|
||||
@@ -35,11 +36,12 @@ class GroupsView(SCIMObjectView):
|
||||
payload = SCIMGroupModel(
|
||||
schemas=[SCIM_GROUP_SCHEMA],
|
||||
id=str(scim_group.group.pk),
|
||||
externalId=scim_group.id,
|
||||
externalId=scim_group.external_id,
|
||||
displayName=scim_group.group.name,
|
||||
members=[],
|
||||
meta={
|
||||
"resourceType": "Group",
|
||||
"lastModified": scim_group.last_update,
|
||||
"location": self.request.build_absolute_uri(
|
||||
reverse(
|
||||
"authentik_sources_scim:v2-groups",
|
||||
@@ -54,7 +56,11 @@ class GroupsView(SCIMObjectView):
|
||||
for member in scim_group.group.users.order_by("pk"):
|
||||
member: User
|
||||
payload.members.append(GroupMember(value=str(member.uuid)))
|
||||
return payload.model_dump(mode="json", exclude_unset=True)
|
||||
final_payload = payload.model_dump(mode="json", exclude_unset=True)
|
||||
final_payload.update(scim_group.attributes)
|
||||
return self.remove_excluded_attributes(
|
||||
SCIMGroupModel.model_validate(final_payload).model_dump(mode="json", exclude_unset=True)
|
||||
)
|
||||
|
||||
def get(self, request: Request, group_id: str | None = None, **kwargs) -> Response:
|
||||
"""List Group handler"""
|
||||
@@ -81,7 +87,7 @@ class GroupsView(SCIMObjectView):
|
||||
)
|
||||
|
||||
@atomic
|
||||
def update_group(self, connection: SCIMSourceGroup | None, data: QueryDict):
|
||||
def update_group(self, connection: SCIMSourceGroup | None, data: QueryDict, apply_members=True):
|
||||
"""Partial update a group"""
|
||||
properties = self.build_object_properties(data)
|
||||
|
||||
@@ -94,7 +100,7 @@ class GroupsView(SCIMObjectView):
|
||||
|
||||
group.update_attributes(properties)
|
||||
|
||||
if "members" in data:
|
||||
if "members" in data and apply_members:
|
||||
query = Q()
|
||||
for _member in data.get("members", []):
|
||||
try:
|
||||
@@ -105,14 +111,18 @@ class GroupsView(SCIMObjectView):
|
||||
query |= Q(uuid=member.value)
|
||||
if query:
|
||||
group.users.set(User.objects.filter(query))
|
||||
data["members"] = self._convert_members(group)
|
||||
if not connection:
|
||||
connection, _ = SCIMSourceGroup.objects.get_or_create(
|
||||
connection, _ = SCIMSourceGroup.objects.update_or_create(
|
||||
external_id=data.get("externalId") or str(uuid4()),
|
||||
source=self.source,
|
||||
group=group,
|
||||
attributes=data,
|
||||
id=data.get("externalId") or str(uuid4()),
|
||||
defaults={
|
||||
"attributes": data,
|
||||
},
|
||||
)
|
||||
else:
|
||||
connection.external_id = data.get("externalId", connection.external_id)
|
||||
connection.attributes = data
|
||||
connection.save()
|
||||
return connection
|
||||
@@ -139,6 +149,12 @@ class GroupsView(SCIMObjectView):
|
||||
connection = self.update_group(connection, request.data)
|
||||
return Response(self.group_to_scim(connection), status=200)
|
||||
|
||||
def _convert_members(self, group: Group):
|
||||
users = []
|
||||
for user in group.users.all().order_by("uuid"):
|
||||
users.append({"value": str(user.uuid)})
|
||||
return sorted(users, key=lambda u: u["value"])
|
||||
|
||||
@atomic
|
||||
def patch(self, request: Request, group_id: str, **kwargs) -> Response:
|
||||
"""Patch group handler"""
|
||||
@@ -171,6 +187,13 @@ class GroupsView(SCIMObjectView):
|
||||
query |= Q(uuid=member["value"])
|
||||
if query:
|
||||
connection.group.users.remove(*User.objects.filter(query))
|
||||
patcher = SCIMPatchProcessor()
|
||||
patched_data = patcher.apply_patches(
|
||||
connection.attributes, request.data.get("Operations", [])
|
||||
)
|
||||
patched_data["members"] = self._convert_members(connection.group)
|
||||
if patched_data != connection.attributes:
|
||||
self.update_group(connection, patched_data, apply_members=False)
|
||||
return Response(self.group_to_scim(connection), status=200)
|
||||
|
||||
@atomic
|
||||
|
||||
@@ -33,9 +33,7 @@ class ServiceProviderConfigView(SCIMView):
|
||||
{
|
||||
"schemas": ["urn:ietf:params:scim:schemas:core:2.0:ServiceProviderConfig"],
|
||||
"authenticationSchemes": auth_schemas,
|
||||
# We only support patch for groups currently, so don't broadly advertise it.
|
||||
# Implementations that require Group patch will use it regardless of this flag.
|
||||
"patch": {"supported": False},
|
||||
"patch": {"supported": True},
|
||||
"bulk": {"supported": False, "maxOperations": 0, "maxPayloadSize": 0},
|
||||
"filter": {
|
||||
"supported": True,
|
||||
|
||||
@@ -15,6 +15,7 @@ from authentik.core.models import User
|
||||
from authentik.providers.scim.clients.schema import SCIM_USER_SCHEMA
|
||||
from authentik.providers.scim.clients.schema import User as SCIMUserModel
|
||||
from authentik.sources.scim.models import SCIMSourceUser
|
||||
from authentik.sources.scim.patch.processor import SCIMPatchProcessor
|
||||
from authentik.sources.scim.views.v2.base import SCIMObjectView
|
||||
from authentik.sources.scim.views.v2.exceptions import SCIMConflictError, SCIMNotFoundError
|
||||
|
||||
@@ -29,7 +30,7 @@ class UsersView(SCIMObjectView):
|
||||
payload = SCIMUserModel(
|
||||
schemas=[SCIM_USER_SCHEMA],
|
||||
id=str(scim_user.user.uuid),
|
||||
externalId=scim_user.id,
|
||||
externalId=scim_user.external_id,
|
||||
userName=scim_user.user.username,
|
||||
name=Name(
|
||||
formatted=scim_user.user.name,
|
||||
@@ -44,8 +45,7 @@ class UsersView(SCIMObjectView):
|
||||
meta={
|
||||
"resourceType": "User",
|
||||
"created": scim_user.user.date_joined,
|
||||
# TODO: use events to find last edit?
|
||||
"lastModified": scim_user.user.date_joined,
|
||||
"lastModified": scim_user.last_update,
|
||||
"location": self.request.build_absolute_uri(
|
||||
reverse(
|
||||
"authentik_sources_scim:v2-users",
|
||||
@@ -59,7 +59,9 @@ class UsersView(SCIMObjectView):
|
||||
)
|
||||
final_payload = payload.model_dump(mode="json", exclude_unset=True)
|
||||
final_payload.update(scim_user.attributes)
|
||||
return final_payload
|
||||
return self.remove_excluded_attributes(
|
||||
SCIMUserModel.model_validate(final_payload).model_dump(mode="json", exclude_unset=True)
|
||||
)
|
||||
|
||||
def get(self, request: Request, user_id: str | None = None, **kwargs) -> Response:
|
||||
"""List User handler"""
|
||||
@@ -101,13 +103,16 @@ class UsersView(SCIMObjectView):
|
||||
user.update_attributes(properties)
|
||||
|
||||
if not connection:
|
||||
connection, _ = SCIMSourceUser.objects.get_or_create(
|
||||
connection, _ = SCIMSourceUser.objects.update_or_create(
|
||||
external_id=data.get("externalId") or str(uuid4()),
|
||||
source=self.source,
|
||||
user=user,
|
||||
attributes=data,
|
||||
id=data.get("externalId") or str(uuid4()),
|
||||
defaults={
|
||||
"attributes": data,
|
||||
},
|
||||
)
|
||||
else:
|
||||
connection.external_id = data.get("externalId", connection.external_id)
|
||||
connection.attributes = data
|
||||
connection.save()
|
||||
return connection
|
||||
@@ -127,6 +132,18 @@ class UsersView(SCIMObjectView):
|
||||
connection = self.update_user(None, request.data)
|
||||
return Response(self.user_to_scim(connection), status=201)
|
||||
|
||||
def patch(self, request: Request, user_id: str, **kwargs):
|
||||
connection = SCIMSourceUser.objects.filter(source=self.source, user__uuid=user_id).first()
|
||||
if not connection:
|
||||
raise SCIMNotFoundError("User not found.")
|
||||
patcher = SCIMPatchProcessor()
|
||||
patched_data = patcher.apply_patches(
|
||||
connection.attributes, request.data.get("Operations", [])
|
||||
)
|
||||
if patched_data != connection.attributes:
|
||||
self.update_user(connection, patched_data)
|
||||
return Response(self.user_to_scim(connection), status=200)
|
||||
|
||||
def put(self, request: Request, user_id: str, **kwargs) -> Response:
|
||||
"""Update user handler"""
|
||||
connection = SCIMSourceUser.objects.filter(source=self.source, user__uuid=user_id).first()
|
||||
|
||||
@@ -13,7 +13,6 @@ from authentik.flows.exceptions import StageInvalidException
|
||||
from authentik.flows.models import ConfigurableStage, FriendlyNamedStage, Stage
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.lib.utils.time import timedelta_string_validator
|
||||
from authentik.stages.authenticator.models import SideChannelDevice
|
||||
from authentik.stages.email.utils import TemplateEmailMessage
|
||||
@@ -160,9 +159,8 @@ class EmailDevice(SerializerModel, SideChannelDevice):
|
||||
Event.new(
|
||||
EventAction.CONFIGURATION_ERROR,
|
||||
message=_("Exception occurred while rendering E-mail template"),
|
||||
error=exception_to_string(exc),
|
||||
template=stage.template,
|
||||
).from_http(self.request)
|
||||
).with_exception(exc).from_http(self.request)
|
||||
raise StageInvalidException from exc
|
||||
|
||||
def __str__(self):
|
||||
|
||||
@@ -17,7 +17,6 @@ from authentik.flows.challenge import (
|
||||
from authentik.flows.exceptions import StageInvalidException
|
||||
from authentik.flows.stage import ChallengeStageView
|
||||
from authentik.lib.utils.email import mask_email
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
from authentik.stages.authenticator_email.models import (
|
||||
AuthenticatorEmailStage,
|
||||
@@ -100,9 +99,8 @@ class AuthenticatorEmailStageView(ChallengeStageView):
|
||||
Event.new(
|
||||
EventAction.CONFIGURATION_ERROR,
|
||||
message=_("Exception occurred while rendering E-mail template"),
|
||||
error=exception_to_string(exc),
|
||||
template=stage.template,
|
||||
).from_http(self.request)
|
||||
).with_exception(exc).from_http(self.request)
|
||||
raise StageInvalidException from exc
|
||||
|
||||
def _has_email(self) -> str | None:
|
||||
|
||||
@@ -4,7 +4,7 @@ from hashlib import sha256
|
||||
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.db import models
|
||||
from django.http import HttpResponseBadRequest
|
||||
from django.http import HttpRequest, HttpResponseBadRequest
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.views import View
|
||||
from requests.exceptions import RequestException
|
||||
@@ -19,7 +19,6 @@ from authentik.events.models import Event, EventAction, NotificationWebhookMappi
|
||||
from authentik.events.utils import sanitize_item
|
||||
from authentik.flows.models import ConfigurableStage, FriendlyNamedStage, Stage
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
from authentik.stages.authenticator.models import SideChannelDevice
|
||||
|
||||
@@ -69,32 +68,44 @@ class AuthenticatorSMSStage(ConfigurableStage, FriendlyNamedStage, Stage):
|
||||
help_text=_("Optionally modify the payload being sent to custom providers."),
|
||||
)
|
||||
|
||||
def send(self, token: str, device: "SMSDevice"):
|
||||
def send(self, request: HttpRequest, token: str, device: "SMSDevice"):
|
||||
"""Send message via selected provider"""
|
||||
if self.provider == SMSProviders.TWILIO:
|
||||
return self.send_twilio(token, device)
|
||||
return self.send_twilio(request, token, device)
|
||||
if self.provider == SMSProviders.GENERIC:
|
||||
return self.send_generic(token, device)
|
||||
return self.send_generic(request, token, device)
|
||||
raise ValueError(f"invalid provider {self.provider}")
|
||||
|
||||
def get_message(self, token: str) -> str:
|
||||
"""Get SMS message"""
|
||||
return _("Use this code to authenticate in authentik: {token}".format_map({"token": token}))
|
||||
|
||||
def send_twilio(self, token: str, device: "SMSDevice"):
|
||||
def send_twilio(self, request: HttpRequest, token: str, device: "SMSDevice"):
|
||||
"""send sms via twilio provider"""
|
||||
client = Client(self.account_sid, self.auth)
|
||||
message_body = str(self.get_message(token))
|
||||
if self.mapping:
|
||||
payload = sanitize_item(
|
||||
self.mapping.evaluate(
|
||||
user=device.user,
|
||||
request=request,
|
||||
device=device,
|
||||
token=token,
|
||||
stage=self,
|
||||
)
|
||||
)
|
||||
message_body = payload.get("message", message_body)
|
||||
|
||||
try:
|
||||
message = client.messages.create(
|
||||
to=device.phone_number, from_=self.from_number, body=str(self.get_message(token))
|
||||
to=device.phone_number, from_=self.from_number, body=message_body
|
||||
)
|
||||
LOGGER.debug("Sent SMS", to=device, message=message.sid)
|
||||
except TwilioRestException as exc:
|
||||
LOGGER.warning("Error sending token by Twilio SMS", exc=exc, msg=exc.msg)
|
||||
raise ValidationError(exc.msg) from None
|
||||
|
||||
def send_generic(self, token: str, device: "SMSDevice"):
|
||||
def send_generic(self, request: HttpRequest, token: str, device: "SMSDevice"):
|
||||
"""Send SMS via outside API"""
|
||||
payload = {
|
||||
"From": self.from_number,
|
||||
@@ -107,7 +118,7 @@ class AuthenticatorSMSStage(ConfigurableStage, FriendlyNamedStage, Stage):
|
||||
payload = sanitize_item(
|
||||
self.mapping.evaluate(
|
||||
user=device.user,
|
||||
request=None,
|
||||
request=request,
|
||||
device=device,
|
||||
token=token,
|
||||
stage=self,
|
||||
@@ -142,10 +153,9 @@ class AuthenticatorSMSStage(ConfigurableStage, FriendlyNamedStage, Stage):
|
||||
Event.new(
|
||||
EventAction.CONFIGURATION_ERROR,
|
||||
message="Error sending SMS",
|
||||
exc=exception_to_string(exc),
|
||||
status_code=response.status_code,
|
||||
body=response.text,
|
||||
).set_user(device.user).save()
|
||||
).with_exception(exc).set_user(device.user).save()
|
||||
if response.status_code >= HttpResponseBadRequest.status_code:
|
||||
raise ValidationError(response.text) from None
|
||||
raise
|
||||
|
||||
@@ -71,7 +71,7 @@ class AuthenticatorSMSStageView(ChallengeStageView):
|
||||
raise ValidationError(_("Invalid phone number"))
|
||||
# No code yet, but we have a phone number, so send a verification message
|
||||
device: SMSDevice = self.request.session[SESSION_KEY_SMS_DEVICE]
|
||||
stage.send(device.token, device)
|
||||
stage.send(self.request, device.token, device)
|
||||
|
||||
def _has_phone_number(self) -> str | None:
|
||||
context = self.executor.plan.context
|
||||
|
||||
@@ -124,7 +124,7 @@ def select_challenge(request: HttpRequest, device: Device):
|
||||
def select_challenge_sms(request: HttpRequest, device: SMSDevice):
|
||||
"""Send SMS"""
|
||||
device.generate_token()
|
||||
device.stage.send(device.token, device)
|
||||
device.stage.send(request, device.token, device)
|
||||
|
||||
|
||||
def select_challenge_email(request: HttpRequest, device: EmailDevice):
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -21,7 +21,6 @@ from authentik.flows.models import FlowDesignation, FlowToken
|
||||
from authentik.flows.planner import PLAN_CONTEXT_IS_RESTORED, PLAN_CONTEXT_PENDING_USER
|
||||
from authentik.flows.stage import ChallengeStageView
|
||||
from authentik.flows.views.executor import QS_KEY_TOKEN, QS_QUERY
|
||||
from authentik.lib.utils.errors import exception_to_string
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
from authentik.stages.email.flow import pickle_flow_token_for_email
|
||||
from authentik.stages.email.models import EmailStage
|
||||
@@ -129,9 +128,8 @@ class EmailStageView(ChallengeStageView):
|
||||
Event.new(
|
||||
EventAction.CONFIGURATION_ERROR,
|
||||
message=_("Exception occurred while rendering E-mail template"),
|
||||
error=exception_to_string(exc),
|
||||
template=current_stage.template,
|
||||
).from_http(self.request)
|
||||
).with_exception(exc).from_http(self.request)
|
||||
raise StageInvalidException from exc
|
||||
|
||||
def get(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
|
||||
@@ -145,7 +143,7 @@ class EmailStageView(ChallengeStageView):
|
||||
messages.success(request, _("Successfully verified Email."))
|
||||
if self.executor.current_stage.activate_user_on_success:
|
||||
user.is_active = True
|
||||
user.save()
|
||||
user.save(update_fields=["is_active"])
|
||||
return self.executor.stage_ok()
|
||||
if PLAN_CONTEXT_PENDING_USER not in self.executor.plan.context:
|
||||
self.logger.debug("No pending user")
|
||||
|
||||
@@ -89,6 +89,29 @@ class TestPasswordStage(FlowTestCase):
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertStageRedirects(response, reverse("authentik_core:root-redirect"))
|
||||
|
||||
def test_valid_password_inactive(self):
|
||||
"""Test with a valid pending user and valid password"""
|
||||
self.user.is_active = False
|
||||
self.user.save()
|
||||
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
|
||||
plan.context[PLAN_CONTEXT_PENDING_USER] = self.user
|
||||
session = self.client.session
|
||||
session[SESSION_KEY_PLAN] = plan
|
||||
session.save()
|
||||
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug}),
|
||||
# Form data
|
||||
{"password": self.user.username},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertStageResponse(
|
||||
response,
|
||||
self.flow,
|
||||
response_errors={"password": [{"string": "Invalid password", "code": "invalid"}]},
|
||||
)
|
||||
|
||||
def test_invalid_password(self):
|
||||
"""Test with a valid pending user and invalid password"""
|
||||
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
|
||||
|
||||
@@ -191,9 +191,10 @@ class ListPolicyEngine(PolicyEngine):
|
||||
self.use_cache = False
|
||||
|
||||
def bindings(self):
|
||||
for policy in self.__list:
|
||||
for idx, policy in enumerate(self.__list):
|
||||
yield PolicyBinding(
|
||||
policy=policy,
|
||||
order=idx,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -214,7 +214,7 @@ class TestPromptStage(FlowTestCase):
|
||||
"""Test challenge_response validation"""
|
||||
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
|
||||
expr = "False"
|
||||
expr_policy = ExpressionPolicy.objects.create(name="validate-form", expression=expr)
|
||||
expr_policy = ExpressionPolicy.objects.create(name=generate_id(), expression=expr)
|
||||
self.stage.validation_policies.set([expr_policy])
|
||||
self.stage.save()
|
||||
challenge_response = PromptChallengeResponse(
|
||||
@@ -222,6 +222,18 @@ class TestPromptStage(FlowTestCase):
|
||||
)
|
||||
self.assertEqual(challenge_response.is_valid(), False)
|
||||
|
||||
def test_invalid_challenge_multiple(self):
|
||||
"""Test challenge_response validation (multiple policies)"""
|
||||
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
|
||||
expr_policy1 = ExpressionPolicy.objects.create(name=generate_id(), expression="False")
|
||||
expr_policy2 = ExpressionPolicy.objects.create(name=generate_id(), expression="False")
|
||||
self.stage.validation_policies.set([expr_policy1, expr_policy2])
|
||||
self.stage.save()
|
||||
challenge_response = PromptChallengeResponse(
|
||||
None, stage_instance=self.stage, plan=plan, data=self.prompt_data, stage=self.stage_view
|
||||
)
|
||||
self.assertEqual(challenge_response.is_valid(), False)
|
||||
|
||||
def test_valid_challenge_request(self):
|
||||
"""Test a request with valid challenge_response data"""
|
||||
plan = FlowPlan(flow_pk=self.flow.pk.hex, bindings=[self.binding], markers=[StageMarker()])
|
||||
@@ -234,7 +246,7 @@ class TestPromptStage(FlowTestCase):
|
||||
"return request.context['prompt_data']['password_prompt'] "
|
||||
"== request.context['prompt_data']['password2_prompt']"
|
||||
)
|
||||
expr_policy = ExpressionPolicy.objects.create(name="validate-form", expression=expr)
|
||||
expr_policy = ExpressionPolicy.objects.create(name=generate_id(), expression=expr)
|
||||
self.stage.validation_policies.set([expr_policy])
|
||||
self.stage.save()
|
||||
challenge_response = PromptChallengeResponse(
|
||||
|
||||
@@ -153,6 +153,7 @@ class UserLoginStageView(ChallengeStageView):
|
||||
user: User = self.executor.plan.context[PLAN_CONTEXT_PENDING_USER]
|
||||
if not user.is_active:
|
||||
self.logger.warning("User is not active, login will not work.")
|
||||
return self.executor.stage_invalid()
|
||||
delta = self.set_session_duration(bool(remember))
|
||||
self.set_session_ip()
|
||||
# Check if the login request is coming from a known device
|
||||
|
||||
@@ -7,6 +7,7 @@ from django.http import HttpRequest
|
||||
from django.urls import reverse
|
||||
from django.utils.timezone import now
|
||||
|
||||
from authentik.blueprints.tests import apply_blueprint
|
||||
from authentik.core.models import AuthenticatedSession, Session
|
||||
from authentik.core.tests.utils import create_test_flow, create_test_user
|
||||
from authentik.flows.markers import StageMarker
|
||||
@@ -181,6 +182,7 @@ class TestUserLoginStage(FlowTestCase):
|
||||
component="ak-stage-access-denied",
|
||||
)
|
||||
|
||||
@apply_blueprint("default/flow-default-user-settings-flow.yaml")
|
||||
def test_inactive_account(self):
|
||||
"""Test with a valid pending user and backend"""
|
||||
self.user.is_active = False
|
||||
@@ -194,12 +196,29 @@ class TestUserLoginStage(FlowTestCase):
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": self.flow.slug})
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertStageRedirects(response, reverse("authentik_core:root-redirect"))
|
||||
self.assertStageResponse(
|
||||
response, self.flow, component="ak-stage-access-denied", error_message="Unknown error"
|
||||
)
|
||||
|
||||
# Check that API requests get rejected
|
||||
response = self.client.get(reverse("authentik_api:application-list"))
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Check that flow requests requiring a user also get rejected
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
"authentik_api:flow-executor",
|
||||
kwargs={"flow_slug": "default-user-settings-flow"},
|
||||
)
|
||||
)
|
||||
self.assertStageResponse(
|
||||
response,
|
||||
self.flow,
|
||||
component="ak-stage-access-denied",
|
||||
error_message="Flow does not apply to current user.",
|
||||
)
|
||||
|
||||
def test_binding_net_break_log(self):
|
||||
"""Test logout_extra with exception"""
|
||||
# IPs from https://github.com/maxmind/MaxMind-DB/blob/main/source-data/GeoLite2-ASN-Test.json
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"$schema": "http://json-schema.org/draft-07/schema",
|
||||
"$id": "https://goauthentik.io/blueprints/schema.json",
|
||||
"type": "object",
|
||||
"title": "authentik 2025.6.3 Blueprint schema",
|
||||
"title": "authentik 2025.6.4 Blueprint schema",
|
||||
"required": [
|
||||
"version",
|
||||
"entries"
|
||||
@@ -8419,6 +8419,32 @@
|
||||
},
|
||||
"title": "Redirect uris"
|
||||
},
|
||||
"backchannel_logout_uris": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"matching_mode": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"strict",
|
||||
"regex"
|
||||
],
|
||||
"title": "Matching mode"
|
||||
},
|
||||
"url": {
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
"title": "Url"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"matching_mode",
|
||||
"url"
|
||||
]
|
||||
},
|
||||
"title": "Backchannel logout uris"
|
||||
},
|
||||
"sub_mode": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
@@ -10961,6 +10987,7 @@
|
||||
"enum": [
|
||||
"apple",
|
||||
"openidconnect",
|
||||
"entraid",
|
||||
"azuread",
|
||||
"discord",
|
||||
"facebook",
|
||||
|
||||
@@ -31,7 +31,7 @@ services:
|
||||
volumes:
|
||||
- redis:/data
|
||||
server:
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.6.3}
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.6.4}
|
||||
restart: unless-stopped
|
||||
command: server
|
||||
environment:
|
||||
@@ -55,7 +55,7 @@ services:
|
||||
redis:
|
||||
condition: service_healthy
|
||||
worker:
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.6.3}
|
||||
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:${AUTHENTIK_TAG:-2025.6.4}
|
||||
restart: unless-stopped
|
||||
command: worker
|
||||
environment:
|
||||
|
||||
8
go.mod
8
go.mod
@@ -10,14 +10,14 @@ require (
|
||||
github.com/go-http-utils/etag v0.0.0-20161124023236-513ea8f21eb1
|
||||
github.com/go-ldap/ldap/v3 v3.4.11
|
||||
github.com/go-openapi/runtime v0.28.0
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2
|
||||
github.com/golang-jwt/jwt/v5 v5.2.3
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/gorilla/handlers v1.5.2
|
||||
github.com/gorilla/mux v1.8.1
|
||||
github.com/gorilla/securecookie v1.1.2
|
||||
github.com/gorilla/sessions v1.4.0
|
||||
github.com/gorilla/websocket v1.5.3
|
||||
github.com/grafana/pyroscope-go v1.2.2
|
||||
github.com/grafana/pyroscope-go v1.2.4
|
||||
github.com/jellydator/ttlcache/v3 v3.4.0
|
||||
github.com/mitchellh/mapstructure v1.5.0
|
||||
github.com/nmcclain/asn1-ber v0.0.0-20170104154839-2661553a0484
|
||||
@@ -29,10 +29,10 @@ require (
|
||||
github.com/spf13/cobra v1.9.1
|
||||
github.com/stretchr/testify v1.10.0
|
||||
github.com/wwt/guac v1.3.2
|
||||
goauthentik.io/api/v3 v3.2025063.3
|
||||
goauthentik.io/api/v3 v3.2025064.1
|
||||
golang.org/x/exp v0.0.0-20230210204819-062eb4c674ab
|
||||
golang.org/x/oauth2 v0.30.0
|
||||
golang.org/x/sync v0.15.0
|
||||
golang.org/x/sync v0.16.0
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
layeh.com/radius v0.0.0-20210819152912-ad72663a72ab
|
||||
)
|
||||
|
||||
360
go.sum
360
go.sum
@@ -1,42 +1,7 @@
|
||||
beryju.io/ldap v0.1.0 h1:rPjGE3qR1Klbvn9N+iECWdzt/tK87XHgz8W5wZJg9B8=
|
||||
beryju.io/ldap v0.1.0/go.mod h1:sOrYV+ZlDTDu/IvIiEiuAaXzjcpMBE+XXr4V+NJ0pWI=
|
||||
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
|
||||
cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=
|
||||
cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
|
||||
cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=
|
||||
cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
|
||||
cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=
|
||||
cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4=
|
||||
cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M=
|
||||
cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc=
|
||||
cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk=
|
||||
cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs=
|
||||
cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc=
|
||||
cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY=
|
||||
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
|
||||
cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
|
||||
cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc=
|
||||
cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg=
|
||||
cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc=
|
||||
cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ=
|
||||
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
|
||||
cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk=
|
||||
cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
|
||||
cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
|
||||
cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA=
|
||||
cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU=
|
||||
cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
|
||||
cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
|
||||
cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=
|
||||
cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
|
||||
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
|
||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 h1:mFRzDkZVAjdal+s7s0MwaRv9igoPqLRdzOLzw/8Xvq8=
|
||||
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||
github.com/alexbrainman/sspi v0.0.0-20231016080023-1a75b4708caa h1:LHTHcTQiSGT7VVbI0o4wBRNQIgn917usHWOd6VAffYI=
|
||||
github.com/alexbrainman/sspi v0.0.0-20231016080023-1a75b4708caa/go.mod h1:cEWa1LVoE5KvSD9ONXsZrj0z6KqySlCCNKHlLzbqAt4=
|
||||
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so=
|
||||
@@ -49,14 +14,8 @@ github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs=
|
||||
github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c=
|
||||
github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA=
|
||||
github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0=
|
||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
||||
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
|
||||
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
|
||||
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
|
||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
||||
github.com/coreos/go-oidc/v3 v3.14.1 h1:9ePWwfdwC4QKRlCXsJGou56adA/owXczOzwKdOumLqk=
|
||||
github.com/coreos/go-oidc/v3 v3.14.1/go.mod h1:HaZ3szPaZ0e4r6ebqvsLWlk2Tn+aejfmrfah6hnSYEU=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||
@@ -65,10 +24,6 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
|
||||
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
|
||||
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
|
||||
github.com/felixge/httpsnoop v1.0.3 h1:s/nj+GCswXYzN5v2DpNMuMQYe+0DDwt5WVCU6CWBdXk=
|
||||
github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
||||
github.com/getsentry/sentry-go v0.34.1 h1:HSjc1C/OsnZttohEPrrqKH42Iud0HuLCXpv8cU1pWcw=
|
||||
@@ -77,9 +32,6 @@ github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667 h1:BP4M0CvQ
|
||||
github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667/go.mod h1:hEBeB/ic+5LoWskz+yKT7vGhhPYkProFKoKdwZRWMe0=
|
||||
github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA=
|
||||
github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og=
|
||||
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-http-utils/etag v0.0.0-20161124023236-513ea8f21eb1 h1:zga7zaRE8HCbWjcXMDlfvmQtH0/kMVLo7cQ48dy6kWg=
|
||||
github.com/go-http-utils/etag v0.0.0-20161124023236-513ea8f21eb1/go.mod h1:PumS+5d59wmAGsZo6IfRpVNaJUq+6xjC4Utt/k8GO6Q=
|
||||
github.com/go-http-utils/fresh v0.0.0-20161124030543-7231e26a4b27 h1:O6yi4xa9b2DMosGsXzlMe2E9qXgXCVkRLCoRX+5amxI=
|
||||
@@ -115,60 +67,15 @@ github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+Gr
|
||||
github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ=
|
||||
github.com/go-openapi/validate v0.24.0 h1:LdfDKwNbpB6Vn40xhTdNZAnfLECL81w+VX3BumrGD58=
|
||||
github.com/go-openapi/validate v0.24.0/go.mod h1:iyeX1sEufmv3nPbBdX3ieNviWnOZaJ1+zquzJEf2BAQ=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
|
||||
github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
|
||||
github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
|
||||
github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
|
||||
github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4=
|
||||
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
|
||||
github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
|
||||
github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk=
|
||||
github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
|
||||
github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
|
||||
github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
|
||||
github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
|
||||
github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
|
||||
github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
|
||||
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
|
||||
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.3 h1:kkGXqQOBSDDWRhWNXTFpqGSCMyh/PLnqUvMGJPDJDs0=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.3/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
|
||||
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
||||
github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
|
||||
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
|
||||
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
|
||||
github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
|
||||
github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
|
||||
github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
|
||||
github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
|
||||
github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
|
||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
|
||||
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
|
||||
github.com/gorilla/handlers v1.5.2 h1:cLTUSsNkgcwhgRqvCNmdbRWG0A3N4F+M2nWKdScwyEE=
|
||||
github.com/gorilla/handlers v1.5.2/go.mod h1:dX+xVpaxdSw+q0Qek8SSsl3dfMk3jNddUkMzo0GtH0w=
|
||||
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
||||
@@ -180,15 +87,12 @@ github.com/gorilla/sessions v1.4.0/go.mod h1:FLWm50oby91+hl7p/wRxDth9bWSuk0qVL2e
|
||||
github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
|
||||
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/grafana/pyroscope-go v1.2.2 h1:uvKCyZMD724RkaCEMrSTC38Yn7AnFe8S2wiAIYdDPCE=
|
||||
github.com/grafana/pyroscope-go v1.2.2/go.mod h1:zzT9QXQAp2Iz2ZdS216UiV8y9uXJYQiGE1q8v1FyhqU=
|
||||
github.com/grafana/pyroscope-go v1.2.4 h1:B22GMXz+O0nWLatxLuaP7o7L9dvP0clLvIpmeEQQM0Q=
|
||||
github.com/grafana/pyroscope-go v1.2.4/go.mod h1:zzT9QXQAp2Iz2ZdS216UiV8y9uXJYQiGE1q8v1FyhqU=
|
||||
github.com/grafana/pyroscope-go/godeltaprof v0.1.8 h1:iwOtYXeeVSAeYefJNaxDytgjKtUuKQbJqgAIjlnicKg=
|
||||
github.com/grafana/pyroscope-go/godeltaprof v0.1.8/go.mod h1:2+l7K7twW49Ct4wFluZD3tZ6e0SjanjcUUBPVD/UuGU=
|
||||
github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8=
|
||||
github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8=
|
||||
@@ -207,17 +111,11 @@ github.com/jellydator/ttlcache/v3 v3.4.0 h1:YS4P125qQS0tNhtL6aeYkheEaB/m8HCqdMMP
|
||||
github.com/jellydator/ttlcache/v3 v3.4.0/go.mod h1:Hw9EgjymziQD3yGsQdf1FqFdpp7YjFMd4Srg5EJlgD4=
|
||||
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
|
||||
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
||||
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
|
||||
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
|
||||
@@ -244,7 +142,6 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/prometheus/client_golang v1.22.0 h1:rb93p9lokFEsctTys46VnV1kLCDpVZ0a/Y92Vm0Zc6Q=
|
||||
github.com/prometheus/client_golang v1.22.0/go.mod h1:R7ljNsLXhuQXYZYtw6GAE9AZg8Y7vEW5scdCXrWRXC0=
|
||||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E=
|
||||
github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY=
|
||||
github.com/prometheus/common v0.62.0 h1:xasJaQlnWAeyHdUBeGjXmutelfJHWMRr+Fg4QszZ2Io=
|
||||
@@ -253,7 +150,6 @@ github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0leargg
|
||||
github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk=
|
||||
github.com/redis/go-redis/v9 v9.11.0 h1:E3S08Gl/nJNn5vkxd2i78wZxWAPNZgUNTp8WIJUAiIs=
|
||||
github.com/redis/go-redis/v9 v9.11.0/go.mod h1:huWgSWd8mW6+m0VPhJjSSQ+d6Nh1VICQ6Q5lHuCH/Iw=
|
||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
|
||||
github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
@@ -272,22 +168,13 @@ github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
|
||||
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/wwt/guac v1.3.2 h1:sH6OFGa/1tBs7ieWBVlZe7t6F5JAOWBry/tqQL/Vup4=
|
||||
github.com/wwt/guac v1.3.2/go.mod h1:eKm+NrnK7A88l4UBEcYNpZQGMpZRryYKoz4D/0/n1C0=
|
||||
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd80=
|
||||
go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c=
|
||||
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
|
||||
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
|
||||
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo=
|
||||
go.opentelemetry.io/otel v1.24.0/go.mod h1:W7b9Ozg4nkF5tWI5zsXkaKKDjdVjpD4oAt9Qi/MArHo=
|
||||
go.opentelemetry.io/otel/metric v1.24.0 h1:6EhoGWWK28x1fbpA4tYTOWBkPefTDQnb8WSGXlc88kI=
|
||||
@@ -298,272 +185,41 @@ go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y
|
||||
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
goauthentik.io/api/v3 v3.2025063.3 h1:Ci+iKWgyioG6QYN3yTZn0SLEnGLC8uLu4FUqMdF5AP8=
|
||||
goauthentik.io/api/v3 v3.2025063.3/go.mod h1:zz+mEZg8rY/7eEjkMGWJ2DnGqk+zqxuybGCGrR2O4Kw=
|
||||
goauthentik.io/api/v3 v3.2025064.1 h1:MM1p8g8bar/r1+gwYJRqMGA2/P7n6bYnN07E3ON1sZI=
|
||||
goauthentik.io/api/v3 v3.2025064.1/go.mod h1:82lqAz4jxzl6Cg0YDbhNtvvTG2rm6605ZhdJFnbbsl8=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20200709230013-948cd5f35899/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8=
|
||||
golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||
golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
|
||||
golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
|
||||
golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
||||
golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
||||
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
||||
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
|
||||
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
|
||||
golang.org/x/exp v0.0.0-20230210204819-062eb4c674ab h1:628ME69lBm9C6JY2wXhAph/yjN3jezx1z7BIDLUwxjo=
|
||||
golang.org/x/exp v0.0.0-20230210204819-062eb4c674ab/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc=
|
||||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
|
||||
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs=
|
||||
golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
|
||||
golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
|
||||
golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
|
||||
golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
|
||||
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
|
||||
golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
|
||||
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
|
||||
golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
|
||||
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
||||
golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
||||
golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
||||
golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY=
|
||||
golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||
golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI=
|
||||
golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8=
|
||||
golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
|
||||
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
|
||||
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4=
|
||||
golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
||||
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||
golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
|
||||
golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
|
||||
golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
|
||||
golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
||||
golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
||||
golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
|
||||
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
|
||||
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
|
||||
google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
|
||||
google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
|
||||
google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
|
||||
google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
|
||||
google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
|
||||
google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
|
||||
google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
|
||||
google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
|
||||
google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
|
||||
google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
|
||||
google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
|
||||
google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM=
|
||||
google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc=
|
||||
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
|
||||
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||
google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
|
||||
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
|
||||
google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
|
||||
google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
|
||||
google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
|
||||
google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
|
||||
google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
|
||||
google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
|
||||
google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
|
||||
google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA=
|
||||
google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U=
|
||||
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
|
||||
google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA=
|
||||
google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||
google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||
google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
|
||||
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
||||
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
|
||||
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
|
||||
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
|
||||
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
|
||||
google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
|
||||
google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60=
|
||||
google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=
|
||||
google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
|
||||
google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
|
||||
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
|
||||
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
|
||||
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
|
||||
google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
|
||||
google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
|
||||
google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
|
||||
google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
|
||||
google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
|
||||
google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4=
|
||||
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
|
||||
google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM=
|
||||
google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
|
||||
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
|
||||
honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
|
||||
layeh.com/radius v0.0.0-20210819152912-ad72663a72ab h1:05KeMI4s7jEdIfHb7QCjUr5X2BRA0gjLZLZEmmjGNc4=
|
||||
layeh.com/radius v0.0.0-20210819152912-ad72663a72ab/go.mod h1:pFWM9De99EY9TPVyHIyA56QmoRViVck/x41WFkUlc9A=
|
||||
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
|
||||
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
|
||||
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
|
||||
|
||||
@@ -33,4 +33,4 @@ func UserAgent() string {
|
||||
return fmt.Sprintf("authentik@%s", FullVersion())
|
||||
}
|
||||
|
||||
const VERSION = "2025.6.3"
|
||||
const VERSION = "2025.6.4"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user