mirror of
https://github.com/goauthentik/authentik
synced 2026-05-09 00:22:24 +02:00
Compare commits
208 Commits
version/20
...
version/20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6760f4c5d3 | ||
|
|
8710474c11 | ||
|
|
cf5623526a | ||
|
|
6aef323784 | ||
|
|
4f58a76a52 | ||
|
|
a5d1fce1ef | ||
|
|
a109c9959c | ||
|
|
47ec6b220f | ||
|
|
cbcd6196f5 | ||
|
|
eaea324844 | ||
|
|
970f5d7dfb | ||
|
|
bce6560989 | ||
|
|
f6f2f6ceab | ||
|
|
6585bdad4d | ||
|
|
adfab8e322 | ||
|
|
644e8e6915 | ||
|
|
f4848883fe | ||
|
|
29e23ce08c | ||
|
|
0c95d5bbe3 | ||
|
|
15c4de7c5b | ||
|
|
a4187baa10 | ||
|
|
ff42054d9d | ||
|
|
7b0a6b4282 | ||
|
|
8a55050da5 | ||
|
|
87d7ebcfdf | ||
|
|
6ad4bbefcf | ||
|
|
1538e42f3d | ||
|
|
73ac3f6336 | ||
|
|
eb127fd39d | ||
|
|
12978bd87d | ||
|
|
4d8ba745b0 | ||
|
|
90f2a01451 | ||
|
|
177ebe06b2 | ||
|
|
f6a5ddd367 | ||
|
|
dbec7ead5d | ||
|
|
a1e2a50037 | ||
|
|
f06d36e48f | ||
|
|
dd2ad94971 | ||
|
|
bfcdc9ea2f | ||
|
|
b4beb1de9c | ||
|
|
22d09744e0 | ||
|
|
e7d09e820f | ||
|
|
f47749ab60 | ||
|
|
b4f7455f21 | ||
|
|
3beef73f82 | ||
|
|
7ee1fbf267 | ||
|
|
ac0501fb06 | ||
|
|
2b1bfbbb54 | ||
|
|
e9719cf7d5 | ||
|
|
e924a37985 | ||
|
|
3f9ca19d35 | ||
|
|
50e55eea08 | ||
|
|
383d3b89f2 | ||
|
|
8cc768f973 | ||
|
|
03d21be201 | ||
|
|
7d8465bdb5 | ||
|
|
a9b46a4943 | ||
|
|
d3c052559d | ||
|
|
b73b6dcdd3 | ||
|
|
e37bdc6a1d | ||
|
|
b3f1c4736d | ||
|
|
5fd5f3d6ff | ||
|
|
5dbcf6c484 | ||
|
|
056e2c8571 | ||
|
|
0f58a567ce | ||
|
|
502e037d04 | ||
|
|
9b6fae0749 | ||
|
|
dc2332a316 | ||
|
|
c39414f558 | ||
|
|
aac1acfebd | ||
|
|
4d881bb3d2 | ||
|
|
852d392158 | ||
|
|
76b26ea288 | ||
|
|
a1f1378814 | ||
|
|
afc2be6b68 | ||
|
|
c45985e9d0 | ||
|
|
7221ed1ce6 | ||
|
|
123fd3dfb8 | ||
|
|
59c292ca21 | ||
|
|
2b247b60cf | ||
|
|
359a3b9768 | ||
|
|
2c84d73353 | ||
|
|
56ba055857 | ||
|
|
4b9775d9fe | ||
|
|
d06091e226 | ||
|
|
f715e7a537 | ||
|
|
1068dfcc28 | ||
|
|
9a6f66b23c | ||
|
|
853a367325 | ||
|
|
09cdcd1892 | ||
|
|
bed6407b52 | ||
|
|
3936a4e09a | ||
|
|
ad818a2880 | ||
|
|
f8f049f080 | ||
|
|
434e8203de | ||
|
|
7715ce1a90 | ||
|
|
c735dd67a2 | ||
|
|
1b5962be60 | ||
|
|
796d130ea4 | ||
|
|
6c8b502a5b | ||
|
|
674d681f98 | ||
|
|
8c6d3e131d | ||
|
|
b689debfed | ||
|
|
03e4297824 | ||
|
|
c4e0a02837 | ||
|
|
4586ed0735 | ||
|
|
59ef6bb6ea | ||
|
|
6ce812b01f | ||
|
|
87d08dc164 | ||
|
|
c41883b8ea | ||
|
|
6e9d510c9e | ||
|
|
d09ed8e8f0 | ||
|
|
8fe8b1e803 | ||
|
|
66438f3780 | ||
|
|
46f446fd0e | ||
|
|
f83d3a19d0 | ||
|
|
ef59ff1856 | ||
|
|
4966225282 | ||
|
|
2b8765d0aa | ||
|
|
d60d06f958 | ||
|
|
1a3f268476 | ||
|
|
515a855c40 | ||
|
|
16d65b8d12 | ||
|
|
bfe928df18 | ||
|
|
c447bbe6c8 | ||
|
|
1c0a3f95df | ||
|
|
8a6116ab79 | ||
|
|
430010fbea | ||
|
|
079b575a45 | ||
|
|
b2ca887d59 | ||
|
|
d7b30ad0d7 | ||
|
|
b084ace1dd | ||
|
|
b3e45cdf1a | ||
|
|
8132e1f7d9 | ||
|
|
149dccf244 | ||
|
|
b5e4797761 | ||
|
|
be670d6253 | ||
|
|
71060ea4e7 | ||
|
|
f60f38280c | ||
|
|
418deeb332 | ||
|
|
619c77c27e | ||
|
|
ddfddb49da | ||
|
|
dbbb1870b7 | ||
|
|
5b43301206 | ||
|
|
d915d1a94a | ||
|
|
786497790a | ||
|
|
56c899cf21 | ||
|
|
943f22e5a9 | ||
|
|
11b45689f4 | ||
|
|
87f443532f | ||
|
|
0c672a0c37 | ||
|
|
dfd11ceb57 | ||
|
|
d865b7fd87 | ||
|
|
aa8a6b9c43 | ||
|
|
fe5313f42e | ||
|
|
499f739e2b | ||
|
|
4e0e738823 | ||
|
|
24360bf306 | ||
|
|
6fad3c2bbd | ||
|
|
2cf20de7ec | ||
|
|
3d8d3bb8ce | ||
|
|
80bcbe4885 | ||
|
|
32e4782ed8 | ||
|
|
613a51bdbb | ||
|
|
1c6de43701 | ||
|
|
6771530025 | ||
|
|
5876f367bc | ||
|
|
e263af2dd9 | ||
|
|
3a59911a2b | ||
|
|
bbf31e99c3 | ||
|
|
9d5bd42f3e | ||
|
|
e721dae6da | ||
|
|
af3106b144 | ||
|
|
5b55103575 | ||
|
|
ee4ecf929f | ||
|
|
8336556a6f | ||
|
|
709aad1d3b | ||
|
|
fb7ab4937c | ||
|
|
5df1726d80 | ||
|
|
9fdb568843 | ||
|
|
8e76f56f89 | ||
|
|
05d3791577 | ||
|
|
d00dd7eb90 | ||
|
|
8d2e404017 | ||
|
|
95eb2af25e | ||
|
|
cbc00a501b | ||
|
|
480645d897 | ||
|
|
997c767c95 | ||
|
|
5a54e1dc9a | ||
|
|
49b1952566 | ||
|
|
e73edc2fce | ||
|
|
409652e874 | ||
|
|
1d3fb6431f | ||
|
|
76cfada60f | ||
|
|
ac45f80551 | ||
|
|
5ea85f086a | ||
|
|
e3f657746c | ||
|
|
001b56e2cc | ||
|
|
ecbfd2f0de | ||
|
|
45753397e1 | ||
|
|
dc6fe1dafe | ||
|
|
d5e8f2f416 | ||
|
|
d73af5a2b4 | ||
|
|
7042f2bba8 | ||
|
|
efeb260fa8 | ||
|
|
29e90092ea | ||
|
|
0abe865023 | ||
|
|
220c65a41a |
6
.github/actions/cherry-pick/action.yml
vendored
6
.github/actions/cherry-pick/action.yml
vendored
@@ -215,9 +215,6 @@ runs:
|
||||
--head "$CHERRY_PICK_BRANCH" \
|
||||
--label "cherry-pick")
|
||||
|
||||
# Assign the PR to the original author
|
||||
gh pr edit "$NEW_PR" --add-assignee "$PR_AUTHOR" || true
|
||||
|
||||
echo "✅ Created cherry-pick PR $NEW_PR for $TARGET_BRANCH"
|
||||
|
||||
# Comment on original PR
|
||||
@@ -257,9 +254,6 @@ runs:
|
||||
--head "$CHERRY_PICK_BRANCH" \
|
||||
--label "cherry-pick")
|
||||
|
||||
# Assign the PR to the original author
|
||||
gh pr edit "$NEW_PR" --add-assignee "$PR_AUTHOR" || true
|
||||
|
||||
echo "⚠️ Created conflict resolution PR $NEW_PR for $TARGET_BRANCH"
|
||||
|
||||
# Comment on original PR
|
||||
|
||||
14
.github/actions/setup/action.yml
vendored
14
.github/actions/setup/action.yml
vendored
@@ -22,12 +22,12 @@ runs:
|
||||
sudo rm -rf /usr/local/lib/android
|
||||
- name: Install uv
|
||||
if: ${{ contains(inputs.dependencies, 'python') }}
|
||||
uses: astral-sh/setup-uv@eac588ad8def6316056a12d4907a9d4d84ff7a3b # v5
|
||||
uses: astral-sh/setup-uv@ed21f2f24f8dd64503750218de024bcf64c7250a # v5
|
||||
with:
|
||||
enable-cache: true
|
||||
- name: Setup python
|
||||
if: ${{ contains(inputs.dependencies, 'python') }}
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v5
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v5
|
||||
with:
|
||||
python-version-file: "pyproject.toml"
|
||||
- name: Install Python deps
|
||||
@@ -36,7 +36,7 @@ runs:
|
||||
run: uv sync --all-extras --dev --frozen
|
||||
- name: Setup node
|
||||
if: ${{ contains(inputs.dependencies, 'node') }}
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v4
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v4
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
cache: "npm"
|
||||
@@ -44,21 +44,21 @@ runs:
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- name: Setup go
|
||||
if: ${{ contains(inputs.dependencies, 'go') }}
|
||||
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v5
|
||||
uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v5
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Setup docker cache
|
||||
if: ${{ contains(inputs.dependencies, 'runtime') }}
|
||||
uses: AndreKurait/docker-cache@0fe76702a40db986d9663c24954fc14c6a6031b7
|
||||
with:
|
||||
key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }}
|
||||
key: docker-images-${{ runner.os }}-${{ hashFiles('.github/actions/setup/docker-compose.yml', 'Makefile') }}-${{ inputs.postgresql_version }}
|
||||
- name: Setup dependencies
|
||||
if: ${{ contains(inputs.dependencies, 'runtime') }}
|
||||
shell: bash
|
||||
run: |
|
||||
export PSQL_TAG=${{ inputs.postgresql_version }}
|
||||
docker compose -f .github/actions/setup/compose.yml up -d
|
||||
cd web && npm ci
|
||||
docker compose -f .github/actions/setup/docker-compose.yml up -d
|
||||
cd web && npm i
|
||||
- name: Generate config
|
||||
if: ${{ contains(inputs.dependencies, 'python') }}
|
||||
shell: uv run python {0}
|
||||
|
||||
@@ -11,6 +11,11 @@ services:
|
||||
ports:
|
||||
- 5432:5432
|
||||
restart: always
|
||||
redis:
|
||||
image: docker.io/library/redis:7
|
||||
ports:
|
||||
- 6379:6379
|
||||
restart: always
|
||||
s3:
|
||||
container_name: s3
|
||||
image: docker.io/zenko/cloudserver
|
||||
@@ -22,7 +27,7 @@ services:
|
||||
- 8020:8000
|
||||
volumes:
|
||||
- s3-data:/usr/src/app/localData
|
||||
- s3-metadata:/usr/src/app/localMetadata
|
||||
- s3-metadata:/usr/scr/app/localMetadata
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
4
.github/actions/test-results/action.yml
vendored
4
.github/actions/test-results/action.yml
vendored
@@ -12,11 +12,11 @@ runs:
|
||||
with:
|
||||
flags: ${{ inputs.flags }}
|
||||
use_oidc: true
|
||||
- uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5
|
||||
- uses: codecov/test-results-action@47f89e9acb64b76debcd5ea40642d25a4adced9f # v1
|
||||
with:
|
||||
flags: ${{ inputs.flags }}
|
||||
file: unittest.xml
|
||||
use_oidc: true
|
||||
report_type: test_results
|
||||
- name: PostgreSQL Logs
|
||||
shell: bash
|
||||
run: |
|
||||
|
||||
@@ -42,9 +42,9 @@ jobs:
|
||||
# Needed for checkout
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
|
||||
- uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3
|
||||
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
@@ -56,18 +56,24 @@ jobs:
|
||||
release: ${{ inputs.release }}
|
||||
- name: Login to Docker Hub
|
||||
if: ${{ inputs.registry_dockerhub }}
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_CORP_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_CORP_PASSWORD }}
|
||||
- name: Login to GitHub Container Registry
|
||||
if: ${{ inputs.registry_ghcr }}
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
|
||||
- name: make empty clients
|
||||
if: ${{ inputs.release }}
|
||||
run: |
|
||||
mkdir -p ./gen-ts-api
|
||||
mkdir -p ./gen-go-api
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v5
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
cache: "npm"
|
||||
@@ -84,7 +90,6 @@ jobs:
|
||||
id: push
|
||||
with:
|
||||
context: .
|
||||
file: lifecycle/container/Dockerfile
|
||||
push: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
secrets: |
|
||||
GEOIPUPDATE_ACCOUNT_ID=${{ secrets.GEOIPUPDATE_ACCOUNT_ID }}
|
||||
@@ -95,7 +100,7 @@ jobs:
|
||||
platforms: linux/${{ inputs.image_arch }}
|
||||
cache-from: type=registry,ref=${{ steps.ev.outputs.attestImageNames }}:buildcache-${{ inputs.image_arch }}
|
||||
cache-to: ${{ steps.ev.outputs.cacheTo }}
|
||||
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
|
||||
- uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3
|
||||
id: attest
|
||||
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
with:
|
||||
|
||||
12
.github/workflows/_reusable-docker-build.yml
vendored
12
.github/workflows/_reusable-docker-build.yml
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
tags: ${{ steps.ev.outputs.imageTagsJSON }}
|
||||
shouldPush: ${{ steps.ev.outputs.shouldPush }}
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
@@ -69,7 +69,7 @@ jobs:
|
||||
matrix:
|
||||
tag: ${{ fromJson(needs.get-tags.outputs.tags) }}
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
@@ -79,25 +79,25 @@ jobs:
|
||||
image-name: ${{ inputs.image_name }}
|
||||
- name: Login to Docker Hub
|
||||
if: ${{ inputs.registry_dockerhub }}
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_CORP_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_CORP_PASSWORD }}
|
||||
- name: Login to GitHub Container Registry
|
||||
if: ${{ inputs.registry_ghcr }}
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- uses: int128/docker-manifest-create-action@1a059c021f1d5e9f2bd39de745d5dd3a0ef6df90 # v2
|
||||
- uses: int128/docker-manifest-create-action@b60433fd4312d7a64a56d769b76ebe3f45cf36b4 # v2
|
||||
id: build
|
||||
with:
|
||||
tags: ${{ matrix.tag }}
|
||||
sources: |
|
||||
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-amd64.outputs.image-digest }}
|
||||
${{ steps.ev.outputs.attestImageNames }}@${{ needs.build-server-arm64.outputs.image-digest }}
|
||||
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
|
||||
- uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3
|
||||
id: attest
|
||||
with:
|
||||
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
||||
|
||||
6
.github/workflows/api-ts-publish.yml
vendored
6
.github/workflows/api-ts-publish.yml
vendored
@@ -22,10 +22,10 @@ jobs:
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
|
||||
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v5
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
@@ -46,7 +46,7 @@ jobs:
|
||||
run: |
|
||||
export VERSION=`node -e 'console.log(require("../gen-ts-api/package.json").version)'`
|
||||
npm i @goauthentik/api@$VERSION
|
||||
- uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
|
||||
- uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v7
|
||||
id: cpr
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
|
||||
16
.github/workflows/ci-api-docs.yml
vendored
16
.github/workflows/ci-api-docs.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
command:
|
||||
- prettier-check
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- name: Install Dependencies
|
||||
working-directory: website/
|
||||
run: npm ci
|
||||
@@ -32,8 +32,8 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v5
|
||||
with:
|
||||
node-version-file: website/package.json
|
||||
cache: "npm"
|
||||
@@ -41,7 +41,7 @@ jobs:
|
||||
- working-directory: website/
|
||||
name: Install Dependencies
|
||||
run: npm ci
|
||||
- uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v4
|
||||
- uses: actions/cache@a7833574556fa59680c1b7cb190c1735db73ebf0 # v4
|
||||
with:
|
||||
path: |
|
||||
${{ github.workspace }}/website/api/.docusaurus
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
env:
|
||||
NODE_ENV: production
|
||||
run: npm run build -w api
|
||||
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v4
|
||||
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
|
||||
with:
|
||||
name: api-docs
|
||||
path: website/api/build
|
||||
@@ -66,12 +66,12 @@ jobs:
|
||||
- lint
|
||||
- build
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v5
|
||||
with:
|
||||
name: api-docs
|
||||
path: website/api/build
|
||||
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
|
||||
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v5
|
||||
with:
|
||||
node-version-file: website/package.json
|
||||
cache: "npm"
|
||||
|
||||
4
.github/workflows/ci-aws-cfn.yml
vendored
4
.github/workflows/ci-aws-cfn.yml
vendored
@@ -21,10 +21,10 @@ jobs:
|
||||
check-changes-applied:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
|
||||
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v5
|
||||
with:
|
||||
node-version-file: lifecycle/aws/package.json
|
||||
cache: "npm"
|
||||
|
||||
2
.github/workflows/ci-docs-source.yml
vendored
2
.github/workflows/ci-docs-source.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: generate docs
|
||||
|
||||
26
.github/workflows/ci-docs.yml
vendored
26
.github/workflows/ci-docs.yml
vendored
@@ -15,15 +15,13 @@ on:
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
NODE_ENV: production
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
command:
|
||||
- prettier-check
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- name: Install dependencies
|
||||
working-directory: website/
|
||||
run: npm ci
|
||||
@@ -32,11 +30,10 @@ jobs:
|
||||
run: npm run ${{ matrix.command }}
|
||||
build-docs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
NODE_ENV: production
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v5
|
||||
with:
|
||||
node-version-file: website/package.json
|
||||
cache: "npm"
|
||||
@@ -49,11 +46,10 @@ jobs:
|
||||
run: npm run build
|
||||
build-integrations:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
NODE_ENV: production
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v5
|
||||
with:
|
||||
node-version-file: website/package.json
|
||||
cache: "npm"
|
||||
@@ -73,13 +69,13 @@ jobs:
|
||||
id-token: write
|
||||
attestations: write
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
@@ -89,7 +85,7 @@ jobs:
|
||||
image-name: ghcr.io/goauthentik/dev-docs
|
||||
- name: Login to Container Registry
|
||||
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -105,7 +101,7 @@ jobs:
|
||||
context: .
|
||||
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache
|
||||
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && 'type=registry,ref=ghcr.io/goauthentik/dev-docs:buildcache,mode=max' || '' }}
|
||||
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
|
||||
- uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3
|
||||
id: attest
|
||||
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
with:
|
||||
|
||||
4
.github/workflows/ci-main-daily.yml
vendored
4
.github/workflows/ci-main-daily.yml
vendored
@@ -18,11 +18,11 @@ jobs:
|
||||
- version-2025-4
|
||||
- version-2025-2
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- run: |
|
||||
current="$(pwd)"
|
||||
dir="/tmp/authentik/${{ matrix.version }}"
|
||||
mkdir -p $dir
|
||||
cd $dir
|
||||
wget https://${{ matrix.version }}.goauthentik.io/compose.yml
|
||||
wget https://${{ matrix.version }}.goauthentik.io/docker-compose.yml
|
||||
${current}/scripts/test_docker.sh
|
||||
|
||||
70
.github/workflows/ci-main.yml
vendored
70
.github/workflows/ci-main.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
- mypy
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: run job
|
||||
@@ -45,7 +45,7 @@ jobs:
|
||||
test-migrations:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: run migrations
|
||||
@@ -71,7 +71,7 @@ jobs:
|
||||
- 18-alpine
|
||||
run_id: [1, 2, 3, 4, 5]
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: checkout stable
|
||||
@@ -136,7 +136,7 @@ jobs:
|
||||
- 18-alpine
|
||||
run_id: [1, 2, 3, 4, 5]
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
@@ -156,7 +156,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Create k8s Kind Cluster
|
||||
@@ -187,25 +187,21 @@ jobs:
|
||||
glob: tests/e2e/test_provider_saml* tests/e2e/test_source_saml*
|
||||
- name: ldap
|
||||
glob: tests/e2e/test_provider_ldap* tests/e2e/test_source_ldap*
|
||||
- name: ws-fed
|
||||
glob: tests/e2e/test_provider_ws_fed*
|
||||
- name: radius
|
||||
glob: tests/e2e/test_provider_radius*
|
||||
- name: scim
|
||||
glob: tests/e2e/test_source_scim*
|
||||
- name: flows
|
||||
glob: tests/e2e/test_flows*
|
||||
- name: endpoints
|
||||
glob: tests/e2e/test_endpoints_*
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Setup e2e env (chrome, etc)
|
||||
run: |
|
||||
docker compose -f tests/e2e/compose.yml up -d --quiet-pull
|
||||
docker compose -f tests/e2e/docker-compose.yml up -d --quiet-pull
|
||||
- id: cache-web
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v4
|
||||
uses: actions/cache@a7833574556fa59680c1b7cb190c1735db73ebf0 # v4
|
||||
with:
|
||||
path: web/dist
|
||||
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
|
||||
@@ -225,54 +221,6 @@ jobs:
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
flags: e2e
|
||||
test-openid-conformance:
|
||||
name: test-openid-conformance (${{ matrix.job.name }})
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job:
|
||||
- name: basic
|
||||
glob: tests/openid_conformance/test_basic.py
|
||||
- name: implicit
|
||||
glob: tests/openid_conformance/test_implicit.py
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Setup e2e env (chrome, etc)
|
||||
run: |
|
||||
docker compose -f tests/e2e/compose.yml up -d --quiet-pull
|
||||
- name: Setup conformance suite
|
||||
run: |
|
||||
docker compose -f tests/openid_conformance/compose.yml up -d --quiet-pull
|
||||
- id: cache-web
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v4
|
||||
with:
|
||||
path: web/dist
|
||||
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
|
||||
- name: prepare web ui
|
||||
if: steps.cache-web.outputs.cache-hit != 'true'
|
||||
working-directory: web
|
||||
run: |
|
||||
npm ci
|
||||
make -C .. gen-client-ts
|
||||
npm run build
|
||||
npm run build:sfe
|
||||
- name: run conformance
|
||||
run: |
|
||||
uv run coverage run manage.py test ${{ matrix.job.glob }}
|
||||
uv run coverage xml
|
||||
- uses: ./.github/actions/test-results
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
flags: conformance
|
||||
- if: ${{ !cancelled() }}
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
|
||||
with:
|
||||
name: conformance-certification-${{ matrix.job.name }}
|
||||
path: tests/openid_conformance/exports/
|
||||
ci-core-mark:
|
||||
if: always()
|
||||
needs:
|
||||
@@ -312,7 +260,7 @@ jobs:
|
||||
pull-requests: write
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: prepare variables
|
||||
|
||||
24
.github/workflows/ci-outpost.yml
vendored
24
.github/workflows/ci-outpost.yml
vendored
@@ -21,8 +21,8 @@ jobs:
|
||||
lint-golint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Prepare and generate API
|
||||
@@ -42,8 +42,8 @@ jobs:
|
||||
test-unittest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- name: Setup authentik env
|
||||
@@ -86,13 +86,13 @@ jobs:
|
||||
id-token: write
|
||||
attestations: write
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
@@ -102,7 +102,7 @@ jobs:
|
||||
image-name: ghcr.io/goauthentik/dev-${{ matrix.type }}
|
||||
- name: Login to Container Registry
|
||||
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -114,7 +114,7 @@ jobs:
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6
|
||||
with:
|
||||
tags: ${{ steps.ev.outputs.imageTags }}
|
||||
file: lifecycle/container/${{ matrix.type }}.Dockerfile
|
||||
file: ${{ matrix.type }}.Dockerfile
|
||||
push: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
build-args: |
|
||||
GIT_BUILD_HASH=${{ steps.ev.outputs.sha }}
|
||||
@@ -122,7 +122,7 @@ jobs:
|
||||
context: .
|
||||
cache-from: type=registry,ref=ghcr.io/goauthentik/dev-${{ matrix.type }}:buildcache
|
||||
cache-to: ${{ steps.ev.outputs.shouldPush == 'true' && format('type=registry,ref=ghcr.io/goauthentik/dev-{0}:buildcache,mode=max', matrix.type) || '' }}
|
||||
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
|
||||
- uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3
|
||||
id: attest
|
||||
if: ${{ steps.ev.outputs.shouldPush == 'true' }}
|
||||
with:
|
||||
@@ -145,13 +145,13 @@ jobs:
|
||||
goos: [linux]
|
||||
goarch: [amd64, arm64]
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
|
||||
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
|
||||
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v5
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
cache: "npm"
|
||||
|
||||
12
.github/workflows/ci-web.yml
vendored
12
.github/workflows/ci-web.yml
vendored
@@ -31,8 +31,8 @@ jobs:
|
||||
- command: lit-analyse
|
||||
project: web
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v5
|
||||
with:
|
||||
node-version-file: ${{ matrix.project }}/package.json
|
||||
cache: "npm"
|
||||
@@ -48,8 +48,8 @@ jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v5
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
cache: "npm"
|
||||
@@ -76,8 +76,8 @@ jobs:
|
||||
- ci-web-mark
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v5
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
cache: "npm"
|
||||
|
||||
6
.github/workflows/gen-image-compress.yml
vendored
6
.github/workflows/gen-image-compress.yml
vendored
@@ -33,16 +33,16 @@ jobs:
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- name: Compress images
|
||||
id: compress
|
||||
uses: calibreapp/image-actions@d9c8ee5c3dc52ae4622c82ead88d658f4b16b65f # main
|
||||
uses: calibreapp/image-actions@420075c115b26f8785e293c5bd5bef0911c506e5 # main
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||
compressOnly: ${{ github.event_name != 'pull_request' }}
|
||||
- uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
|
||||
- uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v7
|
||||
if: "${{ github.event_name != 'pull_request' && steps.compress.outputs.markdown != '' }}"
|
||||
id: cpr
|
||||
with:
|
||||
|
||||
@@ -20,13 +20,13 @@ jobs:
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- run: uv run ak update_webauthn_mds
|
||||
- uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
|
||||
- uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v7
|
||||
id: cpr
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
|
||||
2
.github/workflows/gh-cherry-pick.yml
vendored
2
.github/workflows/gh-cherry-pick.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
env:
|
||||
GH_APP_ID: ${{ secrets.GH_APP_ID }}
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
if: ${{ steps.app-token.outcome != 'skipped' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
2
.github/workflows/gh-gha-cache-cleanup.yml
vendored
2
.github/workflows/gh-gha-cache-cleanup.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
|
||||
- name: Cleanup
|
||||
run: |
|
||||
|
||||
6
.github/workflows/packages-npm-publish.yml
vendored
6
.github/workflows/packages-npm-publish.yml
vendored
@@ -31,16 +31,16 @@ jobs:
|
||||
- packages/docusaurus-config
|
||||
- packages/esbuild-plugin-live-reload
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
with:
|
||||
fetch-depth: 2
|
||||
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
|
||||
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v5
|
||||
with:
|
||||
node-version-file: ${{ matrix.package }}/package.json
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@8cba46e29c11878d930bca7870bb54394d3e8b21 # 24d32ffd492484c1d75e0c0b894501ddb9d30d62
|
||||
uses: tj-actions/changed-files@e0021407031f5be11a464abee9a0776171c79891 # 24d32ffd492484c1d75e0c0b894501ddb9d30d62
|
||||
with:
|
||||
files: |
|
||||
${{ matrix.package }}/package.json
|
||||
|
||||
2
.github/workflows/qa-codeql.yml
vendored
2
.github/workflows/qa-codeql.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
language: ["go", "javascript", "python"]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Initialize CodeQL
|
||||
|
||||
2
.github/workflows/qa-semgrep.yml
vendored
2
.github/workflows/qa-semgrep.yml
vendored
@@ -26,5 +26,5 @@ jobs:
|
||||
image: semgrep/semgrep
|
||||
if: (github.actor != 'dependabot[bot]')
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- run: semgrep ci
|
||||
|
||||
6
.github/workflows/release-branch-off.yml
vendored
6
.github/workflows/release-branch-off.yml
vendored
@@ -34,7 +34,7 @@ jobs:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- name: Checkout main
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
with:
|
||||
ref: main
|
||||
token: "${{ steps.app-token.outputs.token }}"
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- name: Checkout main
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
with:
|
||||
ref: main
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
@@ -73,7 +73,7 @@ jobs:
|
||||
- name: Bump version
|
||||
run: "make bump version=${{ inputs.next_version }}.0-rc1"
|
||||
- name: Create pull request
|
||||
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
|
||||
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v7
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
branch: release-bump-${{ inputs.next_version }}
|
||||
|
||||
2
.github/workflows/release-next-branch.yml
vendored
2
.github/workflows/release-next-branch.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment: internal-production
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
with:
|
||||
ref: main
|
||||
- run: |
|
||||
|
||||
62
.github/workflows/release-publish.yml
vendored
62
.github/workflows/release-publish.yml
vendored
@@ -31,11 +31,11 @@ jobs:
|
||||
id-token: write
|
||||
attestations: write
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
@@ -44,7 +44,7 @@ jobs:
|
||||
with:
|
||||
image-name: ghcr.io/goauthentik/docs
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
context: .
|
||||
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
|
||||
- uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3
|
||||
id: attest
|
||||
if: true
|
||||
with:
|
||||
@@ -83,8 +83,8 @@ jobs:
|
||||
- radius
|
||||
- rac
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
|
||||
@@ -95,7 +95,7 @@ jobs:
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
@@ -108,12 +108,12 @@ jobs:
|
||||
make gen-client-ts
|
||||
make gen-client-go
|
||||
- name: Docker Login Registry
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_CORP_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_CORP_PASSWORD }}
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -126,10 +126,10 @@ jobs:
|
||||
build-args: |
|
||||
VERSION=${{ github.ref }}
|
||||
tags: ${{ steps.ev.outputs.imageTags }}
|
||||
file: lifecycle/container/${{ matrix.type }}.Dockerfile
|
||||
file: ${{ matrix.type }}.Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
context: .
|
||||
- uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3
|
||||
- uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3
|
||||
id: attest
|
||||
with:
|
||||
subject-name: ${{ steps.ev.outputs.attestImageNames }}
|
||||
@@ -151,27 +151,23 @@ jobs:
|
||||
goos: [linux, darwin]
|
||||
goarch: [amd64, arm64]
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
|
||||
with:
|
||||
go-version-file: "go.mod"
|
||||
- uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v5
|
||||
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v5
|
||||
with:
|
||||
node-version-file: web/package.json
|
||||
cache: "npm"
|
||||
cache-dependency-path: web/package-lock.json
|
||||
- name: Install web dependencies
|
||||
working-directory: web/
|
||||
run: |
|
||||
npm ci
|
||||
- name: Generate API Clients
|
||||
run: |
|
||||
make gen-client-ts
|
||||
make gen-client-go
|
||||
- name: Build web
|
||||
working-directory: web/
|
||||
run: |
|
||||
npm ci
|
||||
npm run build-proxy
|
||||
- name: Build API client
|
||||
run: |
|
||||
make gen-client-go
|
||||
- name: Build outpost
|
||||
run: |
|
||||
set -x
|
||||
@@ -198,8 +194,8 @@ jobs:
|
||||
AWS_REGION: eu-central-1
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6.0.0
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708 # v5
|
||||
with:
|
||||
role-to-assume: "arn:aws:iam::016170277896:role/github_goauthentik_authentik"
|
||||
aws-region: ${{ env.AWS_REGION }}
|
||||
@@ -214,15 +210,15 @@ jobs:
|
||||
- build-outpost-binary
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- name: Run test suite in final docker images
|
||||
run: |
|
||||
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> lifecycle/container/.env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> lifecycle/container/.env
|
||||
docker compose -f lifecycle/container/compose.yml pull -q
|
||||
docker compose -f lifecycle/container/compose.yml up --no-start
|
||||
docker compose -f lifecycle/container/compose.yml start postgresql
|
||||
docker compose -f lifecycle/container/compose.yml run -u root server test-all
|
||||
echo "PG_PASS=$(openssl rand 32 | base64 -w 0)" >> .env
|
||||
echo "AUTHENTIK_SECRET_KEY=$(openssl rand 32 | base64 -w 0)" >> .env
|
||||
docker compose pull -q
|
||||
docker compose up --no-start
|
||||
docker compose start postgresql
|
||||
docker compose run -u root server test-all
|
||||
sentry-release:
|
||||
needs:
|
||||
- build-server
|
||||
@@ -230,7 +226,7 @@ jobs:
|
||||
- build-outpost-binary
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
- name: prepare variables
|
||||
uses: ./.github/actions/docker-push-variables
|
||||
id: ev
|
||||
@@ -244,7 +240,7 @@ jobs:
|
||||
container=$(docker container create ${{ steps.ev.outputs.imageMainName }})
|
||||
docker cp ${container}:web/ .
|
||||
- name: Create a Sentry.io release
|
||||
uses: getsentry/action-release@dab6548b3c03c4717878099e43782cf5be654289 # v3
|
||||
uses: getsentry/action-release@128c5058bbbe93c8e02147fe0a9c713f166259a6 # v3
|
||||
continue-on-error: true
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
|
||||
13
.github/workflows/release-tag.yml
vendored
13
.github/workflows/release-tag.yml
vendored
@@ -52,7 +52,7 @@ jobs:
|
||||
needs:
|
||||
- check-inputs
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
with:
|
||||
ref: "version-${{ needs.check-inputs.outputs.major_version }}"
|
||||
- name: Setup authentik env
|
||||
@@ -76,7 +76,7 @@ jobs:
|
||||
run: echo "user-id=$(gh api "/users/${{ steps.app-token.outputs.app-slug }}[bot]" --jq .id)" >> "$GITHUB_OUTPUT"
|
||||
env:
|
||||
GH_TOKEN: "${{ steps.app-token.outputs.token }}"
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
with:
|
||||
ref: "version-${{ needs.check-inputs.outputs.major_version }}"
|
||||
token: "${{ steps.app-token.outputs.token }}"
|
||||
@@ -91,7 +91,6 @@ jobs:
|
||||
# ID from https://api.github.com/users/authentik-automation[bot]
|
||||
git config --global user.name '${{ steps.app-token.outputs.app-slug }}[bot]'
|
||||
git config --global user.email '${{ steps.get-user-id.outputs.user-id }}+${{ steps.app-token.outputs.app-slug }}[bot]@users.noreply.github.com'
|
||||
git pull
|
||||
git commit -a -m "release: ${{ inputs.version }}" --allow-empty
|
||||
git tag "version/${{ inputs.version }}" HEAD -m "version/${{ inputs.version }}"
|
||||
git push --follow-tags
|
||||
@@ -125,7 +124,7 @@ jobs:
|
||||
run: echo "user-id=$(gh api "/users/${{ steps.app-token.outputs.app-slug }}[bot]" --jq .id)" >> "$GITHUB_OUTPUT"
|
||||
env:
|
||||
GH_TOKEN: "${{ steps.app-token.outputs.token }}"
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
with:
|
||||
repository: "${{ github.repository_owner }}/helm"
|
||||
token: "${{ steps.app-token.outputs.token }}"
|
||||
@@ -137,7 +136,7 @@ jobs:
|
||||
sed -E -i 's/[0-9]{4}\.[0-9]{1,2}\.[0-9]+$/${{ inputs.version }}/' charts/authentik/Chart.yaml
|
||||
./scripts/helm-docs.sh
|
||||
- name: Create pull request
|
||||
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
|
||||
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v7
|
||||
with:
|
||||
token: "${{ steps.app-token.outputs.token }}"
|
||||
branch: bump-${{ inputs.version }}
|
||||
@@ -167,7 +166,7 @@ jobs:
|
||||
run: echo "user-id=$(gh api "/users/${{ steps.app-token.outputs.app-slug }}[bot]" --jq .id)" >> "$GITHUB_OUTPUT"
|
||||
env:
|
||||
GH_TOKEN: "${{ steps.app-token.outputs.token }}"
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
with:
|
||||
repository: "${{ github.repository_owner }}/version"
|
||||
token: "${{ steps.app-token.outputs.token }}"
|
||||
@@ -192,7 +191,7 @@ jobs:
|
||||
'.stable.version = $version | .stable.changelog = $changelog | .stable.changelog_url = $changelog_url' version.json > version.new.json
|
||||
mv version.new.json version.json
|
||||
- name: Create pull request
|
||||
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
|
||||
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v7
|
||||
with:
|
||||
token: "${{ steps.app-token.outputs.token }}"
|
||||
branch: bump-${{ inputs.version }}
|
||||
|
||||
@@ -25,11 +25,11 @@ jobs:
|
||||
with:
|
||||
app-id: ${{ secrets.GH_APP_ID }}
|
||||
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v5
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v5
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
@@ -44,7 +44,7 @@ jobs:
|
||||
make web-check-compile
|
||||
- name: Create Pull Request
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v7
|
||||
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v7
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
branch: extract-compile-backend-translation
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -211,5 +211,4 @@ source_docs/
|
||||
/vendor/
|
||||
|
||||
### Docker ###
|
||||
tests/openid_conformance/exports/*.zip
|
||||
compose.override.yml
|
||||
docker-compose.override.yml
|
||||
|
||||
@@ -16,8 +16,10 @@ go.sum @goauthentik/backend
|
||||
# Infrastructure
|
||||
.github/ @goauthentik/infrastructure
|
||||
lifecycle/aws/ @goauthentik/infrastructure
|
||||
lifecycle/container/ @goauthentik/infrastructure
|
||||
Dockerfile @goauthentik/infrastructure
|
||||
*Dockerfile @goauthentik/infrastructure
|
||||
.dockerignore @goauthentik/infrastructure
|
||||
docker-compose.yml @goauthentik/infrastructure
|
||||
Makefile @goauthentik/infrastructure
|
||||
.editorconfig @goauthentik/infrastructure
|
||||
CODEOWNERS @goauthentik/infrastructure
|
||||
|
||||
@@ -26,7 +26,7 @@ RUN npm run build && \
|
||||
npm run build:sfe
|
||||
|
||||
# Stage 2: Build go proxy
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.25.5-trixie@sha256:8e8f9c84609b6005af0a4a8227cee53d6226aab1c6dcb22daf5aeeb8b05480e1 AS go-builder
|
||||
FROM --platform=${BUILDPLATFORM} docker.io/library/golang:1.25.5-trixie@sha256:5d35fb8d28b9095d123b7d96095bbf3750ff18be0a87e5a21c9cffc4351fbf96 AS go-builder
|
||||
|
||||
ARG TARGETOS
|
||||
ARG TARGETARCH
|
||||
@@ -78,9 +78,9 @@ RUN --mount=type=secret,id=GEOIPUPDATE_ACCOUNT_ID \
|
||||
/bin/sh -c "GEOIPUPDATE_LICENSE_KEY_FILE=/run/secrets/GEOIPUPDATE_LICENSE_KEY /usr/bin/entry.sh || echo 'Failed to get GeoIP database, disabling'; exit 0"
|
||||
|
||||
# Stage 4: Download uv
|
||||
FROM ghcr.io/astral-sh/uv:0.9.18@sha256:5713fa8217f92b80223bc83aac7db36ec80a84437dbc0d04bbc659cae030d8c9 AS uv
|
||||
FROM ghcr.io/astral-sh/uv:0.9.17@sha256:5cb6b54d2bc3fe2eb9a8483db958a0b9eebf9edff68adedb369df8e7b98711a2 AS uv
|
||||
# Stage 5: Base python image
|
||||
FROM ghcr.io/goauthentik/fips-python:3.14.2-slim-trixie-fips@sha256:46c0658052e43ad303da39e461ad106c499a03fabd3512d05ff586e506507242 AS python-base
|
||||
FROM ghcr.io/goauthentik/fips-python:3.13.9-slim-trixie-fips@sha256:700fc8c1e290bd14e5eaca50b1d8e8c748c820010559cbfb4c4f8dfbe2c4c9ff AS python-base
|
||||
|
||||
ENV VENV_PATH="/ak-root/.venv" \
|
||||
PATH="/lifecycle:/ak-root/.venv/bin:$PATH" \
|
||||
141
Makefile
141
Makefile
@@ -5,56 +5,32 @@ SHELL := /usr/bin/env bash
|
||||
PWD = $(shell pwd)
|
||||
UID = $(shell id -u)
|
||||
GID = $(shell id -g)
|
||||
NPM_VERSION = $(shell python -m scripts.generate_semver)
|
||||
PY_SOURCES = authentik packages tests scripts lifecycle .github
|
||||
DOCKER_IMAGE ?= "authentik:test"
|
||||
|
||||
UNAME_S := $(shell uname -s)
|
||||
ifeq ($(UNAME_S),Darwin)
|
||||
SED_INPLACE = sed -i ''
|
||||
else
|
||||
SED_INPLACE = sed -i
|
||||
endif
|
||||
|
||||
GEN_API_TS = gen-ts-api
|
||||
GEN_API_PY = gen-py-api
|
||||
GEN_API_GO = gen-go-api
|
||||
|
||||
BREW_LDFLAGS :=
|
||||
BREW_CPPFLAGS :=
|
||||
BREW_PKG_CONFIG_PATH :=
|
||||
|
||||
UV := uv
|
||||
pg_user := $(shell uv run python -m authentik.lib.config postgresql.user 2>/dev/null)
|
||||
pg_host := $(shell uv run python -m authentik.lib.config postgresql.host 2>/dev/null)
|
||||
pg_name := $(shell uv run python -m authentik.lib.config postgresql.name 2>/dev/null)
|
||||
|
||||
# For macOS users, add the libxml2 installed from brew libxmlsec1 to the build path
|
||||
# to prevent SAML-related tests from failing and ensure correct pip dependency compilation
|
||||
ifeq ($(UNAME_S),Darwin)
|
||||
# Only add for brew users who installed libxmlsec1
|
||||
BREW_EXISTS := $(shell command -v brew 2> /dev/null)
|
||||
ifdef BREW_EXISTS
|
||||
LIBXML2_EXISTS := $(shell brew list libxml2 2> /dev/null)
|
||||
ifdef LIBXML2_EXISTS
|
||||
_xml_pref := $(shell brew --prefix libxml2)
|
||||
BREW_LDFLAGS += -L${_xml_pref}/lib
|
||||
BREW_CPPFLAGS += -I${_xml_pref}/include
|
||||
BREW_PKG_CONFIG_PATH = ${_xml_pref}/lib/pkgconfig:$(PKG_CONFIG_PATH)
|
||||
endif
|
||||
KRB5_EXISTS := $(shell brew list krb5 2> /dev/null)
|
||||
ifdef KRB5_EXISTS
|
||||
_krb5_pref := $(shell brew --prefix krb5)
|
||||
BREW_LDFLAGS += -L${_krb5_pref}/lib
|
||||
BREW_CPPFLAGS += -I${_krb5_pref}/include
|
||||
BREW_PKG_CONFIG_PATH = ${_krb5_pref}/lib/pkgconfig:$(PKG_CONFIG_PATH)
|
||||
endif
|
||||
UV := LDFLAGS="$(BREW_LDFLAGS)" CPPFLAGS="$(BREW_CPPFLAGS)" PKG_CONFIG_PATH="$(BREW_PKG_CONFIG_PATH)" uv
|
||||
endif
|
||||
endif
|
||||
# These functions are only evaluated when called in specific targets
|
||||
LIBXML2_EXISTS = $(shell brew list libxml2 2> /dev/null)
|
||||
KRB5_EXISTS = $(shell brew list krb5 2> /dev/null)
|
||||
|
||||
NPM_VERSION :=
|
||||
UV_EXISTS := $(shell command -v uv 2> /dev/null)
|
||||
ifdef UV_EXISTS
|
||||
NPM_VERSION := $(shell $(UV) run python -m scripts.generate_semver)
|
||||
else
|
||||
NPM_VERSION = $(shell python -m scripts.generate_semver)
|
||||
LIBXML2_LDFLAGS = -L$(shell brew --prefix libxml2)/lib $(LDFLAGS)
|
||||
LIBXML2_CPPFLAGS = -I$(shell brew --prefix libxml2)/include $(CPPFLAGS)
|
||||
LIBXML2_PKG_CONFIG = $(shell brew --prefix libxml2)/lib/pkgconfig:$(PKG_CONFIG_PATH)
|
||||
|
||||
KRB_PATH =
|
||||
|
||||
ifneq ($(KRB5_EXISTS),)
|
||||
KRB_PATH = PATH="$(shell brew --prefix krb5)/sbin:$(shell brew --prefix krb5)/bin:$$PATH"
|
||||
endif
|
||||
|
||||
all: lint-fix lint gen web test ## Lint, build, and test everything
|
||||
@@ -73,46 +49,47 @@ go-test:
|
||||
go test -timeout 0 -v -race -cover ./...
|
||||
|
||||
test: ## Run the server tests and produce a coverage report (locally)
|
||||
$(UV) run coverage run manage.py test --keepdb $(or $(filter-out $@,$(MAKECMDGOALS)),authentik)
|
||||
$(UV) run coverage html
|
||||
$(UV) run coverage report
|
||||
$(KRB_PATH) uv run coverage run manage.py test --keepdb $(or $(filter-out $@,$(MAKECMDGOALS)),authentik)
|
||||
uv run coverage html
|
||||
uv run coverage report
|
||||
|
||||
lint-fix: lint-codespell ## Lint and automatically fix errors in the python source code. Reports spelling errors.
|
||||
$(UV) run black $(PY_SOURCES)
|
||||
$(UV) run ruff check --fix $(PY_SOURCES)
|
||||
uv run black $(PY_SOURCES)
|
||||
uv run ruff check --fix $(PY_SOURCES)
|
||||
|
||||
lint-codespell: ## Reports spelling errors.
|
||||
$(UV) run codespell -w
|
||||
uv run codespell -w
|
||||
|
||||
lint: ci-bandit ci-mypy ## Lint the python and golang sources
|
||||
lint: ## Lint the python and golang sources
|
||||
uv run bandit -c pyproject.toml -r $(PY_SOURCES)
|
||||
golangci-lint run -v
|
||||
|
||||
core-install:
|
||||
ifdef ($(BREW_EXISTS))
|
||||
ifneq ($(LIBXML2_EXISTS),)
|
||||
# Clear cache to ensure fresh compilation
|
||||
$(UV) cache clean
|
||||
uv cache clean
|
||||
# Force compilation from source for lxml and xmlsec with correct environment
|
||||
$(UV) sync --frozen --reinstall-package lxml --reinstall-package xmlsec --no-binary-package lxml --no-binary-package xmlsec
|
||||
LDFLAGS="$(LIBXML2_LDFLAGS)" CPPFLAGS="$(LIBXML2_CPPFLAGS)" PKG_CONFIG_PATH="$(LIBXML2_PKG_CONFIG)" uv sync --frozen --reinstall-package lxml --reinstall-package xmlsec --no-binary-package lxml --no-binary-package xmlsec
|
||||
else
|
||||
$(UV) sync --frozen
|
||||
uv sync --frozen
|
||||
endif
|
||||
|
||||
migrate: ## Run the Authentik Django server's migrations
|
||||
$(UV) run python -m lifecycle.migrate
|
||||
uv run python -m lifecycle.migrate
|
||||
|
||||
i18n-extract: core-i18n-extract web-i18n-extract ## Extract strings that require translation into files to send to a translation service
|
||||
|
||||
aws-cfn:
|
||||
cd lifecycle/aws && npm i && $(UV) run npm run aws-cfn
|
||||
cd lifecycle/aws && npm i && uv run npm run aws-cfn
|
||||
|
||||
run-server: ## Run the main authentik server process
|
||||
$(UV) run ak server
|
||||
uv run ak server
|
||||
|
||||
run-worker: ## Run the main authentik worker process
|
||||
$(UV) run ak worker
|
||||
uv run ak worker
|
||||
|
||||
core-i18n-extract:
|
||||
$(UV) run ak makemessages \
|
||||
uv run ak makemessages \
|
||||
--add-location file \
|
||||
--no-obsolete \
|
||||
--ignore web \
|
||||
@@ -125,17 +102,11 @@ core-i18n-extract:
|
||||
install: node-install docs-install core-install ## Install all requires dependencies for `node`, `docs` and `core`
|
||||
|
||||
dev-drop-db:
|
||||
$(eval pg_user := $(shell $(UV) run python -m authentik.lib.config postgresql.user 2>/dev/null))
|
||||
$(eval pg_host := $(shell $(UV) run python -m authentik.lib.config postgresql.host 2>/dev/null))
|
||||
$(eval pg_name := $(shell $(UV) run python -m authentik.lib.config postgresql.name 2>/dev/null))
|
||||
dropdb -U ${pg_user} -h ${pg_host} ${pg_name} || true
|
||||
# Also remove the test-db if it exists
|
||||
dropdb -U ${pg_user} -h ${pg_host} test_${pg_name} || true
|
||||
|
||||
dev-create-db:
|
||||
$(eval pg_user := $(shell $(UV) run python -m authentik.lib.config postgresql.user 2>/dev/null))
|
||||
$(eval pg_host := $(shell $(UV) run python -m authentik.lib.config postgresql.host 2>/dev/null))
|
||||
$(eval pg_name := $(shell $(UV) run python -m authentik.lib.config postgresql.name 2>/dev/null))
|
||||
createdb -U ${pg_user} -h ${pg_host} ${pg_name}
|
||||
|
||||
dev-reset: dev-drop-db dev-create-db migrate ## Drop and restore the Authentik PostgreSQL instance to a "fresh install" state.
|
||||
@@ -148,11 +119,11 @@ bump: ## Bump authentik version. Usage: make bump version=20xx.xx.xx
|
||||
ifndef version
|
||||
$(error Usage: make bump version=20xx.xx.xx )
|
||||
endif
|
||||
$(eval current_version := $(shell cat ${PWD}/internal/constants/VERSION))
|
||||
$(SED_INPLACE) 's/^version = ".*"/version = "$(version)"/' ${PWD}/pyproject.toml
|
||||
$(SED_INPLACE) 's/^VERSION = ".*"/VERSION = "$(version)"/' ${PWD}/authentik/__init__.py
|
||||
sed -i 's/^version = ".*"/version = "$(version)"/' pyproject.toml
|
||||
sed -i 's/^VERSION = ".*"/VERSION = "$(version)"/' authentik/__init__.py
|
||||
$(MAKE) gen-build gen-compose aws-cfn
|
||||
$(SED_INPLACE) "s/\"${current_version}\"/\"$(version)\"/" ${PWD}/package.json ${PWD}/package-lock.json ${PWD}/web/package.json ${PWD}/web/package-lock.json
|
||||
npm version --no-git-tag-version --allow-same-version $(version)
|
||||
cd ${PWD}/web && npm version --no-git-tag-version --allow-same-version $(version)
|
||||
echo -n $(version) > ${PWD}/internal/constants/VERSION
|
||||
|
||||
#########################
|
||||
@@ -163,10 +134,14 @@ gen-build: ## Extract the schema from the database
|
||||
AUTHENTIK_DEBUG=true \
|
||||
AUTHENTIK_TENANTS__ENABLED=true \
|
||||
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
|
||||
$(UV) run ak build_schema
|
||||
uv run ak make_blueprint_schema --file blueprints/schema.json
|
||||
AUTHENTIK_DEBUG=true \
|
||||
AUTHENTIK_TENANTS__ENABLED=true \
|
||||
AUTHENTIK_OUTPOSTS__DISABLE_EMBEDDED_OUTPOST=true \
|
||||
uv run ak spectacular --file schema.yml
|
||||
|
||||
gen-compose:
|
||||
$(UV) run scripts/generate_compose.py
|
||||
uv run scripts/generate_docker_compose.py
|
||||
|
||||
gen-changelog: ## (Release) generate the changelog based from the commits since the last tag
|
||||
git log --pretty=format:" - %s" $(shell git describe --tags $(shell git rev-list --tags --max-count=1))...$(shell git branch --show-current) | sort > changelog.md
|
||||
@@ -174,14 +149,14 @@ gen-changelog: ## (Release) generate the changelog based from the commits since
|
||||
|
||||
gen-diff: ## (Release) generate the changelog diff between the current schema and the last tag
|
||||
git show $(shell git describe --tags $(shell git rev-list --tags --max-count=1)):schema.yml > schema-old.yml
|
||||
docker compose -f scripts/api/compose.yml run --rm --user "${UID}:${GID}" diff \
|
||||
docker compose -f scripts/api/docker-compose.yml run --rm --user "${UID}:${GID}" diff \
|
||||
--markdown \
|
||||
/local/diff.md \
|
||||
/local/schema-old.yml \
|
||||
/local/schema.yml
|
||||
rm schema-old.yml
|
||||
$(SED_INPLACE) 's/{/{/g' diff.md
|
||||
$(SED_INPLACE) 's/}/}/g' diff.md
|
||||
sed -i 's/{/{/g' diff.md
|
||||
sed -i 's/}/}/g' diff.md
|
||||
npx prettier --write diff.md
|
||||
|
||||
gen-clean-ts: ## Remove generated API client for TypeScript
|
||||
@@ -197,7 +172,7 @@ gen-clean-go: ## Remove generated API client for Go
|
||||
gen-clean: gen-clean-ts gen-clean-go gen-clean-py ## Remove generated API clients
|
||||
|
||||
gen-client-ts: gen-clean-ts ## Build and install the authentik API for Typescript into the authentik UI Application
|
||||
docker compose -f scripts/api/compose.yml run --rm --user "${UID}:${GID}" gen \
|
||||
docker compose -f scripts/api/docker-compose.yml run --rm --user "${UID}:${GID}" gen \
|
||||
generate \
|
||||
-i /local/schema.yml \
|
||||
-g typescript-fetch \
|
||||
@@ -225,7 +200,7 @@ gen-client-go: gen-clean-go ## Build and install the authentik API for Golang
|
||||
go mod edit -replace goauthentik.io/api/v3=./${GEN_API_GO}
|
||||
|
||||
gen-dev-config: ## Generate a local development config file
|
||||
$(UV) run scripts/generate_config.py
|
||||
uv run scripts/generate_config.py
|
||||
|
||||
gen: gen-build gen-client-ts
|
||||
|
||||
@@ -309,7 +284,7 @@ docs-api-clean: ## Clean generated API documentation
|
||||
|
||||
docker: ## Build a docker image of the current source tree
|
||||
mkdir -p ${GEN_API_TS}
|
||||
DOCKER_BUILDKIT=1 docker build . -f lifecycle/container/Dockerfile --progress plain --tag ${DOCKER_IMAGE}
|
||||
DOCKER_BUILDKIT=1 docker build . --progress plain --tag ${DOCKER_IMAGE}
|
||||
|
||||
test-docker:
|
||||
BUILD=true ${PWD}/scripts/test_docker.sh
|
||||
@@ -321,28 +296,28 @@ test-docker:
|
||||
# which makes the YAML File a lot smaller
|
||||
|
||||
ci--meta-debug:
|
||||
$(UV) run python -V
|
||||
python -V
|
||||
node --version
|
||||
|
||||
ci-mypy: ci--meta-debug
|
||||
$(UV) run mypy --strict $(PY_SOURCES)
|
||||
uv run mypy --strict $(PY_SOURCES)
|
||||
|
||||
ci-black: ci--meta-debug
|
||||
$(UV) run black --check $(PY_SOURCES)
|
||||
uv run black --check $(PY_SOURCES)
|
||||
|
||||
ci-ruff: ci--meta-debug
|
||||
$(UV) run ruff check $(PY_SOURCES)
|
||||
uv run ruff check $(PY_SOURCES)
|
||||
|
||||
ci-codespell: ci--meta-debug
|
||||
$(UV) run codespell -s
|
||||
uv run codespell -s
|
||||
|
||||
ci-bandit: ci--meta-debug
|
||||
$(UV) run bandit -c pyproject.toml -r $(PY_SOURCES) -iii
|
||||
uv run bandit -r $(PY_SOURCES)
|
||||
|
||||
ci-pending-migrations: ci--meta-debug
|
||||
$(UV) run ak makemigrations --check
|
||||
uv run ak makemigrations --check
|
||||
|
||||
ci-test: ci--meta-debug
|
||||
$(UV) run coverage run manage.py test --keepdb authentik
|
||||
$(UV) run coverage report
|
||||
$(UV) run coverage xml
|
||||
uv run coverage run manage.py test --keepdb authentik
|
||||
uv run coverage report
|
||||
uv run coverage xml
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from functools import lru_cache
|
||||
from os import environ
|
||||
|
||||
VERSION = "2026.2.0"
|
||||
VERSION = "2025.12.3"
|
||||
ENV_GIT_HASH_KEY = "GIT_BUILD_HASH"
|
||||
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@ from rest_framework.views import APIView
|
||||
|
||||
from authentik import authentik_full_version
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.enterprise.license import LicenseKey
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.utils.reflection import get_env
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||
@@ -25,15 +26,6 @@ from authentik.outposts.models import Outpost
|
||||
from authentik.rbac.permissions import HasPermission
|
||||
|
||||
|
||||
def fips_enabled():
|
||||
try:
|
||||
from authentik.enterprise.license import LicenseKey
|
||||
|
||||
return backend._fips_enabled if LicenseKey.get_total().status().is_valid else None
|
||||
except ModuleNotFoundError:
|
||||
return None
|
||||
|
||||
|
||||
class RuntimeDict(TypedDict):
|
||||
"""Runtime information"""
|
||||
|
||||
@@ -88,7 +80,9 @@ class SystemInfoSerializer(PassiveSerializer):
|
||||
"architecture": platform.machine(),
|
||||
"authentik_version": authentik_full_version(),
|
||||
"environment": get_env(),
|
||||
"openssl_fips_enabled": fips_enabled(),
|
||||
"openssl_fips_enabled": (
|
||||
backend._fips_enabled if LicenseKey.get_total().status().is_valid else None
|
||||
),
|
||||
"openssl_version": OPENSSL_VERSION,
|
||||
"platform": platform.platform(),
|
||||
"python_version": python_version,
|
||||
|
||||
@@ -13,10 +13,10 @@ from rest_framework.exceptions import AuthenticationFailed
|
||||
from rest_framework.request import Request
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.common.oauth.constants import SCOPE_AUTHENTIK_API
|
||||
from authentik.core.middleware import CTX_AUTH_VIA
|
||||
from authentik.core.models import Token, TokenIntents, User, UserTypes
|
||||
from authentik.outposts.models import Outpost
|
||||
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
||||
|
||||
LOGGER = get_logger()
|
||||
_tmp = Path(gettempdir())
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
from json import dumps
|
||||
|
||||
from django.core.management.base import BaseCommand, no_translations
|
||||
from drf_spectacular.drainage import GENERATOR_STATS
|
||||
from drf_spectacular.generators import SchemaGenerator
|
||||
from drf_spectacular.renderers import OpenApiYamlRenderer
|
||||
from drf_spectacular.validation import validate_schema
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.blueprints.v1.schema import SchemaBuilder
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.logger = get_logger()
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("--blueprint-file", type=str, default="blueprints/schema.json")
|
||||
parser.add_argument("--api-file", type=str, default="schema.yml")
|
||||
|
||||
@no_translations
|
||||
def handle(self, *args, blueprint_file: str, api_file: str, **options):
|
||||
self.build_blueprint(blueprint_file)
|
||||
self.build_api(api_file)
|
||||
|
||||
def build_blueprint(self, file: str):
|
||||
self.logger.debug("Building blueprint schema...", file=file)
|
||||
blueprint_builder = SchemaBuilder()
|
||||
blueprint_builder.build()
|
||||
with open(file, "w") as _schema:
|
||||
_schema.write(
|
||||
dumps(blueprint_builder.schema, indent=4, default=SchemaBuilder.json_default)
|
||||
)
|
||||
|
||||
def build_api(self, file: str):
|
||||
self.logger.debug("Building API schema...", file=file)
|
||||
generator = SchemaGenerator()
|
||||
schema = generator.get_schema(request=None, public=True)
|
||||
GENERATOR_STATS.emit_summary()
|
||||
validate_schema(schema)
|
||||
output = OpenApiYamlRenderer().render(schema, renderer_context={})
|
||||
with open(file, "wb") as f:
|
||||
f.write(output)
|
||||
@@ -11,12 +11,12 @@ from rest_framework.exceptions import AuthenticationFailed
|
||||
|
||||
from authentik.api.authentication import IPCUser, TokenAuthentication
|
||||
from authentik.blueprints.tests import reconcile_app
|
||||
from authentik.common.oauth.constants import SCOPE_AUTHENTIK_API
|
||||
from authentik.core.models import Token, TokenIntents, UserTypes
|
||||
from authentik.core.tests.utils import create_test_admin_user, create_test_flow
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.outposts.apps import MANAGED_OUTPOST
|
||||
from authentik.outposts.models import Outpost
|
||||
from authentik.providers.oauth2.constants import SCOPE_AUTHENTIK_API
|
||||
from authentik.providers.oauth2.models import AccessToken, OAuth2Provider
|
||||
|
||||
|
||||
|
||||
@@ -1,16 +1,9 @@
|
||||
"""Schema generation tests"""
|
||||
|
||||
from pathlib import Path
|
||||
from tempfile import gettempdir
|
||||
from uuid import uuid4
|
||||
|
||||
from django.core.management import call_command
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APITestCase
|
||||
from yaml import safe_load
|
||||
|
||||
from authentik.lib.config import CONFIG
|
||||
|
||||
|
||||
class TestSchemaGeneration(APITestCase):
|
||||
"""Generic admin tests"""
|
||||
@@ -28,17 +21,3 @@ class TestSchemaGeneration(APITestCase):
|
||||
reverse("authentik_api:schema-browser"),
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_build_schema(self):
|
||||
"""Test schema build command"""
|
||||
tmp = Path(gettempdir())
|
||||
blueprint_file = tmp / f"{str(uuid4())}.json"
|
||||
api_file = tmp / f"{str(uuid4())}.yml"
|
||||
with (
|
||||
CONFIG.patch("debug", True),
|
||||
CONFIG.patch("tenants.enabled", True),
|
||||
CONFIG.patch("outposts.disable_embedded_outpost", True),
|
||||
):
|
||||
call_command("build_schema", blueprint_file=blueprint_file, api_file=api_file)
|
||||
self.assertTrue(blueprint_file.exists())
|
||||
self.assertTrue(api_file.exists())
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
"""Generate JSON Schema for blueprints"""
|
||||
|
||||
from json import dumps
|
||||
from typing import Any
|
||||
|
||||
from django.core.management.base import BaseCommand, no_translations
|
||||
from django.db.models import Model, fields
|
||||
from django.db.models.fields.related import OneToOneField
|
||||
from drf_jsonschema_serializer.convert import converter, field_to_converter
|
||||
@@ -38,12 +40,13 @@ class PrimaryKeyRelatedFieldConverter:
|
||||
return {"type": "integer"}
|
||||
|
||||
|
||||
class SchemaBuilder:
|
||||
class Command(BaseCommand):
|
||||
"""Generate JSON Schema for blueprints"""
|
||||
|
||||
schema: dict
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema",
|
||||
"$id": "https://goauthentik.io/blueprints/schema.json",
|
||||
@@ -90,6 +93,16 @@ class SchemaBuilder:
|
||||
"$defs": {"blueprint_entry": {"oneOf": []}},
|
||||
}
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("--file", type=str)
|
||||
|
||||
@no_translations
|
||||
def handle(self, *args, file: str, **options):
|
||||
"""Generate JSON Schema for blueprints"""
|
||||
self.build()
|
||||
with open(file, "w") as _schema:
|
||||
_schema.write(dumps(self.schema, indent=4, default=Command.json_default))
|
||||
|
||||
@staticmethod
|
||||
def json_default(value: Any) -> Any:
|
||||
"""Helper that handles gettext_lazy strings that JSON doesn't handle"""
|
||||
@@ -111,7 +124,7 @@ class SchemaBuilder:
|
||||
try:
|
||||
serializer_class = model_instance.serializer
|
||||
except NotImplementedError as exc:
|
||||
raise ValueError(f"SerializerModel not implemented by {model}") from exc
|
||||
raise NotImplementedError(model_instance) from exc
|
||||
serializer = serializer_class(
|
||||
context={
|
||||
SERIALIZER_CONTEXT_BLUEPRINT: False,
|
||||
@@ -18,7 +18,7 @@ entries:
|
||||
name: foo
|
||||
title: foo
|
||||
permissions:
|
||||
- permission: authentik_flows.view_flow
|
||||
- permission: view_flow
|
||||
user: !KeyOf user
|
||||
- permission: authentik_flows.view_flow
|
||||
- permission: view_flow
|
||||
role: !KeyOf role
|
||||
|
||||
@@ -9,7 +9,7 @@ from functools import reduce
|
||||
from json import JSONDecodeError, loads
|
||||
from operator import ixor
|
||||
from os import getenv
|
||||
from typing import Any, Literal
|
||||
from typing import Any, Literal, Union
|
||||
from uuid import UUID
|
||||
|
||||
from deepmerge import always_merger
|
||||
@@ -43,6 +43,8 @@ def get_attrs(obj: SerializerModel) -> dict[str, Any]:
|
||||
continue
|
||||
if _field.read_only:
|
||||
data.pop(field_name, None)
|
||||
if _field.get_initial() == data.get(field_name, None):
|
||||
data.pop(field_name, None)
|
||||
if field_name.endswith("_set"):
|
||||
data.pop(field_name, None)
|
||||
return data
|
||||
@@ -68,17 +70,19 @@ class BlueprintEntryDesiredState(Enum):
|
||||
class BlueprintEntryPermission:
|
||||
"""Describe object-level permissions"""
|
||||
|
||||
permission: str | YAMLTag
|
||||
user: int | YAMLTag | None = field(default=None)
|
||||
role: str | YAMLTag | None = field(default=None)
|
||||
permission: Union[str, "YAMLTag"]
|
||||
user: Union[int, "YAMLTag", None] = field(default=None)
|
||||
role: Union[str, "YAMLTag", None] = field(default=None)
|
||||
|
||||
|
||||
@dataclass
|
||||
class BlueprintEntry:
|
||||
"""Single entry of a blueprint"""
|
||||
|
||||
model: str | YAMLTag
|
||||
state: BlueprintEntryDesiredState | YAMLTag = field(default=BlueprintEntryDesiredState.PRESENT)
|
||||
model: Union[str, "YAMLTag"]
|
||||
state: Union[BlueprintEntryDesiredState, "YAMLTag"] = field(
|
||||
default=BlueprintEntryDesiredState.PRESENT
|
||||
)
|
||||
conditions: list[Any] = field(default_factory=list)
|
||||
identifiers: dict[str, Any] = field(default_factory=dict)
|
||||
attrs: dict[str, Any] | None = field(default_factory=dict)
|
||||
@@ -92,7 +96,7 @@ class BlueprintEntry:
|
||||
self.__tag_contexts: list[YAMLTagContext] = []
|
||||
|
||||
@staticmethod
|
||||
def from_model(model: SerializerModel, *extra_identifier_names: str) -> BlueprintEntry:
|
||||
def from_model(model: SerializerModel, *extra_identifier_names: str) -> "BlueprintEntry":
|
||||
"""Convert a SerializerModel instance to a blueprint Entry"""
|
||||
identifiers = {
|
||||
"pk": model.pk,
|
||||
@@ -110,8 +114,8 @@ class BlueprintEntry:
|
||||
def get_tag_context(
|
||||
self,
|
||||
depth: int = 0,
|
||||
context_tag_type: type[YAMLTagContext] | tuple[YAMLTagContext, ...] | None = None,
|
||||
) -> YAMLTagContext:
|
||||
context_tag_type: type["YAMLTagContext"] | tuple["YAMLTagContext", ...] | None = None,
|
||||
) -> "YAMLTagContext":
|
||||
"""Get a YAMLTagContext object located at a certain depth in the tag tree"""
|
||||
if depth < 0:
|
||||
raise ValueError("depth must be a positive number or zero")
|
||||
@@ -126,7 +130,7 @@ class BlueprintEntry:
|
||||
except IndexError as exc:
|
||||
raise ValueError(f"invalid depth: {depth}. Max depth: {len(contexts) - 1}") from exc
|
||||
|
||||
def tag_resolver(self, value: Any, blueprint: Blueprint) -> Any:
|
||||
def tag_resolver(self, value: Any, blueprint: "Blueprint") -> Any:
|
||||
"""Check if we have any special tags that need handling"""
|
||||
val = copy(value)
|
||||
|
||||
@@ -148,23 +152,23 @@ class BlueprintEntry:
|
||||
|
||||
return val
|
||||
|
||||
def get_attrs(self, blueprint: Blueprint) -> dict[str, Any]:
|
||||
def get_attrs(self, blueprint: "Blueprint") -> dict[str, Any]:
|
||||
"""Get attributes of this entry, with all yaml tags resolved"""
|
||||
return self.tag_resolver(self.attrs, blueprint)
|
||||
|
||||
def get_identifiers(self, blueprint: Blueprint) -> dict[str, Any]:
|
||||
def get_identifiers(self, blueprint: "Blueprint") -> dict[str, Any]:
|
||||
"""Get attributes of this entry, with all yaml tags resolved"""
|
||||
return self.tag_resolver(self.identifiers, blueprint)
|
||||
|
||||
def get_state(self, blueprint: Blueprint) -> BlueprintEntryDesiredState:
|
||||
def get_state(self, blueprint: "Blueprint") -> BlueprintEntryDesiredState:
|
||||
"""Get the blueprint state, with yaml tags resolved if present"""
|
||||
return BlueprintEntryDesiredState(self.tag_resolver(self.state, blueprint))
|
||||
|
||||
def get_model(self, blueprint: Blueprint) -> str:
|
||||
def get_model(self, blueprint: "Blueprint") -> str:
|
||||
"""Get the blueprint model, with yaml tags resolved if present"""
|
||||
return str(self.tag_resolver(self.model, blueprint))
|
||||
|
||||
def get_permissions(self, blueprint: Blueprint) -> Generator[BlueprintEntryPermission]:
|
||||
def get_permissions(self, blueprint: "Blueprint") -> Generator[BlueprintEntryPermission]:
|
||||
"""Get permissions of this entry, with all yaml tags resolved"""
|
||||
for perm in self.permissions:
|
||||
yield BlueprintEntryPermission(
|
||||
@@ -173,7 +177,7 @@ class BlueprintEntry:
|
||||
role=self.tag_resolver(perm.role, blueprint),
|
||||
)
|
||||
|
||||
def check_all_conditions_match(self, blueprint: Blueprint) -> bool:
|
||||
def check_all_conditions_match(self, blueprint: "Blueprint") -> bool:
|
||||
"""Check all conditions of this entry match (evaluate to True)"""
|
||||
return all(self.tag_resolver(self.conditions, blueprint))
|
||||
|
||||
@@ -228,7 +232,7 @@ class KeyOf(YAMLTag):
|
||||
|
||||
id_from: str
|
||||
|
||||
def __init__(self, loader: BlueprintLoader, node: ScalarNode) -> None:
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
|
||||
super().__init__()
|
||||
self.id_from = node.value
|
||||
|
||||
@@ -254,7 +258,7 @@ class Env(YAMLTag):
|
||||
key: str
|
||||
default: Any | None
|
||||
|
||||
def __init__(self, loader: BlueprintLoader, node: ScalarNode | SequenceNode) -> None:
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.default = None
|
||||
if isinstance(node, ScalarNode):
|
||||
@@ -273,7 +277,7 @@ class File(YAMLTag):
|
||||
path: str
|
||||
default: Any | None
|
||||
|
||||
def __init__(self, loader: BlueprintLoader, node: ScalarNode | SequenceNode) -> None:
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.default = None
|
||||
if isinstance(node, ScalarNode):
|
||||
@@ -301,7 +305,7 @@ class Context(YAMLTag):
|
||||
key: str
|
||||
default: Any | None
|
||||
|
||||
def __init__(self, loader: BlueprintLoader, node: ScalarNode | SequenceNode) -> None:
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode | SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.default = None
|
||||
if isinstance(node, ScalarNode):
|
||||
@@ -324,7 +328,7 @@ class ParseJSON(YAMLTag):
|
||||
|
||||
raw: str
|
||||
|
||||
def __init__(self, loader: BlueprintLoader, node: ScalarNode) -> None:
|
||||
def __init__(self, loader: "BlueprintLoader", node: ScalarNode) -> None:
|
||||
super().__init__()
|
||||
self.raw = node.value
|
||||
|
||||
@@ -341,7 +345,7 @@ class Format(YAMLTag):
|
||||
format_string: str
|
||||
args: list[Any]
|
||||
|
||||
def __init__(self, loader: BlueprintLoader, node: SequenceNode) -> None:
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.format_string = loader.construct_object(node.value[0])
|
||||
self.args = []
|
||||
@@ -368,7 +372,7 @@ class Find(YAMLTag):
|
||||
model_name: str | YAMLTag
|
||||
conditions: list[list]
|
||||
|
||||
def __init__(self, loader: BlueprintLoader, node: SequenceNode) -> None:
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.model_name = loader.construct_object(node.value[0])
|
||||
self.conditions = []
|
||||
@@ -440,7 +444,7 @@ class Condition(YAMLTag):
|
||||
"XNOR": lambda args: not (reduce(ixor, args) if len(args) > 1 else args[0]),
|
||||
}
|
||||
|
||||
def __init__(self, loader: BlueprintLoader, node: SequenceNode) -> None:
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.mode = loader.construct_object(node.value[0])
|
||||
self.args = []
|
||||
@@ -474,7 +478,7 @@ class If(YAMLTag):
|
||||
when_true: Any
|
||||
when_false: Any
|
||||
|
||||
def __init__(self, loader: BlueprintLoader, node: SequenceNode) -> None:
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.condition = loader.construct_object(node.value[0])
|
||||
if len(node.value) == 1:
|
||||
@@ -514,7 +518,7 @@ class Enumerate(YAMLTag, YAMLTagContext):
|
||||
),
|
||||
}
|
||||
|
||||
def __init__(self, loader: BlueprintLoader, node: SequenceNode) -> None:
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.iterable = loader.construct_object(node.value[0])
|
||||
self.output_body = loader.construct_object(node.value[1])
|
||||
@@ -580,7 +584,7 @@ class EnumeratedItem(YAMLTag):
|
||||
|
||||
_SUPPORTED_CONTEXT_TAGS = (Enumerate,)
|
||||
|
||||
def __init__(self, _loader: BlueprintLoader, node: ScalarNode) -> None:
|
||||
def __init__(self, _loader: "BlueprintLoader", node: ScalarNode) -> None:
|
||||
super().__init__()
|
||||
self.depth = int(node.value)
|
||||
|
||||
@@ -636,7 +640,7 @@ class AtIndex(YAMLTag):
|
||||
attribute: int | str | YAMLTag
|
||||
default: Any | UNSET
|
||||
|
||||
def __init__(self, loader: BlueprintLoader, node: SequenceNode) -> None:
|
||||
def __init__(self, loader: "BlueprintLoader", node: SequenceNode) -> None:
|
||||
super().__init__()
|
||||
self.obj = loader.construct_object(node.value[0])
|
||||
self.attribute = loader.construct_object(node.value[1])
|
||||
@@ -753,7 +757,7 @@ class EntryInvalidError(SentryIgnoredException):
|
||||
@staticmethod
|
||||
def from_entry(
|
||||
msg_or_exc: str | Exception, entry: BlueprintEntry, *args, **kwargs
|
||||
) -> EntryInvalidError:
|
||||
) -> "EntryInvalidError":
|
||||
"""Create EntryInvalidError with the context of an entry"""
|
||||
error = EntryInvalidError(msg_or_exc, *args, **kwargs)
|
||||
if isinstance(msg_or_exc, ValidationError):
|
||||
|
||||
@@ -15,7 +15,8 @@ from django.db.models import Model
|
||||
from django.db.models.query_utils import Q
|
||||
from django.db.transaction import atomic
|
||||
from django.db.utils import IntegrityError
|
||||
from guardian.models import RoleObjectPermission
|
||||
from django_channels_postgres.models import GroupChannel, Message
|
||||
from guardian.models import RoleObjectPermission, UserObjectPermission
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.serializers import BaseSerializer, Serializer
|
||||
from structlog.stdlib import BoundLogger, get_logger
|
||||
@@ -40,16 +41,55 @@ from authentik.core.models import (
|
||||
User,
|
||||
UserSourceConnection,
|
||||
)
|
||||
from authentik.endpoints.models import Connector
|
||||
from authentik.endpoints.connectors.agent.models import (
|
||||
AgentDeviceConnection,
|
||||
AppleNonce,
|
||||
DeviceAuthenticationToken,
|
||||
)
|
||||
from authentik.endpoints.connectors.agent.models import (
|
||||
DeviceToken as EndpointDeviceToken,
|
||||
)
|
||||
from authentik.endpoints.models import Connector, Device, DeviceConnection, DeviceFactSnapshot
|
||||
from authentik.enterprise.license import LicenseKey
|
||||
from authentik.enterprise.models import LicenseUsage
|
||||
from authentik.enterprise.providers.google_workspace.models import (
|
||||
GoogleWorkspaceProviderGroup,
|
||||
GoogleWorkspaceProviderUser,
|
||||
)
|
||||
from authentik.enterprise.providers.microsoft_entra.models import (
|
||||
MicrosoftEntraProviderGroup,
|
||||
MicrosoftEntraProviderUser,
|
||||
)
|
||||
from authentik.enterprise.providers.ssf.models import StreamEvent
|
||||
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import (
|
||||
EndpointDevice,
|
||||
EndpointDeviceConnection,
|
||||
)
|
||||
from authentik.events.logs import LogEvent, capture_logs
|
||||
from authentik.events.utils import cleanse_dict
|
||||
from authentik.flows.models import Stage
|
||||
from authentik.lib.models import InternallyManagedMixin, SerializerModel
|
||||
from authentik.flows.models import FlowToken, Stage
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
from authentik.lib.utils.reflection import get_apps
|
||||
from authentik.outposts.models import OutpostServiceConnection
|
||||
from authentik.policies.models import Policy, PolicyBindingModel
|
||||
from authentik.policies.reputation.models import Reputation
|
||||
from authentik.providers.oauth2.models import (
|
||||
AccessToken,
|
||||
AuthorizationCode,
|
||||
DeviceToken,
|
||||
RefreshToken,
|
||||
)
|
||||
from authentik.providers.proxy.models import ProxySession
|
||||
from authentik.providers.rac.models import ConnectionToken
|
||||
from authentik.providers.saml.models import SAMLSession
|
||||
from authentik.providers.scim.models import SCIMProviderGroup, SCIMProviderUser
|
||||
from authentik.rbac.models import Role
|
||||
from authentik.sources.scim.models import SCIMSourceGroup, SCIMSourceUser
|
||||
from authentik.stages.authenticator_webauthn.models import WebAuthnDeviceType
|
||||
from authentik.stages.consent.models import UserConsent
|
||||
from authentik.tasks.models import Task, TaskLog
|
||||
from authentik.tenants.models import Tenant
|
||||
|
||||
# Context set when the serializer is created in a blueprint context
|
||||
# Update website/docs/customize/blueprints/v1/models.md when used
|
||||
@@ -70,6 +110,7 @@ def excluded_models() -> list[type[Model]]:
|
||||
ContentType,
|
||||
Permission,
|
||||
RoleObjectPermission,
|
||||
UserObjectPermission,
|
||||
# Base classes
|
||||
Provider,
|
||||
Source,
|
||||
@@ -84,16 +125,49 @@ def excluded_models() -> list[type[Model]]:
|
||||
# Classes that have other dependencies
|
||||
Session,
|
||||
AuthenticatedSession,
|
||||
# Classes which are only internally managed
|
||||
# FIXME: these shouldn't need to be explicitly listed, but rather based off of a mixin
|
||||
FlowToken,
|
||||
LicenseUsage,
|
||||
SCIMProviderGroup,
|
||||
SCIMProviderUser,
|
||||
Tenant,
|
||||
Task,
|
||||
TaskLog,
|
||||
ConnectionToken,
|
||||
AuthorizationCode,
|
||||
AccessToken,
|
||||
RefreshToken,
|
||||
ProxySession,
|
||||
Reputation,
|
||||
WebAuthnDeviceType,
|
||||
SCIMSourceUser,
|
||||
SCIMSourceGroup,
|
||||
GoogleWorkspaceProviderUser,
|
||||
GoogleWorkspaceProviderGroup,
|
||||
MicrosoftEntraProviderUser,
|
||||
MicrosoftEntraProviderGroup,
|
||||
EndpointDevice,
|
||||
EndpointDeviceConnection,
|
||||
EndpointDeviceToken,
|
||||
Device,
|
||||
DeviceConnection,
|
||||
DeviceAuthenticationToken,
|
||||
AppleNonce,
|
||||
AgentDeviceConnection,
|
||||
DeviceFactSnapshot,
|
||||
DeviceToken,
|
||||
StreamEvent,
|
||||
UserConsent,
|
||||
SAMLSession,
|
||||
Message,
|
||||
GroupChannel,
|
||||
)
|
||||
|
||||
|
||||
def is_model_allowed(model: type[Model]) -> bool:
|
||||
"""Check if model is allowed"""
|
||||
return (
|
||||
model not in excluded_models()
|
||||
and issubclass(model, SerializerModel | BaseMetaModel)
|
||||
and not issubclass(model, InternallyManagedMixin)
|
||||
)
|
||||
return model not in excluded_models() and issubclass(model, SerializerModel | BaseMetaModel)
|
||||
|
||||
|
||||
class DoRollback(SentryIgnoredException):
|
||||
@@ -139,22 +213,13 @@ class Importer:
|
||||
|
||||
def default_context(self):
|
||||
"""Default context"""
|
||||
context = {
|
||||
return {
|
||||
"goauthentik.io/enterprise/licensed": LicenseKey.get_total().status().is_valid,
|
||||
"goauthentik.io/rbac/models": rbac_models(),
|
||||
"goauthentik.io/enterprise/licensed": False,
|
||||
}
|
||||
try:
|
||||
from authentik.enterprise.license import LicenseKey
|
||||
|
||||
context["goauthentik.io/enterprise/licensed"] = (
|
||||
LicenseKey.get_total().status().is_valid,
|
||||
)
|
||||
except ModuleNotFoundError:
|
||||
pass
|
||||
return context
|
||||
|
||||
@staticmethod
|
||||
def from_string(yaml_input: str, context: dict | None = None) -> Importer:
|
||||
def from_string(yaml_input: str, context: dict | None = None) -> "Importer":
|
||||
"""Parse YAML string and create blueprint importer from it"""
|
||||
import_dict = load(yaml_input, BlueprintLoader)
|
||||
try:
|
||||
|
||||
@@ -23,7 +23,7 @@ class ApplyBlueprintMetaSerializer(PassiveSerializer):
|
||||
|
||||
# We cannot override `instance` as that will confuse rest_framework
|
||||
# and make it attempt to update the instance
|
||||
blueprint_instance: BlueprintInstance
|
||||
blueprint_instance: "BlueprintInstance"
|
||||
|
||||
def validate(self, attrs):
|
||||
from authentik.blueprints.models import BlueprintInstance
|
||||
|
||||
@@ -124,8 +124,10 @@ class CurrentBrandSerializer(PassiveSerializer):
|
||||
@extend_schema_field(field=FlagJSONField)
|
||||
def get_flags(self, _):
|
||||
values = {}
|
||||
for flag in Flag.available(visibility="public"):
|
||||
values[flag().key] = flag.get()
|
||||
for flag in Flag.available():
|
||||
_flag = flag()
|
||||
if _flag.visibility == "public":
|
||||
values[_flag.key] = _flag.get()
|
||||
return values
|
||||
|
||||
|
||||
|
||||
@@ -21,8 +21,10 @@ class TestBrands(APITestCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.default_flags = {}
|
||||
for flag in Flag.available(visibility="public"):
|
||||
self.default_flags[flag().key] = flag.get()
|
||||
for flag in Flag.available():
|
||||
_flag = flag()
|
||||
if _flag.visibility == "public":
|
||||
self.default_flags[_flag.key] = _flag.get()
|
||||
Brand.objects.all().delete()
|
||||
|
||||
def test_current_brand(self):
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from typing import Any
|
||||
|
||||
from django.db.models import Case, F, IntegerField, Q, Value, When
|
||||
from django.db.models.functions import Concat, Length
|
||||
from django.db.models.functions import Length
|
||||
from django.http.request import HttpRequest
|
||||
from django.utils.html import _json_script_escapes
|
||||
from django.utils.safestring import mark_safe
|
||||
@@ -26,8 +26,7 @@ def get_brand_for_request(request: HttpRequest) -> Brand:
|
||||
domain_length=Length("domain"),
|
||||
match_priority=Case(
|
||||
When(
|
||||
condition=Q(host_domain__iexact=F("domain"))
|
||||
| Q(host_domain__iendswith=Concat(Value("."), F("domain"))),
|
||||
condition=Q(host_domain__iendswith=F("domain")),
|
||||
then=F("domain_length"),
|
||||
),
|
||||
default=Value(-1),
|
||||
|
||||
@@ -66,7 +66,7 @@ class ApplicationSerializer(ModelSerializer):
|
||||
user = self.context["request"].user
|
||||
|
||||
# Cache serialized user data to avoid N+1 when formatting launch URLs
|
||||
# for multiple applications. UserSerializer accesses user.groups which
|
||||
# for multiple applications. UserSerializer accesses user.ak_groups which
|
||||
# would otherwise trigger a query for each application.
|
||||
if user is not None:
|
||||
if "_cached_user_data" not in self.context:
|
||||
|
||||
@@ -2,31 +2,18 @@
|
||||
|
||||
from typing import TypedDict
|
||||
|
||||
from drf_spectacular.utils import (
|
||||
extend_schema,
|
||||
inline_serializer,
|
||||
)
|
||||
from rest_framework import mixins, serializers
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework import mixins
|
||||
from rest_framework.fields import SerializerMethodField
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import (
|
||||
CharField,
|
||||
DateTimeField,
|
||||
IPAddressField,
|
||||
ListField,
|
||||
)
|
||||
from rest_framework.serializers import CharField, DateTimeField, IPAddressField
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
from ua_parser import user_agent_parser
|
||||
|
||||
from authentik.api.validation import validate
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.core.api.utils import ModelSerializer, PassiveSerializer
|
||||
from authentik.core.api.utils import ModelSerializer
|
||||
from authentik.core.models import AuthenticatedSession
|
||||
from authentik.events.context_processors.asn import ASN_CONTEXT_PROCESSOR, ASNDict
|
||||
from authentik.events.context_processors.geoip import GEOIP_CONTEXT_PROCESSOR, GeoIPDict
|
||||
from authentik.rbac.decorators import permission_required
|
||||
|
||||
|
||||
class UserAgentDeviceDict(TypedDict):
|
||||
@@ -65,14 +52,6 @@ class UserAgentDict(TypedDict):
|
||||
string: str
|
||||
|
||||
|
||||
class BulkDeleteSessionSerializer(PassiveSerializer):
|
||||
"""Serializer for bulk deleting authenticated sessions by user"""
|
||||
|
||||
user_pks = ListField(
|
||||
child=serializers.IntegerField(), help_text="List of user IDs to revoke all sessions for"
|
||||
)
|
||||
|
||||
|
||||
class AuthenticatedSessionSerializer(ModelSerializer):
|
||||
"""AuthenticatedSession Serializer"""
|
||||
|
||||
@@ -136,22 +115,3 @@ class AuthenticatedSessionViewSet(
|
||||
filterset_fields = ["user__username", "session__last_ip", "session__last_user_agent"]
|
||||
ordering = ["user__username"]
|
||||
owner_field = "user"
|
||||
|
||||
@permission_required("authentik_core.delete_authenticatedsession")
|
||||
@extend_schema(
|
||||
parameters=[BulkDeleteSessionSerializer],
|
||||
responses={
|
||||
200: inline_serializer(
|
||||
"BulkDeleteSessionResponse",
|
||||
{"deleted": serializers.IntegerField()},
|
||||
),
|
||||
},
|
||||
)
|
||||
@validate(BulkDeleteSessionSerializer, location="query")
|
||||
@action(detail=False, methods=["DELETE"], pagination_class=None, filter_backends=[])
|
||||
def bulk_delete(self, request: Request, *, query: BulkDeleteSessionSerializer) -> Response:
|
||||
"""Bulk revoke all sessions for multiple users"""
|
||||
user_pks = query.validated_data.get("user_pks", [])
|
||||
deleted_count, _ = AuthenticatedSession.objects.filter(user_id__in=user_pks).delete()
|
||||
|
||||
return Response({"deleted": deleted_count}, status=200)
|
||||
|
||||
@@ -16,15 +16,11 @@ from rest_framework.viewsets import ViewSet
|
||||
from authentik.api.validation import validate
|
||||
from authentik.core.api.users import ParamUserSerializer
|
||||
from authentik.core.api.utils import MetaNameSerializer
|
||||
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice
|
||||
from authentik.stages.authenticator import device_classes, devices_for_user
|
||||
from authentik.stages.authenticator.models import Device
|
||||
from authentik.stages.authenticator_webauthn.models import WebAuthnDevice
|
||||
|
||||
try:
|
||||
from authentik.enterprise.stages.authenticator_endpoint_gdtc.models import EndpointDevice
|
||||
except ModuleNotFoundError:
|
||||
EndpointDevice = None
|
||||
|
||||
|
||||
class DeviceSerializer(MetaNameSerializer):
|
||||
"""Serializer for authenticator devices"""
|
||||
@@ -47,7 +43,7 @@ class DeviceSerializer(MetaNameSerializer):
|
||||
"""Get extra description"""
|
||||
if isinstance(instance, WebAuthnDevice):
|
||||
return instance.device_type.description if instance.device_type else None
|
||||
if EndpointDevice and isinstance(instance, EndpointDevice):
|
||||
if isinstance(instance, EndpointDevice):
|
||||
return instance.data.get("deviceSignals", {}).get("deviceModel")
|
||||
return None
|
||||
|
||||
@@ -55,7 +51,7 @@ class DeviceSerializer(MetaNameSerializer):
|
||||
"""Get external Device ID"""
|
||||
if isinstance(instance, WebAuthnDevice):
|
||||
return instance.device_type.aaguid if instance.device_type else None
|
||||
if EndpointDevice and isinstance(instance, EndpointDevice):
|
||||
if isinstance(instance, EndpointDevice):
|
||||
return instance.data.get("deviceSignals", {}).get("deviceModel")
|
||||
return None
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
|
||||
from authentik.core.api.utils import PassiveSerializer
|
||||
from authentik.enterprise.apps import EnterpriseConfig
|
||||
from authentik.lib.models import DeprecatedMixin
|
||||
from authentik.lib.utils.reflection import all_subclasses
|
||||
|
||||
@@ -60,25 +61,19 @@ class TypesMixin:
|
||||
continue
|
||||
instance = subclass()
|
||||
try:
|
||||
type_signature = {
|
||||
"name": subclass._meta.verbose_name,
|
||||
"description": subclass.__doc__,
|
||||
"component": instance.component,
|
||||
"model_name": subclass._meta.model_name,
|
||||
"icon_url": getattr(instance, "icon_url", None),
|
||||
"requires_enterprise": False,
|
||||
"deprecated": isinstance(instance, DeprecatedMixin),
|
||||
}
|
||||
try:
|
||||
from authentik.enterprise.apps import EnterpriseConfig
|
||||
|
||||
type_signature["requires_enterprise"] = isinstance(
|
||||
subclass._meta.app_config, EnterpriseConfig
|
||||
)
|
||||
except ModuleNotFoundError:
|
||||
pass
|
||||
|
||||
data.append(type_signature)
|
||||
data.append(
|
||||
{
|
||||
"name": subclass._meta.verbose_name,
|
||||
"description": subclass.__doc__,
|
||||
"component": instance.component,
|
||||
"model_name": subclass._meta.model_name,
|
||||
"icon_url": getattr(instance, "icon_url", None),
|
||||
"requires_enterprise": isinstance(
|
||||
subclass._meta.app_config, EnterpriseConfig
|
||||
),
|
||||
"deprecated": isinstance(instance, DeprecatedMixin),
|
||||
}
|
||||
)
|
||||
except NotImplementedError:
|
||||
continue
|
||||
if additional:
|
||||
|
||||
@@ -4,6 +4,7 @@ from typing import Any
|
||||
|
||||
from django.utils.timezone import now
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from guardian.shortcuts import get_anonymous_user
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import CharField
|
||||
@@ -145,6 +146,12 @@ class TokenViewSet(UsedByMixin, ModelViewSet):
|
||||
owner_field = "user"
|
||||
rbac_allow_create_without_perm = True
|
||||
|
||||
def get_queryset(self):
|
||||
user = self.request.user if self.request else get_anonymous_user()
|
||||
if user.is_superuser:
|
||||
return super().get_queryset()
|
||||
return super().get_queryset().filter(user=user.pk)
|
||||
|
||||
def perform_create(self, serializer: TokenSerializer):
|
||||
if not self.request.user.is_superuser:
|
||||
instance = serializer.save(
|
||||
|
||||
@@ -30,6 +30,7 @@ from drf_spectacular.utils import (
|
||||
extend_schema_field,
|
||||
inline_serializer,
|
||||
)
|
||||
from guardian.shortcuts import get_objects_for_user
|
||||
from rest_framework.authentication import SessionAuthentication
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import ValidationError
|
||||
@@ -41,7 +42,6 @@ from rest_framework.fields import (
|
||||
IntegerField,
|
||||
ListField,
|
||||
SerializerMethodField,
|
||||
UUIDField,
|
||||
)
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.request import Request
|
||||
@@ -72,14 +72,12 @@ from authentik.core.middleware import (
|
||||
from authentik.core.models import (
|
||||
USER_ATTRIBUTE_TOKEN_EXPIRING,
|
||||
USER_PATH_SERVICE_ACCOUNT,
|
||||
USERNAME_MAX_LENGTH,
|
||||
Group,
|
||||
Session,
|
||||
Token,
|
||||
TokenIntents,
|
||||
User,
|
||||
UserTypes,
|
||||
default_token_duration,
|
||||
)
|
||||
from authentik.endpoints.connectors.agent.auth import AgentAuth
|
||||
from authentik.events.models import Event, EventAction
|
||||
@@ -89,7 +87,6 @@ from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER, FlowPlanner
|
||||
from authentik.flows.views.executor import QS_KEY_TOKEN
|
||||
from authentik.lib.avatars import get_avatar
|
||||
from authentik.lib.utils.reflection import ConditionalInheritance
|
||||
from authentik.lib.utils.time import timedelta_from_string, timedelta_string_validator
|
||||
from authentik.rbac.api.roles import RoleSerializer
|
||||
from authentik.rbac.decorators import permission_required
|
||||
from authentik.rbac.models import Role, get_permission_choices
|
||||
@@ -132,6 +129,7 @@ class UserSerializer(ModelSerializer):
|
||||
groups = PrimaryKeyRelatedField(
|
||||
allow_empty=True,
|
||||
many=True,
|
||||
source="ak_groups",
|
||||
queryset=Group.objects.all().order_by("name"),
|
||||
default=list,
|
||||
)
|
||||
@@ -145,7 +143,7 @@ class UserSerializer(ModelSerializer):
|
||||
roles_obj = SerializerMethodField(allow_null=True)
|
||||
uid = CharField(read_only=True)
|
||||
username = CharField(
|
||||
max_length=USERNAME_MAX_LENGTH,
|
||||
max_length=150,
|
||||
validators=[UniqueValidator(queryset=User.objects.all().order_by("username"))],
|
||||
)
|
||||
|
||||
@@ -167,7 +165,7 @@ class UserSerializer(ModelSerializer):
|
||||
def get_groups_obj(self, instance: User) -> list[PartialGroupSerializer] | None:
|
||||
if not self._should_include_groups:
|
||||
return None
|
||||
return PartialGroupSerializer(instance.groups, many=True).data
|
||||
return PartialGroupSerializer(instance.ak_groups, many=True).data
|
||||
|
||||
@extend_schema_field(RoleSerializer(many=True))
|
||||
def get_roles_obj(self, instance: User) -> list[RoleSerializer] | None:
|
||||
@@ -241,14 +239,14 @@ class UserSerializer(ModelSerializer):
|
||||
and self.instance.type == UserTypes.INTERNAL_SERVICE_ACCOUNT
|
||||
and user_type != UserTypes.INTERNAL_SERVICE_ACCOUNT.value
|
||||
):
|
||||
raise ValidationError(_("Can't change internal service account to other user type."))
|
||||
raise ValidationError("Can't change internal service account to other user type.")
|
||||
if not self.instance and user_type == UserTypes.INTERNAL_SERVICE_ACCOUNT.value:
|
||||
raise ValidationError(_("Setting a user to internal service account is not allowed."))
|
||||
raise ValidationError("Setting a user to internal service account is not allowed.")
|
||||
return user_type
|
||||
|
||||
def validate(self, attrs: dict) -> dict:
|
||||
if self.instance and self.instance.type == UserTypes.INTERNAL_SERVICE_ACCOUNT:
|
||||
raise ValidationError(_("Can't modify internal service account users"))
|
||||
raise ValidationError("Can't modify internal service account users")
|
||||
return super().validate(attrs)
|
||||
|
||||
class Meta:
|
||||
@@ -400,18 +398,6 @@ class UserServiceAccountSerializer(PassiveSerializer):
|
||||
)
|
||||
|
||||
|
||||
class UserRecoveryLinkSerializer(PassiveSerializer):
|
||||
"""Payload to create a recovery link"""
|
||||
|
||||
token_duration = CharField(required=False)
|
||||
|
||||
|
||||
class UserRecoveryEmailSerializer(UserRecoveryLinkSerializer):
|
||||
"""Payload to create and email a recovery link"""
|
||||
|
||||
email_stage = UUIDField()
|
||||
|
||||
|
||||
class UsersFilter(FilterSet):
|
||||
"""Filter for users"""
|
||||
|
||||
@@ -430,12 +416,7 @@ class UsersFilter(FilterSet):
|
||||
last_updated = IsoDateTimeFilter(field_name="last_updated")
|
||||
last_updated__gt = IsoDateTimeFilter(field_name="last_updated", lookup_expr="gt")
|
||||
|
||||
last_login__lt = IsoDateTimeFilter(field_name="last_login", lookup_expr="lt")
|
||||
last_login = IsoDateTimeFilter(field_name="last_login")
|
||||
last_login__gt = IsoDateTimeFilter(field_name="last_login", lookup_expr="gt")
|
||||
last_login__isnull = BooleanFilter(field_name="last_login", lookup_expr="isnull")
|
||||
|
||||
is_superuser = BooleanFilter(field_name="groups", method="filter_is_superuser")
|
||||
is_superuser = BooleanFilter(field_name="ak_groups", method="filter_is_superuser")
|
||||
uuid = UUIDFilter(field_name="uuid")
|
||||
|
||||
path = CharFilter(field_name="path")
|
||||
@@ -444,12 +425,12 @@ class UsersFilter(FilterSet):
|
||||
type = MultipleChoiceFilter(choices=UserTypes.choices, field_name="type")
|
||||
|
||||
groups_by_name = ModelMultipleChoiceFilter(
|
||||
field_name="groups__name",
|
||||
field_name="ak_groups__name",
|
||||
to_field_name="name",
|
||||
queryset=Group.objects.all().order_by("name"),
|
||||
)
|
||||
groups_by_pk = ModelMultipleChoiceFilter(
|
||||
field_name="groups",
|
||||
field_name="ak_groups",
|
||||
queryset=Group.objects.all().order_by("name"),
|
||||
)
|
||||
|
||||
@@ -465,22 +446,22 @@ class UsersFilter(FilterSet):
|
||||
|
||||
def filter_is_superuser(self, queryset, name, value):
|
||||
if value:
|
||||
return queryset.filter(groups__is_superuser=True).distinct()
|
||||
return queryset.exclude(groups__is_superuser=True).distinct()
|
||||
return queryset.filter(ak_groups__is_superuser=True).distinct()
|
||||
return queryset.exclude(ak_groups__is_superuser=True).distinct()
|
||||
|
||||
def filter_attributes(self, queryset, name, value):
|
||||
"""Filter attributes by query args"""
|
||||
try:
|
||||
value = loads(value)
|
||||
except ValueError:
|
||||
raise ValidationError(_("filter: failed to parse JSON")) from None
|
||||
raise ValidationError(detail="filter: failed to parse JSON") from None
|
||||
if not isinstance(value, dict):
|
||||
raise ValidationError(_("filter: value must be key:value mapping"))
|
||||
raise ValidationError(detail="filter: value must be key:value mapping")
|
||||
qs = {}
|
||||
for key, _value in value.items():
|
||||
qs[f"attributes__{key}"] = _value
|
||||
try:
|
||||
__ = len(queryset.filter(**qs))
|
||||
_ = len(queryset.filter(**qs))
|
||||
return queryset.filter(**qs)
|
||||
except ValueError:
|
||||
return queryset
|
||||
@@ -492,7 +473,6 @@ class UsersFilter(FilterSet):
|
||||
"email",
|
||||
"date_joined",
|
||||
"last_updated",
|
||||
"last_login",
|
||||
"name",
|
||||
"is_active",
|
||||
"is_superuser",
|
||||
@@ -513,7 +493,7 @@ class UserViewSet(
|
||||
"""User Viewset"""
|
||||
|
||||
queryset = User.objects.none()
|
||||
ordering = ["username", "date_joined", "last_updated", "last_login"]
|
||||
ordering = ["username", "date_joined", "last_updated"]
|
||||
serializer_class = UserSerializer
|
||||
filterset_class = UsersFilter
|
||||
search_fields = ["email", "name", "uuid", "username"]
|
||||
@@ -544,7 +524,7 @@ class UserViewSet(
|
||||
def get_queryset(self):
|
||||
base_qs = User.objects.all().exclude_anonymous()
|
||||
if self.serializer_class(context={"request": self.request})._should_include_groups:
|
||||
base_qs = base_qs.prefetch_related("groups")
|
||||
base_qs = base_qs.prefetch_related("ak_groups")
|
||||
if self.serializer_class(context={"request": self.request})._should_include_roles:
|
||||
base_qs = base_qs.prefetch_related("roles")
|
||||
return base_qs
|
||||
@@ -558,16 +538,14 @@ class UserViewSet(
|
||||
def list(self, request, *args, **kwargs):
|
||||
return super().list(request, *args, **kwargs)
|
||||
|
||||
def _create_recovery_link(
|
||||
self, token_duration: str | None, for_email=False
|
||||
) -> tuple[str, Token]:
|
||||
def _create_recovery_link(self, for_email=False) -> tuple[str, Token]:
|
||||
"""Create a recovery link (when the current brand has a recovery flow set),
|
||||
that can either be shown to an admin or sent to the user directly"""
|
||||
brand: Brand = self.request.brand
|
||||
brand: Brand = self.request._request.brand
|
||||
# Check that there is a recovery flow, if not return an error
|
||||
flow = brand.flow_recovery
|
||||
if not flow:
|
||||
raise ValidationError({"non_field_errors": _("No recovery flow set.")})
|
||||
raise ValidationError({"non_field_errors": "No recovery flow set."})
|
||||
user: User = self.get_object()
|
||||
planner = FlowPlanner(flow)
|
||||
planner.allow_empty_flows = True
|
||||
@@ -581,15 +559,11 @@ class UserViewSet(
|
||||
)
|
||||
except FlowNonApplicableException:
|
||||
raise ValidationError(
|
||||
{"non_field_errors": _("Recovery flow not applicable to user")}
|
||||
{"non_field_errors": "Recovery flow not applicable to user"}
|
||||
) from None
|
||||
_plan = FlowToken.pickle(plan)
|
||||
if for_email:
|
||||
_plan = pickle_flow_token_for_email(plan)
|
||||
expires = default_token_duration()
|
||||
if token_duration:
|
||||
timedelta_string_validator(token_duration)
|
||||
expires = now() + timedelta_from_string(token_duration)
|
||||
token, __ = FlowToken.objects.update_or_create(
|
||||
identifier=f"{user.uid}-password-reset",
|
||||
defaults={
|
||||
@@ -597,7 +571,6 @@ class UserViewSet(
|
||||
"flow": flow,
|
||||
"_plan": _plan,
|
||||
"revoke_on_execution": not for_email,
|
||||
"expires": expires,
|
||||
},
|
||||
)
|
||||
querystring = urlencode({QS_KEY_TOKEN: token.key})
|
||||
@@ -745,60 +718,60 @@ class UserViewSet(
|
||||
|
||||
@permission_required("authentik_core.reset_user_password")
|
||||
@extend_schema(
|
||||
request=UserRecoveryLinkSerializer,
|
||||
responses={
|
||||
"200": LinkSerializer(many=False),
|
||||
},
|
||||
request=None,
|
||||
)
|
||||
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
|
||||
@validate(UserRecoveryLinkSerializer)
|
||||
def recovery(self, request: Request, pk: int, body: UserRecoveryLinkSerializer) -> Response:
|
||||
def recovery(self, request: Request, pk: int) -> Response:
|
||||
"""Create a temporary link that a user can use to recover their account"""
|
||||
link, _ = self._create_recovery_link(
|
||||
token_duration=body.validated_data.get("token_duration")
|
||||
)
|
||||
link, _ = self._create_recovery_link()
|
||||
return Response({"link": link})
|
||||
|
||||
@permission_required("authentik_core.reset_user_password")
|
||||
@extend_schema(
|
||||
request=UserRecoveryEmailSerializer,
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="email_stage",
|
||||
location=OpenApiParameter.QUERY,
|
||||
type=OpenApiTypes.STR,
|
||||
required=True,
|
||||
)
|
||||
],
|
||||
responses={
|
||||
"204": OpenApiResponse(description="Successfully sent recover email"),
|
||||
},
|
||||
request=None,
|
||||
)
|
||||
@action(detail=True, pagination_class=None, filter_backends=[], methods=["POST"])
|
||||
@validate(UserRecoveryEmailSerializer)
|
||||
def recovery_email(
|
||||
self, request: Request, pk: int, body: UserRecoveryEmailSerializer
|
||||
) -> Response:
|
||||
def recovery_email(self, request: Request, pk: int) -> Response:
|
||||
"""Send an email with a temporary link that a user can use to recover their account"""
|
||||
email_error_message = _("User does not have an email address set.")
|
||||
stage_error_message = _("Email stage not found.")
|
||||
user: User = self.get_object()
|
||||
if not user.email:
|
||||
for_user: User = self.get_object()
|
||||
if for_user.email == "":
|
||||
LOGGER.debug("User doesn't have an email address")
|
||||
raise ValidationError({"non_field_errors": email_error_message})
|
||||
if not (stage := EmailStage.objects.filter(pk=body.validated_data["email_stage"]).first()):
|
||||
LOGGER.debug("Email stage does not exist")
|
||||
raise ValidationError({"non_field_errors": stage_error_message})
|
||||
if not request.user.has_perm("authentik_stages_email.view_emailstage", stage):
|
||||
LOGGER.debug("User has no view access to email stage")
|
||||
raise ValidationError({"non_field_errors": stage_error_message})
|
||||
link, token = self._create_recovery_link(
|
||||
token_duration=body.validated_data.get("token_duration"), for_email=True
|
||||
)
|
||||
raise ValidationError({"non_field_errors": "User does not have an email address set."})
|
||||
link, token = self._create_recovery_link(for_email=True)
|
||||
# Lookup the email stage to assure the current user can access it
|
||||
stages = get_objects_for_user(
|
||||
request.user, "authentik_stages_email.view_emailstage"
|
||||
).filter(pk=request.query_params.get("email_stage"))
|
||||
if not stages.exists():
|
||||
LOGGER.debug("Email stage does not exist/user has no permissions")
|
||||
raise ValidationError({"non_field_errors": "Email stage does not exist."})
|
||||
email_stage: EmailStage = stages.first()
|
||||
message = TemplateEmailMessage(
|
||||
subject=_(stage.subject),
|
||||
to=[(user.name, user.email)],
|
||||
template_name=stage.template,
|
||||
language=user.locale(request),
|
||||
subject=_(email_stage.subject),
|
||||
to=[(for_user.name, for_user.email)],
|
||||
template_name=email_stage.template,
|
||||
language=for_user.locale(request),
|
||||
template_context={
|
||||
"url": link,
|
||||
"user": user,
|
||||
"user": for_user,
|
||||
"expires": token.expires,
|
||||
},
|
||||
)
|
||||
send_mails(stage, message)
|
||||
send_mails(email_stage, message)
|
||||
return Response(status=204)
|
||||
|
||||
@permission_required("authentik_core.impersonate")
|
||||
|
||||
@@ -16,7 +16,7 @@ def backport_is_backchannel(apps: Apps, schema_editor: BaseDatabaseSchemaEditor)
|
||||
for obj in model.objects.using(db_alias).only("is_backchannel"):
|
||||
obj.is_backchannel = True
|
||||
obj.save()
|
||||
except DatabaseError, InternalError, ProgrammingError:
|
||||
except (DatabaseError, InternalError, ProgrammingError):
|
||||
# The model might not have been migrated yet/doesn't exist yet
|
||||
# so we don't need to worry about backporting the data
|
||||
pass
|
||||
|
||||
@@ -1,9 +1,101 @@
|
||||
# Generated by Django 5.0.11 on 2025-01-27 12:58
|
||||
|
||||
import uuid
|
||||
import pickle # nosec
|
||||
from django.core import signing
|
||||
from django.contrib.auth import BACKEND_SESSION_KEY, HASH_SESSION_KEY, SESSION_KEY
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from authentik.lib.migrations import progress_bar
|
||||
from authentik.root.middleware import ClientIPMiddleware
|
||||
|
||||
|
||||
class PickleSerializer:
|
||||
"""
|
||||
Simple wrapper around pickle to be used in signing.dumps()/loads() and
|
||||
cache backends.
|
||||
"""
|
||||
|
||||
def __init__(self, protocol=None):
|
||||
self.protocol = pickle.HIGHEST_PROTOCOL if protocol is None else protocol
|
||||
|
||||
def dumps(self, obj):
|
||||
"""Pickle data to be stored in redis"""
|
||||
return pickle.dumps(obj, self.protocol)
|
||||
|
||||
def loads(self, data):
|
||||
"""Unpickle data to be loaded from redis"""
|
||||
try:
|
||||
return pickle.loads(data) # nosec
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
def _migrate_session(
|
||||
apps,
|
||||
db_alias,
|
||||
session_key,
|
||||
session_data,
|
||||
expires,
|
||||
):
|
||||
Session = apps.get_model("authentik_core", "Session")
|
||||
OldAuthenticatedSession = apps.get_model("authentik_core", "OldAuthenticatedSession")
|
||||
AuthenticatedSession = apps.get_model("authentik_core", "AuthenticatedSession")
|
||||
|
||||
old_auth_session = (
|
||||
OldAuthenticatedSession.objects.using(db_alias).filter(session_key=session_key).first()
|
||||
)
|
||||
|
||||
args = {
|
||||
"session_key": session_key,
|
||||
"expires": expires,
|
||||
"last_ip": ClientIPMiddleware.default_ip,
|
||||
"last_user_agent": "",
|
||||
"session_data": {},
|
||||
}
|
||||
for k, v in session_data.items():
|
||||
if k == "authentik/stages/user_login/last_ip":
|
||||
args["last_ip"] = v
|
||||
elif k in ["last_user_agent", "last_used"]:
|
||||
args[k] = v
|
||||
elif args in [SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY]:
|
||||
pass
|
||||
else:
|
||||
args["session_data"][k] = v
|
||||
if old_auth_session:
|
||||
args["last_user_agent"] = old_auth_session.last_user_agent
|
||||
args["last_used"] = old_auth_session.last_used
|
||||
|
||||
args["session_data"] = pickle.dumps(args["session_data"])
|
||||
session = Session.objects.using(db_alias).create(**args)
|
||||
|
||||
if old_auth_session:
|
||||
AuthenticatedSession.objects.using(db_alias).create(
|
||||
session=session,
|
||||
user=old_auth_session.user,
|
||||
uuid=old_auth_session.uuid,
|
||||
)
|
||||
|
||||
|
||||
def migrate_database_sessions(apps, schema_editor):
|
||||
DjangoSession = apps.get_model("sessions", "Session")
|
||||
db_alias = schema_editor.connection.alias
|
||||
|
||||
print("\nMigration database sessions, this might take a couple of minutes...")
|
||||
for django_session in progress_bar(DjangoSession.objects.using(db_alias).all()):
|
||||
session_data = signing.loads(
|
||||
django_session.session_data,
|
||||
salt="django.contrib.sessions.SessionStore",
|
||||
serializer=PickleSerializer,
|
||||
)
|
||||
_migrate_session(
|
||||
apps=apps,
|
||||
db_alias=db_alias,
|
||||
session_key=django_session.session_key,
|
||||
session_data=session_data,
|
||||
expires=django_session.expire_date,
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
@@ -113,4 +205,8 @@ class Migration(migrations.Migration):
|
||||
"verbose_name_plural": "Authenticated Sessions",
|
||||
},
|
||||
),
|
||||
migrations.RunPython(
|
||||
code=migrate_database_sessions,
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
# Generated by Django 5.2.10 on 2026-01-19 21:46
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_core", "0056_user_roles"),
|
||||
("authentik_rbac", "0010_remove_role_group_alter_role_name"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="user",
|
||||
name="user_permissions",
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="group",
|
||||
name="roles",
|
||||
field=models.ManyToManyField(
|
||||
blank=True, related_name="groups", to="authentik_rbac.role"
|
||||
),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="user",
|
||||
name="groups",
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name="user",
|
||||
old_name="ak_groups",
|
||||
new_name="groups",
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="user",
|
||||
options={
|
||||
"permissions": [
|
||||
("reset_user_password", "Reset Password"),
|
||||
("impersonate", "Can impersonate other users"),
|
||||
("preview_user", "Can preview user data sent to providers"),
|
||||
("view_user_applications", "View applications the user has access to"),
|
||||
],
|
||||
"verbose_name": "User",
|
||||
"verbose_name_plural": "Users",
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -1,11 +1,9 @@
|
||||
"""authentik core models"""
|
||||
|
||||
import re
|
||||
import traceback
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime
|
||||
from enum import StrEnum
|
||||
from hashlib import sha256
|
||||
from typing import Any, Self
|
||||
from typing import Any, Optional, Self
|
||||
from uuid import uuid4
|
||||
|
||||
import pgtrigger
|
||||
@@ -52,7 +50,6 @@ from authentik.tenants.models import DEFAULT_TOKEN_DURATION, DEFAULT_TOKEN_LENGT
|
||||
from authentik.tenants.utils import get_current_tenant, get_unique_identifier
|
||||
|
||||
LOGGER = get_logger()
|
||||
USERNAME_MAX_LENGTH = 150
|
||||
USER_PATH_SYSTEM_PREFIX = "goauthentik.io"
|
||||
_USER_ATTR_PREFIX = f"{USER_PATH_SYSTEM_PREFIX}/user"
|
||||
USER_ATTRIBUTE_DEBUG = f"{_USER_ATTR_PREFIX}/debug"
|
||||
@@ -186,7 +183,7 @@ class Group(SerializerModel, AttributesMixin):
|
||||
default=False, help_text=_("Users added to this group will be superusers.")
|
||||
)
|
||||
|
||||
roles = models.ManyToManyField("authentik_rbac.Role", related_name="groups", blank=True)
|
||||
roles = models.ManyToManyField("authentik_rbac.Role", related_name="ak_groups", blank=True)
|
||||
|
||||
parents = models.ManyToManyField(
|
||||
"Group",
|
||||
@@ -228,14 +225,14 @@ class Group(SerializerModel, AttributesMixin):
|
||||
# in the LDAP Outpost we use the last 5 chars so match here
|
||||
return int(str(self.pk.int)[:5])
|
||||
|
||||
def is_member(self, user: User) -> bool:
|
||||
def is_member(self, user: "User") -> bool:
|
||||
"""Recursively check if `user` is member of us, or any parent."""
|
||||
return user.all_groups().filter(group_uuid=self.group_uuid).exists()
|
||||
|
||||
def all_roles(self) -> QuerySet[Role]:
|
||||
"""Get all roles of this group and all of its ancestors."""
|
||||
return Role.objects.filter(
|
||||
groups__in=Group.objects.filter(pk=self.pk).with_ancestors()
|
||||
ak_groups__in=Group.objects.filter(pk=self.pk).with_ancestors()
|
||||
).distinct()
|
||||
|
||||
def get_managed_role(self, create=False):
|
||||
@@ -243,7 +240,7 @@ class Group(SerializerModel, AttributesMixin):
|
||||
name = managed_role_name(self)
|
||||
role, created = Role.objects.get_or_create(name=name, managed=name)
|
||||
if created:
|
||||
role.groups.add(self)
|
||||
role.ak_groups.add(self)
|
||||
return role
|
||||
else:
|
||||
return Role.objects.filter(name=managed_role_name(self)).first()
|
||||
@@ -358,17 +355,13 @@ class UserManager(DjangoUserManager):
|
||||
class User(SerializerModel, AttributesMixin, AbstractUser):
|
||||
"""authentik User model, based on django's contrib auth user model."""
|
||||
|
||||
# Overwriting PermissionsMixin: permissions are handled by roles.
|
||||
# (This knowingly violates the Liskov substitution principle. It is better to fail loudly.)
|
||||
user_permissions = None
|
||||
|
||||
uuid = models.UUIDField(default=uuid4, editable=False, unique=True)
|
||||
name = models.TextField(help_text=_("User's display name."))
|
||||
path = models.TextField(default="users")
|
||||
type = models.TextField(choices=UserTypes.choices, default=UserTypes.INTERNAL)
|
||||
|
||||
sources = models.ManyToManyField("Source", through="UserSourceConnection")
|
||||
groups = models.ManyToManyField("Group", related_name="users")
|
||||
ak_groups = models.ManyToManyField("Group", related_name="users")
|
||||
roles = models.ManyToManyField("authentik_rbac.Role", related_name="users", blank=True)
|
||||
password_change_date = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
@@ -382,6 +375,8 @@ class User(SerializerModel, AttributesMixin, AbstractUser):
|
||||
permissions = [
|
||||
("reset_user_password", _("Reset Password")),
|
||||
("impersonate", _("Can impersonate other users")),
|
||||
("assign_user_permissions", _("Can assign permissions to users")),
|
||||
("unassign_user_permissions", _("Can unassign permissions from users")),
|
||||
("preview_user", _("Can preview user data sent to providers")),
|
||||
("view_user_applications", _("View applications the user has access to")),
|
||||
]
|
||||
@@ -405,11 +400,11 @@ class User(SerializerModel, AttributesMixin, AbstractUser):
|
||||
|
||||
def all_groups(self) -> QuerySet[Group]:
|
||||
"""Recursively get all groups this user is a member of."""
|
||||
return self.groups.all().with_ancestors()
|
||||
return self.ak_groups.all().with_ancestors()
|
||||
|
||||
def all_roles(self) -> QuerySet[Role]:
|
||||
"""Get all roles of this user and all of its groups (recursively)."""
|
||||
return Role.objects.filter(Q(users=self) | Q(groups__in=self.all_groups())).distinct()
|
||||
return Role.objects.filter(Q(users=self) | Q(ak_groups__in=self.all_groups())).distinct()
|
||||
|
||||
def get_managed_role(self, create=False):
|
||||
if create:
|
||||
@@ -471,7 +466,7 @@ class User(SerializerModel, AttributesMixin, AbstractUser):
|
||||
always_merger.merge(final_attributes, self.attributes)
|
||||
return final_attributes
|
||||
|
||||
def app_entitlements(self, app: Application | None) -> QuerySet[ApplicationEntitlement]:
|
||||
def app_entitlements(self, app: "Application | None") -> QuerySet["ApplicationEntitlement"]:
|
||||
"""Get all entitlements this user has for `app`."""
|
||||
if not app:
|
||||
return []
|
||||
@@ -490,7 +485,7 @@ class User(SerializerModel, AttributesMixin, AbstractUser):
|
||||
).order_by("name")
|
||||
return qs
|
||||
|
||||
def app_entitlements_attributes(self, app: Application | None) -> dict:
|
||||
def app_entitlements_attributes(self, app: "Application | None") -> dict:
|
||||
"""Get a dictionary containing all merged attributes from app entitlements for `app`."""
|
||||
final_attributes = {}
|
||||
for attrs in self.app_entitlements(app).values_list("attributes", flat=True):
|
||||
@@ -513,54 +508,6 @@ class User(SerializerModel, AttributesMixin, AbstractUser):
|
||||
"""superuser == staff user"""
|
||||
return self.is_superuser # type: ignore
|
||||
|
||||
# TODO: remove this after 2026.
|
||||
@property
|
||||
def ak_groups(self):
|
||||
"""This is a proxy for a renamed, deprecated field."""
|
||||
from authentik.events.models import Event, EventAction
|
||||
|
||||
deprecation = "authentik.core.models.User.ak_groups"
|
||||
replacement = "authentik.core.models.User.groups"
|
||||
message_logger = (
|
||||
f"{deprecation} is deprecated and will be removed in a future version of "
|
||||
f"authentik. Please use {replacement} instead."
|
||||
)
|
||||
message_event = (
|
||||
f"{message_logger} This event will not be repeated until it expires (by "
|
||||
"default: in 30 days). See authentik logs for every will invocation of this "
|
||||
"deprecation."
|
||||
)
|
||||
stacktrace = traceback.format_stack()
|
||||
# The last line is this function, the next-to-last line is its caller
|
||||
cause = stacktrace[-2] if len(stacktrace) > 1 else "Unknown, see stacktrace in logs"
|
||||
if search := re.search(r'"(.*?)"', cause):
|
||||
cause = f"Property mapping or Expression policy named {search.group(1)}"
|
||||
|
||||
LOGGER.warning(
|
||||
"deprecation used",
|
||||
message=message_logger,
|
||||
deprecation=deprecation,
|
||||
replacement=replacement,
|
||||
cause=cause,
|
||||
stacktrace=stacktrace,
|
||||
)
|
||||
if not Event.filter_not_expired(
|
||||
action=EventAction.CONFIGURATION_WARNING,
|
||||
context__deprecation=deprecation,
|
||||
context__cause=cause,
|
||||
).exists():
|
||||
event = Event.new(
|
||||
EventAction.CONFIGURATION_WARNING,
|
||||
deprecation=deprecation,
|
||||
replacement=replacement,
|
||||
message=message_event,
|
||||
cause=cause,
|
||||
)
|
||||
event.expires = datetime.now() + timedelta(days=30)
|
||||
event.save()
|
||||
|
||||
return self.groups
|
||||
|
||||
def set_password(self, raw_password, signal=True, sender=None, request=None):
|
||||
if self.pk and signal:
|
||||
from authentik.core.signals import password_changed
|
||||
@@ -707,7 +654,7 @@ class BackchannelProvider(Provider):
|
||||
|
||||
|
||||
class ApplicationQuerySet(QuerySet):
|
||||
def with_provider(self) -> QuerySet[Application]:
|
||||
def with_provider(self) -> "QuerySet[Application]":
|
||||
qs = self.select_related("provider")
|
||||
for subclass in Provider.objects.get_queryset()._get_subclasses_recurse(Provider):
|
||||
qs = qs.select_related(f"provider__{subclass}")
|
||||
@@ -1015,7 +962,7 @@ class Source(ManagedModel, SerializerModel, PolicyBindingModel):
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def property_mapping_type(self) -> type[PropertyMapping]:
|
||||
def property_mapping_type(self) -> "type[PropertyMapping]":
|
||||
"""Return property mapping type used by this object"""
|
||||
if self.managed == self.MANAGED_INBUILT:
|
||||
from authentik.core.models import PropertyMapping
|
||||
@@ -1136,7 +1083,7 @@ class ExpiringModel(models.Model):
|
||||
return self.delete(*args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def filter_not_expired(cls, **kwargs) -> QuerySet[Self]:
|
||||
def filter_not_expired(cls, **kwargs) -> QuerySet["Self"]:
|
||||
"""Filer for tokens which are not expired yet or are not expiring,
|
||||
and match filters in `kwargs`"""
|
||||
for obj in cls.objects.filter(**kwargs).filter(Q(expires__lt=now(), expiring=True)):
|
||||
@@ -1332,7 +1279,7 @@ class AuthenticatedSession(SerializerModel):
|
||||
return f"Authenticated Session {str(self.pk)[:10]}"
|
||||
|
||||
@staticmethod
|
||||
def from_request(request: HttpRequest, user: User) -> AuthenticatedSession | None:
|
||||
def from_request(request: HttpRequest, user: User) -> Optional["AuthenticatedSession"]:
|
||||
"""Create a new session from a http request"""
|
||||
if not hasattr(request, "session") or not request.session.exists(
|
||||
request.session.session_key
|
||||
|
||||
@@ -66,7 +66,7 @@ class SessionStore(SessionBase):
|
||||
def decode(self, session_data):
|
||||
try:
|
||||
return pickle.loads(session_data) # nosec
|
||||
except pickle.PickleError, AttributeError, TypeError:
|
||||
except (pickle.PickleError, AttributeError, TypeError):
|
||||
# PickleError, ValueError - unpickling exceptions
|
||||
# AttributeError - can happen when Django model fields (e.g., FileField) are unpickled
|
||||
# and their descriptors fail to initialize (e.g., missing storage)
|
||||
|
||||
@@ -51,7 +51,7 @@ def user_logged_in_session(sender, request: HttpRequest, user: User, **_):
|
||||
if session:
|
||||
session.save()
|
||||
|
||||
if not RefreshOtherFlowsAfterAuthentication.get():
|
||||
if not RefreshOtherFlowsAfterAuthentication().get():
|
||||
return
|
||||
layer = get_channel_layer()
|
||||
device_cookie = request.COOKIES.get("authentik_device")
|
||||
@@ -63,7 +63,7 @@ def user_logged_in_session(sender, request: HttpRequest, user: User, **_):
|
||||
|
||||
|
||||
@receiver(post_delete, sender=AuthenticatedSession)
|
||||
def authenticated_session_delete(sender: type[Model], instance: AuthenticatedSession, **_):
|
||||
def authenticated_session_delete(sender: type[Model], instance: "AuthenticatedSession", **_):
|
||||
"""Delete session when authenticated session is deleted"""
|
||||
Session.objects.filter(session_key=instance.pk).delete()
|
||||
|
||||
|
||||
@@ -392,10 +392,10 @@ class GroupUpdateStage(StageView):
|
||||
groups.append(group)
|
||||
|
||||
with transaction.atomic():
|
||||
self.user.groups.remove(
|
||||
*self.user.groups.filter(groupsourceconnection__source=self.source)
|
||||
self.user.ak_groups.remove(
|
||||
*self.user.ak_groups.filter(groupsourceconnection__source=self.source)
|
||||
)
|
||||
self.user.groups.add(*groups)
|
||||
self.user.ak_groups.add(*groups)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -49,7 +49,7 @@ class SourceMapper:
|
||||
def build_object_properties(
|
||||
self,
|
||||
object_type: type[User | Group],
|
||||
manager: PropertyMappingManager | None = None,
|
||||
manager: "PropertyMappingManager | None" = None,
|
||||
user: User | None = None,
|
||||
request: HttpRequest | None = None,
|
||||
**kwargs,
|
||||
|
||||
@@ -44,24 +44,19 @@
|
||||
{% endblock %}
|
||||
</div>
|
||||
</main>
|
||||
<footer
|
||||
name="site-footer"
|
||||
aria-label="{% trans 'Site footer' %}"
|
||||
class="pf-c-login__footer pf-m-dark">
|
||||
<div name="flow-links" aria-label="{% trans 'Flow links' %}">
|
||||
<ul class="pf-c-list pf-m-inline" part="list">
|
||||
{% for link in footer_links %}
|
||||
<li part="list-item">
|
||||
<a part="list-item-link" href="{{ link.href }}">{{ link.name }}</a>
|
||||
</li>
|
||||
{% endfor %}
|
||||
<li part="list-item">
|
||||
<span>
|
||||
{% trans 'Powered by authentik' %}
|
||||
</span>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
<footer aria-label="Site footer" class="pf-c-login__footer pf-m-dark">
|
||||
<ul class="pf-c-list pf-m-inline">
|
||||
{% for link in footer_links %}
|
||||
<li>
|
||||
<a href="{{ link.href }}">{{ link.name }}</a>
|
||||
</li>
|
||||
{% endfor %}
|
||||
<li>
|
||||
<span>
|
||||
{% trans 'Powered by authentik' %}
|
||||
</span>
|
||||
</li>
|
||||
</ul>
|
||||
</footer>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -38,7 +38,7 @@ class TestApplicationEntitlements(APITestCase):
|
||||
def test_group(self):
|
||||
"""Test direct group"""
|
||||
group = Group.objects.create(name=generate_id())
|
||||
self.user.groups.add(group)
|
||||
self.user.ak_groups.add(group)
|
||||
ent = ApplicationEntitlement.objects.create(app=self.app, name=generate_id())
|
||||
PolicyBinding.objects.create(target=ent, group=group, order=0)
|
||||
ents = self.user.app_entitlements(self.app)
|
||||
@@ -50,7 +50,7 @@ class TestApplicationEntitlements(APITestCase):
|
||||
parent = Group.objects.create(name=generate_id())
|
||||
group = Group.objects.create(name=generate_id())
|
||||
group.parents.add(parent)
|
||||
self.user.groups.add(group)
|
||||
self.user.ak_groups.add(group)
|
||||
ent = ApplicationEntitlement.objects.create(app=self.app, name=generate_id())
|
||||
PolicyBinding.objects.create(target=ent, group=parent, order=0)
|
||||
ents = self.user.app_entitlements(self.app)
|
||||
|
||||
@@ -122,8 +122,8 @@ class TestGroupsAPI(APITestCase):
|
||||
def test_superuser_update_no_perm(self):
|
||||
"""Test updating a superuser group without permission"""
|
||||
group = Group.objects.create(name=generate_id(), is_superuser=True)
|
||||
self.login_user.assign_perms_to_managed_role("authentik_core.view_group", group)
|
||||
self.login_user.assign_perms_to_managed_role("authentik_core.change_group", group)
|
||||
self.login_user.assign_perms_to_managed_role("view_group", group)
|
||||
self.login_user.assign_perms_to_managed_role("change_group", group)
|
||||
self.client.force_login(self.login_user)
|
||||
res = self.client.patch(
|
||||
reverse("authentik_api:group-detail", kwargs={"pk": group.pk}),
|
||||
@@ -139,8 +139,8 @@ class TestGroupsAPI(APITestCase):
|
||||
"""Test updating a superuser group without permission
|
||||
and without changing the superuser status"""
|
||||
group = Group.objects.create(name=generate_id(), is_superuser=True)
|
||||
self.login_user.assign_perms_to_managed_role("authentik_core.view_group", group)
|
||||
self.login_user.assign_perms_to_managed_role("authentik_core.change_group", group)
|
||||
self.login_user.assign_perms_to_managed_role("view_group", group)
|
||||
self.login_user.assign_perms_to_managed_role("change_group", group)
|
||||
self.client.force_login(self.login_user)
|
||||
res = self.client.patch(
|
||||
reverse("authentik_api:group-detail", kwargs={"pk": group.pk}),
|
||||
|
||||
@@ -54,7 +54,7 @@ class TestSourceFlowManager(FlowTestCase):
|
||||
)
|
||||
self.assertTrue(stage.handle_groups())
|
||||
self.assertTrue(Group.objects.filter(name="group 1").exists())
|
||||
self.assertTrue(self.user.groups.filter(name="group 1").exists())
|
||||
self.assertTrue(self.user.ak_groups.filter(name="group 1").exists())
|
||||
self.assertTrue(
|
||||
GroupOAuthSourceConnection.objects.filter(
|
||||
group=Group.objects.get(name="group 1"), source=self.source
|
||||
@@ -88,7 +88,7 @@ class TestSourceFlowManager(FlowTestCase):
|
||||
)
|
||||
self.assertTrue(stage.handle_groups())
|
||||
self.assertTrue(Group.objects.filter(name="group 1").exists())
|
||||
self.assertTrue(self.user.groups.filter(name="group 1").exists())
|
||||
self.assertTrue(self.user.ak_groups.filter(name="group 1").exists())
|
||||
self.assertTrue(
|
||||
GroupOAuthSourceConnection.objects.filter(
|
||||
group=Group.objects.get(name="group 1"), source=self.source
|
||||
@@ -123,7 +123,7 @@ class TestSourceFlowManager(FlowTestCase):
|
||||
)
|
||||
self.assertTrue(stage.handle_groups())
|
||||
self.assertTrue(Group.objects.filter(name="group 1").exists())
|
||||
self.assertTrue(self.user.groups.filter(name="group 1").exists())
|
||||
self.assertTrue(self.user.ak_groups.filter(name="group 1").exists())
|
||||
self.assertTrue(
|
||||
GroupOAuthSourceConnection.objects.filter(group=group, source=self.source).exists()
|
||||
)
|
||||
@@ -155,7 +155,7 @@ class TestSourceFlowManager(FlowTestCase):
|
||||
)
|
||||
self.assertTrue(stage.handle_groups())
|
||||
self.assertTrue(Group.objects.filter(name="group 1").exists())
|
||||
self.assertTrue(self.user.groups.filter(name="group 1").exists())
|
||||
self.assertTrue(self.user.ak_groups.filter(name="group 1").exists())
|
||||
self.assertTrue(
|
||||
GroupOAuthSourceConnection.objects.filter(
|
||||
group=Group.objects.get(name="group 1"), source=self.source
|
||||
@@ -189,7 +189,7 @@ class TestSourceFlowManager(FlowTestCase):
|
||||
request=request,
|
||||
)
|
||||
self.assertFalse(stage.handle_groups())
|
||||
self.assertFalse(self.user.groups.filter(name="group 1").exists())
|
||||
self.assertFalse(self.user.ak_groups.filter(name="group 1").exists())
|
||||
self.assertFalse(
|
||||
GroupOAuthSourceConnection.objects.filter(group=group, source=self.source).exists()
|
||||
)
|
||||
@@ -201,7 +201,7 @@ class TestSourceFlowManager(FlowTestCase):
|
||||
other_group = Group.objects.create(name="other group")
|
||||
old_group = Group.objects.create(name="old group")
|
||||
new_group = Group.objects.create(name="new group")
|
||||
self.user.groups.set([other_group, old_group])
|
||||
self.user.ak_groups.set([other_group, old_group])
|
||||
GroupOAuthSourceConnection.objects.create(
|
||||
group=old_group, source=self.source, identifier=old_group.name
|
||||
)
|
||||
@@ -231,7 +231,7 @@ class TestSourceFlowManager(FlowTestCase):
|
||||
request=request,
|
||||
)
|
||||
self.assertTrue(stage.handle_groups())
|
||||
self.assertFalse(self.user.groups.filter(name="old group").exists())
|
||||
self.assertTrue(self.user.groups.filter(name="other group").exists())
|
||||
self.assertTrue(self.user.groups.filter(name="new group").exists())
|
||||
self.assertEqual(self.user.groups.count(), 2)
|
||||
self.assertFalse(self.user.ak_groups.filter(name="old group").exists())
|
||||
self.assertTrue(self.user.ak_groups.filter(name="other group").exists())
|
||||
self.assertTrue(self.user.ak_groups.filter(name="new group").exists())
|
||||
self.assertEqual(self.user.ak_groups.count(), 2)
|
||||
|
||||
@@ -5,10 +5,9 @@ from django.test import TestCase
|
||||
from authentik.core.models import Group, PropertyMapping, Source, User
|
||||
from authentik.core.sources.mapper import SourceMapper
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.lib.models import InternallyManagedMixin
|
||||
|
||||
|
||||
class ProxySource(InternallyManagedMixin, Source):
|
||||
class ProxySource(Source):
|
||||
@property
|
||||
def property_mapping_type(self):
|
||||
return PropertyMapping
|
||||
|
||||
@@ -183,16 +183,16 @@ class TestTokenAPI(APITestCase):
|
||||
self.assertEqual(len(body["results"]), 1)
|
||||
self.assertEqual(body["results"][0]["identifier"], token_should.identifier)
|
||||
|
||||
def test_list_with_permission(self):
|
||||
"""Test Token List (Test with `view_token` permission)"""
|
||||
def test_list_admin(self):
|
||||
"""Test Token List (Test with admin auth)"""
|
||||
Token.objects.all().delete()
|
||||
self.client.force_login(self.admin)
|
||||
token_should: Token = Token.objects.create(
|
||||
identifier="test", expiring=False, user=self.user
|
||||
)
|
||||
token_should_not: Token = Token.objects.create(
|
||||
identifier="test-2", expiring=False, user=get_anonymous_user()
|
||||
)
|
||||
self.user.assign_perms_to_managed_role("authentik_core.view_token")
|
||||
response = self.client.get(reverse("authentik_api:token-list"))
|
||||
body = loads(response.content)
|
||||
self.assertEqual(len(body["results"]), 2)
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
from django.test.testcases import TestCase
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.events.models import Event
|
||||
from authentik.lib.generators import generate_id
|
||||
|
||||
|
||||
@@ -19,17 +18,3 @@ class TestUsers(TestCase):
|
||||
self.assertTrue(user.has_perm(perm))
|
||||
user.remove_perms_from_managed_role(perm)
|
||||
self.assertFalse(user.has_perm(perm))
|
||||
|
||||
def test_user_ak_groups(self):
|
||||
"""Test user.ak_groups is a proxy for user.groups"""
|
||||
user = User.objects.create(username=generate_id())
|
||||
self.assertEqual(user.ak_groups, user.groups)
|
||||
|
||||
def test_user_ak_groups_event(self):
|
||||
"""Test user.ak_groups creates exactly one event"""
|
||||
user = User.objects.create(username=generate_id())
|
||||
self.assertEqual(Event.objects.count(), 0)
|
||||
user.ak_groups.all()
|
||||
self.assertEqual(Event.objects.count(), 1)
|
||||
user.ak_groups.all()
|
||||
self.assertEqual(Event.objects.count(), 1)
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
"""Test Users API"""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime
|
||||
from json import loads
|
||||
|
||||
from django.urls.base import reverse
|
||||
from django.utils.timezone import now
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.brands.models import Brand
|
||||
@@ -128,62 +127,13 @@ class TestUsersAPI(APITestCase):
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_recovery_duration(self):
|
||||
"""Test user recovery token duration"""
|
||||
Token.objects.all().delete()
|
||||
flow = create_test_flow(
|
||||
FlowDesignation.RECOVERY,
|
||||
authentication=FlowAuthenticationRequirement.REQUIRE_UNAUTHENTICATED,
|
||||
)
|
||||
brand: Brand = create_test_brand()
|
||||
brand.flow_recovery = flow
|
||||
brand.save()
|
||||
self.client.force_login(self.admin)
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-recovery", kwargs={"pk": self.user.pk}),
|
||||
data={"token_duration": "days=33"},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
expires = Token.objects.first().expires
|
||||
expected_expires = now() + timedelta(days=33)
|
||||
self.assertTrue(timedelta(minutes=-1) < expected_expires - expires < timedelta(minutes=1))
|
||||
|
||||
def test_recovery_duration_update(self):
|
||||
"""Test user recovery token duration update"""
|
||||
Token.objects.all().delete()
|
||||
flow = create_test_flow(
|
||||
FlowDesignation.RECOVERY,
|
||||
authentication=FlowAuthenticationRequirement.REQUIRE_UNAUTHENTICATED,
|
||||
)
|
||||
brand: Brand = create_test_brand()
|
||||
brand.flow_recovery = flow
|
||||
brand.save()
|
||||
self.client.force_login(self.admin)
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-recovery", kwargs={"pk": self.user.pk}),
|
||||
data={"token_duration": "days=33"},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
expires = Token.objects.first().expires
|
||||
expected_expires = now() + timedelta(days=33)
|
||||
self.assertTrue(timedelta(minutes=-1) < expected_expires - expires < timedelta(minutes=1))
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-recovery", kwargs={"pk": self.user.pk}),
|
||||
data={"token_duration": "days=66"},
|
||||
)
|
||||
expires = Token.objects.first().expires
|
||||
expected_expires = now() + timedelta(days=66)
|
||||
self.assertTrue(timedelta(minutes=-1) < expected_expires - expires < timedelta(minutes=1))
|
||||
|
||||
def test_recovery_email_no_flow(self):
|
||||
"""Test user recovery link (no recovery flow set)"""
|
||||
self.client.force_login(self.admin)
|
||||
self.user.email = ""
|
||||
self.user.save()
|
||||
stage = EmailStage.objects.create(name="email")
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-recovery-email", kwargs={"pk": self.user.pk}),
|
||||
data={"email_stage": stage.pk},
|
||||
reverse("authentik_api:user-recovery-email", kwargs={"pk": self.user.pk})
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(
|
||||
@@ -192,8 +142,7 @@ class TestUsersAPI(APITestCase):
|
||||
self.user.email = "foo@bar.baz"
|
||||
self.user.save()
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:user-recovery-email", kwargs={"pk": self.user.pk}),
|
||||
data={"email_stage": stage.pk},
|
||||
reverse("authentik_api:user-recovery-email", kwargs={"pk": self.user.pk})
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(response.content, {"non_field_errors": "No recovery flow set."})
|
||||
@@ -211,7 +160,7 @@ class TestUsersAPI(APITestCase):
|
||||
reverse("authentik_api:user-recovery-email", kwargs={"pk": self.user.pk})
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertJSONEqual(response.content, {"email_stage": ["This field is required."]})
|
||||
self.assertJSONEqual(response.content, {"non_field_errors": "Email stage does not exist."})
|
||||
|
||||
def test_recovery_email(self):
|
||||
"""Test user recovery link"""
|
||||
@@ -229,8 +178,8 @@ class TestUsersAPI(APITestCase):
|
||||
reverse(
|
||||
"authentik_api:user-recovery-email",
|
||||
kwargs={"pk": self.user.pk},
|
||||
),
|
||||
data={"email_stage": stage.pk},
|
||||
)
|
||||
+ f"?email_stage={stage.pk}"
|
||||
)
|
||||
self.assertEqual(response.status_code, 204)
|
||||
|
||||
@@ -791,90 +740,3 @@ class TestUsersAPI(APITestCase):
|
||||
response.content,
|
||||
{"name": ["This field must be unique."]},
|
||||
)
|
||||
|
||||
def test_filter_last_login(self):
|
||||
"""Test API filtering by last_login"""
|
||||
from datetime import timedelta
|
||||
|
||||
from django.utils import timezone
|
||||
|
||||
User.objects.all().delete()
|
||||
admin = create_test_admin_user()
|
||||
self.client.force_login(admin)
|
||||
|
||||
# Create users with different last_login values
|
||||
user_recent = create_test_user()
|
||||
user_recent.last_login = timezone.now()
|
||||
user_recent.save()
|
||||
|
||||
user_old = create_test_user()
|
||||
user_old.last_login = timezone.now() - timedelta(days=400) # Over 1 year ago
|
||||
user_old.save()
|
||||
|
||||
user_never = create_test_user()
|
||||
user_never.last_login = None # Never logged in
|
||||
user_never.save()
|
||||
|
||||
# Filter users who logged in before 1 year ago
|
||||
one_year_ago = (timezone.now() - timedelta(days=365)).isoformat()
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:user-list"),
|
||||
data={"last_login__lt": one_year_ago},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
body = loads(response.content)
|
||||
self.assertEqual(len(body["results"]), 1)
|
||||
self.assertEqual(body["results"][0]["pk"], user_old.pk)
|
||||
|
||||
# Filter users who have never logged in
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:user-list"),
|
||||
data={"last_login__isnull": True},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
body = loads(response.content)
|
||||
# Should include user_never and admin (who hasn't logged in via the app)
|
||||
pks = [r["pk"] for r in body["results"]]
|
||||
self.assertIn(user_never.pk, pks)
|
||||
|
||||
def test_sort_by_last_login(self):
|
||||
"""Test API sorting by last_login"""
|
||||
from datetime import timedelta
|
||||
|
||||
from django.utils import timezone
|
||||
|
||||
User.objects.all().delete()
|
||||
admin = create_test_admin_user()
|
||||
self.client.force_login(admin)
|
||||
|
||||
user1 = create_test_user()
|
||||
user1.last_login = timezone.now() - timedelta(days=10)
|
||||
user1.save()
|
||||
|
||||
user2 = create_test_user()
|
||||
user2.last_login = timezone.now() - timedelta(days=5)
|
||||
user2.save()
|
||||
|
||||
# Ascending order (oldest first)
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:user-list"),
|
||||
data={"ordering": "last_login"},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
body = loads(response.content)
|
||||
# Users with null last_login come first, then user1 (older), then user2 (newer)
|
||||
self.assertEqual(len(body["results"]), 3)
|
||||
|
||||
# Descending order (newest first)
|
||||
response = self.client.get(
|
||||
reverse("authentik_api:user-list"),
|
||||
data={"ordering": "-last_login"},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
body = loads(response.content)
|
||||
# user2 should come before user1 (more recent login)
|
||||
pks = [r["pk"] for r in body["results"]]
|
||||
self.assertIn(user1.pk, pks)
|
||||
self.assertIn(user2.pk, pks)
|
||||
# Verify user2 comes before user1 in descending order
|
||||
self.assertLess(pks.index(user2.pk), pks.index(user1.pk))
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
"""Crypto API Views"""
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.serialization import load_pem_private_key
|
||||
from cryptography.x509 import load_pem_x509_certificate
|
||||
@@ -13,12 +15,14 @@ from drf_spectacular.utils import (
|
||||
OpenApiParameter,
|
||||
OpenApiResponse,
|
||||
extend_schema,
|
||||
extend_schema_field,
|
||||
)
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import (
|
||||
CharField,
|
||||
ChoiceField,
|
||||
DateTimeField,
|
||||
IntegerField,
|
||||
SerializerMethodField,
|
||||
)
|
||||
@@ -47,15 +51,59 @@ LOGGER = get_logger()
|
||||
class CertificateKeyPairSerializer(ModelSerializer):
|
||||
"""CertificateKeyPair Serializer"""
|
||||
|
||||
fingerprint_sha256 = SerializerMethodField()
|
||||
fingerprint_sha1 = SerializerMethodField()
|
||||
|
||||
cert_expiry = SerializerMethodField()
|
||||
cert_subject = SerializerMethodField()
|
||||
private_key_available = SerializerMethodField()
|
||||
key_type = SerializerMethodField()
|
||||
|
||||
certificate_download_url = SerializerMethodField()
|
||||
private_key_download_url = SerializerMethodField()
|
||||
|
||||
@property
|
||||
def _should_include_details(self) -> bool:
|
||||
request: Request = self.context.get("request", None)
|
||||
if not request:
|
||||
return True
|
||||
return str(request.query_params.get("include_details", "true")).lower() == "true"
|
||||
|
||||
def get_fingerprint_sha256(self, instance: CertificateKeyPair) -> str | None:
|
||||
"Get certificate Hash (SHA256)"
|
||||
if not self._should_include_details:
|
||||
return None
|
||||
return instance.fingerprint_sha256
|
||||
|
||||
def get_fingerprint_sha1(self, instance: CertificateKeyPair) -> str | None:
|
||||
"Get certificate Hash (SHA1)"
|
||||
if not self._should_include_details:
|
||||
return None
|
||||
return instance.fingerprint_sha1
|
||||
|
||||
def get_cert_expiry(self, instance: CertificateKeyPair) -> datetime | None:
|
||||
"Get certificate expiry"
|
||||
if not self._should_include_details:
|
||||
return None
|
||||
return DateTimeField().to_representation(instance.certificate.not_valid_after_utc)
|
||||
|
||||
def get_cert_subject(self, instance: CertificateKeyPair) -> str | None:
|
||||
"""Get certificate subject as full rfc4514"""
|
||||
if not self._should_include_details:
|
||||
return None
|
||||
return instance.certificate.subject.rfc4514_string()
|
||||
|
||||
def get_private_key_available(self, instance: CertificateKeyPair) -> bool:
|
||||
"""Show if this keypair has a private key configured or not"""
|
||||
return instance.key_data != "" and instance.key_data is not None
|
||||
|
||||
@extend_schema_field(ChoiceField(choices=KeyType.choices, allow_null=True))
|
||||
def get_key_type(self, instance: CertificateKeyPair) -> str | None:
|
||||
"""Get the key algorithm type from the certificate's public key"""
|
||||
if not self._should_include_details:
|
||||
return None
|
||||
return instance.key_type
|
||||
|
||||
def get_certificate_download_url(self, instance: CertificateKeyPair) -> str:
|
||||
"""Get URL to download certificate"""
|
||||
return (
|
||||
@@ -127,11 +175,6 @@ class CertificateKeyPairSerializer(ModelSerializer):
|
||||
"managed": {"read_only": True},
|
||||
"key_data": {"write_only": True},
|
||||
"certificate_data": {"write_only": True},
|
||||
"fingerprint_sha256": {"read_only": True},
|
||||
"fingerprint_sha1": {"read_only": True},
|
||||
"cert_expiry": {"read_only": True},
|
||||
"cert_subject": {"read_only": True},
|
||||
"key_type": {"read_only": True},
|
||||
}
|
||||
|
||||
|
||||
@@ -173,12 +216,17 @@ class CertificateKeyPairFilter(FilterSet):
|
||||
return queryset.exclude(key_data__exact="")
|
||||
|
||||
def filter_key_type(self, queryset, name, value): # pragma: no cover
|
||||
"""Filter certificates by key type using the stored database field"""
|
||||
"""Filter certificates by key type using the public key from the certificate"""
|
||||
if not value:
|
||||
return queryset
|
||||
|
||||
# value is a list of KeyType enum values from MultipleChoiceFilter
|
||||
return queryset.filter(key_type__in=value)
|
||||
filtered_pks = []
|
||||
for cert in queryset:
|
||||
if cert.key_type in value:
|
||||
filtered_pks.append(cert.pk)
|
||||
|
||||
return queryset.filter(pk__in=filtered_pks)
|
||||
|
||||
class Meta:
|
||||
model = CertificateKeyPair
|
||||
@@ -215,6 +263,7 @@ class CertificateKeyPairViewSet(UsedByMixin, ModelViewSet):
|
||||
"Can be specified multiple times (e.g. '?key_type=rsa&key_type=ec')"
|
||||
),
|
||||
),
|
||||
OpenApiParameter("include_details", bool, default=True),
|
||||
]
|
||||
)
|
||||
def list(self, request, *args, **kwargs):
|
||||
|
||||
@@ -7,8 +7,6 @@ from cryptography import x509
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import ec, rsa
|
||||
from cryptography.hazmat.primitives.asymmetric.ed448 import Ed448PrivateKey
|
||||
from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PrivateKey
|
||||
from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes
|
||||
from cryptography.x509.oid import NameOID
|
||||
from django.db import models
|
||||
@@ -23,8 +21,6 @@ class PrivateKeyAlg(models.TextChoices):
|
||||
|
||||
RSA = "rsa", _("rsa")
|
||||
ECDSA = "ecdsa", _("ecdsa")
|
||||
ED25519 = "ed25519", _("Ed25519")
|
||||
ED448 = "ed448", _("Ed448")
|
||||
|
||||
|
||||
class CertificateBuilder:
|
||||
@@ -60,10 +56,6 @@ class CertificateBuilder:
|
||||
return rsa.generate_private_key(
|
||||
public_exponent=65537, key_size=4096, backend=default_backend()
|
||||
)
|
||||
if self.alg == PrivateKeyAlg.ED25519:
|
||||
return Ed25519PrivateKey.generate()
|
||||
if self.alg == PrivateKeyAlg.ED448:
|
||||
return Ed448PrivateKey.generate()
|
||||
raise ValueError(f"Invalid alg: {self.alg}")
|
||||
|
||||
def build(
|
||||
@@ -106,25 +98,18 @@ class CertificateBuilder:
|
||||
self.__builder = self.__builder.add_extension(
|
||||
x509.SubjectAlternativeName(alt_names), critical=True
|
||||
)
|
||||
algo = hashes.SHA256()
|
||||
# EdDSA doesn't take a hash algorithm
|
||||
if isinstance(self.__private_key, (Ed25519PrivateKey | Ed448PrivateKey)):
|
||||
algo = None
|
||||
self.__certificate = self.__builder.sign(
|
||||
private_key=self.__private_key,
|
||||
algorithm=algo,
|
||||
algorithm=hashes.SHA256(),
|
||||
backend=default_backend(),
|
||||
)
|
||||
|
||||
@property
|
||||
def private_key(self):
|
||||
"""Return private key in PEM format"""
|
||||
format = serialization.PrivateFormat.TraditionalOpenSSL
|
||||
if isinstance(self.__private_key, (Ed25519PrivateKey | Ed448PrivateKey)):
|
||||
format = serialization.PrivateFormat.PKCS8
|
||||
return self.__private_key.private_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=format,
|
||||
format=serialization.PrivateFormat.TraditionalOpenSSL,
|
||||
encryption_algorithm=serialization.NoEncryption(),
|
||||
).decode("utf-8")
|
||||
|
||||
|
||||
@@ -1,120 +0,0 @@
|
||||
# Generated by Django 5.2.9 on 2025-12-09 06:22
|
||||
|
||||
from hashlib import md5
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.x509 import load_pem_x509_certificate
|
||||
from django.db import migrations, models
|
||||
|
||||
from authentik.crypto.signals import extract_certificate_metadata
|
||||
from authentik.lib.migrations import progress_bar
|
||||
|
||||
|
||||
def backfill_certificate_metadata(apps, schema_editor): # noqa: ARG001
|
||||
"""Backfill certificate metadata and kid for existing records."""
|
||||
|
||||
db_alias = schema_editor.connection.alias
|
||||
CertificateKeyPair = apps.get_model("authentik_crypto", "CertificateKeyPair")
|
||||
|
||||
print("\nStoring extra data about certificates, this might take a couple of minutes...")
|
||||
for cert in progress_bar(CertificateKeyPair.objects.using(db_alias).all()):
|
||||
updated_fields = []
|
||||
|
||||
if cert.certificate_data:
|
||||
try:
|
||||
certificate = load_pem_x509_certificate(
|
||||
cert.certificate_data.encode("utf-8"), default_backend()
|
||||
)
|
||||
metadata = extract_certificate_metadata(certificate)
|
||||
|
||||
cert.key_type = metadata["key_type"]
|
||||
cert.cert_expiry = metadata["cert_expiry"]
|
||||
cert.cert_subject = metadata["cert_subject"]
|
||||
cert.fingerprint_sha256 = metadata["fingerprint_sha256"]
|
||||
cert.fingerprint_sha1 = metadata["fingerprint_sha1"]
|
||||
updated_fields.extend(
|
||||
[
|
||||
"key_type",
|
||||
"cert_expiry",
|
||||
"cert_subject",
|
||||
"fingerprint_sha256",
|
||||
"fingerprint_sha1",
|
||||
]
|
||||
)
|
||||
except ValueError, TypeError, AttributeError:
|
||||
pass
|
||||
|
||||
# Backfill kid with MD5 for backwards compatibility
|
||||
if cert.key_data:
|
||||
cert.kid = md5(cert.key_data.encode("utf-8"), usedforsecurity=False).hexdigest()
|
||||
updated_fields.append("kid")
|
||||
|
||||
if updated_fields:
|
||||
cert.save(update_fields=updated_fields, using=db_alias)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_crypto", "0005_alter_certificatekeypair_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="certificatekeypair",
|
||||
name="cert_expiry",
|
||||
field=models.DateTimeField(blank=True, help_text="Certificate expiry date", null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="certificatekeypair",
|
||||
name="cert_subject",
|
||||
field=models.TextField(
|
||||
blank=True, help_text="Certificate subject as RFC4514 string", null=True
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="certificatekeypair",
|
||||
name="fingerprint_sha1",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
help_text="SHA1 fingerprint of the certificate",
|
||||
max_length=59,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="certificatekeypair",
|
||||
name="fingerprint_sha256",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
help_text="SHA256 fingerprint of the certificate",
|
||||
max_length=95,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="certificatekeypair",
|
||||
name="key_type",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
choices=[
|
||||
("rsa", "RSA"),
|
||||
("ec", "Elliptic Curve"),
|
||||
("dsa", "DSA"),
|
||||
("ed25519", "Ed25519"),
|
||||
("ed448", "Ed448"),
|
||||
],
|
||||
help_text="Key algorithm type detected from the certificate's public key",
|
||||
max_length=16,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="certificatekeypair",
|
||||
name="kid",
|
||||
field=models.CharField(
|
||||
blank=True, help_text="Key ID generated from private key", max_length=128, null=True
|
||||
),
|
||||
),
|
||||
migrations.RunPython(backfill_certificate_metadata, migrations.RunPython.noop),
|
||||
]
|
||||
@@ -1,8 +1,7 @@
|
||||
"""authentik crypto models"""
|
||||
|
||||
from base64 import urlsafe_b64encode
|
||||
from binascii import hexlify
|
||||
from hashlib import md5, sha512
|
||||
from hashlib import md5
|
||||
from ssl import PEM_FOOTER, PEM_HEADER
|
||||
from textwrap import wrap
|
||||
from uuid import uuid4
|
||||
@@ -48,39 +47,6 @@ def fingerprint_sha256(cert: Certificate) -> str:
|
||||
return hexlify(cert.fingerprint(hashes.SHA256()), ":").decode("utf-8")
|
||||
|
||||
|
||||
def detect_key_type(certificate: Certificate) -> str | None:
|
||||
"""Detect the key algorithm type by parsing the certificate's public key"""
|
||||
try:
|
||||
public_key = certificate.public_key()
|
||||
if isinstance(public_key, RSAPublicKey):
|
||||
return KeyType.RSA
|
||||
if isinstance(public_key, EllipticCurvePublicKey):
|
||||
return KeyType.EC
|
||||
if isinstance(public_key, DSAPublicKey):
|
||||
return KeyType.DSA
|
||||
if isinstance(public_key, Ed25519PublicKey):
|
||||
return KeyType.ED25519
|
||||
if isinstance(public_key, Ed448PublicKey):
|
||||
return KeyType.ED448
|
||||
except (ValueError, TypeError, AttributeError) as exc:
|
||||
LOGGER.warning("Failed to detect key type", exc=exc)
|
||||
return None
|
||||
|
||||
|
||||
def generate_key_id(key_data: str) -> str:
|
||||
"""Generate Key ID using SHA512 + urlsafe_b64encode."""
|
||||
if not key_data:
|
||||
return ""
|
||||
return urlsafe_b64encode(sha512(key_data.encode("utf-8")).digest()).decode("utf-8").rstrip("=")
|
||||
|
||||
|
||||
def generate_key_id_legacy(key_data: str) -> str:
|
||||
"""Generate Key ID using MD5 (legacy format for backwards compatibility)."""
|
||||
if not key_data:
|
||||
return ""
|
||||
return md5(key_data.encode("utf-8")).hexdigest() # nosec
|
||||
|
||||
|
||||
class CertificateKeyPair(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||
"""CertificateKeyPair that can be used for signing or encrypting if `key_data`
|
||||
is set, otherwise it can be used to verify remote data."""
|
||||
@@ -96,41 +62,6 @@ class CertificateKeyPair(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||
blank=True,
|
||||
default="",
|
||||
)
|
||||
key_type = models.CharField(
|
||||
max_length=16,
|
||||
choices=KeyType.choices,
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text=_("Key algorithm type detected from the certificate's public key"),
|
||||
)
|
||||
cert_expiry = models.DateTimeField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text=_("Certificate expiry date"),
|
||||
)
|
||||
cert_subject = models.TextField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text=_("Certificate subject as RFC4514 string"),
|
||||
)
|
||||
fingerprint_sha256 = models.CharField(
|
||||
max_length=95,
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text=_("SHA256 fingerprint of the certificate"),
|
||||
)
|
||||
fingerprint_sha1 = models.CharField(
|
||||
max_length=59,
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text=_("SHA1 fingerprint of the certificate"),
|
||||
)
|
||||
kid = models.CharField(
|
||||
max_length=128,
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text=_("Key ID generated from private key"),
|
||||
)
|
||||
|
||||
_cert: Certificate | None = None
|
||||
_private_key: PrivateKeyTypes | None = None
|
||||
@@ -175,6 +106,41 @@ class CertificateKeyPair(SerializerModel, ManagedModel, CreatedUpdatedModel):
|
||||
return None
|
||||
return self._private_key
|
||||
|
||||
@property
|
||||
def fingerprint_sha256(self) -> str:
|
||||
"""Get SHA256 Fingerprint of certificate_data"""
|
||||
return fingerprint_sha256(self.certificate)
|
||||
|
||||
@property
|
||||
def fingerprint_sha1(self) -> str:
|
||||
"""Get SHA1 Fingerprint of certificate_data"""
|
||||
return hexlify(self.certificate.fingerprint(hashes.SHA1()), ":").decode("utf-8") # nosec
|
||||
|
||||
@property
|
||||
def kid(self):
|
||||
"""Get Key ID used for JWKS"""
|
||||
return (
|
||||
md5(self.key_data.encode("utf-8"), usedforsecurity=False).hexdigest()
|
||||
if self.key_data
|
||||
else ""
|
||||
) # nosec
|
||||
|
||||
@property
|
||||
def key_type(self) -> str | None:
|
||||
"""Get the key algorithm type from the certificate's public key"""
|
||||
public_key = self.certificate.public_key()
|
||||
if isinstance(public_key, RSAPublicKey):
|
||||
return KeyType.RSA
|
||||
if isinstance(public_key, EllipticCurvePublicKey):
|
||||
return KeyType.EC
|
||||
if isinstance(public_key, DSAPublicKey):
|
||||
return KeyType.DSA
|
||||
if isinstance(public_key, Ed25519PublicKey):
|
||||
return KeyType.ED25519
|
||||
if isinstance(public_key, Ed448PublicKey):
|
||||
return KeyType.ED448
|
||||
return None
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Certificate-Key Pair {self.name}"
|
||||
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
"""authentik crypto signals"""
|
||||
|
||||
from binascii import hexlify
|
||||
from datetime import datetime
|
||||
from ssl import CertificateError
|
||||
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.x509 import Certificate
|
||||
from django.db.models.signals import pre_save
|
||||
from django.dispatch import receiver
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.crypto.models import (
|
||||
CertificateKeyPair,
|
||||
detect_key_type,
|
||||
fingerprint_sha256,
|
||||
generate_key_id,
|
||||
generate_key_id_legacy,
|
||||
)
|
||||
|
||||
LOGGER = get_logger()
|
||||
|
||||
|
||||
def extract_certificate_metadata(certificate: Certificate) -> dict[str, str | datetime]:
|
||||
"""Extract all metadata fields from a certificate."""
|
||||
metadata = {}
|
||||
|
||||
try:
|
||||
metadata["key_type"] = detect_key_type(certificate)
|
||||
metadata["cert_expiry"] = certificate.not_valid_after_utc
|
||||
metadata["cert_subject"] = certificate.subject.rfc4514_string()
|
||||
metadata["fingerprint_sha256"] = fingerprint_sha256(certificate)
|
||||
metadata["fingerprint_sha1"] = hexlify(
|
||||
certificate.fingerprint(hashes.SHA1()), ":" # nosec
|
||||
).decode("utf-8")
|
||||
except (ValueError, TypeError, AttributeError) as exc:
|
||||
raise CertificateError(f"Invalid certificate metadata: {exc}") from exc
|
||||
|
||||
return metadata
|
||||
|
||||
|
||||
@receiver(pre_save, sender="authentik_crypto.CertificateKeyPair")
|
||||
def certificate_key_pair_pre_save(
|
||||
sender: type[CertificateKeyPair], instance: CertificateKeyPair, **_
|
||||
):
|
||||
"""Automatically populate certificate metadata fields before saving"""
|
||||
|
||||
# Only extract metadata if certificate_data is present
|
||||
if not instance.certificate_data:
|
||||
return
|
||||
|
||||
try:
|
||||
metadata = extract_certificate_metadata(instance.certificate)
|
||||
except (CertificateError, ValueError, TypeError, AttributeError) as exc:
|
||||
LOGGER.warning("Failed to extract certificate metadata", exc=exc)
|
||||
return
|
||||
|
||||
instance.key_type = metadata["key_type"]
|
||||
instance.cert_expiry = metadata["cert_expiry"]
|
||||
instance.cert_subject = metadata["cert_subject"]
|
||||
instance.fingerprint_sha256 = metadata["fingerprint_sha256"]
|
||||
instance.fingerprint_sha1 = metadata["fingerprint_sha1"]
|
||||
|
||||
# Generate kid if not set, or regenerate if key_data has changed
|
||||
# Preserve existing kid (MD5 or SHA512) if it matches the current key_data
|
||||
if instance.key_data:
|
||||
new_kid = generate_key_id(instance.key_data)
|
||||
legacy_kid = generate_key_id_legacy(instance.key_data)
|
||||
if instance.kid not in (new_kid, legacy_kid):
|
||||
instance.kid = new_kid
|
||||
@@ -20,7 +20,7 @@ from authentik.core.tests.utils import (
|
||||
)
|
||||
from authentik.crypto.api import CertificateKeyPairSerializer
|
||||
from authentik.crypto.builder import CertificateBuilder
|
||||
from authentik.crypto.models import CertificateKeyPair, generate_key_id, generate_key_id_legacy
|
||||
from authentik.crypto.models import CertificateKeyPair
|
||||
from authentik.crypto.tasks import MANAGED_DISCOVERED, certificate_discovery
|
||||
from authentik.lib.config import CONFIG
|
||||
from authentik.lib.generators import generate_id, generate_key
|
||||
@@ -173,33 +173,28 @@ class TestCrypto(APITestCase):
|
||||
self.assertEqual(api_cert["fingerprint_sha1"], cert.fingerprint_sha1)
|
||||
self.assertEqual(api_cert["fingerprint_sha256"], cert.fingerprint_sha256)
|
||||
|
||||
def test_list_always_includes_details(self):
|
||||
"""Test API List always includes certificate details"""
|
||||
def test_list_without_details(self):
|
||||
"""Test API List (no details)"""
|
||||
cert = create_test_cert()
|
||||
self.client.force_login(create_test_admin_user())
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
"authentik_api:certificatekeypair-list",
|
||||
),
|
||||
data={"name": cert.name},
|
||||
data={"name": cert.name, "include_details": False},
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
body = loads(response.content.decode())
|
||||
api_cert = [x for x in body["results"] if x["name"] == cert.name][0]
|
||||
# All details should now always be included
|
||||
self.assertEqual(api_cert["fingerprint_sha1"], cert.fingerprint_sha1)
|
||||
self.assertEqual(api_cert["fingerprint_sha256"], cert.fingerprint_sha256)
|
||||
self.assertIsNotNone(api_cert["cert_expiry"])
|
||||
self.assertIsNotNone(api_cert["cert_subject"])
|
||||
self.assertEqual(api_cert["fingerprint_sha1"], None)
|
||||
self.assertEqual(api_cert["fingerprint_sha256"], None)
|
||||
|
||||
def test_certificate_download(self):
|
||||
"""Test certificate export (download)"""
|
||||
keypair = create_test_cert()
|
||||
user = create_test_user()
|
||||
user.assign_perms_to_managed_role("authentik_crypto.view_certificatekeypair", keypair)
|
||||
user.assign_perms_to_managed_role(
|
||||
"authentik_crypto.view_certificatekeypair_certificate", keypair
|
||||
)
|
||||
user.assign_perms_to_managed_role("view_certificatekeypair", keypair)
|
||||
user.assign_perms_to_managed_role("view_certificatekeypair_certificate", keypair)
|
||||
self.client.force_login(user)
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
@@ -222,8 +217,8 @@ class TestCrypto(APITestCase):
|
||||
"""Test private_key export (download)"""
|
||||
keypair = create_test_cert()
|
||||
user = create_test_user()
|
||||
user.assign_perms_to_managed_role("authentik_crypto.view_certificatekeypair", keypair)
|
||||
user.assign_perms_to_managed_role("authentik_crypto.view_certificatekeypair_key", keypair)
|
||||
user.assign_perms_to_managed_role("view_certificatekeypair", keypair)
|
||||
user.assign_perms_to_managed_role("view_certificatekeypair_key", keypair)
|
||||
self.client.force_login(user)
|
||||
response = self.client.get(
|
||||
reverse(
|
||||
@@ -431,114 +426,3 @@ class TestCrypto(APITestCase):
|
||||
self.assertEqual(
|
||||
1, final_count, "Should not create duplicate cert for same private key"
|
||||
)
|
||||
|
||||
def test_metadata_extraction_with_cert_and_key(self):
|
||||
"""Test that metadata is extracted when creating keypair with certificate and key"""
|
||||
cert = create_test_cert()
|
||||
|
||||
# Verify all metadata fields are populated
|
||||
self.assertIsNotNone(cert.key_type)
|
||||
self.assertIsNotNone(cert.cert_expiry)
|
||||
self.assertIsNotNone(cert.cert_subject)
|
||||
self.assertIsNotNone(cert.fingerprint_sha256)
|
||||
self.assertIsNotNone(cert.fingerprint_sha1)
|
||||
|
||||
# Verify kid is generated using SHA512 for new records
|
||||
self.assertIsNotNone(cert.kid)
|
||||
self.assertEqual(cert.kid, generate_key_id(cert.key_data))
|
||||
|
||||
def test_metadata_extraction_without_key(self):
|
||||
"""Test that metadata is extracted when creating keypair without private key"""
|
||||
builder = CertificateBuilder(generate_id())
|
||||
builder.build(subject_alt_names=[], validity_days=3)
|
||||
|
||||
# Create keypair with only certificate, no key
|
||||
cert = CertificateKeyPair.objects.create(
|
||||
name=generate_id(),
|
||||
certificate_data=builder.certificate,
|
||||
key_data="",
|
||||
)
|
||||
|
||||
# Verify certificate metadata fields are populated
|
||||
self.assertIsNotNone(cert.key_type)
|
||||
self.assertIsNotNone(cert.cert_expiry)
|
||||
self.assertIsNotNone(cert.cert_subject)
|
||||
self.assertIsNotNone(cert.fingerprint_sha256)
|
||||
self.assertIsNotNone(cert.fingerprint_sha1)
|
||||
|
||||
# Verify kid is empty when no key_data
|
||||
self.assertEqual(cert.kid, None)
|
||||
|
||||
def test_metadata_extraction_invalid_cert(self):
|
||||
"""Test that invalid certificate data doesn't crash, just skips metadata"""
|
||||
cert = CertificateKeyPair.objects.create(
|
||||
name=generate_id(),
|
||||
certificate_data="invalid certificate data",
|
||||
key_data="",
|
||||
)
|
||||
|
||||
# Verify metadata fields are None for invalid cert
|
||||
self.assertIsNone(cert.key_type)
|
||||
self.assertIsNone(cert.cert_expiry)
|
||||
self.assertIsNone(cert.cert_subject)
|
||||
self.assertIsNone(cert.fingerprint_sha256)
|
||||
self.assertIsNone(cert.fingerprint_sha1)
|
||||
self.assertIsNone(cert.kid)
|
||||
|
||||
def test_kid_legacy_preservation(self):
|
||||
"""Test that legacy MD5 kid is preserved when key_data hasn't changed"""
|
||||
cert = create_test_cert()
|
||||
|
||||
# Simulate a legacy MD5 kid (as if backfilled from old system)
|
||||
legacy_kid = generate_key_id_legacy(cert.key_data)
|
||||
CertificateKeyPair.objects.filter(pk=cert.pk).update(kid=legacy_kid)
|
||||
cert.refresh_from_db()
|
||||
self.assertEqual(cert.kid, legacy_kid)
|
||||
|
||||
# Save the cert again (e.g., name change) - kid should be preserved
|
||||
cert.name = generate_id()
|
||||
cert.save()
|
||||
cert.refresh_from_db()
|
||||
|
||||
self.assertEqual(cert.kid, legacy_kid)
|
||||
|
||||
def test_kid_regenerated_on_key_change(self):
|
||||
"""Test that kid is regenerated when key_data changes"""
|
||||
cert = create_test_cert()
|
||||
original_kid = cert.kid
|
||||
|
||||
# Generate a new key and update the keypair
|
||||
builder = CertificateBuilder(generate_id())
|
||||
builder.build(subject_alt_names=[], validity_days=3)
|
||||
|
||||
cert.key_data = builder.private_key
|
||||
cert.certificate_data = builder.certificate
|
||||
cert.save()
|
||||
cert.refresh_from_db()
|
||||
|
||||
# Kid should be regenerated for the new key
|
||||
self.assertNotEqual(cert.kid, original_kid)
|
||||
self.assertEqual(cert.kid, generate_key_id(cert.key_data))
|
||||
|
||||
def test_kid_regenerated_on_key_change_from_legacy(self):
|
||||
"""Test that kid is regenerated from legacy MD5 when key_data changes"""
|
||||
cert = create_test_cert()
|
||||
|
||||
# Simulate a legacy MD5 kid
|
||||
legacy_kid = generate_key_id_legacy(cert.key_data)
|
||||
CertificateKeyPair.objects.filter(pk=cert.pk).update(kid=legacy_kid)
|
||||
cert.refresh_from_db()
|
||||
self.assertEqual(cert.kid, legacy_kid)
|
||||
|
||||
# Generate a new key and update the keypair
|
||||
builder = CertificateBuilder(generate_id())
|
||||
builder.build(subject_alt_names=[], validity_days=3)
|
||||
|
||||
cert.key_data = builder.private_key
|
||||
cert.certificate_data = builder.certificate
|
||||
cert.save()
|
||||
cert.refresh_from_db()
|
||||
|
||||
# Kid should now be SHA512 for the new key
|
||||
self.assertNotEqual(cert.kid, legacy_kid)
|
||||
self.assertEqual(cert.kid, generate_key_id(cert.key_data))
|
||||
|
||||
@@ -12,7 +12,6 @@ class DeviceAccessGroupSerializer(ModelSerializer):
|
||||
fields = [
|
||||
"pbm_uuid",
|
||||
"name",
|
||||
"attributes",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ from rest_framework.fields import SerializerMethodField
|
||||
from authentik.core.api.utils import ModelSerializer
|
||||
from authentik.endpoints.api.connectors import ConnectorSerializer
|
||||
from authentik.endpoints.api.device_fact_snapshots import DeviceFactSnapshotSerializer
|
||||
from authentik.endpoints.models import Connector, DeviceConnection, DeviceFactSnapshot
|
||||
from authentik.endpoints.models import DeviceConnection
|
||||
|
||||
|
||||
class DeviceConnectionSerializer(ModelSerializer):
|
||||
@@ -12,19 +12,10 @@ class DeviceConnectionSerializer(ModelSerializer):
|
||||
latest_snapshot = SerializerMethodField(allow_null=True)
|
||||
|
||||
def get_latest_snapshot(self, instance: DeviceConnection) -> DeviceFactSnapshotSerializer:
|
||||
snapshot: DeviceFactSnapshot | None = instance.devicefactsnapshot_set.order_by(
|
||||
"-created"
|
||||
).first()
|
||||
snapshot = instance.devicefactsnapshot_set.order_by("-created").first()
|
||||
if not snapshot:
|
||||
return None
|
||||
connector: Connector = Connector.objects.get_subclass(pk=snapshot.connection.connector_id)
|
||||
vendor = connector.controller.vendor_identifier()
|
||||
return DeviceFactSnapshotSerializer(
|
||||
snapshot,
|
||||
context={
|
||||
"vendor": vendor,
|
||||
},
|
||||
).data
|
||||
return DeviceFactSnapshotSerializer(snapshot).data
|
||||
|
||||
class Meta:
|
||||
model = DeviceConnection
|
||||
|
||||
@@ -1,32 +1,11 @@
|
||||
from enum import StrEnum
|
||||
|
||||
from rest_framework.fields import SerializerMethodField
|
||||
|
||||
from authentik.core.api.utils import ModelSerializer
|
||||
from authentik.endpoints.controller import MERGED_VENDOR
|
||||
from authentik.endpoints.facts import DeviceFacts
|
||||
from authentik.endpoints.models import Connector, DeviceFactSnapshot
|
||||
from authentik.lib.utils.reflection import all_subclasses
|
||||
|
||||
|
||||
def get_vendor_choices():
|
||||
choices = [(MERGED_VENDOR, MERGED_VENDOR)]
|
||||
for connector_type in all_subclasses(Connector):
|
||||
ident = connector_type().controller.vendor_identifier()
|
||||
choices.append((ident, ident))
|
||||
return choices
|
||||
|
||||
|
||||
vendors = StrEnum("DeviceConnectorVendors", get_vendor_choices())
|
||||
from authentik.endpoints.models import DeviceFactSnapshot
|
||||
|
||||
|
||||
class DeviceFactSnapshotSerializer(ModelSerializer):
|
||||
|
||||
data = DeviceFacts()
|
||||
vendor = SerializerMethodField()
|
||||
|
||||
def get_vendor(self, instance: DeviceFactSnapshot) -> vendors:
|
||||
return self.context.get("vendor", MERGED_VENDOR)
|
||||
|
||||
class Meta:
|
||||
model = DeviceFactSnapshot
|
||||
@@ -35,7 +14,6 @@ class DeviceFactSnapshotSerializer(ModelSerializer):
|
||||
"connection",
|
||||
"created",
|
||||
"expires",
|
||||
"vendor",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"created": {"read_only": True},
|
||||
|
||||
@@ -62,7 +62,7 @@ class AgentConfigSerializer(PassiveSerializer):
|
||||
def get_system_config(self, instance: AgentConnector) -> ConfigSerializer:
|
||||
return ConfigView.get_config(self.context["request"]).data
|
||||
|
||||
def get_license_status(self, instance: AgentConnector) -> LicenseUsageStatus:
|
||||
def get_license_status(self, instance: AgentConnector) -> "LicenseUsageStatus":
|
||||
try:
|
||||
from authentik.enterprise.license import LicenseKey
|
||||
|
||||
|
||||
@@ -44,10 +44,6 @@ class MDMConfigResponseSerializer(PassiveSerializer):
|
||||
|
||||
class AgentConnectorController(BaseController[AgentConnector]):
|
||||
|
||||
@staticmethod
|
||||
def vendor_identifier() -> str:
|
||||
return "goauthentik.io/platform"
|
||||
|
||||
def supported_enrollment_methods(self):
|
||||
return []
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ from typing import TYPE_CHECKING
|
||||
from uuid import uuid4
|
||||
|
||||
from django.db import models
|
||||
from django.templatetags.static import static
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework.serializers import Serializer
|
||||
|
||||
@@ -17,7 +16,7 @@ from authentik.endpoints.models import (
|
||||
)
|
||||
from authentik.flows.stage import StageView
|
||||
from authentik.lib.generators import generate_key
|
||||
from authentik.lib.models import InternallyManagedMixin, SerializerModel
|
||||
from authentik.lib.models import SerializerModel
|
||||
from authentik.lib.utils.time import timedelta_string_validator
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -52,10 +51,6 @@ class AgentConnector(Connector):
|
||||
)
|
||||
challenge_trigger_check_in = models.BooleanField(default=False)
|
||||
|
||||
@property
|
||||
def icon_url(self):
|
||||
return static("dist/assets/icons/icon.svg")
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[Serializer]:
|
||||
from authentik.endpoints.connectors.agent.api.connectors import (
|
||||
@@ -73,7 +68,7 @@ class AgentConnector(Connector):
|
||||
return AuthenticatorEndpointStageView
|
||||
|
||||
@property
|
||||
def controller(self) -> type[AgentConnectorController]:
|
||||
def controller(self) -> type["AgentConnectorController"]:
|
||||
from authentik.endpoints.connectors.agent.controller import AgentConnectorController
|
||||
|
||||
return AgentConnectorController
|
||||
@@ -102,7 +97,7 @@ class AgentDeviceUserBinding(DeviceUserBinding):
|
||||
apple_enclave_key_id = models.TextField()
|
||||
|
||||
|
||||
class DeviceToken(InternallyManagedMixin, ExpiringModel):
|
||||
class DeviceToken(ExpiringModel):
|
||||
"""Per-device token used for authentication."""
|
||||
|
||||
token_uuid = models.UUIDField(primary_key=True, default=uuid4)
|
||||
@@ -148,7 +143,7 @@ class EnrollmentToken(ExpiringModel, SerializerModel):
|
||||
]
|
||||
|
||||
|
||||
class DeviceAuthenticationToken(InternallyManagedMixin, ExpiringModel):
|
||||
class DeviceAuthenticationToken(ExpiringModel):
|
||||
|
||||
identifier = models.UUIDField(default=uuid4, primary_key=True)
|
||||
device = models.ForeignKey(Device, on_delete=models.CASCADE)
|
||||
@@ -165,7 +160,7 @@ class DeviceAuthenticationToken(InternallyManagedMixin, ExpiringModel):
|
||||
verbose_name_plural = _("Device authentication tokens")
|
||||
|
||||
|
||||
class AppleNonce(InternallyManagedMixin, ExpiringModel):
|
||||
class AppleNonce(ExpiringModel):
|
||||
nonce = models.TextField()
|
||||
device_token = models.ForeignKey(DeviceToken, on_delete=models.CASCADE)
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from hashlib import sha256
|
||||
from json import loads
|
||||
from unittest.mock import PropertyMock, patch
|
||||
|
||||
from django.urls import reverse
|
||||
from jwt import encode
|
||||
@@ -233,43 +232,3 @@ class TestEndpointStage(FlowTestCase):
|
||||
plan = plan()
|
||||
self.assertNotIn(PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE, plan.context)
|
||||
self.assertEqual(plan.context[PLAN_CONTEXT_DEVICE], self.device)
|
||||
|
||||
def test_endpoint_stage_connector_no_stage_optional(self):
|
||||
flow = create_test_flow()
|
||||
stage = EndpointStage.objects.create(connector=self.connector, mode=StageMode.OPTIONAL)
|
||||
FlowStageBinding.objects.create(stage=stage, target=flow, order=0)
|
||||
|
||||
with patch(
|
||||
"authentik.endpoints.connectors.agent.models.AgentConnector.stage",
|
||||
PropertyMock(return_value=None),
|
||||
):
|
||||
with self.assertFlowFinishes() as plan:
|
||||
res = self.client.get(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||
)
|
||||
self.assertStageRedirects(res, reverse("authentik_core:root-redirect"))
|
||||
plan = plan()
|
||||
self.assertNotIn(PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE, plan.context)
|
||||
self.assertNotIn(PLAN_CONTEXT_DEVICE, plan.context)
|
||||
|
||||
def test_endpoint_stage_connector_no_stage_required(self):
|
||||
flow = create_test_flow()
|
||||
stage = EndpointStage.objects.create(connector=self.connector, mode=StageMode.REQUIRED)
|
||||
FlowStageBinding.objects.create(stage=stage, target=flow, order=0)
|
||||
|
||||
with patch(
|
||||
"authentik.endpoints.connectors.agent.models.AgentConnector.stage",
|
||||
PropertyMock(return_value=None),
|
||||
):
|
||||
with self.assertFlowFinishes() as plan:
|
||||
res = self.client.get(
|
||||
reverse("authentik_api:flow-executor", kwargs={"flow_slug": flow.slug}),
|
||||
)
|
||||
self.assertStageResponse(
|
||||
res,
|
||||
component="ak-stage-access-denied",
|
||||
error_message="Invalid stage configuration",
|
||||
)
|
||||
plan = plan()
|
||||
self.assertNotIn(PLAN_CONTEXT_AGENT_ENDPOINT_CHALLENGE, plan.context)
|
||||
self.assertNotIn(PLAN_CONTEXT_DEVICE, plan.context)
|
||||
|
||||
@@ -5,8 +5,6 @@ from authentik.endpoints.models import Connector
|
||||
from authentik.flows.stage import StageView
|
||||
from authentik.lib.sentry import SentryIgnoredException
|
||||
|
||||
MERGED_VENDOR = "goauthentik.io/@merged"
|
||||
|
||||
|
||||
class EnrollmentMethods(models.TextChoices):
|
||||
# Automatically enrolled through user action
|
||||
@@ -30,10 +28,6 @@ class BaseController[T: "Connector"]:
|
||||
self.connector = connector
|
||||
self.logger = get_logger().bind(connector=connector.name)
|
||||
|
||||
@staticmethod
|
||||
def vendor_identifier() -> str:
|
||||
raise NotImplementedError
|
||||
|
||||
def supported_enrollment_methods(self) -> list[EnrollmentMethods]:
|
||||
return []
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
from django.db.models import TextChoices
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from drf_spectacular.extensions import OpenApiSerializerFieldExtension
|
||||
from drf_spectacular.plumbing import build_basic_type
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
@@ -16,6 +15,7 @@ from authentik.core.api.utils import JSONDictField
|
||||
|
||||
|
||||
class BigIntegerFieldFix(OpenApiSerializerFieldExtension):
|
||||
|
||||
target_class = "authentik.endpoints.facts.BigIntegerField"
|
||||
|
||||
def map_serializer_field(self, auto_schema, direction):
|
||||
@@ -46,23 +46,9 @@ class DiskSerializer(Serializer):
|
||||
|
||||
|
||||
class OperatingSystemSerializer(Serializer):
|
||||
"""For example:
|
||||
{"family":"linux","name":"Ubuntu","version":"24.04.3 LTS (Noble Numbat)","arch":"amd64"}
|
||||
{"family": "windows","name":"Server 2022 Datacenter","version":"10.0.20348.4405","arch":"amd64"}
|
||||
{"family": "windows","name":"Server 2022 Datacenter","version":"10.0.20348.4405","arch":"amd64"}
|
||||
{"family": "mac_os", "name": "", "version": "26.2", "arch": "arm64"}
|
||||
"""
|
||||
|
||||
family = ChoiceField(OSFamily.choices, required=True)
|
||||
name = CharField(
|
||||
required=False, help_text=_("Operating System name, such as 'Server 2022' or 'Ubuntu'")
|
||||
)
|
||||
version = CharField(
|
||||
required=False,
|
||||
help_text=_(
|
||||
"Operating System version, must always be the version number but may contain build name"
|
||||
),
|
||||
)
|
||||
name = CharField(required=False)
|
||||
version = CharField(required=False)
|
||||
arch = CharField(required=True)
|
||||
|
||||
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 5.2.9 on 2025-12-08 23:54
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("authentik_endpoints", "0003_alter_endpointstage_options_endpointstage_mode"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="deviceaccessgroup",
|
||||
name="attributes",
|
||||
field=models.JSONField(blank=True, default=dict),
|
||||
),
|
||||
]
|
||||
@@ -15,7 +15,7 @@ from authentik.core.models import AttributesMixin, ExpiringModel
|
||||
from authentik.flows.models import Stage
|
||||
from authentik.flows.stage import StageView
|
||||
from authentik.lib.merge import MERGE_LIST_UNIQUE
|
||||
from authentik.lib.models import InheritanceForeignKey, InternallyManagedMixin, SerializerModel
|
||||
from authentik.lib.models import InheritanceForeignKey, SerializerModel
|
||||
from authentik.lib.utils.time import timedelta_from_string, timedelta_string_validator
|
||||
from authentik.policies.models import PolicyBinding, PolicyBindingModel
|
||||
from authentik.tasks.schedules.common import ScheduleSpec
|
||||
@@ -28,7 +28,7 @@ LOGGER = get_logger()
|
||||
DEVICE_FACTS_CACHE_TIMEOUT = 3600
|
||||
|
||||
|
||||
class Device(InternallyManagedMixin, ExpiringModel, AttributesMixin, PolicyBindingModel):
|
||||
class Device(ExpiringModel, AttributesMixin, PolicyBindingModel):
|
||||
device_uuid = models.UUIDField(default=uuid4, primary_key=True)
|
||||
|
||||
name = models.TextField(unique=True)
|
||||
@@ -43,7 +43,7 @@ class Device(InternallyManagedMixin, ExpiringModel, AttributesMixin, PolicyBindi
|
||||
return f"goauthentik.io/endpoints/devices/{self.device_uuid}/facts"
|
||||
|
||||
@property
|
||||
def cached_facts(self) -> DeviceFactSnapshot:
|
||||
def cached_facts(self) -> "DeviceFactSnapshot":
|
||||
if cached := cache.get(self.cache_key_facts):
|
||||
return cached
|
||||
facts = self.facts
|
||||
@@ -51,7 +51,7 @@ class Device(InternallyManagedMixin, ExpiringModel, AttributesMixin, PolicyBindi
|
||||
return facts
|
||||
|
||||
@property
|
||||
def facts(self) -> DeviceFactSnapshot:
|
||||
def facts(self) -> "DeviceFactSnapshot":
|
||||
data = {}
|
||||
last_updated = datetime.fromtimestamp(0, UTC)
|
||||
for snapshot_data, snapshort_created in DeviceFactSnapshot.filter_not_expired(
|
||||
@@ -86,7 +86,7 @@ class DeviceUserBinding(PolicyBinding):
|
||||
verbose_name_plural = _("Device User bindings")
|
||||
|
||||
|
||||
class DeviceConnection(InternallyManagedMixin, SerializerModel):
|
||||
class DeviceConnection(SerializerModel):
|
||||
device_connection_uuid = models.UUIDField(default=uuid4, primary_key=True)
|
||||
device = models.ForeignKey("Device", on_delete=models.CASCADE)
|
||||
connector = models.ForeignKey("Connector", on_delete=models.CASCADE)
|
||||
@@ -115,7 +115,7 @@ class DeviceConnection(InternallyManagedMixin, SerializerModel):
|
||||
verbose_name_plural = _("Device connections")
|
||||
|
||||
|
||||
class DeviceFactSnapshot(InternallyManagedMixin, ExpiringModel, SerializerModel):
|
||||
class DeviceFactSnapshot(ExpiringModel, SerializerModel):
|
||||
snapshot_id = models.UUIDField(primary_key=True, default=uuid4)
|
||||
connection = models.ForeignKey(DeviceConnection, on_delete=models.CASCADE)
|
||||
data = models.JSONField(default=dict)
|
||||
@@ -157,7 +157,7 @@ class Connector(ScheduledModel, SerializerModel):
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def controller(self) -> type[BaseController[Connector]]:
|
||||
def controller(self) -> type["BaseController[Connector]"]:
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
@@ -175,7 +175,7 @@ class Connector(ScheduledModel, SerializerModel):
|
||||
]
|
||||
|
||||
|
||||
class DeviceAccessGroup(AttributesMixin, SerializerModel, PolicyBindingModel):
|
||||
class DeviceAccessGroup(SerializerModel, PolicyBindingModel):
|
||||
|
||||
name = models.TextField(unique=True)
|
||||
|
||||
@@ -205,7 +205,7 @@ class EndpointStage(Stage):
|
||||
mode = models.TextField(choices=StageMode.choices, default=StageMode.OPTIONAL)
|
||||
|
||||
@property
|
||||
def view(self) -> type[StageView]:
|
||||
def view(self) -> type["StageView"]:
|
||||
from authentik.endpoints.stage import EndpointStageView
|
||||
|
||||
return EndpointStageView
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from authentik.endpoints.models import EndpointStage, StageMode
|
||||
from authentik.endpoints.models import EndpointStage
|
||||
from authentik.flows.stage import StageView
|
||||
|
||||
PLAN_CONTEXT_ENDPOINT_CONNECTOR = "endpoint_connector"
|
||||
@@ -6,24 +6,15 @@ PLAN_CONTEXT_ENDPOINT_CONNECTOR = "endpoint_connector"
|
||||
|
||||
class EndpointStageView(StageView):
|
||||
|
||||
def _get_inner(self) -> StageView | None:
|
||||
def _get_inner(self):
|
||||
stage: EndpointStage = self.executor.current_stage
|
||||
inner_stage: type[StageView] | None = stage.connector.stage
|
||||
if not inner_stage:
|
||||
return None
|
||||
return self.executor.stage_ok()
|
||||
return inner_stage(self.executor, request=self.request)
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
inner = self._get_inner()
|
||||
if inner is None:
|
||||
stage: EndpointStage = self.executor.current_stage
|
||||
if stage.mode == StageMode.OPTIONAL:
|
||||
return self.executor.stage_ok()
|
||||
else:
|
||||
return self.executor.stage_invalid("Invalid stage configuration")
|
||||
return inner.dispatch(request, *args, **kwargs)
|
||||
return self._get_inner().dispatch(request, *args, **kwargs)
|
||||
|
||||
def cleanup(self):
|
||||
inner = self._get_inner()
|
||||
if inner is not None:
|
||||
return inner.cleanup()
|
||||
return self._get_inner().cleanup()
|
||||
|
||||
@@ -3,12 +3,6 @@
|
||||
from django.conf import settings
|
||||
|
||||
from authentik.enterprise.apps import EnterpriseConfig
|
||||
from authentik.tenants.flags import Flag
|
||||
|
||||
|
||||
class AuditIncludeExpandedDiff(Flag[bool], key="enterprise_audit_include_expanded_diff"):
|
||||
default = False
|
||||
visibility = "none"
|
||||
|
||||
|
||||
class AuthentikEnterpriseAuditConfig(EnterpriseConfig):
|
||||
|
||||
@@ -12,7 +12,6 @@ from django.db.models.expressions import BaseExpression, Combinable
|
||||
from django.db.models.signals import post_init
|
||||
from django.http import HttpRequest
|
||||
|
||||
from authentik.enterprise.audit.apps import AuditIncludeExpandedDiff
|
||||
from authentik.events.middleware import AuditMiddleware, should_log_model
|
||||
from authentik.events.utils import cleanse_dict, sanitize_item
|
||||
|
||||
@@ -144,9 +143,5 @@ class EnterpriseAuditMiddleware(AuditMiddleware):
|
||||
# If we're clearing we just set the "flag" to True
|
||||
if action_direction == "clear":
|
||||
pk_set = True
|
||||
elif AuditIncludeExpandedDiff.get():
|
||||
related_model: type[Model] = m2m_field.related_model
|
||||
instances = related_model.objects.filter(pk__in=pk_set)
|
||||
pk_set = [self.serialize_simple(instance) for instance in instances]
|
||||
thread_kwargs["diff"] = {m2m_field.related_name: {action_direction: pk_set}}
|
||||
return super().m2m_changed_handler(request, sender, instance, action, thread_kwargs)
|
||||
|
||||
@@ -7,12 +7,10 @@ from rest_framework.test import APITestCase
|
||||
|
||||
from authentik.core.models import Group, User
|
||||
from authentik.core.tests.utils import create_test_admin_user
|
||||
from authentik.enterprise.audit.apps import AuditIncludeExpandedDiff
|
||||
from authentik.enterprise.audit.middleware import EnterpriseAuditMiddleware
|
||||
from authentik.events.models import Event, EventAction
|
||||
from authentik.events.utils import sanitize_item
|
||||
from authentik.lib.generators import generate_id
|
||||
from authentik.tenants.flags import patch_flag
|
||||
|
||||
|
||||
class TestEnterpriseAudit(APITestCase):
|
||||
@@ -183,61 +181,6 @@ class TestEnterpriseAudit(APITestCase):
|
||||
{"users": {"add": [user.pk]}},
|
||||
)
|
||||
|
||||
@patch(
|
||||
"authentik.enterprise.audit.middleware.EnterpriseAuditMiddleware.enabled",
|
||||
PropertyMock(return_value=True),
|
||||
)
|
||||
@patch_flag(AuditIncludeExpandedDiff, True)
|
||||
def test_m2m_add_expanded(self):
|
||||
"""Test m2m add audit log"""
|
||||
user = create_test_admin_user()
|
||||
group = Group.objects.create(name=generate_id())
|
||||
self.client.force_login(self.user)
|
||||
response = self.client.post(
|
||||
reverse("authentik_api:group-add-user", kwargs={"pk": group.group_uuid}),
|
||||
data={
|
||||
"pk": user.pk,
|
||||
},
|
||||
)
|
||||
self.assertEqual(response.status_code, 204)
|
||||
events = Event.objects.filter(
|
||||
action=EventAction.MODEL_UPDATED,
|
||||
context__model__model_name="group",
|
||||
context__model__app="authentik_core",
|
||||
context__model__pk=group.pk.hex,
|
||||
)
|
||||
event = events.first()
|
||||
self.assertIsNotNone(event)
|
||||
self.assertIsNotNone(event.context["diff"])
|
||||
diff = event.context["diff"]
|
||||
self.assertEqual(
|
||||
diff,
|
||||
{
|
||||
"users": {
|
||||
"add": [
|
||||
{
|
||||
"attributes": {},
|
||||
"date_joined": sanitize_item(user.date_joined),
|
||||
"email": user.email,
|
||||
"first_name": "",
|
||||
"id": user.pk,
|
||||
"is_active": True,
|
||||
"last_login": None,
|
||||
"last_name": "",
|
||||
"last_updated": sanitize_item(user.last_updated),
|
||||
"name": user.name,
|
||||
"password": "********************",
|
||||
"password_change_date": sanitize_item(user.password_change_date),
|
||||
"path": "users",
|
||||
"type": "internal",
|
||||
"username": user.username,
|
||||
"uuid": user.uuid.hex,
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
@patch(
|
||||
"authentik.enterprise.audit.middleware.EnterpriseAuditMiddleware.enabled",
|
||||
PropertyMock(return_value=True),
|
||||
|
||||
@@ -10,7 +10,6 @@ from jwt import PyJWTError, decode, encode, get_unverified_header
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from structlog.stdlib import get_logger
|
||||
|
||||
from authentik.common.oauth.constants import TOKEN_TYPE
|
||||
from authentik.core.models import AuthenticatedSession, Session, User
|
||||
from authentik.core.sessions import SessionStore
|
||||
from authentik.crypto.apps import MANAGED_KEY
|
||||
@@ -27,6 +26,7 @@ from authentik.events.models import Event, EventAction
|
||||
from authentik.events.signals import SESSION_LOGIN_EVENT
|
||||
from authentik.flows.planner import PLAN_CONTEXT_DEVICE
|
||||
from authentik.lib.utils.time import timedelta_from_string
|
||||
from authentik.providers.oauth2.constants import TOKEN_TYPE
|
||||
from authentik.providers.oauth2.id_token import IDToken
|
||||
from authentik.providers.oauth2.models import JWTAlgorithms
|
||||
from authentik.root.middleware import SessionMiddleware
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
"""FleetConnector API Views"""
|
||||
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from authentik.core.api.used_by import UsedByMixin
|
||||
from authentik.endpoints.api.connectors import ConnectorSerializer
|
||||
from authentik.enterprise.api import EnterpriseRequiredMixin
|
||||
from authentik.enterprise.endpoints.connectors.fleet.models import FleetConnector
|
||||
|
||||
|
||||
class FleetConnectorSerializer(EnterpriseRequiredMixin, ConnectorSerializer):
|
||||
"""FleetConnector Serializer"""
|
||||
|
||||
class Meta(ConnectorSerializer.Meta):
|
||||
model = FleetConnector
|
||||
fields = ConnectorSerializer.Meta.fields + [
|
||||
"url",
|
||||
"token",
|
||||
"headers_mapping",
|
||||
"map_users",
|
||||
"map_teams_access_group",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"token": {"write_only": True},
|
||||
}
|
||||
|
||||
|
||||
class FleetConnectorViewSet(UsedByMixin, ModelViewSet):
|
||||
"""FleetConnector Viewset"""
|
||||
|
||||
queryset = FleetConnector.objects.all()
|
||||
serializer_class = FleetConnectorSerializer
|
||||
filterset_fields = [
|
||||
"name",
|
||||
]
|
||||
search_fields = ["name"]
|
||||
ordering = ["name"]
|
||||
@@ -1,12 +0,0 @@
|
||||
"""authentik endpoints app config"""
|
||||
|
||||
from authentik.enterprise.apps import EnterpriseConfig
|
||||
|
||||
|
||||
class AuthentikEnterpriseEndpointsConnectorFleetAppConfig(EnterpriseConfig):
|
||||
"""authentik endpoints app config"""
|
||||
|
||||
name = "authentik.enterprise.endpoints.connectors.fleet"
|
||||
label = "authentik_endpoints_connectors_fleet"
|
||||
verbose_name = "authentik Enterprise.Endpoints.Connectors.Fleet"
|
||||
default = True
|
||||
@@ -1,206 +0,0 @@
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
from django.db import transaction
|
||||
from requests import RequestException
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
from authentik.core.models import User
|
||||
from authentik.endpoints.controller import BaseController, ConnectorSyncException, EnrollmentMethods
|
||||
from authentik.endpoints.facts import (
|
||||
DeviceFacts,
|
||||
OSFamily,
|
||||
)
|
||||
from authentik.endpoints.models import (
|
||||
Device,
|
||||
DeviceAccessGroup,
|
||||
DeviceConnection,
|
||||
DeviceUserBinding,
|
||||
)
|
||||
from authentik.enterprise.endpoints.connectors.fleet.models import FleetConnector as DBC
|
||||
from authentik.events.utils import sanitize_item
|
||||
from authentik.lib.utils.http import get_http_session
|
||||
from authentik.policies.utils import delete_none_values
|
||||
|
||||
|
||||
class FleetController(BaseController[DBC]):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._session = get_http_session()
|
||||
self._session.headers["Authorization"] = f"Bearer {self.connector.token}"
|
||||
if self.connector.headers_mapping:
|
||||
self._session.headers.update(
|
||||
sanitize_item(
|
||||
self.connector.headers_mapping.evaluate(
|
||||
user=None,
|
||||
request=None,
|
||||
connector=self.connector,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def vendor_identifier() -> str:
|
||||
return "fleetdm.com"
|
||||
|
||||
def supported_enrollment_methods(self) -> list[EnrollmentMethods]:
|
||||
return [EnrollmentMethods.AUTOMATIC_API]
|
||||
|
||||
def _url(self, path: str) -> str:
|
||||
return f"{self.connector.url}{path}"
|
||||
|
||||
def _paginate_hosts(self):
|
||||
try:
|
||||
page = 0
|
||||
while True:
|
||||
self.logger.info("Fetching page of hosts...", page=page)
|
||||
res = self._session.get(
|
||||
self._url("/api/v1/fleet/hosts"),
|
||||
params={
|
||||
"order_key": "hardware_serial",
|
||||
"page": page,
|
||||
"per_page": 50,
|
||||
"device_mapping": "true",
|
||||
"populate_software": "true",
|
||||
"populate_users": "true",
|
||||
},
|
||||
)
|
||||
res.raise_for_status()
|
||||
hosts: list[dict[str, Any]] = res.json()["hosts"]
|
||||
if len(hosts) < 1:
|
||||
self.logger.info("No more hosts, finished")
|
||||
break
|
||||
self.logger.info("Got hosts", count=len(hosts))
|
||||
yield from hosts
|
||||
page += 1
|
||||
except RequestException as exc:
|
||||
raise ConnectorSyncException(exc) from exc
|
||||
|
||||
@transaction.atomic
|
||||
def sync_endpoints(self) -> None:
|
||||
for host in self._paginate_hosts():
|
||||
serial = host["hardware_serial"]
|
||||
device, _ = Device.objects.get_or_create(
|
||||
identifier=serial, defaults={"name": host["hostname"], "expiring": False}
|
||||
)
|
||||
connection, _ = DeviceConnection.objects.update_or_create(
|
||||
device=device,
|
||||
connector=self.connector,
|
||||
)
|
||||
if self.connector.map_users:
|
||||
self.map_users(host, device)
|
||||
if self.connector.map_teams_access_group:
|
||||
self.map_access_group(host, device)
|
||||
try:
|
||||
connection.create_snapshot(self.convert_host_data(host))
|
||||
except ValidationError as exc:
|
||||
self.logger.warning(
|
||||
"failed to create snapshot for host", host=host["hostname"], exc=exc
|
||||
)
|
||||
|
||||
def map_users(self, host: dict[str, Any], device: Device):
|
||||
for raw_user in host.get("device_mapping", []) or []:
|
||||
user = User.objects.filter(email=raw_user["email"]).first()
|
||||
if not user:
|
||||
continue
|
||||
DeviceUserBinding.objects.update_or_create(
|
||||
target=device,
|
||||
user=user,
|
||||
create_defaults={
|
||||
"is_primary": True,
|
||||
"order": 0,
|
||||
},
|
||||
)
|
||||
|
||||
def map_access_group(self, host: dict[str, Any], device: Device):
|
||||
team_name = host.get("team_name")
|
||||
if not team_name:
|
||||
return
|
||||
group, _ = DeviceAccessGroup.objects.get_or_create(name=team_name)
|
||||
group.attributes["io.goauthentik.endpoints.connectors.fleet.team_id"] = host["team_id"]
|
||||
if device.access_group:
|
||||
return
|
||||
device.access_group = group
|
||||
device.save()
|
||||
|
||||
@staticmethod
|
||||
def os_family(host: dict[str, Any]) -> OSFamily:
|
||||
if host["platform_like"] in ["debian", "rhel"]:
|
||||
return OSFamily.linux
|
||||
if host["platform_like"] == "windows":
|
||||
return OSFamily.windows
|
||||
if host["platform_like"] == "darwin":
|
||||
return OSFamily.macOS
|
||||
if host["platform"] == "android":
|
||||
return OSFamily.android
|
||||
if host["platform"] in ["ipados", "ios"]:
|
||||
return OSFamily.iOS
|
||||
return OSFamily.other
|
||||
|
||||
def map_os(self, host: dict[str, Any]) -> dict[str, str]:
|
||||
family = FleetController.os_family(host)
|
||||
os = {
|
||||
"arch": self.or_none(host["cpu_type"]),
|
||||
"family": family,
|
||||
"name": self.or_none(host["platform_like"]),
|
||||
"version": self.or_none(host["os_version"]),
|
||||
}
|
||||
if not host["os_version"]:
|
||||
return delete_none_values(os)
|
||||
version = re.search(r"(\d+\.(?:\d+\.?)+)", host["os_version"])
|
||||
if not version:
|
||||
return delete_none_values(os)
|
||||
os["version"] = host["os_version"][version.start() :].strip()
|
||||
os["name"] = host["os_version"][0 : version.start()].strip()
|
||||
return delete_none_values(os)
|
||||
|
||||
def or_none(self, value) -> Any | None:
|
||||
if value == "":
|
||||
return None
|
||||
return value
|
||||
|
||||
def convert_host_data(self, host: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Convert host data from fleet to authentik"""
|
||||
fleet_version = ""
|
||||
for pkg in host.get("software") or []:
|
||||
if pkg["name"] in ["fleet-osquery", "fleet-desktop"]:
|
||||
fleet_version = pkg["version"]
|
||||
data = {
|
||||
"os": self.map_os(host),
|
||||
"disks": [],
|
||||
"network": delete_none_values(
|
||||
{"hostname": self.or_none(host["hostname"]), "interfaces": []}
|
||||
),
|
||||
"hardware": delete_none_values(
|
||||
{
|
||||
"model": self.or_none(host["hardware_model"]),
|
||||
"manufacturer": self.or_none(host["hardware_vendor"]),
|
||||
"serial": self.or_none(host["hardware_serial"]),
|
||||
"cpu_name": self.or_none(host["cpu_brand"]),
|
||||
"cpu_count": self.or_none(host["cpu_logical_cores"]),
|
||||
"memory_bytes": self.or_none(host["memory"]),
|
||||
}
|
||||
),
|
||||
"software": [
|
||||
delete_none_values(
|
||||
{
|
||||
"name": x["name"],
|
||||
"version": x["version"],
|
||||
"source": x["source"],
|
||||
}
|
||||
)
|
||||
for x in (host.get("software") or [])
|
||||
],
|
||||
"vendor": {
|
||||
"fleetdm.com": {
|
||||
"policies": [
|
||||
delete_none_values({"name": policy["name"], "status": policy["response"]})
|
||||
for policy in host.get("policies", [])
|
||||
],
|
||||
"agent_version": fleet_version,
|
||||
},
|
||||
},
|
||||
}
|
||||
facts = DeviceFacts(data=data)
|
||||
facts.is_valid(raise_exception=True)
|
||||
return facts.validated_data
|
||||
@@ -1,53 +0,0 @@
|
||||
# Generated by Django 5.2.10 on 2026-01-15 13:27
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("authentik_endpoints", "0004_deviceaccessgroup_attributes"),
|
||||
("authentik_events", "0014_notification_hyperlink_notification_hyperlink_label_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="FleetConnector",
|
||||
fields=[
|
||||
(
|
||||
"connector_ptr",
|
||||
models.OneToOneField(
|
||||
auto_created=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
parent_link=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
to="authentik_endpoints.connector",
|
||||
),
|
||||
),
|
||||
("url", models.URLField()),
|
||||
("token", models.TextField()),
|
||||
("map_users", models.BooleanField(default=True)),
|
||||
("map_teams_access_group", models.BooleanField(default=False)),
|
||||
(
|
||||
"headers_mapping",
|
||||
models.ForeignKey(
|
||||
default=None,
|
||||
help_text="Configure additional headers to be sent. Mapping should return a dictionary of key-value pairs",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_DEFAULT,
|
||||
related_name="+",
|
||||
to="authentik_events.notificationwebhookmapping",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Fleet Connector",
|
||||
"verbose_name_plural": "Fleet Connectors",
|
||||
},
|
||||
bases=("authentik_endpoints.connector",),
|
||||
),
|
||||
]
|
||||
@@ -1,56 +0,0 @@
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from django.db import models
|
||||
from django.templatetags.static import static
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework.serializers import Serializer
|
||||
|
||||
from authentik.endpoints.models import Connector
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from authentik.enterprise.endpoints.connectors.fleet.controller import FleetController
|
||||
|
||||
|
||||
class FleetConnector(Connector):
|
||||
"""Ingest device data and policy compliance from a Fleet instance."""
|
||||
|
||||
url = models.URLField()
|
||||
token = models.TextField()
|
||||
headers_mapping = models.ForeignKey(
|
||||
"authentik_events.NotificationWebhookMapping",
|
||||
on_delete=models.SET_DEFAULT,
|
||||
null=True,
|
||||
default=None,
|
||||
related_name="+",
|
||||
help_text=_(
|
||||
"Configure additional headers to be sent. "
|
||||
"Mapping should return a dictionary of key-value pairs"
|
||||
),
|
||||
)
|
||||
|
||||
map_users = models.BooleanField(default=True)
|
||||
map_teams_access_group = models.BooleanField(default=False)
|
||||
|
||||
@property
|
||||
def icon_url(self):
|
||||
return static("authentik/connectors/fleet.svg")
|
||||
|
||||
@property
|
||||
def serializer(self) -> type[Serializer]:
|
||||
from authentik.enterprise.endpoints.connectors.fleet.api import FleetConnectorSerializer
|
||||
|
||||
return FleetConnectorSerializer
|
||||
|
||||
@property
|
||||
def controller(self) -> type[FleetController]:
|
||||
from authentik.enterprise.endpoints.connectors.fleet.controller import FleetController
|
||||
|
||||
return FleetController
|
||||
|
||||
@property
|
||||
def component(self) -> str:
|
||||
return "ak-endpoints-connector-fleet-form"
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("Fleet Connector")
|
||||
verbose_name_plural = _("Fleet Connectors")
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user