mirror of
https://github.com/suitenumerique/docs.git
synced 2026-04-26 01:25:05 +02:00
Compare commits
211 Commits
refacto/bl
...
config/inc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2cbd43caae | ||
|
|
525d8c8417 | ||
|
|
c886cbb41d | ||
|
|
98f3ca2763 | ||
|
|
fb92a43755 | ||
|
|
03fd1fe50e | ||
|
|
fc803226ac | ||
|
|
fb725edda3 | ||
|
|
6838b387a2 | ||
|
|
87f570582f | ||
|
|
37f56fcc22 | ||
|
|
19aa3a36bc | ||
|
|
0d09f761dc | ||
|
|
ce5f9a1417 | ||
|
|
83a24c3796 | ||
|
|
4a269e6b0e | ||
|
|
d9d7b70b71 | ||
|
|
a4326366c2 | ||
|
|
1d7b57e03d | ||
|
|
c4c6c22e42 | ||
|
|
10a8eccc71 | ||
|
|
728332f8f7 | ||
|
|
487b95c207 | ||
|
|
d23b38e478 | ||
|
|
d6333c9b81 | ||
|
|
03b6c6a206 | ||
|
|
aadabf8d3c | ||
|
|
2a708d6e46 | ||
|
|
b47c730e19 | ||
|
|
cef83067e6 | ||
|
|
4cabfcc921 | ||
|
|
b8d4b0a044 | ||
|
|
71c4d2921b | ||
|
|
d1636dee13 | ||
|
|
bf93640af8 | ||
|
|
da79c310ae | ||
|
|
99c486571d | ||
|
|
cdf3161869 | ||
|
|
ef108227b3 | ||
|
|
9991820cb1 | ||
|
|
2801ece358 | ||
|
|
0b37996899 | ||
|
|
0867ccef1a | ||
|
|
b3ae6e1a30 | ||
|
|
1df6242927 | ||
|
|
35fba02085 | ||
|
|
0e5c9ed834 | ||
|
|
4e54a53072 | ||
|
|
4f8aea7b80 | ||
|
|
1172fbe0b5 | ||
|
|
7cf144e0de | ||
|
|
54c15c541e | ||
|
|
8472e661f5 | ||
|
|
1d819d8fa2 | ||
|
|
5020bc1c1a | ||
|
|
4cd72ffa4f | ||
|
|
c1998a9b24 | ||
|
|
0fca6db79c | ||
|
|
ad36210e45 | ||
|
|
73a7c250b5 | ||
|
|
0c17d76f60 | ||
|
|
04c9dc3294 | ||
|
|
32b2641fd8 | ||
|
|
07966c5461 | ||
|
|
bcb50a5fce | ||
|
|
ba93bcf20b | ||
|
|
2e05aec303 | ||
|
|
51e8332b95 | ||
|
|
eb2ee1bb7f | ||
|
|
d34f279455 | ||
|
|
3eed542800 | ||
|
|
5f2c472726 | ||
|
|
9e313e30a7 | ||
|
|
6c493c24d5 | ||
|
|
c3acfe45d2 | ||
|
|
a9d2517c7b | ||
|
|
a2ae41296d | ||
|
|
1016b1c25d | ||
|
|
0c649a65b0 | ||
|
|
11d899437a | ||
|
|
27c5e0ce5a | ||
|
|
9337c4b1d5 | ||
|
|
679b29e2e0 | ||
|
|
3cad1b8a39 | ||
|
|
2eb2641d2c | ||
|
|
e36366b293 | ||
|
|
6d73fb69b0 | ||
|
|
b708c8b352 | ||
|
|
36c6762026 | ||
|
|
4637d6f1fe | ||
|
|
167375231b | ||
|
|
c17fb3e6cc | ||
|
|
1be89180fe | ||
|
|
6a3b33ec32 | ||
|
|
29f2c2ebdf | ||
|
|
9d320092df | ||
|
|
77535b0292 | ||
|
|
770c22b1a6 | ||
|
|
3c980512be | ||
|
|
76cb6d66a4 | ||
|
|
6cef5ff2a0 | ||
|
|
d816234839 | ||
|
|
5dd66f0cdc | ||
|
|
0a4052d023 | ||
|
|
189594c839 | ||
|
|
ca286b6de7 | ||
|
|
6062d0e9c4 | ||
|
|
a51b34a04e | ||
|
|
f294a8e5a3 | ||
|
|
b4591cda10 | ||
|
|
301bf43cb7 | ||
|
|
f155e9217e | ||
|
|
09fb9671e4 | ||
|
|
4c0c1f423e | ||
|
|
83fe903587 | ||
|
|
200b975c6d | ||
|
|
9536227c52 | ||
|
|
fb4c502c75 | ||
|
|
77aee5652a | ||
|
|
7cceffff13 | ||
|
|
a028df54ce | ||
|
|
25cf11c90f | ||
|
|
d1a3519646 | ||
|
|
03ea6b29df | ||
|
|
ea0a1aef10 | ||
|
|
bb7d1353f6 | ||
|
|
1944f6177e | ||
|
|
6ce847d6e1 | ||
|
|
e48080b27e | ||
|
|
73621c91e5 | ||
|
|
ee2462310f | ||
|
|
2d6e34c555 | ||
|
|
3f638b22c4 | ||
|
|
c9f42e7924 | ||
|
|
a30384573e | ||
|
|
54dc72209c | ||
|
|
9cf30a0d5f | ||
|
|
f24b047a7c | ||
|
|
3411df09ae | ||
|
|
2718321fbe | ||
|
|
217af2e2a8 | ||
|
|
53985f77f3 | ||
|
|
a51ceeb409 | ||
|
|
1070b91d2f | ||
|
|
24ec1fa70e | ||
|
|
0ba6f02d1a | ||
|
|
8ce216f6e8 | ||
|
|
050b106a8f | ||
|
|
5011db9bd7 | ||
|
|
e1e0e5ebd8 | ||
|
|
5c8fff01a5 | ||
|
|
1a022450c6 | ||
|
|
09438a8941 | ||
|
|
6f0dac4f48 | ||
|
|
9d6fe5da8f | ||
|
|
1ee313efb1 | ||
|
|
1ac6b42ae3 | ||
|
|
ffae927c93 | ||
|
|
0d335105a1 | ||
|
|
dc23883a9c | ||
|
|
a8ce9eabf8 | ||
|
|
21217be587 | ||
|
|
a8212753aa | ||
|
|
c37dc8dd34 | ||
|
|
e323af2cdb | ||
|
|
9f9f26974c | ||
|
|
c80e7d05bb | ||
|
|
5d5ac0c1c8 | ||
|
|
d0b756550b | ||
|
|
010ed4618a | ||
|
|
c0994d7d1f | ||
|
|
fa0c3847e4 | ||
|
|
49871c45b1 | ||
|
|
2cc0d71b89 | ||
|
|
33785440c6 | ||
|
|
75c7811755 | ||
|
|
f4cb66d6b6 | ||
|
|
57dc56f83e | ||
|
|
de1a0e4a73 | ||
|
|
17cb213ecd | ||
|
|
3ab0a47c3a | ||
|
|
685464f2d7 | ||
|
|
9af540de35 | ||
|
|
6c43ecc324 | ||
|
|
607bae0022 | ||
|
|
1d8b730715 | ||
|
|
d02c6250c9 | ||
|
|
b8c1504e7a | ||
|
|
18edcf8537 | ||
|
|
5d8741a70a | ||
|
|
48df68195a | ||
|
|
7cf42e6404 | ||
|
|
9903bd73e2 | ||
|
|
44b38347c4 | ||
|
|
709076067b | ||
|
|
db014cfc6f | ||
|
|
52cd76eb93 | ||
|
|
505b144968 | ||
|
|
009de5299f | ||
|
|
0fddabb354 | ||
|
|
cd25c3a63b | ||
|
|
adb216fbdf | ||
|
|
235c1828e6 | ||
|
|
4588c71e8a | ||
|
|
6b7fc915dd | ||
|
|
c3e83c6612 | ||
|
|
586089c8e4 | ||
|
|
1b5ce3ed10 | ||
|
|
989c70ed57 | ||
|
|
c6ded3f267 | ||
|
|
781f0815a8 |
@@ -34,4 +34,4 @@ db.sqlite3
|
|||||||
|
|
||||||
# Frontend
|
# Frontend
|
||||||
node_modules
|
node_modules
|
||||||
.next
|
**/.next
|
||||||
|
|||||||
3
.github/.trivyignore
vendored
Normal file
3
.github/.trivyignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
CVE-2026-26996
|
||||||
|
CVE-2026-27903
|
||||||
|
CVE-2026-27904
|
||||||
4
.github/workflows/crowdin_download.yml
vendored
4
.github/workflows/crowdin_download.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
|||||||
pull-requests: write
|
pull-requests: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
- name: Create empty source files
|
- name: Create empty source files
|
||||||
run: |
|
run: |
|
||||||
touch src/backend/locale/django.pot
|
touch src/backend/locale/django.pot
|
||||||
@@ -48,7 +48,7 @@ jobs:
|
|||||||
CROWDIN_BASE_PATH: "../src/"
|
CROWDIN_BASE_PATH: "../src/"
|
||||||
# frontend i18n
|
# frontend i18n
|
||||||
- name: Restore the frontend cache
|
- name: Restore the frontend cache
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v5
|
||||||
with:
|
with:
|
||||||
path: "src/frontend/**/node_modules"
|
path: "src/frontend/**/node_modules"
|
||||||
key: front-node_modules-${{ hashFiles('src/frontend/**/yarn.lock') }}
|
key: front-node_modules-${{ hashFiles('src/frontend/**/yarn.lock') }}
|
||||||
|
|||||||
8
.github/workflows/crowdin_upload.yml
vendored
8
.github/workflows/crowdin_upload.yml
vendored
@@ -20,10 +20,10 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
# Backend i18n
|
# Backend i18n
|
||||||
- name: Install Python
|
- name: Install Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: "3.13.3"
|
python-version: "3.13.3"
|
||||||
cache: "pip"
|
cache: "pip"
|
||||||
@@ -33,7 +33,7 @@ jobs:
|
|||||||
run: pip install --user .
|
run: pip install --user .
|
||||||
working-directory: src/backend
|
working-directory: src/backend
|
||||||
- name: Restore the mail templates
|
- name: Restore the mail templates
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v5
|
||||||
id: mail-templates
|
id: mail-templates
|
||||||
with:
|
with:
|
||||||
path: "src/backend/core/templates/mail"
|
path: "src/backend/core/templates/mail"
|
||||||
@@ -49,7 +49,7 @@ jobs:
|
|||||||
DJANGO_CONFIGURATION=Build python manage.py makemessages -a --keep-pot
|
DJANGO_CONFIGURATION=Build python manage.py makemessages -a --keep-pot
|
||||||
# frontend i18n
|
# frontend i18n
|
||||||
- name: Restore the frontend cache
|
- name: Restore the frontend cache
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v5
|
||||||
with:
|
with:
|
||||||
path: "src/frontend/**/node_modules"
|
path: "src/frontend/**/node_modules"
|
||||||
key: front-node_modules-${{ hashFiles('src/frontend/**/yarn.lock') }}
|
key: front-node_modules-${{ hashFiles('src/frontend/**/yarn.lock') }}
|
||||||
|
|||||||
16
.github/workflows/dependencies.yml
vendored
16
.github/workflows/dependencies.yml
vendored
@@ -20,16 +20,16 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
- name: Restore the frontend cache
|
- name: Restore the frontend cache
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v5
|
||||||
id: front-node_modules
|
id: front-node_modules
|
||||||
with:
|
with:
|
||||||
path: "src/frontend/**/node_modules"
|
path: "src/frontend/**/node_modules"
|
||||||
key: front-node_modules-${{ hashFiles('src/frontend/**/yarn.lock') }}
|
key: front-node_modules-${{ hashFiles('src/frontend/**/yarn.lock') }}
|
||||||
- name: Setup Node.js
|
- name: Setup Node.js
|
||||||
if: steps.front-node_modules.outputs.cache-hit != 'true'
|
if: steps.front-node_modules.outputs.cache-hit != 'true'
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version: ${{ inputs.node_version }}
|
node-version: ${{ inputs.node_version }}
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
@@ -37,7 +37,7 @@ jobs:
|
|||||||
run: cd src/frontend/ && yarn install --frozen-lockfile
|
run: cd src/frontend/ && yarn install --frozen-lockfile
|
||||||
- name: Cache install frontend
|
- name: Cache install frontend
|
||||||
if: steps.front-node_modules.outputs.cache-hit != 'true'
|
if: steps.front-node_modules.outputs.cache-hit != 'true'
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v5
|
||||||
with:
|
with:
|
||||||
path: "src/frontend/**/node_modules"
|
path: "src/frontend/**/node_modules"
|
||||||
key: front-node_modules-${{ hashFiles('src/frontend/**/yarn.lock') }}
|
key: front-node_modules-${{ hashFiles('src/frontend/**/yarn.lock') }}
|
||||||
@@ -50,10 +50,10 @@ jobs:
|
|||||||
working-directory: src/mail
|
working-directory: src/mail
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Restore the mail templates
|
- name: Restore the mail templates
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v5
|
||||||
id: mail-templates
|
id: mail-templates
|
||||||
with:
|
with:
|
||||||
path: "src/backend/core/templates/mail"
|
path: "src/backend/core/templates/mail"
|
||||||
@@ -61,7 +61,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Setup Node.js
|
- name: Setup Node.js
|
||||||
if: steps.mail-templates.outputs.cache-hit != 'true'
|
if: steps.mail-templates.outputs.cache-hit != 'true'
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version: ${{ inputs.node_version }}
|
node-version: ${{ inputs.node_version }}
|
||||||
|
|
||||||
@@ -79,7 +79,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Cache mail templates
|
- name: Cache mail templates
|
||||||
if: steps.mail-templates.outputs.cache-hit != 'true'
|
if: steps.mail-templates.outputs.cache-hit != 'true'
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v5
|
||||||
with:
|
with:
|
||||||
path: "src/backend/core/templates/mail"
|
path: "src/backend/core/templates/mail"
|
||||||
key: mail-templates-${{ hashFiles('src/mail/mjml') }}
|
key: mail-templates-${{ hashFiles('src/mail/mjml') }}
|
||||||
|
|||||||
169
.github/workflows/docker-hub.yml
vendored
169
.github/workflows/docker-hub.yml
vendored
@@ -5,149 +5,66 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- 'main'
|
- "main"
|
||||||
tags:
|
tags:
|
||||||
- 'v*'
|
- "v*"
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- 'main'
|
- "main"
|
||||||
- 'ci/trivy-fails'
|
|
||||||
|
|
||||||
env:
|
env:
|
||||||
DOCKER_USER: 1001:127
|
DOCKER_USER: 1001:127
|
||||||
|
SHOULD_PUSH: ${{ github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'preview') }}
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-and-push-backend:
|
build-and-push-backend:
|
||||||
runs-on: ubuntu-latest
|
uses: ./.github/workflows/docker-publish.yml
|
||||||
steps:
|
permissions:
|
||||||
-
|
contents: read
|
||||||
name: Checkout repository
|
secrets: inherit
|
||||||
uses: actions/checkout@v4
|
with:
|
||||||
-
|
image_name: lasuite/impress-backend
|
||||||
name: Docker meta
|
context: .
|
||||||
id: meta
|
file: Dockerfile
|
||||||
uses: docker/metadata-action@v5
|
target: backend-production
|
||||||
with:
|
should_push: ${{ github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'preview') }}
|
||||||
images: lasuite/impress-backend
|
docker_user: 1001:127
|
||||||
-
|
|
||||||
name: Login to DockerHub
|
|
||||||
if: github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'preview')
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKER_HUB_USER }}
|
|
||||||
password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
|
||||||
-
|
|
||||||
name: Run trivy scan
|
|
||||||
uses: numerique-gouv/action-trivy-cache@main
|
|
||||||
with:
|
|
||||||
docker-build-args: '--target backend-production -f Dockerfile'
|
|
||||||
docker-image-name: 'docker.io/lasuite/impress-backend:${{ github.sha }}'
|
|
||||||
-
|
|
||||||
name: Build and push
|
|
||||||
uses: docker/build-push-action@v6
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
target: backend-production
|
|
||||||
build-args: DOCKER_USER=${{ env.DOCKER_USER }}:-1000
|
|
||||||
push: ${{ github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'preview') }}
|
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
||||||
-
|
|
||||||
name: Cleanup Docker after build
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker system prune -af
|
|
||||||
docker volume prune -f
|
|
||||||
|
|
||||||
build-and-push-frontend:
|
build-and-push-frontend:
|
||||||
runs-on: ubuntu-latest
|
uses: ./.github/workflows/docker-publish.yml
|
||||||
steps:
|
permissions:
|
||||||
-
|
contents: read
|
||||||
name: Checkout repository
|
secrets: inherit
|
||||||
uses: actions/checkout@v4
|
with:
|
||||||
-
|
image_name: lasuite/impress-frontend
|
||||||
name: Docker meta
|
context: .
|
||||||
id: meta
|
file: src/frontend/Dockerfile
|
||||||
uses: docker/metadata-action@v5
|
target: frontend-production
|
||||||
with:
|
arm64_reuse_amd64_build_arg: "FRONTEND_IMAGE"
|
||||||
images: lasuite/impress-frontend
|
should_push: ${{ github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'preview') }}
|
||||||
-
|
docker_user: 1001:127
|
||||||
name: Login to DockerHub
|
|
||||||
if: github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'preview')
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKER_HUB_USER }}
|
|
||||||
password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
|
||||||
-
|
|
||||||
name: Run trivy scan
|
|
||||||
uses: numerique-gouv/action-trivy-cache@main
|
|
||||||
with:
|
|
||||||
docker-build-args: '-f src/frontend/Dockerfile --target frontend-production'
|
|
||||||
docker-image-name: 'docker.io/lasuite/impress-frontend:${{ github.sha }}'
|
|
||||||
-
|
|
||||||
name: Build and push
|
|
||||||
uses: docker/build-push-action@v6
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: ./src/frontend/Dockerfile
|
|
||||||
target: frontend-production
|
|
||||||
build-args: |
|
|
||||||
DOCKER_USER=${{ env.DOCKER_USER }}:-1000
|
|
||||||
PUBLISH_AS_MIT=false
|
|
||||||
push: ${{ github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'preview') }}
|
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
||||||
-
|
|
||||||
name: Cleanup Docker after build
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker system prune -af
|
|
||||||
docker volume prune -f
|
|
||||||
|
|
||||||
build-and-push-y-provider:
|
build-and-push-y-provider:
|
||||||
runs-on: ubuntu-latest
|
uses: ./.github/workflows/docker-publish.yml
|
||||||
steps:
|
permissions:
|
||||||
-
|
contents: read
|
||||||
name: Checkout repository
|
secrets: inherit
|
||||||
uses: actions/checkout@v4
|
with:
|
||||||
-
|
image_name: lasuite/impress-y-provider
|
||||||
name: Docker meta
|
context: .
|
||||||
id: meta
|
file: src/frontend/servers/y-provider/Dockerfile
|
||||||
uses: docker/metadata-action@v5
|
target: y-provider
|
||||||
with:
|
should_push: ${{ github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'preview') }}
|
||||||
images: lasuite/impress-y-provider
|
docker_user: 1001:127
|
||||||
-
|
|
||||||
name: Login to DockerHub
|
|
||||||
if: github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'preview')
|
|
||||||
run: echo "${{ secrets.DOCKER_HUB_PASSWORD }}" | docker login -u "${{ secrets.DOCKER_HUB_USER }}" --password-stdin
|
|
||||||
-
|
|
||||||
name: Run trivy scan
|
|
||||||
uses: numerique-gouv/action-trivy-cache@main
|
|
||||||
with:
|
|
||||||
docker-build-args: '-f src/frontend/servers/y-provider/Dockerfile --target y-provider'
|
|
||||||
docker-image-name: 'docker.io/lasuite/impress-y-provider:${{ github.sha }}'
|
|
||||||
-
|
|
||||||
name: Build and push
|
|
||||||
uses: docker/build-push-action@v6
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: ./src/frontend/servers/y-provider/Dockerfile
|
|
||||||
target: y-provider
|
|
||||||
build-args: DOCKER_USER=${{ env.DOCKER_USER }}:-1000
|
|
||||||
push: ${{ github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'preview') }}
|
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
||||||
-
|
|
||||||
name: Cleanup Docker after build
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker system prune -af
|
|
||||||
docker volume prune -f
|
|
||||||
|
|
||||||
notify-argocd:
|
notify-argocd:
|
||||||
needs:
|
needs:
|
||||||
- build-and-push-frontend
|
|
||||||
- build-and-push-backend
|
- build-and-push-backend
|
||||||
|
- build-and-push-frontend
|
||||||
|
- build-and-push-y-provider
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'preview')
|
if: github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'preview')
|
||||||
steps:
|
steps:
|
||||||
|
|||||||
142
.github/workflows/docker-publish.yml
vendored
Normal file
142
.github/workflows/docker-publish.yml
vendored
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
name: Build and Push Container Image
|
||||||
|
description: Build and push a container image based on the input arguments provided
|
||||||
|
|
||||||
|
"on":
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
image_name:
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
description: The suffix for the image name, without the registry and without the repository path.
|
||||||
|
context:
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
description: The path to the context to start `docker build` into.
|
||||||
|
file:
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
description: The path to the Dockerfile
|
||||||
|
target:
|
||||||
|
type: string
|
||||||
|
required: false
|
||||||
|
default: ""
|
||||||
|
description: The Dockerfile target stage to build the image for.
|
||||||
|
should_push:
|
||||||
|
type: boolean
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
description: if the image should be pushed on the docker registry
|
||||||
|
docker_user:
|
||||||
|
type: string
|
||||||
|
required: false
|
||||||
|
default: ""
|
||||||
|
description: The docker_user ARGUMENT to pass to the build step
|
||||||
|
arm64_reuse_amd64_build_arg:
|
||||||
|
type: string
|
||||||
|
required: false
|
||||||
|
default: ""
|
||||||
|
description: "Build arg name to pass first amd64 tag to arm64 build (skips arch-independent build steps)"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-push:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
- name: Login to DockerHub
|
||||||
|
if: ${{ inputs.should_push }}
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_HUB_USER }}
|
||||||
|
password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||||
|
- name: Extract metadata (tags, labels) for Docker
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ inputs.image_name }}
|
||||||
|
- name: Generate platform-specific tags
|
||||||
|
id: platform-tags
|
||||||
|
run: |
|
||||||
|
AMD64_TAGS=$(echo "${{ steps.meta.outputs.tags }}" | sed 's/$/-amd64/')
|
||||||
|
ARM64_TAGS=$(echo "${{ steps.meta.outputs.tags }}" | sed 's/$/-arm64/')
|
||||||
|
FIRST_AMD64_TAG=$(echo "${{ steps.meta.outputs.tags }}" | head -1)-amd64
|
||||||
|
{
|
||||||
|
echo "amd64<<EOF"
|
||||||
|
echo "$AMD64_TAGS"
|
||||||
|
echo "EOF"
|
||||||
|
echo "arm64<<EOF"
|
||||||
|
echo "$ARM64_TAGS"
|
||||||
|
echo "EOF"
|
||||||
|
echo "amd64_first=$FIRST_AMD64_TAG"
|
||||||
|
} >> "$GITHUB_OUTPUT"
|
||||||
|
# - name: Run trivy scan
|
||||||
|
# if: ${{ vars.TRIVY_SCAN_ENABLED }} == 'true'
|
||||||
|
# uses: numerique-gouv/action-trivy-cache@main
|
||||||
|
# with:
|
||||||
|
# docker-build-args: "--target ${{ inputs.target }} -f ${{ inputs.file }}"
|
||||||
|
# docker-image-name: "docker.io/${{ inputs.image_name }}:${{ github.sha }}"
|
||||||
|
# trivyignores: ./.github/.trivyignore
|
||||||
|
- name: Build and push (amd64)
|
||||||
|
if: ${{ inputs.should_push }}||${{ vars.TRIVY_SCAN_ENABLED }} != 'true'
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
context: ${{ inputs.context }}
|
||||||
|
file: ${{ inputs.file }}
|
||||||
|
target: ${{ inputs.target }}
|
||||||
|
platforms: linux/amd64
|
||||||
|
build-args: |
|
||||||
|
DOCKER_USER=${{ inputs.docker_user }}
|
||||||
|
PUBLISH_AS_MIT=false
|
||||||
|
push: ${{ inputs.should_push }}
|
||||||
|
provenance: false
|
||||||
|
tags: ${{ steps.platform-tags.outputs.amd64 }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
- name: Build and push (arm64)
|
||||||
|
if: ${{ inputs.should_push }}
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
context: ${{ inputs.context }}
|
||||||
|
file: ${{ inputs.file }}
|
||||||
|
target: ${{ inputs.target }}
|
||||||
|
platforms: linux/arm64
|
||||||
|
build-args: |
|
||||||
|
DOCKER_USER=${{ inputs.docker_user }}
|
||||||
|
PUBLISH_AS_MIT=false
|
||||||
|
${{ inputs.arm64_reuse_amd64_build_arg && format('{0}={1}', inputs.arm64_reuse_amd64_build_arg, steps.platform-tags.outputs.amd64_first) || '' }}
|
||||||
|
push: ${{ inputs.should_push }}
|
||||||
|
provenance: false
|
||||||
|
tags: ${{ steps.platform-tags.outputs.arm64 }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
- name: Create multi-arch manifests
|
||||||
|
if: ${{ inputs.should_push }}
|
||||||
|
id: create-manifest
|
||||||
|
run: |
|
||||||
|
IMAGE="${{ inputs.image_name }}"
|
||||||
|
readarray -t TAGS <<< "${{ steps.meta.outputs.tags }}"
|
||||||
|
FIRST_TAG=""
|
||||||
|
for tag in "${TAGS[@]}"; do
|
||||||
|
[ -z "$tag" ] && continue
|
||||||
|
docker buildx imagetools create -t "$tag" \
|
||||||
|
"${tag}-amd64" "${tag}-arm64"
|
||||||
|
if [ -z "$FIRST_TAG" ]; then
|
||||||
|
FIRST_TAG="$tag"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
# Get the digest of the multi-arch manifest for attestation
|
||||||
|
# Note: --format '{{.Manifest.Digest}}' is broken (docker/buildx#1175),
|
||||||
|
# so we compute it from the raw manifest JSON instead.
|
||||||
|
if [ -n "$FIRST_TAG" ]; then
|
||||||
|
DIGEST="sha256:$(docker buildx imagetools inspect "$FIRST_TAG" --raw | sha256sum | awk '{print $1}')"
|
||||||
|
echo "digest=$DIGEST" >> "$GITHUB_OUTPUT"
|
||||||
|
fi
|
||||||
|
- name: Cleanup Docker after build
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker system prune -af
|
||||||
|
docker volume prune -f
|
||||||
157
.github/workflows/ghcr.yml
vendored
Normal file
157
.github/workflows/ghcr.yml
vendored
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
name: Build and Push to GHCR
|
||||||
|
run-name: Build and Push to GHCR
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "main"
|
||||||
|
tags:
|
||||||
|
- "v*"
|
||||||
|
|
||||||
|
env:
|
||||||
|
DOCKER_USER: 1001:127
|
||||||
|
REGISTRY: ghcr.io
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-push-backend:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event.repository.fork == true
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
- name: Docker meta
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/${{ github.repository }}/backend
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch
|
||||||
|
type=ref,event=pr
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
|
type=sha
|
||||||
|
- name: Login to GHCR
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
target: backend-production
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
build-args: DOCKER_USER=${{ env.DOCKER_USER }}
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
- name: Cleanup Docker after build
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker system prune -af
|
||||||
|
docker volume prune -f
|
||||||
|
|
||||||
|
build-and-push-frontend:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event.repository.fork == true
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
- name: Docker meta
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/${{ github.repository }}/frontend
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch
|
||||||
|
type=ref,event=pr
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
|
type=sha
|
||||||
|
- name: Login to GHCR
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./src/frontend/Dockerfile
|
||||||
|
target: frontend-production
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
build-args: |
|
||||||
|
DOCKER_USER=${{ env.DOCKER_USER }}
|
||||||
|
PUBLISH_AS_MIT=false
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
- name: Cleanup Docker after build
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker system prune -af
|
||||||
|
docker volume prune -f
|
||||||
|
|
||||||
|
build-and-push-y-provider:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event.repository.fork == true
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
- name: Docker meta
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/${{ github.repository }}/y-provider
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch
|
||||||
|
type=ref,event=pr
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
|
type=sha
|
||||||
|
- name: Login to GHCR
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./src/frontend/servers/y-provider/Dockerfile
|
||||||
|
target: y-provider
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
build-args: DOCKER_USER=${{ env.DOCKER_USER }}:-1000
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
- name: Cleanup Docker after build
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
docker system prune -af
|
||||||
|
docker volume prune -f
|
||||||
2
.github/workflows/helmfile-linter.yaml
vendored
2
.github/workflows/helmfile-linter.yaml
vendored
@@ -15,7 +15,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Checkout repository
|
name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
-
|
-
|
||||||
name: Helmfile lint
|
name: Helmfile lint
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|||||||
38
.github/workflows/impress-frontend.yml
vendored
38
.github/workflows/impress-frontend.yml
vendored
@@ -23,15 +23,15 @@ jobs:
|
|||||||
contents: read
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Setup Node.js
|
- name: Setup Node.js
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version: "22.x"
|
node-version: "22.x"
|
||||||
|
|
||||||
- name: Restore the frontend cache
|
- name: Restore the frontend cache
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v5
|
||||||
with:
|
with:
|
||||||
path: "src/frontend/**/node_modules"
|
path: "src/frontend/**/node_modules"
|
||||||
key: front-node_modules-${{ hashFiles('src/frontend/**/yarn.lock') }}
|
key: front-node_modules-${{ hashFiles('src/frontend/**/yarn.lock') }}
|
||||||
@@ -47,14 +47,14 @@ jobs:
|
|||||||
contents: read
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Setup Node.js
|
- name: Setup Node.js
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version: "22.x"
|
node-version: "22.x"
|
||||||
- name: Restore the frontend cache
|
- name: Restore the frontend cache
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v5
|
||||||
with:
|
with:
|
||||||
path: "src/frontend/**/node_modules"
|
path: "src/frontend/**/node_modules"
|
||||||
key: front-node_modules-${{ hashFiles('src/frontend/**/yarn.lock') }}
|
key: front-node_modules-${{ hashFiles('src/frontend/**/yarn.lock') }}
|
||||||
@@ -69,15 +69,15 @@ jobs:
|
|||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Setup Node.js
|
- name: Setup Node.js
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version: "22.x"
|
node-version: "22.x"
|
||||||
|
|
||||||
- name: Restore the frontend cache
|
- name: Restore the frontend cache
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v5
|
||||||
with:
|
with:
|
||||||
path: "src/frontend/**/node_modules"
|
path: "src/frontend/**/node_modules"
|
||||||
key: front-node_modules-${{ hashFiles('src/frontend/**/yarn.lock') }}
|
key: front-node_modules-${{ hashFiles('src/frontend/**/yarn.lock') }}
|
||||||
@@ -111,15 +111,15 @@ jobs:
|
|||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Setup Node.js
|
- name: Setup Node.js
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version: "22.x"
|
node-version: "22.x"
|
||||||
|
|
||||||
- name: Restore the frontend cache
|
- name: Restore the frontend cache
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v5
|
||||||
with:
|
with:
|
||||||
path: "src/frontend/**/node_modules"
|
path: "src/frontend/**/node_modules"
|
||||||
key: front-node_modules-${{ hashFiles('src/frontend/**/yarn.lock') }}
|
key: front-node_modules-${{ hashFiles('src/frontend/**/yarn.lock') }}
|
||||||
@@ -157,11 +157,11 @@ jobs:
|
|||||||
issues: write
|
issues: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Detect relevant changes
|
- name: Detect relevant changes
|
||||||
id: changes
|
id: changes
|
||||||
uses: dorny/paths-filter@v2
|
uses: dorny/paths-filter@v3
|
||||||
with:
|
with:
|
||||||
filters: |
|
filters: |
|
||||||
lock:
|
lock:
|
||||||
@@ -170,7 +170,7 @@ jobs:
|
|||||||
- 'src/frontend/apps/impress/**'
|
- 'src/frontend/apps/impress/**'
|
||||||
|
|
||||||
- name: Restore the frontend cache
|
- name: Restore the frontend cache
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v5
|
||||||
with:
|
with:
|
||||||
path: "src/frontend/**/node_modules"
|
path: "src/frontend/**/node_modules"
|
||||||
key: front-node_modules-${{ hashFiles('src/frontend/**/yarn.lock') }}
|
key: front-node_modules-${{ hashFiles('src/frontend/**/yarn.lock') }}
|
||||||
@@ -178,7 +178,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Setup Node.js
|
- name: Setup Node.js
|
||||||
if: steps.changes.outputs.lock == 'true' || steps.changes.outputs.app == 'true'
|
if: steps.changes.outputs.lock == 'true' || steps.changes.outputs.app == 'true'
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version: "22.x"
|
node-version: "22.x"
|
||||||
|
|
||||||
@@ -205,14 +205,14 @@ jobs:
|
|||||||
contents: read
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Setup Node.js
|
- name: Setup Node.js
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version: "22.x"
|
node-version: "22.x"
|
||||||
- name: Restore the frontend cache
|
- name: Restore the frontend cache
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v5
|
||||||
with:
|
with:
|
||||||
path: "src/frontend/**/node_modules"
|
path: "src/frontend/**/node_modules"
|
||||||
key: front-node_modules-${{ hashFiles('src/frontend/**/yarn.lock') }}
|
key: front-node_modules-${{ hashFiles('src/frontend/**/yarn.lock') }}
|
||||||
|
|||||||
22
.github/workflows/impress.yml
vendored
22
.github/workflows/impress.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
|||||||
if: github.event_name == 'pull_request' # Makes sense only for pull requests
|
if: github.event_name == 'pull_request' # Makes sense only for pull requests
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: show
|
- name: show
|
||||||
@@ -27,7 +27,7 @@ jobs:
|
|||||||
- name: Enforce absence of print statements in code
|
- name: Enforce absence of print statements in code
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
! git diff origin/${{ github.event.pull_request.base.ref }}..HEAD -- . ':(exclude)**/impress.yml' | grep "print("
|
! git diff origin/${{ github.event.pull_request.base.ref }}..HEAD -- src/backend ':(exclude)**/impress.yml' | grep "print("
|
||||||
- name: Check absence of fixup commits
|
- name: Check absence of fixup commits
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
@@ -46,7 +46,7 @@ jobs:
|
|||||||
github.event_name == 'pull_request'
|
github.event_name == 'pull_request'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
fetch-depth: 50
|
fetch-depth: 50
|
||||||
- name: Check that the CHANGELOG has been modified in the current branch
|
- name: Check that the CHANGELOG has been modified in the current branch
|
||||||
@@ -56,7 +56,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
- name: Check CHANGELOG max line length
|
- name: Check CHANGELOG max line length
|
||||||
run: |
|
run: |
|
||||||
max_line_length=$(cat CHANGELOG.md | grep -Ev "^\[.*\]: https://github.com" | wc -L)
|
max_line_length=$(cat CHANGELOG.md | grep -Ev "^\[.*\]: https://github.com" | wc -L)
|
||||||
@@ -70,7 +70,7 @@ jobs:
|
|||||||
if: github.event_name == 'pull_request'
|
if: github.event_name == 'pull_request'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
- name: Install codespell
|
- name: Install codespell
|
||||||
run: pip install --user codespell
|
run: pip install --user codespell
|
||||||
- name: Check for typos
|
- name: Check for typos
|
||||||
@@ -92,9 +92,9 @@ jobs:
|
|||||||
working-directory: src/backend
|
working-directory: src/backend
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
- name: Install Python
|
- name: Install Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: "3.13.3"
|
python-version: "3.13.3"
|
||||||
cache: "pip"
|
cache: "pip"
|
||||||
@@ -146,7 +146,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Create writable /data
|
- name: Create writable /data
|
||||||
run: |
|
run: |
|
||||||
@@ -154,7 +154,7 @@ jobs:
|
|||||||
sudo mkdir -p /data/static
|
sudo mkdir -p /data/static
|
||||||
|
|
||||||
- name: Restore the mail templates
|
- name: Restore the mail templates
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v5
|
||||||
id: mail-templates
|
id: mail-templates
|
||||||
with:
|
with:
|
||||||
path: "src/backend/core/templates/mail"
|
path: "src/backend/core/templates/mail"
|
||||||
@@ -190,7 +190,7 @@ jobs:
|
|||||||
mc version enable impress/impress-media-storage"
|
mc version enable impress/impress-media-storage"
|
||||||
|
|
||||||
- name: Install Python
|
- name: Install Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: "3.13.3"
|
python-version: "3.13.3"
|
||||||
cache: "pip"
|
cache: "pip"
|
||||||
@@ -202,7 +202,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y gettext pandoc shared-mime-info
|
sudo apt-get install -y gettext pandoc shared-mime-info
|
||||||
sudo wget https://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types -O /etc/mime.types
|
sudo wget https://raw.githubusercontent.com/suitenumerique/django-lasuite/refs/heads/main/assets/conf/mime.types -O /etc/mime.types
|
||||||
|
|
||||||
- name: Generate a MO file from strings extracted from the project
|
- name: Generate a MO file from strings extracted from the project
|
||||||
run: python manage.py compilemessages
|
run: python manage.py compilemessages
|
||||||
|
|||||||
2
.github/workflows/release-helm-chart.yaml
vendored
2
.github/workflows/release-helm-chart.yaml
vendored
@@ -15,7 +15,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
|
|||||||
293
CHANGELOG.md
293
CHANGELOG.md
@@ -6,11 +6,176 @@ and this project adheres to
|
|||||||
|
|
||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
|
- 🚸(frontend) hint min char search users #2064
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- 💄(frontend) improve comments highlights #1961
|
||||||
|
- ♿️(frontend) improve BoxButton a11y and native button semantics #2103
|
||||||
|
- ♿️(frontend) improve language picker accessibility #2069
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- 🐛(y-provider) destroy Y.Doc instances after each convert request #2129
|
||||||
|
|
||||||
|
## [v4.8.3] - 2026-03-23
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- ♿️(frontend) improve version history list accessibility #2033
|
||||||
|
- ♿(frontend) focus skip link on headings and skip grid dropzone #1983
|
||||||
|
- ♿️(frontend) add sr-only format to export download button #2088
|
||||||
|
- ♿️(frontend) announce formatting shortcuts for screen readers #2070
|
||||||
|
- ✨(frontend) add markdown copy icon for Copy as Markdown option #2096
|
||||||
|
- ♻️(backend) skip saving in database a document when payload is empty #2062
|
||||||
|
- ♻️(frontend) refacto Version modal to fit with the design system #2091
|
||||||
|
- ⚡️(frontend) add debounce WebSocket reconnect #2104
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- ♿️(frontend) fix more options menu feedback for screen readers #2071
|
||||||
|
- ♿️(frontend) fix more options menu feedback for screen readers #2071
|
||||||
|
- 💫(frontend) fix the help button to the bottom in tree #2073
|
||||||
|
- ♿️(frontend) fix aria-labels for table of contents #2065
|
||||||
|
- 🐛(backend) allow using search endpoint without refresh token enabled #2097
|
||||||
|
- 🐛(frontend) fix close panel when click on subdoc #2094
|
||||||
|
- 🐛(frontend) fix leftpanel button in doc version #9238
|
||||||
|
- 🐛(y-provider) fix loop when no cookies #2101
|
||||||
|
|
||||||
|
## [v4.8.2] - 2026-03-19
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- ✨(backend) add resource server api #1923
|
||||||
|
- ✨(frontend) activate Find search #1834
|
||||||
|
- ✨ handle searching on subdocuments #1834
|
||||||
|
- ✨(backend) add search feature flags #1897
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- ♿️(frontend) ensure doc title is h1 for accessibility #2006
|
||||||
|
- ♿️(frontend) add nb accesses in share button aria-label #2017
|
||||||
|
- ✨(backend) improve fallback logic on search endpoint #1834
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- 🐛(frontend) fix image resizing when caption #2045
|
||||||
|
- 🙈(docker) add \*\*/.next to .dockerignore #2034
|
||||||
|
- ♿️(frontend) fix share modal heading hierarchy #2007
|
||||||
|
- ♿️(frontend) fix Copy link toast accessibility for screen readers #2029
|
||||||
|
- ♿️(frontend) fix modal aria-label and name #2014
|
||||||
|
- ♿️(frontend) fix language dropdown ARIA for screen readers #2020
|
||||||
|
- ♿️(frontend) fix waffle aria-label spacing for new-window links #2030
|
||||||
|
- 🐛(backend) stop using add_sibling method to create sandbox document #2084
|
||||||
|
- 🐛(backend) duplicate a document as last-sibling #2084
|
||||||
|
|
||||||
|
### Removed
|
||||||
|
|
||||||
|
- 🔥(api) remove `documents/<document_id>/descendants/` endpoint #1834
|
||||||
|
- 🔥(api) remove pagination on `documents/search/` endpoint #1834
|
||||||
|
|
||||||
|
## [v4.8.1] - 2026-03-17
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- 🔧(backend) add DB_PSYCOPG_POOL_ENABLED settings #2035
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- ⬇️(backend) downgrade django-treebeard to version < 5.0.0 #2036
|
||||||
|
|
||||||
|
## [v4.8.0] - 2026-03-13
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- ✨(backend) add a is_first_connection flag to the User model #1938
|
||||||
|
- ✨(frontend) add onboarding modal with help menu button #1868
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- ♿(frontend) localize LaGaufre label fallback in Docs #1979
|
||||||
|
- ✨(backend) add a migration cleaning on-boarding document accesses #1971
|
||||||
|
- ⬆️(frontend) upgrade Next.js to v16 #1980
|
||||||
|
- ♿️(frontend) fix aria-label and landmark on document banner state #1986
|
||||||
|
- 🌐(i18n) add "new window" translation key for waffle aria-label #1984
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- 🐛(backend) create a link_trace record for on-boarding documents #1971
|
||||||
|
- 🐛(backend) manage race condition when creating sandbox document #1971
|
||||||
|
- 🐛(frontend) fix flickering left panel #1989
|
||||||
|
- ♿️(frontend) improve doc tree keyboard navigation #1981
|
||||||
|
- 🔧(helm) allow specific env var for the backend and celery deploy
|
||||||
|
|
||||||
|
## [v4.7.0] - 2026-03-09
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- ✨(helm) allow all keys in configMap as env var #1872
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- 📝(docs) improve README and add documentation hub #1870
|
||||||
|
- ♿️(frontend) restore focus to triggers after closing menus and modals #1863
|
||||||
|
- 🚸(frontend) change position elements toolbar #1957
|
||||||
|
- ♿️(frontend) add focus on open to modals #1948
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- 🐛(frontend) analytic feature flags problem #1953
|
||||||
|
- 🐛(frontend) fix home collapsing panel #1954
|
||||||
|
- 🐛(frontend) fix disabled color on icon Dropdown #1950
|
||||||
|
- 🐛(frontend) fix zIndex table of content #1949
|
||||||
|
- 🐛(frontend) fix bug when language not supported by BN #1957
|
||||||
|
- 🐛 (backend) prevent privileged users from requesting access #1898
|
||||||
|
|
||||||
|
## [v4.6.0] - 2026-03-03
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- ✨(frontend) integrate new Blocknote AI feature #1847
|
||||||
|
- 👷(docker) add arm64 platform support for image builds #1901
|
||||||
|
- ✨(tracking) add UTM parameters to shared document links #1896
|
||||||
|
- ✨(frontend) add floating bar with leftpanel collapse button #1876
|
||||||
|
- ✨(frontend) Can print a doc #1832
|
||||||
|
- ✨(backend) manage reconciliation requests for user accounts #1878
|
||||||
|
- 👷(CI) add GHCR workflow for forked repo testing #1851
|
||||||
|
- ✨(frontend) Move doc modal #1886
|
||||||
|
- ⚡️(backend) remove content from Document serializer when asked #1910
|
||||||
|
- ✨(backend) allow the duplication of subpages #1893
|
||||||
|
- ✨(backend) Onboarding docs for new users #1891
|
||||||
|
- 🩺(trivy) add trivyignore file and add minimatch CVE #1915
|
||||||
|
- 🚩 Add feature flags for the AI feature #1922
|
||||||
|
- 🍱(frontend) add icons ui-kit #1943
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- ♿️(frontend) prevent dates from being focusable #1855
|
||||||
|
- ♿️(frontend) Focus main container after navigation #1864
|
||||||
|
- 💄(frontend) align colors and logo with ui-kit v2 #1869
|
||||||
|
- 🚸(backend) sort user search results by proximity with the active user #1802
|
||||||
|
- 🚸(oidc) ignore case when fallback on email #1880
|
||||||
|
- ⚡️(CI) optimize Docker Hub workflow #1919
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- 🐛(frontend) fix broadcast store sync #1846
|
||||||
|
- 🐛(helm) use celery resources instead of backend resources #1887
|
||||||
|
- 🐛(helm) reverse liveness and readiness for backend deployment #1887
|
||||||
|
- 🐛(y-provider) use CONVERSION_FILE_MAX_SIZE settings #1913
|
||||||
|
- 🐛(frontend) fix callout block spacing for old browsers #1914
|
||||||
|
|
||||||
|
## [v4.5.0] - 2026-01-28
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
- ✨(frontend) integrate configurable Waffle #1795
|
- ✨(frontend) integrate configurable Waffle #1795
|
||||||
- ✨ Import of documents #1609
|
- ✨ Import of documents #1609
|
||||||
- 🚨(CI) gives warning if theme not updated #1811
|
- 🚨(CI) gives warning if theme not updated #1811
|
||||||
|
- ✨(frontend) Add stat for Crisp #1824
|
||||||
|
- ✨(auth) add silent login #1690
|
||||||
- 🔧(project) add DJANGO_EMAIL_URL_APP environment variable #1825
|
- 🔧(project) add DJANGO_EMAIL_URL_APP environment variable #1825
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
@@ -26,16 +191,19 @@ and this project adheres to
|
|||||||
- 🐛(frontend) add fallback for unsupported Blocknote languages #1810
|
- 🐛(frontend) add fallback for unsupported Blocknote languages #1810
|
||||||
- 🐛(frontend) fix emojipicker closing in tree #1808
|
- 🐛(frontend) fix emojipicker closing in tree #1808
|
||||||
- 🐛(frontend) display children in favorite #1782
|
- 🐛(frontend) display children in favorite #1782
|
||||||
|
- 🐛(frontend) preserve typed text after @ on escape #1833
|
||||||
|
|
||||||
### Removed
|
### Removed
|
||||||
|
|
||||||
- 🔥(project) remove all code related to template #1780
|
- 🔥(project) remove all code related to template #1780
|
||||||
|
- 🔥(api) remove `documents/<document_id>/descendants/` endpoint #1834
|
||||||
|
- 🔥(api) remove pagination on `documents/search/` endpoint #1834
|
||||||
|
|
||||||
### Security
|
### Security
|
||||||
|
|
||||||
- 🔒️(trivy) fix vulnerability about jaraco.context #1806
|
- 🔒️(trivy) fix vulnerability about jaraco.context #1806
|
||||||
|
|
||||||
## [4.4.0] - 2026-01-13
|
## [v4.4.0] - 2026-01-13
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
@@ -63,7 +231,7 @@ and this project adheres to
|
|||||||
- 🔒️(backend) validate more strictly url used by cors-proxy endpoint #1768
|
- 🔒️(backend) validate more strictly url used by cors-proxy endpoint #1768
|
||||||
- 🔒️(frontend) fix props vulnerability in Interlinking #1792
|
- 🔒️(frontend) fix props vulnerability in Interlinking #1792
|
||||||
|
|
||||||
## [4.3.0] - 2026-01-05
|
## [v4.3.0] - 2026-01-05
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
@@ -82,7 +250,7 @@ and this project adheres to
|
|||||||
- 🐛(frontend) fix tables deletion #1739
|
- 🐛(frontend) fix tables deletion #1739
|
||||||
- 🐛(frontend) fix children not display when first resize #1753
|
- 🐛(frontend) fix children not display when first resize #1753
|
||||||
|
|
||||||
## [4.2.0] - 2025-12-17
|
## [v4.2.0] - 2025-12-17
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
@@ -106,7 +274,7 @@ and this project adheres to
|
|||||||
- 🐛(frontend) Select text + Go back one page crash the app #1733
|
- 🐛(frontend) Select text + Go back one page crash the app #1733
|
||||||
- 🐛(frontend) fix versioning conflict #1742
|
- 🐛(frontend) fix versioning conflict #1742
|
||||||
|
|
||||||
## [4.1.0] - 2025-12-09
|
## [v4.1.0] - 2025-12-09
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
@@ -125,7 +293,7 @@ and this project adheres to
|
|||||||
- 🐛(nginx) fix / location to handle new static pages #1682
|
- 🐛(nginx) fix / location to handle new static pages #1682
|
||||||
- 🐛(frontend) rerendering during resize window #1715
|
- 🐛(frontend) rerendering during resize window #1715
|
||||||
|
|
||||||
## [4.0.0] - 2025-12-01
|
## [v4.0.0] - 2025-12-01
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
@@ -148,7 +316,7 @@ and this project adheres to
|
|||||||
- 🐛(frontend) preserve left panel width on window resize #1588
|
- 🐛(frontend) preserve left panel width on window resize #1588
|
||||||
- 🐛(frontend) prevent duplicate as first character in title #1595
|
- 🐛(frontend) prevent duplicate as first character in title #1595
|
||||||
|
|
||||||
## [3.10.0] - 2025-11-18
|
## [v3.10.0] - 2025-11-18
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
@@ -182,7 +350,7 @@ and this project adheres to
|
|||||||
|
|
||||||
- 🔥(backend) remove api managing templates
|
- 🔥(backend) remove api managing templates
|
||||||
|
|
||||||
## [3.9.0] - 2025-11-10
|
## [v3.9.0] - 2025-11-10
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
@@ -208,13 +376,13 @@ and this project adheres to
|
|||||||
- 🐛(frontend) button new doc UI fix #1557
|
- 🐛(frontend) button new doc UI fix #1557
|
||||||
- 🐛(frontend) interlinking UI fix #1557
|
- 🐛(frontend) interlinking UI fix #1557
|
||||||
|
|
||||||
## [3.8.2] - 2025-10-17
|
## [v3.8.2] - 2025-10-17
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
|
|
||||||
- 🐛(service-worker) fix sw registration and page reload logic #1500
|
- 🐛(service-worker) fix sw registration and page reload logic #1500
|
||||||
|
|
||||||
## [3.8.1] - 2025-10-17
|
## [v3.8.1] - 2025-10-17
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
|
|
||||||
@@ -228,7 +396,7 @@ and this project adheres to
|
|||||||
|
|
||||||
- 🔥(backend) remove treebeard form for the document admin #1470
|
- 🔥(backend) remove treebeard form for the document admin #1470
|
||||||
|
|
||||||
## [3.8.0] - 2025-10-14
|
## [v3.8.0] - 2025-10-14
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
@@ -240,7 +408,7 @@ and this project adheres to
|
|||||||
- ♻️(frontend) Refactor Auth component for improved redirection logic #1461
|
- ♻️(frontend) Refactor Auth component for improved redirection logic #1461
|
||||||
- ♻️(frontend) replace Arial font-family with token font #1411
|
- ♻️(frontend) replace Arial font-family with token font #1411
|
||||||
- ♿(frontend) improve accessibility:
|
- ♿(frontend) improve accessibility:
|
||||||
- ♿(frontend) enable enter key to open documentss #1354
|
- ♿(frontend) enable enter key to open documents #1354
|
||||||
- ♿(frontend) improve modal a11y: structure, labels, title #1349
|
- ♿(frontend) improve modal a11y: structure, labels, title #1349
|
||||||
- ♿improve NVDA navigation in DocShareModal #1396
|
- ♿improve NVDA navigation in DocShareModal #1396
|
||||||
- ♿ improve accessibility by adding landmark roles to layout #1394
|
- ♿ improve accessibility by adding landmark roles to layout #1394
|
||||||
@@ -281,7 +449,7 @@ and this project adheres to
|
|||||||
|
|
||||||
- 🔥(frontend) remove custom DividerBlock ##1375
|
- 🔥(frontend) remove custom DividerBlock ##1375
|
||||||
|
|
||||||
## [3.7.0] - 2025-09-12
|
## [v3.7.0] - 2025-09-12
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
@@ -313,7 +481,7 @@ and this project adheres to
|
|||||||
|
|
||||||
- 🐛(frontend) fix callout emoji list #1366
|
- 🐛(frontend) fix callout emoji list #1366
|
||||||
|
|
||||||
## [3.6.0] - 2025-09-04
|
## [v3.6.0] - 2025-09-04
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
@@ -349,7 +517,7 @@ and this project adheres to
|
|||||||
- 🐛(frontend) fix display bug on homepage #1332
|
- 🐛(frontend) fix display bug on homepage #1332
|
||||||
- 🐛link role update #1287
|
- 🐛link role update #1287
|
||||||
|
|
||||||
## [3.5.0] - 2025-07-31
|
## [v3.5.0] - 2025-07-31
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
@@ -377,7 +545,7 @@ and this project adheres to
|
|||||||
- 🐛(frontend) 401 redirection overridden #1214
|
- 🐛(frontend) 401 redirection overridden #1214
|
||||||
- 🐛(frontend) include root parent in search #1243
|
- 🐛(frontend) include root parent in search #1243
|
||||||
|
|
||||||
## [3.4.2] - 2025-07-18
|
## [v3.4.2] - 2025-07-18
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
|
||||||
@@ -387,7 +555,7 @@ and this project adheres to
|
|||||||
|
|
||||||
- 🐛(backend) improve prompt to not use code blocks delimiter #1188
|
- 🐛(backend) improve prompt to not use code blocks delimiter #1188
|
||||||
|
|
||||||
## [3.4.1] - 2025-07-15
|
## [v3.4.1] - 2025-07-15
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
|
|
||||||
@@ -398,7 +566,7 @@ and this project adheres to
|
|||||||
- 🐛(frontend) fix crash share modal on grid options #1174
|
- 🐛(frontend) fix crash share modal on grid options #1174
|
||||||
- 🐛(frontend) fix unfold subdocs not clickable at the bottom #1179
|
- 🐛(frontend) fix unfold subdocs not clickable at the bottom #1179
|
||||||
|
|
||||||
## [3.4.0] - 2025-07-09
|
## [v3.4.0] - 2025-07-09
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
@@ -442,16 +610,16 @@ and this project adheres to
|
|||||||
|
|
||||||
- 🔥(frontend) remove Beta from logo #1095
|
- 🔥(frontend) remove Beta from logo #1095
|
||||||
|
|
||||||
## [3.3.0] - 2025-05-06
|
## [v3.3.0] - 2025-05-06
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
- ✨(backend) add endpoint checking media status #984
|
- ✨(backend) add endpoint checking media status #984
|
||||||
- ✨(backend) allow setting session cookie age via env var #977
|
- ✨(backend) allow setting session cookie age via env var #977
|
||||||
- ✨(backend) allow theme customnization using a configuration file #948
|
- ✨(backend) allow theme customization using a configuration file #948
|
||||||
- ✨(frontend) Add a custom callout block to the editor #892
|
- ✨(frontend) Add a custom callout block to the editor #892
|
||||||
- 🚩(frontend) version MIT only #911
|
- 🚩(frontend) version MIT only #911
|
||||||
- ✨(backend) integrate maleware_detection from django-lasuite #936
|
- ✨(backend) integrate malware_detection from django-lasuite #936
|
||||||
- 🏗️(frontend) Footer configurable #959
|
- 🏗️(frontend) Footer configurable #959
|
||||||
- 🩺(CI) add lint spell mistakes #954
|
- 🩺(CI) add lint spell mistakes #954
|
||||||
- ✨(frontend) create generic theme #792
|
- ✨(frontend) create generic theme #792
|
||||||
@@ -474,14 +642,14 @@ and this project adheres to
|
|||||||
|
|
||||||
- 🔥(back) remove footer endpoint #948
|
- 🔥(back) remove footer endpoint #948
|
||||||
|
|
||||||
## [3.2.1] - 2025-05-06
|
## [v3.2.1] - 2025-05-06
|
||||||
|
|
||||||
## Fixed
|
## Fixed
|
||||||
|
|
||||||
- 🐛(frontend) fix list copy paste #943
|
- 🐛(frontend) fix list copy paste #943
|
||||||
- 📝(doc) update contributing policy (commit signatures are now mandatory) #895
|
- 📝(doc) update contributing policy (commit signatures are now mandatory) #895
|
||||||
|
|
||||||
## [3.2.0] - 2025-05-05
|
## [v3.2.0] - 2025-05-05
|
||||||
|
|
||||||
## Added
|
## Added
|
||||||
|
|
||||||
@@ -508,7 +676,7 @@ and this project adheres to
|
|||||||
- 🐛(backend) race condition create doc #633
|
- 🐛(backend) race condition create doc #633
|
||||||
- 🐛(frontend) fix breaklines in custom blocks #908
|
- 🐛(frontend) fix breaklines in custom blocks #908
|
||||||
|
|
||||||
## [3.1.0] - 2025-04-07
|
## [v3.1.0] - 2025-04-07
|
||||||
|
|
||||||
## Added
|
## Added
|
||||||
|
|
||||||
@@ -526,7 +694,7 @@ and this project adheres to
|
|||||||
- 🐛(back) validate document content in serializer #822
|
- 🐛(back) validate document content in serializer #822
|
||||||
- 🐛(frontend) fix selection click past end of content #840
|
- 🐛(frontend) fix selection click past end of content #840
|
||||||
|
|
||||||
## [3.0.0] - 2025-03-28
|
## [v3.0.0] - 2025-03-28
|
||||||
|
|
||||||
## Added
|
## Added
|
||||||
|
|
||||||
@@ -542,7 +710,7 @@ and this project adheres to
|
|||||||
- 🐛(backend) compute ancestor_links in get_abilities if needed #725
|
- 🐛(backend) compute ancestor_links in get_abilities if needed #725
|
||||||
- 🔒️(back) restrict access to document accesses #801
|
- 🔒️(back) restrict access to document accesses #801
|
||||||
|
|
||||||
## [2.6.0] - 2025-03-21
|
## [v2.6.0] - 2025-03-21
|
||||||
|
|
||||||
## Added
|
## Added
|
||||||
|
|
||||||
@@ -560,7 +728,7 @@ and this project adheres to
|
|||||||
- 🔒️(back) throttle user list endpoint #636
|
- 🔒️(back) throttle user list endpoint #636
|
||||||
- 🔒️(back) remove pagination and limit to 5 for user list endpoint #636
|
- 🔒️(back) remove pagination and limit to 5 for user list endpoint #636
|
||||||
|
|
||||||
## [2.5.0] - 2025-03-18
|
## [v2.5.0] - 2025-03-18
|
||||||
|
|
||||||
## Added
|
## Added
|
||||||
|
|
||||||
@@ -590,7 +758,7 @@ and this project adheres to
|
|||||||
- 🚨(helm) fix helmfile lint #736
|
- 🚨(helm) fix helmfile lint #736
|
||||||
- 🚚(frontend) redirect to 401 page when 401 error #759
|
- 🚚(frontend) redirect to 401 page when 401 error #759
|
||||||
|
|
||||||
## [2.4.0] - 2025-03-06
|
## [v2.4.0] - 2025-03-06
|
||||||
|
|
||||||
## Added
|
## Added
|
||||||
|
|
||||||
@@ -604,7 +772,7 @@ and this project adheres to
|
|||||||
|
|
||||||
- 🐛(frontend) fix collaboration error #684
|
- 🐛(frontend) fix collaboration error #684
|
||||||
|
|
||||||
## [2.3.0] - 2025-03-03
|
## [v2.3.0] - 2025-03-03
|
||||||
|
|
||||||
## Added
|
## Added
|
||||||
|
|
||||||
@@ -631,7 +799,7 @@ and this project adheres to
|
|||||||
- ♻️(frontend) improve table pdf rendering
|
- ♻️(frontend) improve table pdf rendering
|
||||||
- 🐛(email) invitation emails in receivers language
|
- 🐛(email) invitation emails in receivers language
|
||||||
|
|
||||||
## [2.2.0] - 2025-02-10
|
## [v2.2.0] - 2025-02-10
|
||||||
|
|
||||||
## Added
|
## Added
|
||||||
|
|
||||||
@@ -650,7 +818,7 @@ and this project adheres to
|
|||||||
- 🐛(frontend) fix cursor breakline #609
|
- 🐛(frontend) fix cursor breakline #609
|
||||||
- 🐛(frontend) fix style pdf export #609
|
- 🐛(frontend) fix style pdf export #609
|
||||||
|
|
||||||
## [2.1.0] - 2025-01-29
|
## [v2.1.0] - 2025-01-29
|
||||||
|
|
||||||
## Added
|
## Added
|
||||||
|
|
||||||
@@ -679,14 +847,14 @@ and this project adheres to
|
|||||||
|
|
||||||
- 🔥(backend) remove "content" field from list serializer # 516
|
- 🔥(backend) remove "content" field from list serializer # 516
|
||||||
|
|
||||||
## [2.0.1] - 2025-01-17
|
## [v2.0.1] - 2025-01-17
|
||||||
|
|
||||||
## Fixed
|
## Fixed
|
||||||
|
|
||||||
-🐛(frontend) share modal is shown when you don't have the abilities #557
|
-🐛(frontend) share modal is shown when you don't have the abilities #557
|
||||||
-🐛(frontend) title copy break app #564
|
-🐛(frontend) title copy break app #564
|
||||||
|
|
||||||
## [2.0.0] - 2025-01-13
|
## [v2.0.0] - 2025-01-13
|
||||||
|
|
||||||
## Added
|
## Added
|
||||||
|
|
||||||
@@ -717,7 +885,7 @@ and this project adheres to
|
|||||||
- 🐛(frontend) hide search and create doc button if not authenticated #555
|
- 🐛(frontend) hide search and create doc button if not authenticated #555
|
||||||
- 🐛(backend) race condition creation issue #556
|
- 🐛(backend) race condition creation issue #556
|
||||||
|
|
||||||
## [1.10.0] - 2024-12-17
|
## [v1.10.0] - 2024-12-17
|
||||||
|
|
||||||
## Added
|
## Added
|
||||||
|
|
||||||
@@ -738,7 +906,7 @@ and this project adheres to
|
|||||||
- 🐛(frontend) update doc editor height #481
|
- 🐛(frontend) update doc editor height #481
|
||||||
- 💄(frontend) add doc search #485
|
- 💄(frontend) add doc search #485
|
||||||
|
|
||||||
## [1.9.0] - 2024-12-11
|
## [v1.9.0] - 2024-12-11
|
||||||
|
|
||||||
## Added
|
## Added
|
||||||
|
|
||||||
@@ -759,19 +927,19 @@ and this project adheres to
|
|||||||
- 🐛(frontend) Fix hidden menu on Firefox #468
|
- 🐛(frontend) Fix hidden menu on Firefox #468
|
||||||
- 🐛(backend) fix sanitize problem IA #490
|
- 🐛(backend) fix sanitize problem IA #490
|
||||||
|
|
||||||
## [1.8.2] - 2024-11-28
|
## [v1.8.2] - 2024-11-28
|
||||||
|
|
||||||
## Changed
|
## Changed
|
||||||
|
|
||||||
- ♻️(SW) change strategy html caching #460
|
- ♻️(SW) change strategy html caching #460
|
||||||
|
|
||||||
## [1.8.1] - 2024-11-27
|
## [v1.8.1] - 2024-11-27
|
||||||
|
|
||||||
## Fixed
|
## Fixed
|
||||||
|
|
||||||
- 🐛(frontend) link not clickable and flickering firefox #457
|
- 🐛(frontend) link not clickable and flickering firefox #457
|
||||||
|
|
||||||
## [1.8.0] - 2024-11-25
|
## [v1.8.0] - 2024-11-25
|
||||||
|
|
||||||
## Added
|
## Added
|
||||||
|
|
||||||
@@ -799,7 +967,7 @@ and this project adheres to
|
|||||||
- 🐛(frontend) users have view access when revoked #387
|
- 🐛(frontend) users have view access when revoked #387
|
||||||
- 🐛(frontend) fix placeholder editable when double clicks #454
|
- 🐛(frontend) fix placeholder editable when double clicks #454
|
||||||
|
|
||||||
## [1.7.0] - 2024-10-24
|
## [v1.7.0] - 2024-10-24
|
||||||
|
|
||||||
## Added
|
## Added
|
||||||
|
|
||||||
@@ -826,7 +994,7 @@ and this project adheres to
|
|||||||
|
|
||||||
- 🔥(helm) remove infra related codes #366
|
- 🔥(helm) remove infra related codes #366
|
||||||
|
|
||||||
## [1.6.0] - 2024-10-17
|
## [v1.6.0] - 2024-10-17
|
||||||
|
|
||||||
## Added
|
## Added
|
||||||
|
|
||||||
@@ -848,13 +1016,13 @@ and this project adheres to
|
|||||||
- 🐛(backend) fix nginx docker container #340
|
- 🐛(backend) fix nginx docker container #340
|
||||||
- 🐛(frontend) fix copy paste firefox #353
|
- 🐛(frontend) fix copy paste firefox #353
|
||||||
|
|
||||||
## [1.5.1] - 2024-10-10
|
## [v1.5.1] - 2024-10-10
|
||||||
|
|
||||||
## Fixed
|
## Fixed
|
||||||
|
|
||||||
- 🐛(db) fix users duplicate #316
|
- 🐛(db) fix users duplicate #316
|
||||||
|
|
||||||
## [1.5.0] - 2024-10-09
|
## [v1.5.0] - 2024-10-09
|
||||||
|
|
||||||
## Added
|
## Added
|
||||||
|
|
||||||
@@ -882,7 +1050,7 @@ and this project adheres to
|
|||||||
- 🔧(backend) fix configuration to avoid different ssl warning #297
|
- 🔧(backend) fix configuration to avoid different ssl warning #297
|
||||||
- 🐛(frontend) fix editor break line not working #302
|
- 🐛(frontend) fix editor break line not working #302
|
||||||
|
|
||||||
## [1.4.0] - 2024-09-17
|
## [v1.4.0] - 2024-09-17
|
||||||
|
|
||||||
## Added
|
## Added
|
||||||
|
|
||||||
@@ -902,7 +1070,7 @@ and this project adheres to
|
|||||||
- 🐛(backend) Fix forcing ID when creating a document via API endpoint #234
|
- 🐛(backend) Fix forcing ID when creating a document via API endpoint #234
|
||||||
- 🐛 Rebuild frontend dev container from makefile #248
|
- 🐛 Rebuild frontend dev container from makefile #248
|
||||||
|
|
||||||
## [1.3.0] - 2024-09-05
|
## [v1.3.0] - 2024-09-05
|
||||||
|
|
||||||
## Added
|
## Added
|
||||||
|
|
||||||
@@ -926,14 +1094,14 @@ and this project adheres to
|
|||||||
|
|
||||||
- 🔥(frontend) remove saving modal #213
|
- 🔥(frontend) remove saving modal #213
|
||||||
|
|
||||||
## [1.2.1] - 2024-08-23
|
## [v1.2.1] - 2024-08-23
|
||||||
|
|
||||||
## Changed
|
## Changed
|
||||||
|
|
||||||
- ♻️ Change ordering docs datagrid #195
|
- ♻️ Change ordering docs datagrid #195
|
||||||
- 🔥(helm) use scaleway email #194
|
- 🔥(helm) use scaleway email #194
|
||||||
|
|
||||||
## [1.2.0] - 2024-08-22
|
## [v1.2.0] - 2024-08-22
|
||||||
|
|
||||||
## Added
|
## Added
|
||||||
|
|
||||||
@@ -959,7 +1127,7 @@ and this project adheres to
|
|||||||
|
|
||||||
- 🔥(helm) remove htaccess #181
|
- 🔥(helm) remove htaccess #181
|
||||||
|
|
||||||
## [1.1.0] - 2024-07-15
|
## [v1.1.0] - 2024-07-15
|
||||||
|
|
||||||
## Added
|
## Added
|
||||||
|
|
||||||
@@ -974,7 +1142,7 @@ and this project adheres to
|
|||||||
- ♻️(frontend) create a doc from a modal #132
|
- ♻️(frontend) create a doc from a modal #132
|
||||||
- ♻️(frontend) manage members from the share modal #140
|
- ♻️(frontend) manage members from the share modal #140
|
||||||
|
|
||||||
## [1.0.0] - 2024-07-02
|
## [v1.0.0] - 2024-07-02
|
||||||
|
|
||||||
## Added
|
## Added
|
||||||
|
|
||||||
@@ -1012,14 +1180,21 @@ and this project adheres to
|
|||||||
- 💚(CI) Remove trigger workflow on push tags on CI (#68)
|
- 💚(CI) Remove trigger workflow on push tags on CI (#68)
|
||||||
- 🔥(frontend) Remove coming soon page (#121)
|
- 🔥(frontend) Remove coming soon page (#121)
|
||||||
|
|
||||||
## [0.1.0] - 2024-05-24
|
## [v0.1.0] - 2024-05-24
|
||||||
|
|
||||||
## Added
|
## Added
|
||||||
|
|
||||||
- ✨(frontend) Coming Soon page (#67)
|
- ✨(frontend) Coming Soon page (#67)
|
||||||
- 🚀 Impress, project to manage your documents easily and collaboratively.
|
- 🚀 Impress, project to manage your documents easily and collaboratively.
|
||||||
|
|
||||||
[unreleased]: https://github.com/suitenumerique/docs/compare/v4.4.0...main
|
[unreleased]: https://github.com/suitenumerique/docs/compare/v4.8.3...main
|
||||||
|
[v4.8.3]: https://github.com/suitenumerique/docs/releases/v4.8.3
|
||||||
|
[v4.8.2]: https://github.com/suitenumerique/docs/releases/v4.8.2
|
||||||
|
[v4.8.1]: https://github.com/suitenumerique/docs/releases/v4.8.1
|
||||||
|
[v4.8.0]: https://github.com/suitenumerique/docs/releases/v4.8.0
|
||||||
|
[v4.7.0]: https://github.com/suitenumerique/docs/releases/v4.7.0
|
||||||
|
[v4.6.0]: https://github.com/suitenumerique/docs/releases/v4.6.0
|
||||||
|
[v4.5.0]: https://github.com/suitenumerique/docs/releases/v4.5.0
|
||||||
[v4.4.0]: https://github.com/suitenumerique/docs/releases/v4.4.0
|
[v4.4.0]: https://github.com/suitenumerique/docs/releases/v4.4.0
|
||||||
[v4.3.0]: https://github.com/suitenumerique/docs/releases/v4.3.0
|
[v4.3.0]: https://github.com/suitenumerique/docs/releases/v4.3.0
|
||||||
[v4.2.0]: https://github.com/suitenumerique/docs/releases/v4.2.0
|
[v4.2.0]: https://github.com/suitenumerique/docs/releases/v4.2.0
|
||||||
@@ -1056,12 +1231,12 @@ and this project adheres to
|
|||||||
[v1.8.0]: https://github.com/suitenumerique/docs/releases/v1.8.0
|
[v1.8.0]: https://github.com/suitenumerique/docs/releases/v1.8.0
|
||||||
[v1.7.0]: https://github.com/suitenumerique/docs/releases/v1.7.0
|
[v1.7.0]: https://github.com/suitenumerique/docs/releases/v1.7.0
|
||||||
[v1.6.0]: https://github.com/suitenumerique/docs/releases/v1.6.0
|
[v1.6.0]: https://github.com/suitenumerique/docs/releases/v1.6.0
|
||||||
[1.5.1]: https://github.com/suitenumerique/docs/releases/v1.5.1
|
[v1.5.1]: https://github.com/suitenumerique/docs/releases/v1.5.1
|
||||||
[1.5.0]: https://github.com/suitenumerique/docs/releases/v1.5.0
|
[v1.5.0]: https://github.com/suitenumerique/docs/releases/v1.5.0
|
||||||
[1.4.0]: https://github.com/suitenumerique/docs/releases/v1.4.0
|
[v1.4.0]: https://github.com/suitenumerique/docs/releases/v1.4.0
|
||||||
[1.3.0]: https://github.com/suitenumerique/docs/releases/v1.3.0
|
[v1.3.0]: https://github.com/suitenumerique/docs/releases/v1.3.0
|
||||||
[1.2.1]: https://github.com/suitenumerique/docs/releases/v1.2.1
|
[v1.2.1]: https://github.com/suitenumerique/docs/releases/v1.2.1
|
||||||
[1.2.0]: https://github.com/suitenumerique/docs/releases/v1.2.0
|
[v1.2.0]: https://github.com/suitenumerique/docs/releases/v1.2.0
|
||||||
[1.1.0]: https://github.com/suitenumerique/docs/releases/v1.1.0
|
[v1.1.0]: https://github.com/suitenumerique/docs/releases/v1.1.0
|
||||||
[1.0.0]: https://github.com/suitenumerique/docs/releases/v1.0.0
|
[v1.0.0]: https://github.com/suitenumerique/docs/releases/v1.0.0
|
||||||
[0.1.0]: https://github.com/suitenumerique/docs/releases/v0.1.0
|
[v0.1.0]: https://github.com/suitenumerique/docs/releases/v0.1.0
|
||||||
|
|||||||
@@ -95,8 +95,8 @@ Thank you for your contributions! 👍
|
|||||||
|
|
||||||
## Contribute to BlockNote
|
## Contribute to BlockNote
|
||||||
We use [BlockNote](https://www.blocknotejs.org/) for the text editing features of Docs.
|
We use [BlockNote](https://www.blocknotejs.org/) for the text editing features of Docs.
|
||||||
If you find and issue with the editor you can [report it](https://github.com/TypeCellOS/BlockNote/issues) directly on their repository.
|
If you find an issue with the editor you can [report it](https://github.com/TypeCellOS/BlockNote/issues) directly on their repository.
|
||||||
|
|
||||||
Please consider contributing to BlockNotejs, as a library, it's useful to many projects not just Docs.
|
Please consider contributing to BlockNotejs, as a library, it's useful to many projects not just Docs.
|
||||||
|
|
||||||
The project is licended with Mozilla Public License Version 2.0 but be aware that [XL packages](https://github.com/TypeCellOS/BlockNote/blob/main/packages/xl-docx-exporter/LICENSE) are dual licenced with GNU AFFERO GENERAL PUBLIC LICENCE Version 3 and proprietary licence if you are [sponsor](https://www.blocknotejs.org/pricing).
|
The project is licensed with Mozilla Public License Version 2.0 but be aware that [XL packages](https://github.com/TypeCellOS/BlockNote/blob/main/packages/xl-docx-exporter/LICENSE) are dual licensed with GNU AFFERO GENERAL PUBLIC LICENSE Version 3 and proprietary license if you are a [sponsor](https://www.blocknotejs.org/pricing).
|
||||||
|
|||||||
42
Dockerfile
42
Dockerfile
@@ -14,13 +14,6 @@ FROM base AS back-builder
|
|||||||
|
|
||||||
WORKDIR /builder
|
WORKDIR /builder
|
||||||
|
|
||||||
# Install Rust and Cargo using Alpine's package manager
|
|
||||||
RUN apk add --no-cache \
|
|
||||||
build-base \
|
|
||||||
libffi-dev \
|
|
||||||
rust \
|
|
||||||
cargo
|
|
||||||
|
|
||||||
# Copy required python dependencies
|
# Copy required python dependencies
|
||||||
COPY ./src/backend /builder
|
COPY ./src/backend /builder
|
||||||
|
|
||||||
@@ -36,7 +29,7 @@ COPY ./src/mail /mail/app
|
|||||||
WORKDIR /mail/app
|
WORKDIR /mail/app
|
||||||
|
|
||||||
RUN yarn install --frozen-lockfile && \
|
RUN yarn install --frozen-lockfile && \
|
||||||
yarn build
|
yarn build
|
||||||
|
|
||||||
|
|
||||||
# ---- static link collector ----
|
# ---- static link collector ----
|
||||||
@@ -58,7 +51,7 @@ WORKDIR /app
|
|||||||
|
|
||||||
# collectstatic
|
# collectstatic
|
||||||
RUN DJANGO_CONFIGURATION=Build \
|
RUN DJANGO_CONFIGURATION=Build \
|
||||||
python manage.py collectstatic --noinput
|
python manage.py collectstatic --noinput
|
||||||
|
|
||||||
# Replace duplicated file by a symlink to decrease the overall size of the
|
# Replace duplicated file by a symlink to decrease the overall size of the
|
||||||
# final image
|
# final image
|
||||||
@@ -81,7 +74,7 @@ RUN apk add --no-cache \
|
|||||||
pango \
|
pango \
|
||||||
shared-mime-info
|
shared-mime-info
|
||||||
|
|
||||||
RUN wget https://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types -O /etc/mime.types
|
RUN wget https://raw.githubusercontent.com/suitenumerique/django-lasuite/refs/heads/main/assets/conf/mime.types -O /etc/mime.types
|
||||||
|
|
||||||
# Copy entrypoint
|
# Copy entrypoint
|
||||||
COPY ./docker/files/usr/local/bin/entrypoint /usr/local/bin/entrypoint
|
COPY ./docker/files/usr/local/bin/entrypoint /usr/local/bin/entrypoint
|
||||||
@@ -98,9 +91,9 @@ COPY --from=back-builder /install /usr/local
|
|||||||
# when python is upgraded and the path to the certificate changes.
|
# when python is upgraded and the path to the certificate changes.
|
||||||
# The space between print and the ( is intended otherwise the git lint is failing
|
# The space between print and the ( is intended otherwise the git lint is failing
|
||||||
RUN mkdir /cert && \
|
RUN mkdir /cert && \
|
||||||
path=`python -c 'import certifi;print (certifi.where())'` && \
|
path=`python -c 'import certifi;print (certifi.where())'` && \
|
||||||
mv $path /cert/ && \
|
mv $path /cert/ && \
|
||||||
ln -s /cert/cacert.pem $path
|
ln -s /cert/cacert.pem $path
|
||||||
|
|
||||||
# Copy impress application (see .dockerignore)
|
# Copy impress application (see .dockerignore)
|
||||||
COPY ./src/backend /app/
|
COPY ./src/backend /app/
|
||||||
@@ -109,7 +102,7 @@ WORKDIR /app
|
|||||||
|
|
||||||
# Generate compiled translation messages
|
# Generate compiled translation messages
|
||||||
RUN DJANGO_CONFIGURATION=Build \
|
RUN DJANGO_CONFIGURATION=Build \
|
||||||
python manage.py compilemessages
|
python manage.py compilemessages
|
||||||
|
|
||||||
|
|
||||||
# We wrap commands run in this container by the following entrypoint that
|
# We wrap commands run in this container by the following entrypoint that
|
||||||
@@ -138,7 +131,7 @@ USER ${DOCKER_USER}
|
|||||||
# Target database host (e.g. database engine following docker compose services
|
# Target database host (e.g. database engine following docker compose services
|
||||||
# name) & port
|
# name) & port
|
||||||
ENV DB_HOST=postgresql \
|
ENV DB_HOST=postgresql \
|
||||||
DB_PORT=5432
|
DB_PORT=5432
|
||||||
|
|
||||||
# Run django development server
|
# Run django development server
|
||||||
CMD ["python", "manage.py", "runserver", "0.0.0.0:8000"]
|
CMD ["python", "manage.py", "runserver", "0.0.0.0:8000"]
|
||||||
@@ -151,7 +144,7 @@ RUN rm -rf /var/cache/apk/*
|
|||||||
|
|
||||||
ARG IMPRESS_STATIC_ROOT=/data/static
|
ARG IMPRESS_STATIC_ROOT=/data/static
|
||||||
|
|
||||||
# Gunicorn
|
# Gunicorn - not used by default but configuration file is provided
|
||||||
RUN mkdir -p /usr/local/etc/gunicorn
|
RUN mkdir -p /usr/local/etc/gunicorn
|
||||||
COPY docker/files/usr/local/etc/gunicorn/impress.py /usr/local/etc/gunicorn/impress.py
|
COPY docker/files/usr/local/etc/gunicorn/impress.py /usr/local/etc/gunicorn/impress.py
|
||||||
|
|
||||||
@@ -165,5 +158,18 @@ COPY --from=link-collector ${IMPRESS_STATIC_ROOT} ${IMPRESS_STATIC_ROOT}
|
|||||||
# Copy impress mails
|
# Copy impress mails
|
||||||
COPY --from=mail-builder /mail/backend/core/templates/mail /app/core/templates/mail
|
COPY --from=mail-builder /mail/backend/core/templates/mail /app/core/templates/mail
|
||||||
|
|
||||||
# The default command runs gunicorn WSGI server in impress's main module
|
# The default command runs uvicorn ASGI server in dics's main module
|
||||||
CMD ["gunicorn", "-c", "/usr/local/etc/gunicorn/impress.py", "impress.wsgi:application"]
|
# WEB_CONCURRENCY: number of workers to run <=> --workers=4
|
||||||
|
ENV WEB_CONCURRENCY=4
|
||||||
|
CMD [\
|
||||||
|
"uvicorn",\
|
||||||
|
"--app-dir=/app",\
|
||||||
|
"--host=0.0.0.0",\
|
||||||
|
"--timeout-graceful-shutdown=300",\
|
||||||
|
"--limit-max-requests=20000",\
|
||||||
|
"--lifespan=off",\
|
||||||
|
"impress.asgi:application"\
|
||||||
|
]
|
||||||
|
|
||||||
|
# To run using gunicorn WSGI server use this instead:
|
||||||
|
#CMD ["gunicorn", "-c", "/usr/local/etc/gunicorn/conversations.py", "impress.wsgi:application"]
|
||||||
|
|||||||
13
Makefile
13
Makefile
@@ -79,10 +79,16 @@ create-env-local-files:
|
|||||||
@touch env.d/development/kc_postgresql.local
|
@touch env.d/development/kc_postgresql.local
|
||||||
.PHONY: create-env-local-files
|
.PHONY: create-env-local-files
|
||||||
|
|
||||||
|
generate-secret-keys:
|
||||||
|
generate-secret-keys: ## generate secret keys to be stored in common.local
|
||||||
|
@bin/generate-oidc-store-refresh-token-key.sh
|
||||||
|
.PHONY: generate-secret-keys
|
||||||
|
|
||||||
pre-bootstrap: \
|
pre-bootstrap: \
|
||||||
data/media \
|
data/media \
|
||||||
data/static \
|
data/static \
|
||||||
create-env-local-files
|
create-env-local-files \
|
||||||
|
generate-secret-keys
|
||||||
.PHONY: pre-bootstrap
|
.PHONY: pre-bootstrap
|
||||||
|
|
||||||
post-bootstrap: \
|
post-bootstrap: \
|
||||||
@@ -156,6 +162,10 @@ endif
|
|||||||
@echo ""
|
@echo ""
|
||||||
.PHONY: post-beautiful-bootstrap
|
.PHONY: post-beautiful-bootstrap
|
||||||
|
|
||||||
|
create-docker-network: ## create the docker network if it doesn't exist
|
||||||
|
@docker network create lasuite-network || true
|
||||||
|
.PHONY: create-docker-network
|
||||||
|
|
||||||
bootstrap: ## Prepare the project for local development
|
bootstrap: ## Prepare the project for local development
|
||||||
bootstrap: \
|
bootstrap: \
|
||||||
pre-beautiful-bootstrap \
|
pre-beautiful-bootstrap \
|
||||||
@@ -213,6 +223,7 @@ logs: ## display app-dev logs (follow mode)
|
|||||||
.PHONY: logs
|
.PHONY: logs
|
||||||
|
|
||||||
run-backend: ## Start only the backend application and all needed services
|
run-backend: ## Start only the backend application and all needed services
|
||||||
|
@$(MAKE) create-docker-network
|
||||||
@$(COMPOSE) up --force-recreate -d docspec
|
@$(COMPOSE) up --force-recreate -d docspec
|
||||||
@$(COMPOSE) up --force-recreate -d celery-dev
|
@$(COMPOSE) up --force-recreate -d celery-dev
|
||||||
@$(COMPOSE) up --force-recreate -d y-provider-development
|
@$(COMPOSE) up --force-recreate -d y-provider-development
|
||||||
|
|||||||
289
README.md
289
README.md
@@ -3,226 +3,243 @@
|
|||||||
<img alt="Docs" src="/docs/assets/banner-docs.png" width="100%" />
|
<img alt="Docs" src="/docs/assets/banner-docs.png" width="100%" />
|
||||||
</a>
|
</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="https://github.com/suitenumerique/docs/stargazers/">
|
<a href="https://github.com/suitenumerique/docs/stargazers/">
|
||||||
<img src="https://img.shields.io/github/stars/suitenumerique/docs" alt="">
|
<img src="https://img.shields.io/github/stars/suitenumerique/docs" alt="">
|
||||||
</a>
|
</a>
|
||||||
<a href='https://github.com/suitenumerique/docs/blob/main/CONTRIBUTING.md'><img alt='PRs Welcome' src='https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=shields'/></a>
|
<a href="https://github.com/suitenumerique/docs/blob/main/CONTRIBUTING.md">
|
||||||
<img alt="GitHub commit activity" src="https://img.shields.io/github/commit-activity/m/suitenumerique/docs"/>
|
<img alt="PRs Welcome" src="https://img.shields.io/badge/PRs-welcome-brightgreen.svg"/>
|
||||||
<img alt="GitHub closed issues" src="https://img.shields.io/github/issues-closed/suitenumerique/docs"/>
|
</a>
|
||||||
<a href="https://github.com/suitenumerique/docs/blob/main/LICENSE">
|
<a href="https://github.com/suitenumerique/docs/blob/main/LICENSE">
|
||||||
<img alt="MIT License" src="https://img.shields.io/github/license/suitenumerique/docs"/>
|
<img alt="MIT License" src="https://img.shields.io/github/license/suitenumerique/docs"/>
|
||||||
</a>
|
|
||||||
</p>
|
|
||||||
<p align="center">
|
|
||||||
<a href="https://matrix.to/#/#docs-official:matrix.org">
|
|
||||||
Chat on Matrix
|
|
||||||
</a> - <a href="/docs/">
|
|
||||||
Documentation
|
|
||||||
</a> - <a href="#getting-started-">
|
|
||||||
Getting started
|
|
||||||
</a> - <a href="mailto:docs@numerique.gouv.fr">
|
|
||||||
Reach out
|
|
||||||
</a>
|
</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
# La Suite Docs : Collaborative Text Editing
|
<p align="center">
|
||||||
Docs, where your notes can become knowledge through live collaboration.
|
<a href="https://matrix.to/#/#docs-official:matrix.org">Chat on Matrix</a> •
|
||||||
|
<a href="/docs/">Documentation</a> •
|
||||||
|
<a href="#try-docs">Try Docs</a> •
|
||||||
|
<a href="mailto:docs@numerique.gouv.fr">Contact us</a>
|
||||||
|
</p>
|
||||||
|
|
||||||
<img src="/docs/assets/docs_live_collaboration_light.gif" width="100%" align="center"/>
|
# La Suite Docs: Collaborative Text Editing
|
||||||
|
|
||||||
## Why use Docs ❓
|
**Docs, where your notes can become knowledge through live collaboration.**
|
||||||
Docs is a collaborative text editor designed to address common challenges in knowledge building and sharing.
|
|
||||||
|
|
||||||
### Write
|
Docs is an open-source collaborative editor that helps teams write, organize, and share knowledge together - in real time.
|
||||||
* 😌 Get simple, accessible online editing for your team.
|
|
||||||
* 💅 Create clean documents with beautiful formatting options.
|
|
||||||
* 🖌️ Focus on your content using either the in-line editor, or [the Markdown syntax](https://www.markdownguide.org/basic-syntax/).
|
|
||||||
* 🧱 Quickly design your page thanks to the many block types, accessible from the `/` slash commands, as well as keyboard shortcuts.
|
|
||||||
* 🔌 Write offline! Your edits will be synced once you're back online.
|
|
||||||
* ✨ Save time thanks to our AI actions, such as rephrasing, summarizing, fixing typos, translating, etc. You can even turn your selected text into a prompt!
|
|
||||||
|
|
||||||
### Work together
|

|
||||||
* 🤝 Enjoy live editing! See your team collaborate in real time.
|
|
||||||
* 🔒 Keep your information secure thanks to granular access control. Only share with the right people.
|
|
||||||
* 📑 Export your content in multiple formats (`.odt`, `.docx`, `.pdf`) with customizable templates.
|
|
||||||
* 📚 Turn your team's collaborative work into organized knowledge with Subpages.
|
|
||||||
|
|
||||||
### Self-host
|
|
||||||
|
|
||||||
#### 🚀 Docs is easy to install on your own servers
|
## What is Docs?
|
||||||
We use Kubernetes for our [production instance](https://docs.numerique.gouv.fr/) but also support Docker Compose. The community contributed a couple other methods (Nix, YunoHost etc.) check out the [docs](/docs/installation/README.md) to get detailed instructions and examples.
|
|
||||||
|
|
||||||
#### 🌍 Known instances
|
Docs is an open-source alternative to tools like Notion or Google Docs, focused on:
|
||||||
We hope to see many more, here is an incomplete list of public Docs instances. Feel free to make a PR to add ones that are not listed below🙏
|
|
||||||
|
|
||||||
| Url | Org | Public |
|
- Real-time collaboration
|
||||||
| --- | --- | ------- |
|
- Clean, structured documents
|
||||||
| [docs.numerique.gouv.fr](https://docs.numerique.gouv.fr/) | DINUM | French public agents working for the central administration and the extended public sphere. ProConnect is required to login in or sign up|
|
- Knowledge organization
|
||||||
| [docs.suite.anct.gouv.fr](https://docs.suite.anct.gouv.fr/) | ANCT | French public agents working for the territorial administration and the extended public sphere. ProConnect is required to login in or sign up|
|
- Data ownership & self-hosting
|
||||||
| [notes.demo.opendesk.eu](https://notes.demo.opendesk.eu) | ZenDiS | Demo instance of OpenDesk. Request access to get credentials |
|
|
||||||
| [notes.liiib.re](https://notes.liiib.re/) | lasuite.coop | Free and open demo to all. Content and accounts are reset after one month |
|
|
||||||
| [docs.federated.nexus](https://docs.federated.nexus/) | federated.nexus | Public instance, but you have to [sign up for a Federated Nexus account](https://federated.nexus/register/). |
|
|
||||||
| [docs.demo.mosacloud.eu](https://docs.demo.mosacloud.eu/) | mosa.cloud | Demo instance of mosa.cloud, a dutch company providing services around La Suite apps. |
|
|
||||||
|
|
||||||
#### ⚠️ Advanced features
|
***Built for public organizations, companies, and open communities.***
|
||||||
For some advanced features (ex: Export as PDF) Docs relies on XL packages from BlockNote. These are licenced under GPL and are not MIT compatible. You can perfectly use Docs without these packages by setting the environment variable `PUBLISH_AS_MIT` to true. That way you'll build an image of the application without the features that are not MIT compatible. Read the [environment variables documentation](/docs/env.md) for more information.
|
|
||||||
|
|
||||||
## Getting started 🔧
|
## Why use Docs?
|
||||||
|
|
||||||
### Test it
|
### Writing
|
||||||
|
|
||||||
You can test Docs on your browser by visiting this [demo document](https://impress-preprod.beta.numerique.gouv.fr/docs/6ee5aac4-4fb9-457d-95bf-bb56c2467713/)
|
- Rich-text & Markdown editing
|
||||||
|
- Slash commands & block system
|
||||||
|
- Beautiful formatting
|
||||||
|
- Offline editing
|
||||||
|
- Optional AI writing helpers (rewrite, summarize, translate, fix typos)
|
||||||
|
|
||||||
### Run Docs locally
|
### Collaboration
|
||||||
|
|
||||||
> ⚠️ The methods described below for running Docs locally is **for testing purposes only**. It is based on building Docs using [Minio](https://min.io/) as an S3-compatible storage solution. Of course you can choose any S3-compatible storage solution.
|
- Live cursors & presence
|
||||||
|
- Comments & sharing
|
||||||
|
- Granular access control
|
||||||
|
|
||||||
**Prerequisite**
|
### Knowledge management
|
||||||
|
|
||||||
Make sure you have a recent version of Docker and [Docker Compose](https://docs.docker.com/compose/install) installed on your laptop, then type:
|
- Subpages & hierarchy
|
||||||
|
- Searchable content
|
||||||
|
|
||||||
```shellscript
|
### Export/Import & interoperability
|
||||||
$ docker -v
|
|
||||||
|
|
||||||
Docker version 20.10.2, build 2291f61
|
- Import to `.docx` and `.md`
|
||||||
|
- Export to `.docx`, `.odt`, `.pdf`
|
||||||
|
|
||||||
$ docker compose version
|
## Try Docs
|
||||||
|
|
||||||
Docker Compose version v2.32.4
|
Experience Docs instantly - no installation required.
|
||||||
|
|
||||||
|
- 🔗 [Open a live demo document][demo]
|
||||||
|
- 🌍 [Browse public instances][instances]
|
||||||
|
|
||||||
|
[demo]: https://docs.la-suite.eu/docs/9137bbb5-3e8a-4ff7-8a36-fcc4e8bd57f4/
|
||||||
|
[instances]: /docs/instances.md
|
||||||
|
|
||||||
|
## Self-hosting
|
||||||
|
|
||||||
|
Docs supports Kubernetes, Docker Compose, and community-provided methods such as Nix and YunoHost.
|
||||||
|
|
||||||
|
Get started with self-hosting: [Installation guide](/docs/installation/README.md)
|
||||||
|
|
||||||
|
> [!WARNING]
|
||||||
|
> Some advanced features (for example: `Export as PDF`) rely on XL packages from Blocknote.
|
||||||
|
> These packages are licensed under GPL and are **not MIT-compatible**
|
||||||
|
>
|
||||||
|
> You can run Docs **without these packages** by building with:
|
||||||
|
>
|
||||||
|
> ```bash
|
||||||
|
> PUBLISH_AS_MIT=true
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> This builds an image of Docs without non-MIT features.
|
||||||
|
>
|
||||||
|
> More details can be found in [environment variables](/docs/env.md)
|
||||||
|
|
||||||
|
## Local Development (for contributors)
|
||||||
|
|
||||||
|
Run Docs locally for development and testing.
|
||||||
|
|
||||||
|
> [!WARNING]
|
||||||
|
> This setup is intended **for development and testing only**.
|
||||||
|
> It uses Minio as an S3-compatible storage backend, but any S3-compatible service can be used.
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
|
||||||
|
- Docker
|
||||||
|
- Docker Compose
|
||||||
|
- GNU Make
|
||||||
|
|
||||||
|
Verify installation:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker -v
|
||||||
|
docker compose version
|
||||||
```
|
```
|
||||||
|
|
||||||
> ⚠️ You may need to run the following commands with `sudo`, but this can be avoided by adding your user to the local `docker` group.
|
> If you encounter permission errors, you may need to use `sudo`, or add your user to the `docker` group.
|
||||||
|
|
||||||
**Project bootstrap**
|
### Bootstrap the project
|
||||||
|
|
||||||
The easiest way to start working on the project is to use [GNU Make](https://www.gnu.org/software/make/):
|
The easiest way to start is using GNU Make:
|
||||||
|
|
||||||
```shellscript
|
```bash
|
||||||
$ make bootstrap FLUSH_ARGS='--no-input'
|
make bootstrap FLUSH_ARGS='--no-input'
|
||||||
```
|
```
|
||||||
|
|
||||||
This command builds the `app-dev` and `frontend-dev` containers, installs dependencies, performs database migrations and compiles translations. It's a good idea to use this command each time you are pulling code from the project repository to avoid dependency-related or migration-related issues.
|
This builds the `app-dev` and `frontend-dev` containers, installs dependencies, runs database migrations, and compiles translations.
|
||||||
|
|
||||||
Your Docker services should now be up and running 🎉
|
It is recommended to run this command after pulling new code.
|
||||||
|
|
||||||
You can access the project by going to <http://localhost:3000>.
|
Start services:
|
||||||
|
|
||||||
You will be prompted to log in. The default credentials are:
|
|
||||||
|
|
||||||
|
```bash
|
||||||
|
make run
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Open <https://localhost:3000>
|
||||||
|
|
||||||
|
Default credentials (development only):
|
||||||
|
|
||||||
|
```md
|
||||||
username: impress
|
username: impress
|
||||||
password: impress
|
password: impress
|
||||||
```
|
```
|
||||||
|
|
||||||
📝 Note that if you need to run them afterwards, you can use the eponymous Make rule:
|
### Frontend development mode
|
||||||
|
|
||||||
```shellscript
|
For frontend work, running outside Docker is often more convenient:
|
||||||
$ make run
|
|
||||||
|
```bash
|
||||||
|
make frontend-development-install
|
||||||
|
make run-frontend-development
|
||||||
```
|
```
|
||||||
|
|
||||||
⚠️ For the frontend developer, it is often better to run the frontend in development mode locally.
|
### Backend only
|
||||||
|
|
||||||
To do so, install the frontend dependencies with the following command:
|
Starting all services except the frontend container:
|
||||||
|
|
||||||
```shellscript
|
```bash
|
||||||
$ make frontend-development-install
|
make run-backend
|
||||||
```
|
```
|
||||||
|
|
||||||
And run the frontend locally in development mode with the following command:
|
### Tests & Linting
|
||||||
|
|
||||||
```shellscript
|
```bash
|
||||||
$ make run-frontend-development
|
make frontend-test
|
||||||
|
make frontend-lint
|
||||||
```
|
```
|
||||||
|
|
||||||
To start all the services, except the frontend container, you can use the following command:
|
Backend tests can be run without docker. This is useful to configure PyCharm or VSCode to do it.
|
||||||
|
Removing docker for testing requires to overwrite some URL and port values that are different in and out of
|
||||||
|
Docker. `env.d/development/common` contains all variables, some of them having to be overwritten by those in
|
||||||
|
`env.d/development/common.test`.
|
||||||
|
|
||||||
```shellscript
|
### Demo content
|
||||||
$ make run-backend
|
|
||||||
|
Create a basic demo site:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
make demo
|
||||||
```
|
```
|
||||||
|
|
||||||
To execute frontend tests & linting only
|
### More Make targets
|
||||||
```shellscript
|
|
||||||
$ make frontend-test
|
To check all available Make rules:
|
||||||
$ make frontend-lint
|
|
||||||
|
```bash
|
||||||
|
make help
|
||||||
```
|
```
|
||||||
|
|
||||||
**Adding content**
|
### Django admin
|
||||||
|
|
||||||
You can create a basic demo site by running this command:
|
Create a superuser:
|
||||||
|
|
||||||
```shellscript
|
```bash
|
||||||
$ make demo
|
make superuser
|
||||||
```
|
```
|
||||||
|
|
||||||
Finally, you can check all available Make rules using this command:
|
Admin UI: <http://localhost:8071/admin>
|
||||||
|
|
||||||
```shellscript
|
## Contributing
|
||||||
$ make help
|
|
||||||
```
|
|
||||||
|
|
||||||
**Django admin**
|
This project is community-driven and PRs are welcome.
|
||||||
|
|
||||||
You can access the Django admin site at:
|
- [Contribution guide](CONTRIBUTING.md)
|
||||||
|
- [Translations](https://crowdin.com/project/lasuite-docs)
|
||||||
|
- [Chat with us!](https://matrix.to/#/#docs-official:matrix.org)
|
||||||
|
|
||||||
<http://localhost:8071/admin>.
|
## Roadmap
|
||||||
|
|
||||||
You first need to create a superuser account:
|
Curious where Docs is headed?
|
||||||
|
|
||||||
```shellscript
|
Explore upcoming features, priorities and long-term direction on our [public roadmap](https://docs.numerique.gouv.fr/docs/d1d3788e-c619-41ff-abe8-2d079da2f084/).
|
||||||
$ make superuser
|
|
||||||
```
|
|
||||||
|
|
||||||
## Feedback 🙋♂️🙋♀️
|
|
||||||
|
|
||||||
We'd love to hear your thoughts, and hear about your experiments, so come and say hi on [Matrix](https://matrix.to/#/#docs-official:matrix.org).
|
|
||||||
|
|
||||||
## Roadmap 💡
|
|
||||||
|
|
||||||
Want to know where the project is headed? [🗺️ Checkout our roadmap](https://github.com/orgs/numerique-gouv/projects/13/views/11)
|
|
||||||
|
|
||||||
## License 📝
|
## License 📝
|
||||||
|
|
||||||
This work is released under the MIT License (see [LICENSE](https://github.com/suitenumerique/docs/blob/main/LICENSE)).
|
This work is released under the MIT License (see [LICENSE](https://github.com/suitenumerique/docs/blob/main/LICENSE)).
|
||||||
|
|
||||||
While Docs is a public-driven initiative, our license choice is an invitation for private sector actors to use, sell and contribute to the project.
|
While Docs is a public-driven initiative, our license choice is an invitation for private sector actors to use, sell and contribute to the project.
|
||||||
|
|
||||||
## Contributing 🙌
|
|
||||||
|
|
||||||
This project is intended to be community-driven, so please, do not hesitate to [get in touch](https://matrix.to/#/#docs-official:matrix.org) if you have any question related to our implementation or design decisions.
|
|
||||||
|
|
||||||
You can help us with translations on [Crowdin](https://crowdin.com/project/lasuite-docs).
|
|
||||||
|
|
||||||
If you intend to make pull requests, see [CONTRIBUTING](https://github.com/suitenumerique/docs/blob/main/CONTRIBUTING.md) for guidelines.
|
|
||||||
|
|
||||||
## Directory structure:
|
|
||||||
|
|
||||||
```markdown
|
|
||||||
docs
|
|
||||||
├── bin - executable scripts or binaries that are used for various tasks, such as setup scripts, utility scripts, or custom commands.
|
|
||||||
├── crowdin - for crowdin translations, a tool or service that helps manage translations for the project.
|
|
||||||
├── docker - Dockerfiles and related configuration files used to build Docker images for the project. These images can be used for development, testing, or production environments.
|
|
||||||
├── docs - documentation for the project, including user guides, API documentation, and other helpful resources.
|
|
||||||
├── env.d/development - environment-specific configuration files for the development environment. These files might include environment variables, configuration settings, or other setup files needed for development.
|
|
||||||
├── gitlint - configuration files for `gitlint`, a tool that enforces commit message guidelines to ensure consistency and quality in commit messages.
|
|
||||||
├── playground - experimental or temporary code, where developers can test new features or ideas without affecting the main codebase.
|
|
||||||
└── src - main source code directory, containing the core application code, libraries, and modules of the project.
|
|
||||||
```
|
|
||||||
|
|
||||||
## Credits ❤️
|
## Credits ❤️
|
||||||
|
|
||||||
### Stack
|
### Stack
|
||||||
|
|
||||||
Docs is built on top of [Django Rest Framework](https://www.django-rest-framework.org/), [Next.js](https://nextjs.org/), [BlockNote.js](https://www.blocknotejs.org/), [HocusPocus](https://tiptap.dev/docs/hocuspocus/introduction) and [Yjs](https://yjs.dev/). We thank the contributors of all these projects for their awesome work!
|
Docs is built on top of [Django Rest Framework](https://www.django-rest-framework.org/), [Next.js](https://nextjs.org/), [ProseMirror](https://prosemirror.net/), [BlockNote.js](https://www.blocknotejs.org/), [HocusPocus](https://tiptap.dev/docs/hocuspocus/introduction), and [Yjs](https://yjs.dev/). We thank the contributors of all these projects for their awesome work!
|
||||||
|
|
||||||
We are proud sponsors of [BlockNotejs](https://www.blocknotejs.org/) and [Yjs](https://yjs.dev/).
|
We are proud sponsors of [BlockNotejs](https://www.blocknotejs.org/) and [Yjs](https://yjs.dev/).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
### Gov ❤️ open source
|
### Gov ❤️ open source
|
||||||
Docs is the result of a joint effort led by the French 🇫🇷🥖 ([DINUM](https://www.numerique.gouv.fr/dinum/)) and German 🇩🇪🥨 governments ([ZenDiS](https://zendis.de/)).
|
|
||||||
|
|
||||||
We are always looking for new public partners (we are currently onboarding the Netherlands 🇳🇱🧀), feel free to [reach out](mailto:docs@numerique.gouv.fr) if you are interested in using or contributing to Docs.
|
Docs is the result of a joint initiative led by the French 🇫🇷 ([DINUM](https://www.numerique.gouv.fr/dinum/)) Government and German 🇩🇪 government ([ZenDiS](https://zendis.de/)).
|
||||||
|
|
||||||
|
We are always looking for new public partners (we are currently onboarding the Netherlands 🇳🇱), feel free to [contact us](mailto:docs@numerique.gouv.fr) if you are interested in using or contributing to Docs.
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<img src="/docs/assets/europe_opensource.png" width="50%"/>
|
<img src="/docs/assets/europe_opensource.png" width="50%"/ alt="Europe Opensource">
|
||||||
</p>
|
</p>
|
||||||
|
|||||||
@@ -16,6 +16,12 @@ the following command inside your docker container:
|
|||||||
|
|
||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
|
|
||||||
|
## [4.6.0] - 2026-02-27
|
||||||
|
|
||||||
|
- ⚠️ Some setup have changed to offer a bigger flexibility and consistency, overriding the favicon and logo are now from the theme configuration.
|
||||||
|
https://github.com/suitenumerique/docs/blob/f24b047a7cc146411412bf759b5b5248a45c3d99/src/backend/impress/configuration/theme/default.json#L129-L161
|
||||||
|
|
||||||
|
|
||||||
## [4.0.0] - 2025-11-26
|
## [4.0.0] - 2025-11-26
|
||||||
|
|
||||||
- ⚠️ We updated `@gouvfr-lasuite/ui-kit` to `0.18.0`, so if you are customizing Docs with a css layer or with a custom template, you need to update your customization to follow the new design system structure.
|
- ⚠️ We updated `@gouvfr-lasuite/ui-kit` to `0.18.0`, so if you are customizing Docs with a css layer or with a custom template, you need to update your customization to follow the new design system structure.
|
||||||
@@ -62,5 +68,5 @@ service.
|
|||||||
|
|
||||||
- AI features are now limited to users who are authenticated. Before this release, even anonymous
|
- AI features are now limited to users who are authenticated. Before this release, even anonymous
|
||||||
users who gained editor access on a document with link reach used to get AI feature.
|
users who gained editor access on a document with link reach used to get AI feature.
|
||||||
IF you want anonymous users to keep access on AI features, you must now define the
|
If you want anonymous users to keep access on AI features, you must now define the
|
||||||
`AI_ALLOW_REACH_FROM` setting to "public".
|
`AI_ALLOW_REACH_FROM` setting to "public".
|
||||||
|
|||||||
@@ -1,6 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# shellcheck source=bin/_config.sh
|
|
||||||
source "$(dirname "${BASH_SOURCE[0]}")/_config.sh"
|
|
||||||
|
|
||||||
_dc_run app-dev python -c 'from cryptography.fernet import Fernet;import sys; sys.stdout.write("\n" + Fernet.generate_key().decode() + "\n");'
|
|
||||||
13
bin/generate-oidc-store-refresh-token-key.sh
Executable file
13
bin/generate-oidc-store-refresh-token-key.sh
Executable file
@@ -0,0 +1,13 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Generate the secret OIDC_STORE_REFRESH_TOKEN_KEY and store it to common.local
|
||||||
|
|
||||||
|
set -eo pipefail
|
||||||
|
|
||||||
|
COMMON_LOCAL="env.d/development/common.local"
|
||||||
|
|
||||||
|
OIDC_STORE_REFRESH_TOKEN_KEY=$(openssl rand -base64 32)
|
||||||
|
|
||||||
|
echo "" >> "${COMMON_LOCAL}"
|
||||||
|
echo "OIDC_STORE_REFRESH_TOKEN_KEY=${OIDC_STORE_REFRESH_TOKEN_KEY}" >> "${COMMON_LOCAL}"
|
||||||
|
echo "✓ OIDC_STORE_REFRESH_TOKEN_KEY generated and stored in ${COMMON_LOCAL}"
|
||||||
@@ -47,6 +47,10 @@ server {
|
|||||||
try_files $uri @proxy_to_docs_backend;
|
try_files $uri @proxy_to_docs_backend;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
location /external_api {
|
||||||
|
try_files $uri @proxy_to_docs_backend;
|
||||||
|
}
|
||||||
|
|
||||||
location /static {
|
location /static {
|
||||||
try_files $uri @proxy_to_docs_backend;
|
try_files $uri @proxy_to_docs_backend;
|
||||||
}
|
}
|
||||||
|
|||||||
39
docs/README.md
Normal file
39
docs/README.md
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
# Docs Documentation
|
||||||
|
|
||||||
|
Welcome to the official documentation for Docs.
|
||||||
|
|
||||||
|
This documentation is organized by topic and audience.
|
||||||
|
Use the section below to quickly find what you are looking for.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table of Contents
|
||||||
|
|
||||||
|
- Getting started
|
||||||
|
- [System requirements](system-requirements.md)
|
||||||
|
- [Installation overview](installation/README.md)
|
||||||
|
- [Docker Compose deployment](installation/compose.md)
|
||||||
|
- [Docker Compose examples](examples/compose/)
|
||||||
|
- [Kubernetes deployment](installation/kubernetes.md)
|
||||||
|
- [Helm values examples](examples/helm/)
|
||||||
|
|
||||||
|
- Configuration
|
||||||
|
- [Environment variables](env.md)
|
||||||
|
- [Customization](customization.md)
|
||||||
|
- [Language configuration](languages-configuration.md)
|
||||||
|
- [Search configuration](search.md)
|
||||||
|
|
||||||
|
- Architecture & design
|
||||||
|
- [Architecture overview](architecture.md)
|
||||||
|
- [Architectural Decision Records (ADR)](adr/)
|
||||||
|
|
||||||
|
- Usage & operations
|
||||||
|
- [Public instances](instances.md)
|
||||||
|
- [Releases & upgrades](release.md)
|
||||||
|
- [Troubleshooting](troubleshoot.md)
|
||||||
|
|
||||||
|
- Project & product
|
||||||
|
- [Roadmap](roadmap.md)
|
||||||
|
|
||||||
|
- Assets
|
||||||
|
- [Branding & visuals](assets/)
|
||||||
60
docs/env.md
60
docs/env.md
@@ -7,23 +7,29 @@ Here we describe all environment variables that can be set for the docs applicat
|
|||||||
These are the environment variables you can set for the `impress-backend` container.
|
These are the environment variables you can set for the `impress-backend` container.
|
||||||
|
|
||||||
| Option | Description | default |
|
| Option | Description | default |
|
||||||
|-------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------|
|
| ----------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------- |
|
||||||
| AI_ALLOW_REACH_FROM | Users that can use AI must be this level. options are "public", "authenticated", "restricted" | authenticated |
|
| AI_ALLOW_REACH_FROM | Users that can use AI must be this level. options are "public", "authenticated", "restricted" | authenticated |
|
||||||
| AI_API_KEY | AI key to be used for AI Base url | |
|
| AI_API_KEY | AI key to be used for AI Base url | |
|
||||||
| AI_BASE_URL | OpenAI compatible AI base url | |
|
| AI_BASE_URL | OpenAI compatible AI base url | |
|
||||||
|
| AI_BOT | Information to give to the frontend about the AI bot | { "name": "Docs AI", "color": "#8bc6ff" }
|
||||||
| AI_FEATURE_ENABLED | Enable AI options | false |
|
| AI_FEATURE_ENABLED | Enable AI options | false |
|
||||||
|
| AI_FEATURE_BLOCKNOTE_ENABLED | Enable Blocknote AI options | false |
|
||||||
|
| AI_FEATURE_LEGACY_ENABLED | Enable legacyAI options | true |
|
||||||
| AI_MODEL | AI Model to use | |
|
| AI_MODEL | AI Model to use | |
|
||||||
|
| AI_VERCEL_SDK_VERSION | The vercel AI SDK version used | 6 |
|
||||||
| ALLOW_LOGOUT_GET_METHOD | Allow get logout method | true |
|
| ALLOW_LOGOUT_GET_METHOD | Allow get logout method | true |
|
||||||
| API_USERS_LIST_LIMIT | Limit on API users | 5 |
|
| API_USERS_LIST_LIMIT | Limit on API users | 5 |
|
||||||
| API_USERS_LIST_THROTTLE_RATE_BURST | Throttle rate for api on burst | 30/minute |
|
| API_USERS_LIST_THROTTLE_RATE_BURST | Throttle rate for api on burst | 30/minute |
|
||||||
| API_USERS_LIST_THROTTLE_RATE_SUSTAINED | Throttle rate for api | 180/hour |
|
| API_USERS_LIST_THROTTLE_RATE_SUSTAINED | Throttle rate for api | 180/hour |
|
||||||
|
| API_USERS_SEARCH_QUERY_MIN_LENGTH | Minimum characters to insert to search a user | 3 |
|
||||||
| AWS_S3_ACCESS_KEY_ID | Access id for s3 endpoint | |
|
| AWS_S3_ACCESS_KEY_ID | Access id for s3 endpoint | |
|
||||||
| AWS_S3_ENDPOINT_URL | S3 endpoint | |
|
| AWS_S3_ENDPOINT_URL | S3 endpoint | |
|
||||||
| AWS_S3_REGION_NAME | Region name for s3 endpoint | |
|
| AWS_S3_REGION_NAME | Region name for s3 endpoint | |
|
||||||
| AWS_S3_SECRET_ACCESS_KEY | Access key for s3 endpoint | |
|
| AWS_S3_SECRET_ACCESS_KEY | Access key for s3 endpoint | |
|
||||||
|
| AWS_S3_SIGNATURE_VERSION | S3 signature version (`s3v4` or `s3`) | s3v4 |
|
||||||
| AWS_STORAGE_BUCKET_NAME | Bucket name for s3 endpoint | impress-media-storage |
|
| AWS_STORAGE_BUCKET_NAME | Bucket name for s3 endpoint | impress-media-storage |
|
||||||
| CACHES_DEFAULT_TIMEOUT | Cache default timeout | 30 |
|
| CACHES_DEFAULT_TIMEOUT | Cache default timeout | 30 |
|
||||||
| CACHES_KEY_PREFIX | The prefix used to every cache keys. | docs |
|
| CACHES_DEFAULT_KEY_PREFIX | The prefix used to every cache keys. | docs |
|
||||||
| COLLABORATION_API_URL | Collaboration api host | |
|
| COLLABORATION_API_URL | Collaboration api host | |
|
||||||
| COLLABORATION_SERVER_SECRET | Collaboration api secret | |
|
| COLLABORATION_SERVER_SECRET | Collaboration api secret | |
|
||||||
| COLLABORATION_WS_NOT_CONNECTED_READY_ONLY | Users not connected to the collaboration server cannot edit | false |
|
| COLLABORATION_WS_NOT_CONNECTED_READY_ONLY | Users not connected to the collaboration server cannot edit | false |
|
||||||
@@ -33,13 +39,17 @@ These are the environment variables you can set for the `impress-backend` contai
|
|||||||
| CONVERSION_API_SECURE | Require secure conversion api | false |
|
| CONVERSION_API_SECURE | Require secure conversion api | false |
|
||||||
| CONVERSION_API_TIMEOUT | Conversion api timeout | 30 |
|
| CONVERSION_API_TIMEOUT | Conversion api timeout | 30 |
|
||||||
| CONVERSION_FILE_MAX_SIZE | The file max size allowed when uploaded to convert it | 20971520 (20MB) |
|
| CONVERSION_FILE_MAX_SIZE | The file max size allowed when uploaded to convert it | 20971520 (20MB) |
|
||||||
| CONVERSION_FILE_EXTENSIONS_ALLOWED | Extension list managed by the conversion service | [".docx", ".md"]
|
| CONVERSION_FILE_EXTENSIONS_ALLOWED | Extension list managed by the conversion service | [".docx", ".md"] |
|
||||||
| CRISP_WEBSITE_ID | Crisp website id for support | |
|
| CRISP_WEBSITE_ID | Crisp website id for support | |
|
||||||
| DB_ENGINE | Engine to use for database connections | django.db.backends.postgresql_psycopg2 |
|
| DB_ENGINE | Engine to use for database connections | django.db.backends.postgresql_psycopg2 |
|
||||||
| DB_HOST | Host of the database | localhost |
|
| DB_HOST | Host of the database | localhost |
|
||||||
| DB_NAME | Name of the database | impress |
|
| DB_NAME | Name of the database | impress |
|
||||||
| DB_PASSWORD | Password to authenticate with | pass |
|
| DB_PASSWORD | Password to authenticate with | pass |
|
||||||
| DB_PORT | Port of the database | 5432 |
|
| DB_PORT | Port of the database | 5432 |
|
||||||
|
| DB_PSYCOPG_POOL_ENABLED | Enable or not the psycopg pool configuration in the default database options | False |
|
||||||
|
| DB_PSYCOPG_POOL_MIN_SIZE | The psycopg min pool size | 4 |
|
||||||
|
| DB_PSYCOPG_POOL_MAX_SIZE | The psycopg max pool size | None |
|
||||||
|
| DB_PSYCOPG_POOL_TIMEOUT | The default maximum time in seconds that a client can wait to receive a connection from the pool | 3 |
|
||||||
| DB_USER | User to authenticate with | dinum |
|
| DB_USER | User to authenticate with | dinum |
|
||||||
| DJANGO_ALLOWED_HOSTS | Allowed hosts | [] |
|
| DJANGO_ALLOWED_HOSTS | Allowed hosts | [] |
|
||||||
| DJANGO_CELERY_BROKER_TRANSPORT_OPTIONS | Celery broker transport options | {} |
|
| DJANGO_CELERY_BROKER_TRANSPORT_OPTIONS | Celery broker transport options | {} |
|
||||||
@@ -56,22 +66,22 @@ These are the environment variables you can set for the `impress-backend` contai
|
|||||||
| DJANGO_EMAIL_HOST_USER | User to authenticate with on the email host | |
|
| DJANGO_EMAIL_HOST_USER | User to authenticate with on the email host | |
|
||||||
| DJANGO_EMAIL_LOGO_IMG | Logo for the email | |
|
| DJANGO_EMAIL_LOGO_IMG | Logo for the email | |
|
||||||
| DJANGO_EMAIL_PORT | Port used to connect to email host | |
|
| DJANGO_EMAIL_PORT | Port used to connect to email host | |
|
||||||
| DJANGO_EMAIL_URL_APP | Url used in the email to go to the app | |
|
| DJANGO_EMAIL_URL_APP | Url used in the email to go to the app | |
|
||||||
| DJANGO_EMAIL_USE_SSL | Use ssl for email host connection | false |
|
| DJANGO_EMAIL_USE_SSL | Use ssl for email host connection | false |
|
||||||
| DJANGO_EMAIL_USE_TLS | Use tls for email host connection | false |
|
| DJANGO_EMAIL_USE_TLS | Use tls for email host connection | false |
|
||||||
| DJANGO_SECRET_KEY | Secret key | |
|
| DJANGO_SECRET_KEY | Secret key | |
|
||||||
| DJANGO_SERVER_TO_SERVER_API_TOKENS | | [] |
|
| DJANGO_SERVER_TO_SERVER_API_TOKENS | | [] |
|
||||||
| DOCSPEC_API_URL | URL to endpoint of DocSpec conversion API | |
|
| DOCSPEC_API_URL | URL to endpoint of DocSpec conversion API | |
|
||||||
| DOCUMENT_IMAGE_MAX_SIZE | Maximum size of document in bytes | 10485760 |
|
| DOCUMENT_IMAGE_MAX_SIZE | Maximum size of document in bytes | 10485760 |
|
||||||
| FRONTEND_CSS_URL | To add a external css file to the app | |
|
| FRONTEND_CSS_URL | To add a external css file to the app | |
|
||||||
| FRONTEND_JS_URL | To add a external js file to the app | |
|
| FRONTEND_JS_URL | To add a external js file to the app | |
|
||||||
| FRONTEND_HOMEPAGE_FEATURE_ENABLED | Frontend feature flag to display the homepage | false |
|
| FRONTEND_HOMEPAGE_FEATURE_ENABLED | Frontend feature flag to display the homepage | false |
|
||||||
| FRONTEND_THEME | Frontend theme to use | |
|
| FRONTEND_THEME | Frontend theme to use | |
|
||||||
| LANGUAGE_CODE | Default language | en-us |
|
| LANGUAGE_CODE | Default language | en-us |
|
||||||
| LANGFUSE_SECRET_KEY | The Langfuse secret key used by the sdk | None |
|
| LANGFUSE_SECRET_KEY | The Langfuse secret key used by the sdk | None |
|
||||||
| LANGFUSE_PUBLIC_KEY | The Langfuse public key used by the sdk | None |
|
| LANGFUSE_PUBLIC_KEY | The Langfuse public key used by the sdk | None |
|
||||||
| LANGFUSE_BASE_URL | The Langfuse base url used by the sdk | None |
|
| LANGFUSE_BASE_URL | The Langfuse base url used by the sdk | None |
|
||||||
| LASUITE_MARKETING_BACKEND | Backend used when SIGNUP_NEW_USER_TO_MARKETING_EMAIL is True. See https://github.com/suitenumerique/django-lasuite/blob/main/documentation/how-to-use-marketing-backend.md | lasuite.marketing.backends.dummy.DummyBackend |
|
| LASUITE_MARKETING_BACKEND | Backend used when SIGNUP_NEW_USER_TO_MARKETING_EMAIL is True. See https://github.com/suitenumerique/django-lasuite/blob/main/documentation/how-to-use-marketing-backend.md | lasuite.marketing.backends.dummy.DummyBackend |
|
||||||
| LASUITE_MARKETING_PARAMETERS | The parameters to configure LASUITE_MARKETING_BACKEND. See https://github.com/suitenumerique/django-lasuite/blob/main/documentation/how-to-use-marketing-backend.md | {} |
|
| LASUITE_MARKETING_PARAMETERS | The parameters to configure LASUITE_MARKETING_BACKEND. See https://github.com/suitenumerique/django-lasuite/blob/main/documentation/how-to-use-marketing-backend.md | {} |
|
||||||
| LOGGING_LEVEL_LOGGERS_APP | Application logging level. options are "DEBUG", "INFO", "WARN", "ERROR", "CRITICAL" | INFO |
|
| LOGGING_LEVEL_LOGGERS_APP | Application logging level. options are "DEBUG", "INFO", "WARN", "ERROR", "CRITICAL" | INFO |
|
||||||
| LOGGING_LEVEL_LOGGERS_ROOT | Default logging level. options are "DEBUG", "INFO", "WARN", "ERROR", "CRITICAL" | INFO |
|
| LOGGING_LEVEL_LOGGERS_ROOT | Default logging level. options are "DEBUG", "INFO", "WARN", "ERROR", "CRITICAL" | INFO |
|
||||||
@@ -98,6 +108,9 @@ These are the environment variables you can set for the `impress-backend` contai
|
|||||||
| OIDC_RP_SCOPES | Scopes requested for OIDC | openid email |
|
| OIDC_RP_SCOPES | Scopes requested for OIDC | openid email |
|
||||||
| OIDC_RP_SIGN_ALGO | verification algorithm used OIDC tokens | RS256 |
|
| OIDC_RP_SIGN_ALGO | verification algorithm used OIDC tokens | RS256 |
|
||||||
| OIDC_STORE_ID_TOKEN | Store OIDC token | true |
|
| OIDC_STORE_ID_TOKEN | Store OIDC token | true |
|
||||||
|
| OIDC_STORE_ACCESS_TOKEN | If True stores OIDC access token in session. | false |
|
||||||
|
| OIDC_STORE_REFRESH_TOKEN | If True stores OIDC refresh token in session. | false |
|
||||||
|
| OIDC_STORE_REFRESH_TOKEN_KEY | Key to encrypt refresh token stored in session, must be a valid Fernet key | |
|
||||||
| OIDC_USERINFO_FULLNAME_FIELDS | OIDC token claims to create full name | ["first_name", "last_name"] |
|
| OIDC_USERINFO_FULLNAME_FIELDS | OIDC token claims to create full name | ["first_name", "last_name"] |
|
||||||
| OIDC_USERINFO_SHORTNAME_FIELD | OIDC token claims to create shortname | first_name |
|
| OIDC_USERINFO_SHORTNAME_FIELD | OIDC token claims to create shortname | first_name |
|
||||||
| OIDC_USE_NONCE | Use nonce for OIDC | true |
|
| OIDC_USE_NONCE | Use nonce for OIDC | true |
|
||||||
@@ -107,8 +120,9 @@ These are the environment variables you can set for the `impress-backend` contai
|
|||||||
| SEARCH_INDEXER_CLASS | Class of the backend for document indexation & search | |
|
| SEARCH_INDEXER_CLASS | Class of the backend for document indexation & search | |
|
||||||
| SEARCH_INDEXER_COUNTDOWN | Minimum debounce delay of indexation jobs (in seconds) | 1 |
|
| SEARCH_INDEXER_COUNTDOWN | Minimum debounce delay of indexation jobs (in seconds) | 1 |
|
||||||
| SEARCH_INDEXER_QUERY_LIMIT | Maximum number of results expected from search endpoint | 50 |
|
| SEARCH_INDEXER_QUERY_LIMIT | Maximum number of results expected from search endpoint | 50 |
|
||||||
| SEARCH_INDEXER_SECRET | Token for indexation queries | |
|
| SEARCH_URL | Find application endpoint for search queries | |
|
||||||
| SEARCH_INDEXER_URL | Find application endpoint for indexation | |
|
| SEARCH_INDEXER_SECRET | Token required for indexation queries | |
|
||||||
|
| INDEXING_URL | Find application endpoint for indexation | |
|
||||||
| SENTRY_DSN | Sentry host | |
|
| SENTRY_DSN | Sentry host | |
|
||||||
| SESSION_COOKIE_AGE | duration of the cookie session | 60*60*12 |
|
| SESSION_COOKIE_AGE | duration of the cookie session | 60*60*12 |
|
||||||
| SIGNUP_NEW_USER_TO_MARKETING_EMAIL | Register new user to the marketing onboarding. If True, see env LASUITE_MARKETING_* system | False |
|
| SIGNUP_NEW_USER_TO_MARKETING_EMAIL | Register new user to the marketing onboarding. If True, see env LASUITE_MARKETING_* system | False |
|
||||||
@@ -118,10 +132,12 @@ These are the environment variables you can set for the `impress-backend` contai
|
|||||||
| THEME_CUSTOMIZATION_FILE_PATH | Full path to the file customizing the theme. An example is provided in src/backend/impress/configuration/theme/default.json | BASE_DIR/impress/configuration/theme/default.json |
|
| THEME_CUSTOMIZATION_FILE_PATH | Full path to the file customizing the theme. An example is provided in src/backend/impress/configuration/theme/default.json | BASE_DIR/impress/configuration/theme/default.json |
|
||||||
| TRASHBIN_CUTOFF_DAYS | Trashbin cutoff | 30 |
|
| TRASHBIN_CUTOFF_DAYS | Trashbin cutoff | 30 |
|
||||||
| USER_OIDC_ESSENTIAL_CLAIMS | Essential claims in OIDC token | [] |
|
| USER_OIDC_ESSENTIAL_CLAIMS | Essential claims in OIDC token | [] |
|
||||||
|
| USER_ONBOARDING_DOCUMENTS | A list of documents IDs for which a read-only access will be created for new s | [] |
|
||||||
|
| USER_ONBOARDING_SANDBOX_DOCUMENT | ID of a template sandbox document that will be duplicated for new users | |
|
||||||
|
| USER_RECONCILIATION_FORM_URL | URL of a third-party form for user reconciliation requests | |
|
||||||
| Y_PROVIDER_API_BASE_URL | Y Provider url | |
|
| Y_PROVIDER_API_BASE_URL | Y Provider url | |
|
||||||
| Y_PROVIDER_API_KEY | Y provider API key | |
|
| Y_PROVIDER_API_KEY | Y provider API key | |
|
||||||
|
|
||||||
|
|
||||||
## impress-frontend image
|
## impress-frontend image
|
||||||
|
|
||||||
These are the environment variables you can set to build the `impress-frontend` image.
|
These are the environment variables you can set to build the `impress-frontend` image.
|
||||||
@@ -132,31 +148,31 @@ If you want to build the Docker image, this variable is used as an argument in t
|
|||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
```
|
```bash
|
||||||
docker build -f src/frontend/Dockerfile --target frontend-production --build-arg PUBLISH_AS_MIT=false docs-frontend:latest
|
docker build -f src/frontend/Dockerfile --target frontend-production --build-arg PUBLISH_AS_MIT=false docs-frontend:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
If you want to build the front-end application using the yarn build command, you can edit the file `src/frontend/apps/impress/.env` with the `NODE_ENV=production` environment variable and modify it. Alternatively, you can use the listed environment variables with the prefix `NEXT_PUBLIC_` (for example, `NEXT_PUBLIC_PUBLISH_AS_MIT=false`).
|
If you want to build the front-end application using the yarn build command, you can edit the file `src/frontend/apps/impress/.env` with the `NODE_ENV=production` environment variable and modify it. Alternatively, you can use the listed environment variables with the prefix `NEXT_PUBLIC_` (for example, `NEXT_PUBLIC_PUBLISH_AS_MIT=false`).
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
```
|
```bash
|
||||||
cd src/frontend/apps/impress
|
cd src/frontend/apps/impress
|
||||||
NODE_ENV=production NEXT_PUBLIC_PUBLISH_AS_MIT=false yarn build
|
NODE_ENV=production NEXT_PUBLIC_PUBLISH_AS_MIT=false yarn build
|
||||||
```
|
```
|
||||||
|
|
||||||
| Option | Description | default |
|
| Option | Description | default |
|
||||||
| ----------------------------------------------- | --------------------------------------------------------------------------------------------- | ------------------------------------------------------- |
|
| -------------- | ---------------------------------------------------------------------------------- | ------- |
|
||||||
| API_ORIGIN | backend domain - it uses the current domain if not initialized | |
|
| API_ORIGIN | backend domain - it uses the current domain if not initialized | |
|
||||||
| SW_DEACTIVATED | To not install the service worker | |
|
| SW_DEACTIVATED | To not install the service worker | |
|
||||||
| PUBLISH_AS_MIT | Removes packages whose licences are incompatible with the MIT licence (see below) | true |
|
| PUBLISH_AS_MIT | Removes packages whose licences are incompatible with the MIT licence (see below) | true |
|
||||||
|
|
||||||
Packages with licences incompatible with the MIT licence:
|
Packages with licences incompatible with the MIT licence:
|
||||||
* `xl-docx-exporter`: [GPL](https://github.com/TypeCellOS/BlockNote/blob/main/packages/xl-docx-exporter/LICENSE),
|
|
||||||
* `xl-pdf-exporter`: [GPL](https://github.com/TypeCellOS/BlockNote/blob/main/packages/xl-pdf-exporter/LICENSE),
|
* `xl-docx-exporter`: [GPL](https://github.com/TypeCellOS/BlockNote/blob/main/packages/xl-docx-exporter/LICENSE),
|
||||||
* `xl-multi-column`: [GPL](https://github.com/TypeCellOS/BlockNote/blob/main/packages/xl-multi-column/LICENSE).
|
* `xl-pdf-exporter`: [GPL](https://github.com/TypeCellOS/BlockNote/blob/main/packages/xl-pdf-exporter/LICENSE),
|
||||||
|
* `xl-multi-column`: [GPL](https://github.com/TypeCellOS/BlockNote/blob/main/packages/xl-multi-column/LICENSE).
|
||||||
|
|
||||||
In `.env.development`, `PUBLISH_AS_MIT` is set to `false`, allowing developers to test Docs with all its features.
|
In `.env.development`, `PUBLISH_AS_MIT` is set to `false`, allowing developers to test Docs with all its features.
|
||||||
|
|
||||||
⚠️ If you run Docs in production with `PUBLISH_AS_MIT` set to `false` make sure you fulfill your BlockNote licensing or [subscription](https://www.blocknotejs.org/about#partner-with-us) obligations.
|
⚠️ If you run Docs in production with `PUBLISH_AS_MIT` set to `false` make sure you fulfill your BlockNote licensing or [subscription](https://www.blocknotejs.org/about#partner-with-us) obligations.
|
||||||
|
|
||||||
|
|||||||
@@ -67,6 +67,7 @@ backend:
|
|||||||
AWS_S3_SECRET_ACCESS_KEY: password
|
AWS_S3_SECRET_ACCESS_KEY: password
|
||||||
AWS_STORAGE_BUCKET_NAME: docs-media-storage
|
AWS_STORAGE_BUCKET_NAME: docs-media-storage
|
||||||
STORAGES_STATICFILES_BACKEND: django.contrib.staticfiles.storage.StaticFilesStorage
|
STORAGES_STATICFILES_BACKEND: django.contrib.staticfiles.storage.StaticFilesStorage
|
||||||
|
USER_RECONCILIATION_FORM_URL: https://docs.127.0.0.1.nip.io
|
||||||
Y_PROVIDER_API_BASE_URL: http://impress-y-provider:443/api/
|
Y_PROVIDER_API_BASE_URL: http://impress-y-provider:443/api/
|
||||||
Y_PROVIDER_API_KEY: my-secret
|
Y_PROVIDER_API_KEY: my-secret
|
||||||
CACHES_KEY_PREFIX: "{{ now | unixEpoch }}"
|
CACHES_KEY_PREFIX: "{{ now | unixEpoch }}"
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ Please follow the instructions [here](/docs/installation/compose.md).
|
|||||||
⚠️ Please keep in mind that we do not use it ourselves in production. Let us know in the issues if you run into troubles, we'll try to help.
|
⚠️ Please keep in mind that we do not use it ourselves in production. Let us know in the issues if you run into troubles, we'll try to help.
|
||||||
|
|
||||||
## Other ways to install Docs
|
## Other ways to install Docs
|
||||||
Community members have contributed several other ways to install Docs. While we owe them a big thanks 🙏, please keep in mind we (Docs maintainers) can't provide support on these installation methods as we don't use them ourselves and there are two many options out there for us to keep track of. Of course you can contact the contributors and the broader community for assistance.
|
Community members have contributed several other ways to install Docs. While we owe them a big thanks 🙏, please keep in mind we (Docs maintainers) can't provide support on these installation methods as we don't use them ourselves and there are too many options out there for us to keep track of. Of course you can contact the contributors and the broader community for assistance.
|
||||||
|
|
||||||
Here is the list of other methods in alphabetical order:
|
Here is the list of other methods in alphabetical order:
|
||||||
- Coop-Cloud: [code](https://git.coopcloud.tech/coop-cloud/lasuite-docs)
|
- Coop-Cloud: [code](https://git.coopcloud.tech/coop-cloud/lasuite-docs)
|
||||||
|
|||||||
@@ -134,10 +134,12 @@ DJANGO_EMAIL_URL_APP=<url used in email templates to go to the app> # e.g. "http
|
|||||||
|
|
||||||
Built-in AI actions let users generate, summarize, translate, and correct content.
|
Built-in AI actions let users generate, summarize, translate, and correct content.
|
||||||
|
|
||||||
AI is disabled by default. To enable it, the following environment variables must be set in in `env.d/backend`:
|
AI is disabled by default. To enable it, the following environment variables must be set in `env.d/backend`:
|
||||||
|
|
||||||
```env
|
```env
|
||||||
AI_FEATURE_ENABLED=true # is false by default
|
AI_FEATURE_ENABLED=true # is false by default
|
||||||
|
AI_FEATURE_BLOCKNOTE_ENABLED=true # is false by default
|
||||||
|
AI_FEATURE_LEGACY_ENABLED=true # is true by default, AI_FEATURE_ENABLED must be set to true to enable it
|
||||||
AI_BASE_URL=https://openaiendpoint.com
|
AI_BASE_URL=https://openaiendpoint.com
|
||||||
AI_API_KEY=<API key>
|
AI_API_KEY=<API key>
|
||||||
AI_MODEL=<model used> e.g. llama
|
AI_MODEL=<model used> e.g. llama
|
||||||
@@ -150,7 +152,7 @@ You can [customize your Docs instance](../theming.md) with your own theme and cu
|
|||||||
The following environment variables must be set in `env.d/backend`:
|
The following environment variables must be set in `env.d/backend`:
|
||||||
|
|
||||||
```env
|
```env
|
||||||
FRONTEND_THEME=default # name of your theme built with cuningham
|
FRONTEND_THEME=default # name of your theme built with Cunningham
|
||||||
FRONTEND_CSS_URL=https://storage.yourdomain.tld/themes/custom.css # custom css
|
FRONTEND_CSS_URL=https://storage.yourdomain.tld/themes/custom.css # custom css
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -204,7 +206,7 @@ Replace `<admin email>` with the email of your admin user and generate a secure
|
|||||||
|
|
||||||
Your docs instance is now available on the domain you defined, https://docs.yourdomain.tld.
|
Your docs instance is now available on the domain you defined, https://docs.yourdomain.tld.
|
||||||
|
|
||||||
THe admin interface is available on https://docs.yourdomain.tld/admin with the admin user you just created.
|
The admin interface is available on https://docs.yourdomain.tld/admin with the admin user you just created.
|
||||||
|
|
||||||
## How to upgrade your Docs application
|
## How to upgrade your Docs application
|
||||||
|
|
||||||
|
|||||||
@@ -250,4 +250,4 @@ minio-dev-backend-minio-api <none> docs-minio.127.0.0.1.nip.io
|
|||||||
minio-dev-backend-minio-console <none> docs-minio-console.127.0.0.1.nip.io localhost 80, 443 8m48s
|
minio-dev-backend-minio-console <none> docs-minio-console.127.0.0.1.nip.io localhost 80, 443 8m48s
|
||||||
```
|
```
|
||||||
|
|
||||||
You can use Docs at https://docs.127.0.0.1.nip.io. The provisionning user in keycloak is docs/docs.
|
You can use Docs at https://docs.127.0.0.1.nip.io. The provisioning user in keycloak is docs/docs.
|
||||||
|
|||||||
77
docs/instances.md
Normal file
77
docs/instances.md
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
# 🌍 Public Docs Instances
|
||||||
|
|
||||||
|
This page lists known public instances of **Docs**.
|
||||||
|
|
||||||
|
These instances are operated by different organizations and may have different access policies.
|
||||||
|
If you run a public instance and would like it listed here, feel free to open a pull request.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🏛️ Public Organizations
|
||||||
|
|
||||||
|
### docs.numerique.gouv.fr
|
||||||
|
|
||||||
|
**Organization:** DINUM
|
||||||
|
**Audience:** French public agents working for central administration and extended public sphere
|
||||||
|
**Access:** ProConnect account required
|
||||||
|
<https://docs.numerique.gouv.fr/>
|
||||||
|
|
||||||
|
### docs.suite.anct.gouv.fr
|
||||||
|
|
||||||
|
**Organization:** ANCT
|
||||||
|
**Audience:** French public agents working for territorial administration and extended public sphere
|
||||||
|
**Access:** ProConnect account required
|
||||||
|
<https://docs.suite.anct.gouv.fr/>
|
||||||
|
|
||||||
|
### notes.demo.opendesk.eu
|
||||||
|
|
||||||
|
**Organization:** ZenDiS
|
||||||
|
**Type:** OpenDesk demo instance
|
||||||
|
**Access:** Request credentials
|
||||||
|
<https://notes.demo.opendesk.eu/>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🏢 Private Sector
|
||||||
|
|
||||||
|
### docs.demo.mosacloud.eu
|
||||||
|
|
||||||
|
**Organization:** mosa.cloud
|
||||||
|
**Type:** Demo instance
|
||||||
|
<https://docs.demo.mosacloud.eu/>
|
||||||
|
|
||||||
|
### notes.liiib.re
|
||||||
|
|
||||||
|
**Organization:** lasuite.coop
|
||||||
|
**Access:** Public demo
|
||||||
|
**Notes:** Content and accounts reset monthly
|
||||||
|
<https://notes.liiib.re/>
|
||||||
|
|
||||||
|
### notes.lasuite.coop
|
||||||
|
|
||||||
|
**Organization:** lasuite.coop
|
||||||
|
**Access:** Public
|
||||||
|
<https://notes.lasuite.coop/>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🤝 NGOs
|
||||||
|
|
||||||
|
### docs.federated.nexus
|
||||||
|
|
||||||
|
**Organization:** federated.nexus
|
||||||
|
**Access:** Public with account registration
|
||||||
|
<https://docs.federated.nexus/>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## ➕ Add your instance
|
||||||
|
|
||||||
|
To add your instance:
|
||||||
|
|
||||||
|
1. Fork the repository
|
||||||
|
2. Edit `docs/instances.md`
|
||||||
|
3. Add your instance following the existing format
|
||||||
|
4. Open a pull request
|
||||||
|
|
||||||
|
Thank you for helping grow the Docs ecosystem ❤️
|
||||||
106
docs/resource_server.md
Normal file
106
docs/resource_server.md
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
# Use Docs as a Resource Server
|
||||||
|
|
||||||
|
Docs implements resource server, so it means it can be used from an external app to perform some operation using the dedicated API.
|
||||||
|
|
||||||
|
> **Note:** This feature might be subject to future evolutions. The API endpoints, configuration options, and behavior may change in future versions.
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
In order to activate the resource server on Docs you need to setup the following environment variables
|
||||||
|
|
||||||
|
```python
|
||||||
|
OIDC_RESOURCE_SERVER_ENABLED=True
|
||||||
|
OIDC_OP_URL=
|
||||||
|
OIDC_OP_INTROSPECTION_ENDPOINT=
|
||||||
|
OIDC_RS_CLIENT_ID=
|
||||||
|
OIDC_RS_CLIENT_SECRET=
|
||||||
|
OIDC_RS_AUDIENCE_CLAIM=
|
||||||
|
OIDC_RS_ALLOWED_AUDIENCES=
|
||||||
|
```
|
||||||
|
|
||||||
|
It implements the resource server using `django-lasuite`, see the [documentation](https://github.com/suitenumerique/django-lasuite/blob/main/documentation/how-to-use-oidc-resource-server-backend.md)
|
||||||
|
|
||||||
|
## Customise allowed routes
|
||||||
|
|
||||||
|
Configure the `EXTERNAL_API` setting to control which routes and actions are available in the external API. Set it via the `EXTERNAL_API` environment variable (as JSON) or in Django settings.
|
||||||
|
|
||||||
|
Default configuration:
|
||||||
|
|
||||||
|
```python
|
||||||
|
EXTERNAL_API = {
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": ["list", "retrieve", "create", "children"],
|
||||||
|
},
|
||||||
|
"document_access": {
|
||||||
|
"enabled": False,
|
||||||
|
"actions": [],
|
||||||
|
},
|
||||||
|
"document_invitation": {
|
||||||
|
"enabled": False,
|
||||||
|
"actions": [],
|
||||||
|
},
|
||||||
|
"users": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": ["get_me"],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Endpoints:**
|
||||||
|
|
||||||
|
- `documents`: Controls `/external_api/v1.0/documents/`. Available actions: `list`, `retrieve`, `create`, `update`, `destroy`, `trashbin`, `children`, `restore`, `move`,`versions_list`, `versions_detail`, `favorite_detail`,`link_configuration`, `attachment_upload`, `media_auth`, `ai_transform`, `ai_translate`, `ai_proxy`. Always allowed actions: `favorite_list`, `duplicate`.
|
||||||
|
- `document_access`: `/external_api/v1.0/documents/{id}/accesses/`. Available actions: `list`, `retrieve`, `create`, `update`, `partial_update`, `destroy`
|
||||||
|
- `document_invitation`: Controls `/external_api/v1.0/documents/{id}/invitations/`. Available actions: `list`, `retrieve`, `create`, `partial_update`, `destroy`
|
||||||
|
- `users`: Controls `/external_api/v1.0/documents/`. Available actions: `get_me`.
|
||||||
|
|
||||||
|
Each endpoint has `enabled` (boolean) and `actions` (list of allowed actions). Only actions explicitly listed are accessible.
|
||||||
|
|
||||||
|
## Request Docs
|
||||||
|
|
||||||
|
In order to request Docs from an external resource provider, you need to implement the basic setup of `django-lasuite` [Using the OIDC Authentication Backend to request a resource server](https://github.com/suitenumerique/django-lasuite/blob/main/documentation/how-to-use-oidc-call-to-resource-server.md)
|
||||||
|
|
||||||
|
Then you can requests some routes that are available at `/external_api/v1.0/*`, here are some examples of what you can do.
|
||||||
|
|
||||||
|
### Create a document
|
||||||
|
|
||||||
|
Here is an example of a view that creates a document from a markdown file at the root level in Docs.
|
||||||
|
|
||||||
|
```python
|
||||||
|
@method_decorator(refresh_oidc_access_token)
|
||||||
|
def create_document_from_markdown(self, request):
|
||||||
|
"""
|
||||||
|
Create a new document from a Markdown file at root level.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Get the access token from the session
|
||||||
|
access_token = request.session.get('oidc_access_token')
|
||||||
|
|
||||||
|
# Create a new document from a file
|
||||||
|
file_content = b"# Test Document\n\nThis is a test."
|
||||||
|
file = BytesIO(file_content)
|
||||||
|
file.name = "readme.md"
|
||||||
|
|
||||||
|
response = requests.post(
|
||||||
|
f"{settings.DOCS_API}/documents/",
|
||||||
|
{
|
||||||
|
"file": file,
|
||||||
|
},
|
||||||
|
format="multipart",
|
||||||
|
)
|
||||||
|
|
||||||
|
response.raise_for_status()
|
||||||
|
data = response.json()
|
||||||
|
return {"id": data["id"]}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Get user information
|
||||||
|
|
||||||
|
The same way, you can use the /me endpoint to get user information.
|
||||||
|
|
||||||
|
```python
|
||||||
|
response = requests.get(
|
||||||
|
"{settings.DOCS_API}/users/me/",
|
||||||
|
headers={"Authorization": f"Bearer {access_token}", "Content-Type": "application/json"},
|
||||||
|
)
|
||||||
|
```
|
||||||
@@ -1,8 +1,8 @@
|
|||||||
# Setup the Find search for Impress
|
# Setup Find search for Docs
|
||||||
|
|
||||||
This configuration will enable the fulltext search feature for Docs :
|
This configuration will enable Find searches:
|
||||||
- Each save on **core.Document** or **core.DocumentAccess** will trigger the indexer
|
- Each save on **core.Document** or **core.DocumentAccess** will trigger the indexing of the document into Find.
|
||||||
- The `api/v1.0/documents/search/` will work as a proxy with the Find API for fulltext search.
|
- The `api/v1.0/documents/search/` will be used as proxy for searching documents from Find indexes.
|
||||||
|
|
||||||
## Create an index service for Docs
|
## Create an index service for Docs
|
||||||
|
|
||||||
@@ -15,27 +15,38 @@ See [how-to-use-indexer.md](how-to-use-indexer.md) for details.
|
|||||||
|
|
||||||
## Configure settings of Docs
|
## Configure settings of Docs
|
||||||
|
|
||||||
Add those Django settings the Docs application to enable the feature.
|
Find uses a service provider authentication for indexing and a OIDC authentication for searching.
|
||||||
|
|
||||||
|
Add those Django settings to the Docs application to enable the feature.
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
SEARCH_INDEXER_CLASS="core.services.search_indexers.FindDocumentIndexer"
|
SEARCH_INDEXER_CLASS="core.services.search_indexers.FindDocumentIndexer"
|
||||||
|
|
||||||
SEARCH_INDEXER_COUNTDOWN=10 # Debounce delay in seconds for the indexer calls.
|
SEARCH_INDEXER_COUNTDOWN=10 # Debounce delay in seconds for the indexer calls.
|
||||||
|
SEARCH_INDEXER_QUERY_LIMIT=50 # Maximum number of results expected from the search endpoint
|
||||||
|
|
||||||
# The token from service "docs" of Find application (development).
|
INDEXING_URL="http://find:8000/api/v1.0/documents/index/"
|
||||||
|
SEARCH_URL="http://find:8000/api/v1.0/documents/search/"
|
||||||
|
|
||||||
|
# Service provider authentication
|
||||||
SEARCH_INDEXER_SECRET="find-api-key-for-docs-with-exactly-50-chars-length"
|
SEARCH_INDEXER_SECRET="find-api-key-for-docs-with-exactly-50-chars-length"
|
||||||
SEARCH_INDEXER_URL="http://find:8000/api/v1.0/documents/index/"
|
|
||||||
|
|
||||||
# Search endpoint. Uses the OIDC token for authentication
|
# OIDC authentication
|
||||||
SEARCH_INDEXER_QUERY_URL="http://find:8000/api/v1.0/documents/search/"
|
OIDC_STORE_ACCESS_TOKEN=True # Store the access token in the session
|
||||||
# Maximum number of results expected from the search endpoint
|
OIDC_STORE_REFRESH_TOKEN=True # Store the encrypted refresh token in the session
|
||||||
SEARCH_INDEXER_QUERY_LIMIT=50
|
OIDC_STORE_REFRESH_TOKEN_KEY="<your-32-byte-encryption-key==>"
|
||||||
```
|
```
|
||||||
|
|
||||||
We also need to enable the **OIDC Token** refresh or the authentication will fail quickly.
|
`OIDC_STORE_REFRESH_TOKEN_KEY` must be a valid Fernet key (32 url-safe base64-encoded bytes).
|
||||||
|
To create one, use the `bin/generate-oidc-store-refresh-token-key.sh` command.
|
||||||
|
|
||||||
```shell
|
## Feature flags
|
||||||
# Store OIDC tokens in the session
|
|
||||||
OIDC_STORE_ACCESS_TOKEN = True # Store the access token in the session
|
The Find search integration is controlled by two feature flags:
|
||||||
OIDC_STORE_REFRESH_TOKEN = True # Store the encrypted refresh token in the session
|
- `flag_find_hybrid_search`
|
||||||
OIDC_STORE_REFRESH_TOKEN_KEY = "your-32-byte-encryption-key==" # Must be a valid Fernet key (32 url-safe base64-encoded bytes)
|
- `flag_find_full_text_search`
|
||||||
```
|
|
||||||
|
If a user has both flags activated the most advanced search is used (hybrid > full text > title).
|
||||||
|
A user with no flag will default to the basic title search.
|
||||||
|
|
||||||
|
Feature flags can be activated through the admin interface.
|
||||||
|
|||||||
30
docs/user_account_reconciliation.md
Normal file
30
docs/user_account_reconciliation.md
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# User account reconciliation
|
||||||
|
|
||||||
|
It is possible to merge user accounts based on their email addresses.
|
||||||
|
|
||||||
|
Docs does not have an internal process to requests, but it allows the import of a CSV from an external form
|
||||||
|
(e.g. made with Grist) in the Django admin panel (in "Core" > "User reconciliation CSV imports" > "Add user reconciliation")
|
||||||
|
|
||||||
|
## CSV file format
|
||||||
|
|
||||||
|
The CSV must contain the following mandatory columns:
|
||||||
|
|
||||||
|
- `active_email`: the email of the user that will remain active after the process.
|
||||||
|
- `inactive_email`: the email of the user(s) that will be merged into the active user. It is possible to indicate several emails, so the user only has to make one request even if they have more than two accounts.
|
||||||
|
- `id`: a unique row id, so that entries already processed in a previous import are ignored.
|
||||||
|
|
||||||
|
The following columns are optional: `active_email_checked` and `inactive_email_checked` (both must contain `0` (False) or `1` (True), and both default to False.)
|
||||||
|
If present, it allows to indicate that the source form has a way to validate that the user making the request actually controls the email addresses, skipping the need to send confirmation emails (cf. below)
|
||||||
|
|
||||||
|
Once the CSV file is processed, this will create entries in "Core" > "User reconciliations" and send verification emails to validate that the user making the request actually controls the email addresses (unless `active_email_checked` and `inactive_email_checked` were set to `1` in the CSV)
|
||||||
|
|
||||||
|
In "Core" > "User reconciliations", an admin can then select all rows they wish to process and check the action "Process selected user reconciliations". Only rows that have the status `ready` and for which both emails have been validated will be processed.
|
||||||
|
|
||||||
|
## Settings
|
||||||
|
|
||||||
|
If there is a problem with the reconciliation attempt (e.g., one of the addresses given by the user does not match an existing account), the email signaling the error can give back the link to the reconciliation form. This is configured through the following environment variable:
|
||||||
|
|
||||||
|
```env
|
||||||
|
USER_RECONCILIATION_FORM_URL=<url used in the email for reconciliation with errors to allow a new requests>
|
||||||
|
# e.g. "https://yourgristinstance.tld/xxxx/UserReconciliationForm"
|
||||||
|
```
|
||||||
@@ -48,19 +48,33 @@ LOGIN_REDIRECT_URL=http://localhost:3000
|
|||||||
LOGIN_REDIRECT_URL_FAILURE=http://localhost:3000
|
LOGIN_REDIRECT_URL_FAILURE=http://localhost:3000
|
||||||
LOGOUT_REDIRECT_URL=http://localhost:3000
|
LOGOUT_REDIRECT_URL=http://localhost:3000
|
||||||
|
|
||||||
OIDC_REDIRECT_ALLOWED_HOSTS=["http://localhost:8083", "http://localhost:3000"]
|
OIDC_REDIRECT_ALLOWED_HOSTS="localhost:8083,localhost:3000"
|
||||||
OIDC_AUTH_REQUEST_EXTRA_PARAMS={"acr_values": "eidas1"}
|
OIDC_AUTH_REQUEST_EXTRA_PARAMS={"acr_values": "eidas1"}
|
||||||
|
|
||||||
|
# Resource Server Backend
|
||||||
|
OIDC_OP_URL=http://localhost:8083/realms/docs
|
||||||
|
OIDC_OP_INTROSPECTION_ENDPOINT = http://nginx:8083/realms/docs/protocol/openid-connect/token/introspect
|
||||||
|
OIDC_RESOURCE_SERVER_ENABLED=False
|
||||||
|
OIDC_RS_CLIENT_ID=docs
|
||||||
|
OIDC_RS_CLIENT_SECRET=ThisIsAnExampleKeyForDevPurposeOnly
|
||||||
|
OIDC_RS_AUDIENCE_CLAIM="client_id" # The claim used to identify the audience
|
||||||
|
OIDC_RS_ALLOWED_AUDIENCES=""
|
||||||
|
|
||||||
# Store OIDC tokens in the session. Needed by search/ endpoint.
|
# Store OIDC tokens in the session. Needed by search/ endpoint.
|
||||||
# OIDC_STORE_ACCESS_TOKEN = True
|
# OIDC_STORE_ACCESS_TOKEN=True
|
||||||
# OIDC_STORE_REFRESH_TOKEN = True # Store the encrypted refresh token in the session.
|
# OIDC_STORE_REFRESH_TOKEN=True # Store the encrypted refresh token in the session.
|
||||||
|
|
||||||
# Must be a valid Fernet key (32 url-safe base64-encoded bytes)
|
# Must be a valid Fernet key (32 url-safe base64-encoded bytes)
|
||||||
# To create one, use the bin/fernetkey command.
|
# To create one, use the bin/fernetkey command.
|
||||||
# OIDC_STORE_REFRESH_TOKEN_KEY="your-32-byte-encryption-key=="
|
# OIDC_STORE_REFRESH_TOKEN_KEY="your-32-byte-encryption-key=="
|
||||||
|
|
||||||
|
# User reconciliation
|
||||||
|
USER_RECONCILIATION_FORM_URL=http://localhost:3000
|
||||||
|
|
||||||
# AI
|
# AI
|
||||||
AI_FEATURE_ENABLED=true
|
AI_FEATURE_ENABLED=true
|
||||||
|
AI_FEATURE_BLOCKNOTE_ENABLED=true
|
||||||
|
AI_FEATURE_LEGACY_ENABLED=true
|
||||||
AI_BASE_URL=https://openaiendpoint.com
|
AI_BASE_URL=https://openaiendpoint.com
|
||||||
AI_API_KEY=password
|
AI_API_KEY=password
|
||||||
AI_MODEL=llama
|
AI_MODEL=llama
|
||||||
@@ -82,8 +96,9 @@ DOCSPEC_API_URL=http://docspec:4000/conversion
|
|||||||
# Theme customization
|
# Theme customization
|
||||||
THEME_CUSTOMIZATION_CACHE_TIMEOUT=15
|
THEME_CUSTOMIZATION_CACHE_TIMEOUT=15
|
||||||
|
|
||||||
# Indexer (disabled)
|
# Indexer (disabled by default)
|
||||||
# SEARCH_INDEXER_CLASS="core.services.search_indexers.SearchIndexer"
|
# SEARCH_INDEXER_CLASS=core.services.search_indexers.FindDocumentIndexer
|
||||||
SEARCH_INDEXER_SECRET=find-api-key-for-docs-with-exactly-50-chars-length # Key generated by create_demo in Find app.
|
SEARCH_INDEXER_SECRET=find-api-key-for-docs-with-exactly-50-chars-length # Key generated by create_demo in Find app.
|
||||||
SEARCH_INDEXER_URL="http://find:8000/api/v1.0/documents/index/"
|
INDEXING_URL=http://find:8000/api/v1.0/documents/index/
|
||||||
SEARCH_INDEXER_QUERY_URL="http://find:8000/api/v1.0/documents/search/"
|
SEARCH_URL=http://find:8000/api/v1.0/documents/search/
|
||||||
|
SEARCH_INDEXER_QUERY_LIMIT=50
|
||||||
|
|||||||
7
env.d/development/common.test
Normal file
7
env.d/development/common.test
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# Test environment configuration for running tests without docker
|
||||||
|
# Base configuration is loaded from 'common' file
|
||||||
|
|
||||||
|
DJANGO_SETTINGS_MODULE=impress.settings
|
||||||
|
DJANGO_CONFIGURATION=Test
|
||||||
|
DB_PORT=15432
|
||||||
|
AWS_S3_ENDPOINT_URL=http://localhost:9000
|
||||||
@@ -8,4 +8,4 @@ DB_HOST=postgresql
|
|||||||
DB_NAME=impress
|
DB_NAME=impress
|
||||||
DB_USER=dinum
|
DB_USER=dinum
|
||||||
DB_PASSWORD=pass
|
DB_PASSWORD=pass
|
||||||
DB_PORT=5432
|
DB_PORT=5432
|
||||||
|
|||||||
@@ -53,8 +53,13 @@ LOGOUT_REDIRECT_URL=https://${DOCS_HOST}
|
|||||||
|
|
||||||
OIDC_REDIRECT_ALLOWED_HOSTS=["https://${DOCS_HOST}"]
|
OIDC_REDIRECT_ALLOWED_HOSTS=["https://${DOCS_HOST}"]
|
||||||
|
|
||||||
|
# User reconciliation
|
||||||
|
#USER_RECONCILIATION_FORM_URL=https://${DOCS_HOST}
|
||||||
|
|
||||||
# AI
|
# AI
|
||||||
#AI_FEATURE_ENABLED=true # is false by default
|
#AI_FEATURE_ENABLED=true # is false by default
|
||||||
|
#AI_FEATURE_BLOCKNOTE_ENABLED=true # is false by default
|
||||||
|
#AI_FEATURE_LEGACY_ENABLED=true # is true by default, AI_FEATURE_ENABLED must be set to true to enable it
|
||||||
#AI_BASE_URL=https://openaiendpoint.com
|
#AI_BASE_URL=https://openaiendpoint.com
|
||||||
#AI_API_KEY=<API key>
|
#AI_API_KEY=<API key>
|
||||||
#AI_MODEL=<model used> e.g. llama
|
#AI_MODEL=<model used> e.g. llama
|
||||||
|
|||||||
@@ -32,24 +32,41 @@
|
|||||||
"allowedVersions": "<6.0.0"
|
"allowedVersions": "<6.0.0"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|
||||||
"groupName": "allowed celery versions",
|
"groupName": "allowed celery versions",
|
||||||
"matchManagers": ["pep621"],
|
"matchManagers": ["pep621"],
|
||||||
"matchPackageNames": ["celery"],
|
"matchPackageNames": ["celery"],
|
||||||
"allowedVersions": "<5.6.0"
|
"allowedVersions": "<5.6.0"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"groupName": "allowed pydantic-ai-slim versions",
|
||||||
|
"matchManagers": ["pep621"],
|
||||||
|
"matchPackageNames": ["pydantic-ai-slim"],
|
||||||
|
"allowedVersions": "<1.59.0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"groupName": "allowed langfuse versions",
|
||||||
|
"matchManagers": ["pep621"],
|
||||||
|
"matchPackageNames": ["langfuse"],
|
||||||
|
"allowedVersions": "<3.12.0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"groupName": "allowed django-treebeard versions",
|
||||||
|
"matchManagers": ["pep621"],
|
||||||
|
"matchPackageNames": ["django-treebeard"],
|
||||||
|
"allowedVersions": "<5.0.0"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"enabled": false,
|
"enabled": false,
|
||||||
"groupName": "ignored js dependencies",
|
"groupName": "ignored js dependencies",
|
||||||
"matchManagers": ["npm"],
|
"matchManagers": ["npm"],
|
||||||
"matchPackageNames": [
|
"matchPackageNames": [
|
||||||
"@next/eslint-plugin-next",
|
"@react-pdf/renderer",
|
||||||
"docx",
|
|
||||||
"eslint-config-next",
|
|
||||||
"fetch-mock",
|
"fetch-mock",
|
||||||
"next",
|
|
||||||
"node",
|
"node",
|
||||||
"node-fetch",
|
"node-fetch",
|
||||||
|
"react-resizable-panels",
|
||||||
|
"stylelint",
|
||||||
|
"stylelint-config-standard",
|
||||||
"workbox-webpack-plugin"
|
"workbox-webpack-plugin"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,12 +1,14 @@
|
|||||||
"""Admin classes and registrations for core app."""
|
"""Admin classes and registrations for core app."""
|
||||||
|
|
||||||
from django.contrib import admin
|
from django.contrib import admin, messages
|
||||||
from django.contrib.auth import admin as auth_admin
|
from django.contrib.auth import admin as auth_admin
|
||||||
|
from django.shortcuts import redirect
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from treebeard.admin import TreeAdmin
|
from treebeard.admin import TreeAdmin
|
||||||
|
|
||||||
from . import models
|
from core import models
|
||||||
|
from core.tasks.user_reconciliation import user_reconciliation_csv_import_job
|
||||||
|
|
||||||
|
|
||||||
@admin.register(models.User)
|
@admin.register(models.User)
|
||||||
@@ -95,6 +97,44 @@ class UserAdmin(auth_admin.UserAdmin):
|
|||||||
search_fields = ("id", "sub", "admin_email", "email", "full_name")
|
search_fields = ("id", "sub", "admin_email", "email", "full_name")
|
||||||
|
|
||||||
|
|
||||||
|
@admin.register(models.UserReconciliationCsvImport)
|
||||||
|
class UserReconciliationCsvImportAdmin(admin.ModelAdmin):
|
||||||
|
"""Admin class for UserReconciliationCsvImport model."""
|
||||||
|
|
||||||
|
list_display = ("id", "__str__", "created_at", "status")
|
||||||
|
|
||||||
|
def save_model(self, request, obj, form, change):
|
||||||
|
"""Override save_model to trigger the import task on creation."""
|
||||||
|
super().save_model(request, obj, form, change)
|
||||||
|
|
||||||
|
if not change:
|
||||||
|
user_reconciliation_csv_import_job.delay(obj.pk)
|
||||||
|
messages.success(request, _("Import job created and queued."))
|
||||||
|
return redirect("..")
|
||||||
|
|
||||||
|
|
||||||
|
@admin.action(description=_("Process selected user reconciliations"))
|
||||||
|
def process_reconciliation(_modeladmin, _request, queryset):
|
||||||
|
"""
|
||||||
|
Admin action to process selected user reconciliations.
|
||||||
|
The action will process only entries that are ready and have both emails checked.
|
||||||
|
"""
|
||||||
|
processable_entries = queryset.filter(
|
||||||
|
status="ready", active_email_checked=True, inactive_email_checked=True
|
||||||
|
)
|
||||||
|
|
||||||
|
for entry in processable_entries:
|
||||||
|
entry.process_reconciliation_request()
|
||||||
|
|
||||||
|
|
||||||
|
@admin.register(models.UserReconciliation)
|
||||||
|
class UserReconciliationAdmin(admin.ModelAdmin):
|
||||||
|
"""Admin class for UserReconciliation model."""
|
||||||
|
|
||||||
|
list_display = ["id", "__str__", "created_at", "status"]
|
||||||
|
actions = [process_reconciliation]
|
||||||
|
|
||||||
|
|
||||||
class DocumentAccessInline(admin.TabularInline):
|
class DocumentAccessInline(admin.TabularInline):
|
||||||
"""Inline admin class for document accesses."""
|
"""Inline admin class for document accesses."""
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
import unicodedata
|
import unicodedata
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
import django_filters
|
import django_filters
|
||||||
@@ -46,10 +47,13 @@ class DocumentFilter(django_filters.FilterSet):
|
|||||||
title = AccentInsensitiveCharFilter(
|
title = AccentInsensitiveCharFilter(
|
||||||
field_name="title", lookup_expr="unaccent__icontains", label=_("Title")
|
field_name="title", lookup_expr="unaccent__icontains", label=_("Title")
|
||||||
)
|
)
|
||||||
|
q = AccentInsensitiveCharFilter(
|
||||||
|
field_name="title", lookup_expr="unaccent__icontains", label=_("Search")
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = models.Document
|
model = models.Document
|
||||||
fields = ["title"]
|
fields = ["title", "q"]
|
||||||
|
|
||||||
|
|
||||||
class ListDocumentFilter(DocumentFilter):
|
class ListDocumentFilter(DocumentFilter):
|
||||||
@@ -69,7 +73,7 @@ class ListDocumentFilter(DocumentFilter):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = models.Document
|
model = models.Document
|
||||||
fields = ["is_creator_me", "is_favorite", "title"]
|
fields = ["is_creator_me", "is_favorite", "title", "q"]
|
||||||
|
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def filter_is_creator_me(self, queryset, name, value):
|
def filter_is_creator_me(self, queryset, name, value):
|
||||||
@@ -135,4 +139,6 @@ class UserSearchFilter(django_filters.FilterSet):
|
|||||||
Custom filter for searching users.
|
Custom filter for searching users.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
q = django_filters.CharFilter(min_length=5, max_length=254)
|
q = django_filters.CharFilter(
|
||||||
|
min_length=settings.API_USERS_SEARCH_QUERY_MIN_LENGTH, max_length=254
|
||||||
|
)
|
||||||
|
|||||||
@@ -32,8 +32,21 @@ class UserSerializer(serializers.ModelSerializer):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = models.User
|
model = models.User
|
||||||
fields = ["id", "email", "full_name", "short_name", "language"]
|
fields = [
|
||||||
read_only_fields = ["id", "email", "full_name", "short_name"]
|
"id",
|
||||||
|
"email",
|
||||||
|
"full_name",
|
||||||
|
"short_name",
|
||||||
|
"language",
|
||||||
|
"is_first_connection",
|
||||||
|
]
|
||||||
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
"email",
|
||||||
|
"full_name",
|
||||||
|
"short_name",
|
||||||
|
"is_first_connection",
|
||||||
|
]
|
||||||
|
|
||||||
def get_full_name(self, instance):
|
def get_full_name(self, instance):
|
||||||
"""Return the full name of the user."""
|
"""Return the full name of the user."""
|
||||||
@@ -225,8 +238,16 @@ class DocumentSerializer(ListDocumentSerializer):
|
|||||||
fields = super().get_fields()
|
fields = super().get_fields()
|
||||||
|
|
||||||
request = self.context.get("request")
|
request = self.context.get("request")
|
||||||
if request and request.method == "POST":
|
if request:
|
||||||
fields["id"].read_only = False
|
if request.method == "POST":
|
||||||
|
fields["id"].read_only = False
|
||||||
|
if (
|
||||||
|
serializers.BooleanField().to_internal_value(
|
||||||
|
request.query_params.get("without_content", False)
|
||||||
|
)
|
||||||
|
is True
|
||||||
|
):
|
||||||
|
del fields["content"]
|
||||||
|
|
||||||
return fields
|
return fields
|
||||||
|
|
||||||
@@ -279,6 +300,15 @@ class DocumentSerializer(ListDocumentSerializer):
|
|||||||
|
|
||||||
return file
|
return file
|
||||||
|
|
||||||
|
def update(self, instance, validated_data):
|
||||||
|
"""
|
||||||
|
When no data is sent on the update, skip making the update in the database and return
|
||||||
|
directly the instance unchanged.
|
||||||
|
"""
|
||||||
|
if not validated_data:
|
||||||
|
return instance # No data provided, skip the update
|
||||||
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
def save(self, **kwargs):
|
def save(self, **kwargs):
|
||||||
"""
|
"""
|
||||||
Process the content field to extract attachment keys and update the document's
|
Process the content field to extract attachment keys and update the document's
|
||||||
@@ -591,10 +621,13 @@ class LinkDocumentSerializer(serializers.ModelSerializer):
|
|||||||
class DocumentDuplicationSerializer(serializers.Serializer):
|
class DocumentDuplicationSerializer(serializers.Serializer):
|
||||||
"""
|
"""
|
||||||
Serializer for duplicating a document.
|
Serializer for duplicating a document.
|
||||||
Allows specifying whether to keep access permissions.
|
Allows specifying whether to keep access permissions,
|
||||||
|
and whether to duplicate descendant documents as well
|
||||||
|
(deep copy) or not (shallow copy).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
with_accesses = serializers.BooleanField(default=False)
|
with_accesses = serializers.BooleanField(default=False)
|
||||||
|
with_descendants = serializers.BooleanField(default=False)
|
||||||
|
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
"""
|
"""
|
||||||
@@ -980,8 +1013,5 @@ class ThreadSerializer(serializers.ModelSerializer):
|
|||||||
class SearchDocumentSerializer(serializers.Serializer):
|
class SearchDocumentSerializer(serializers.Serializer):
|
||||||
"""Serializer for fulltext search requests through Find application"""
|
"""Serializer for fulltext search requests through Find application"""
|
||||||
|
|
||||||
q = serializers.CharField(required=True, allow_blank=False, trim_whitespace=True)
|
q = serializers.CharField(required=True, allow_blank=True, trim_whitespace=True)
|
||||||
page_size = serializers.IntegerField(
|
path = serializers.CharField(required=False, allow_blank=False)
|
||||||
required=False, min_value=1, max_value=50, default=20
|
|
||||||
)
|
|
||||||
page = serializers.IntegerField(required=False, min_value=1, default=1)
|
|
||||||
|
|||||||
@@ -6,8 +6,10 @@ from abc import ABC, abstractmethod
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.core.files.storage import default_storage
|
from django.core.files.storage import default_storage
|
||||||
|
from django.utils.decorators import method_decorator
|
||||||
|
|
||||||
import botocore
|
import botocore
|
||||||
|
from lasuite.oidc_login.decorators import refresh_oidc_access_token
|
||||||
from rest_framework.throttling import BaseThrottle
|
from rest_framework.throttling import BaseThrottle
|
||||||
|
|
||||||
|
|
||||||
@@ -91,6 +93,19 @@ def generate_s3_authorization_headers(key):
|
|||||||
return request
|
return request
|
||||||
|
|
||||||
|
|
||||||
|
def conditional_refresh_oidc_token(func):
|
||||||
|
"""
|
||||||
|
Conditionally apply refresh_oidc_access_token decorator.
|
||||||
|
|
||||||
|
The decorator is only applied if OIDC_STORE_REFRESH_TOKEN is True, meaning
|
||||||
|
we can actually refresh something. Broader settings checks are done in settings.py.
|
||||||
|
"""
|
||||||
|
if settings.OIDC_STORE_REFRESH_TOKEN:
|
||||||
|
return method_decorator(refresh_oidc_access_token)(func)
|
||||||
|
|
||||||
|
return func
|
||||||
|
|
||||||
|
|
||||||
class AIBaseRateThrottle(BaseThrottle, ABC):
|
class AIBaseRateThrottle(BaseThrottle, ABC):
|
||||||
"""Base throttle class for AI-related rate limiting with backoff."""
|
"""Base throttle class for AI-related rate limiting with backoff."""
|
||||||
|
|
||||||
|
|||||||
@@ -25,21 +25,24 @@ from django.db.models.functions import Greatest, Left, Length
|
|||||||
from django.http import Http404, StreamingHttpResponse
|
from django.http import Http404, StreamingHttpResponse
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.utils.decorators import method_decorator
|
|
||||||
from django.utils.functional import cached_property
|
from django.utils.functional import cached_property
|
||||||
|
from django.utils.http import content_disposition_header
|
||||||
from django.utils.text import capfirst, slugify
|
from django.utils.text import capfirst, slugify
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
import rest_framework as drf
|
import rest_framework as drf
|
||||||
|
import waffle
|
||||||
from botocore.exceptions import ClientError
|
from botocore.exceptions import ClientError
|
||||||
from csp.constants import NONE
|
from csp.constants import NONE
|
||||||
from csp.decorators import csp_update
|
from csp.decorators import csp_update
|
||||||
from lasuite.malware_detection import malware_detection
|
from lasuite.malware_detection import malware_detection
|
||||||
from lasuite.oidc_login.decorators import refresh_oidc_access_token
|
from lasuite.tools.email import get_domain_from_email
|
||||||
|
from pydantic import ValidationError as PydanticValidationError
|
||||||
from rest_framework import filters, status, viewsets
|
from rest_framework import filters, status, viewsets
|
||||||
from rest_framework import response as drf_response
|
from rest_framework import response as drf_response
|
||||||
from rest_framework.permissions import AllowAny
|
from rest_framework.permissions import AllowAny
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
from core import authentication, choices, enums, models
|
from core import authentication, choices, enums, models
|
||||||
from core.api.filters import remove_accents
|
from core.api.filters import remove_accents
|
||||||
@@ -61,10 +64,19 @@ from core.services.search_indexers import (
|
|||||||
get_visited_document_ids_of,
|
get_visited_document_ids_of,
|
||||||
)
|
)
|
||||||
from core.tasks.mail import send_ask_for_access_mail
|
from core.tasks.mail import send_ask_for_access_mail
|
||||||
from core.utils import extract_attachments, filter_descendants
|
from core.utils import (
|
||||||
|
extract_attachments,
|
||||||
|
filter_descendants,
|
||||||
|
users_sharing_documents_with,
|
||||||
|
)
|
||||||
|
|
||||||
|
from ..enums import FeatureFlag, SearchType
|
||||||
from . import permissions, serializers, utils
|
from . import permissions, serializers, utils
|
||||||
from .filters import DocumentFilter, ListDocumentFilter, UserSearchFilter
|
from .filters import (
|
||||||
|
DocumentFilter,
|
||||||
|
ListDocumentFilter,
|
||||||
|
UserSearchFilter,
|
||||||
|
)
|
||||||
from .throttling import (
|
from .throttling import (
|
||||||
DocumentThrottle,
|
DocumentThrottle,
|
||||||
UserListThrottleBurst,
|
UserListThrottleBurst,
|
||||||
@@ -220,18 +232,80 @@ class UserViewSet(
|
|||||||
|
|
||||||
# Use trigram similarity for non-email-like queries
|
# Use trigram similarity for non-email-like queries
|
||||||
# For performance reasons we filter first by similarity, which relies on an
|
# For performance reasons we filter first by similarity, which relies on an
|
||||||
# index, then only calculate precise similarity scores for sorting purposes
|
# index, then only calculate precise similarity scores for sorting purposes.
|
||||||
|
#
|
||||||
|
# Additionally results are reordered to prefer users "closer" to the current
|
||||||
|
# user: users they recently shared documents with, then same email domain.
|
||||||
|
# To achieve that without complex SQL, we build a proximity score in Python
|
||||||
|
# and return the top N results.
|
||||||
|
# For security results, users that match neither of these proximity criteria
|
||||||
|
# are not returned at all, to prevent email enumeration.
|
||||||
|
current_user = self.request.user
|
||||||
|
shared_map = users_sharing_documents_with(current_user)
|
||||||
|
|
||||||
return (
|
user_email_domain = get_domain_from_email(current_user.email) or ""
|
||||||
|
|
||||||
|
candidates = list(
|
||||||
queryset.annotate(
|
queryset.annotate(
|
||||||
sim_email=TrigramSimilarity("email", query),
|
sim_email=TrigramSimilarity("email", query),
|
||||||
sim_name=TrigramSimilarity("full_name", query),
|
sim_name=TrigramSimilarity("full_name", query),
|
||||||
)
|
)
|
||||||
.annotate(similarity=Greatest("sim_email", "sim_name"))
|
.annotate(similarity=Greatest("sim_email", "sim_name"))
|
||||||
.filter(similarity__gt=0.2)
|
.filter(similarity__gt=0.2)
|
||||||
.order_by("-similarity")[: settings.API_USERS_LIST_LIMIT]
|
.order_by("-similarity")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Keep only users that either share documents with the current user
|
||||||
|
# or have an email with the same domain as the current user.
|
||||||
|
filtered_candidates = []
|
||||||
|
for u in candidates:
|
||||||
|
candidate_domain = get_domain_from_email(u.email) or ""
|
||||||
|
if shared_map.get(u.id) or (
|
||||||
|
user_email_domain and candidate_domain == user_email_domain
|
||||||
|
):
|
||||||
|
filtered_candidates.append(u)
|
||||||
|
|
||||||
|
candidates = filtered_candidates
|
||||||
|
|
||||||
|
# Build ordering key for each candidate
|
||||||
|
def _sort_key(u):
|
||||||
|
# shared priority: most recent first
|
||||||
|
# Use shared_last_at timestamp numeric for secondary ordering when shared.
|
||||||
|
shared_last_at = shared_map.get(u.id)
|
||||||
|
if shared_last_at:
|
||||||
|
is_shared = 1
|
||||||
|
shared_score = int(shared_last_at.timestamp())
|
||||||
|
else:
|
||||||
|
is_shared = 0
|
||||||
|
shared_score = 0
|
||||||
|
|
||||||
|
# domain proximity
|
||||||
|
candidate_email_domain = get_domain_from_email(u.email) or ""
|
||||||
|
|
||||||
|
same_full_domain = (
|
||||||
|
1
|
||||||
|
if candidate_email_domain
|
||||||
|
and candidate_email_domain == user_email_domain
|
||||||
|
else 0
|
||||||
|
)
|
||||||
|
|
||||||
|
# similarity fallback
|
||||||
|
sim = getattr(u, "similarity", 0) or 0
|
||||||
|
|
||||||
|
return (
|
||||||
|
is_shared,
|
||||||
|
shared_score,
|
||||||
|
same_full_domain,
|
||||||
|
sim,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Sort candidates by the key descending and return top N as a queryset-like
|
||||||
|
# list. Keep return type consistent with previous behavior (QuerySet slice
|
||||||
|
# was returned) by returning a list of model instances.
|
||||||
|
candidates.sort(key=_sort_key, reverse=True)
|
||||||
|
|
||||||
|
return candidates[: settings.API_USERS_LIST_LIMIT]
|
||||||
|
|
||||||
@drf.decorators.action(
|
@drf.decorators.action(
|
||||||
detail=False,
|
detail=False,
|
||||||
methods=["get"],
|
methods=["get"],
|
||||||
@@ -248,6 +322,78 @@ class UserViewSet(
|
|||||||
self.serializer_class(request.user, context=context).data
|
self.serializer_class(request.user, context=context).data
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@drf.decorators.action(
|
||||||
|
detail=False,
|
||||||
|
methods=["post"],
|
||||||
|
url_path="onboarding-done",
|
||||||
|
permission_classes=[permissions.IsAuthenticated],
|
||||||
|
)
|
||||||
|
def onboarding_done(self, request):
|
||||||
|
"""
|
||||||
|
Allows the frontend to mark the first connection as done for the current user,
|
||||||
|
e.g. after showing an onboarding message.
|
||||||
|
"""
|
||||||
|
if request.user.is_first_connection:
|
||||||
|
request.user.is_first_connection = False
|
||||||
|
request.user.save(update_fields=["is_first_connection", "updated_at"])
|
||||||
|
|
||||||
|
return drf.response.Response(
|
||||||
|
{"detail": "Onboarding marked as done."}, status=status.HTTP_200_OK
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ReconciliationConfirmView(APIView):
|
||||||
|
"""API endpoint to confirm user reconciliation emails.
|
||||||
|
|
||||||
|
GET /user-reconciliations/{user_type}/{confirmation_id}/
|
||||||
|
Marks `active_email_checked` or `inactive_email_checked` to True.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [AllowAny]
|
||||||
|
|
||||||
|
def get(self, request, user_type, confirmation_id):
|
||||||
|
"""
|
||||||
|
Check the confirmation ID and mark the corresponding email as checked.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# validate UUID
|
||||||
|
uuid_obj = uuid.UUID(str(confirmation_id))
|
||||||
|
except ValueError:
|
||||||
|
return drf_response.Response(
|
||||||
|
{"detail": "Badly formatted confirmation id"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
if user_type not in ("active", "inactive"):
|
||||||
|
return drf_response.Response(
|
||||||
|
{"detail": "Invalid user_type"}, status=status.HTTP_400_BAD_REQUEST
|
||||||
|
)
|
||||||
|
|
||||||
|
lookup = (
|
||||||
|
{"active_email_confirmation_id": uuid_obj}
|
||||||
|
if user_type == "active"
|
||||||
|
else {"inactive_email_confirmation_id": uuid_obj}
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
rec = models.UserReconciliation.objects.get(**lookup)
|
||||||
|
except models.UserReconciliation.DoesNotExist:
|
||||||
|
return drf_response.Response(
|
||||||
|
{"detail": "Reconciliation entry not found"},
|
||||||
|
status=status.HTTP_404_NOT_FOUND,
|
||||||
|
)
|
||||||
|
|
||||||
|
field_name = (
|
||||||
|
"active_email_checked"
|
||||||
|
if user_type == "active"
|
||||||
|
else "inactive_email_checked"
|
||||||
|
)
|
||||||
|
if not getattr(rec, field_name):
|
||||||
|
setattr(rec, field_name, True)
|
||||||
|
rec.save()
|
||||||
|
|
||||||
|
return drf_response.Response({"detail": "Confirmation received"})
|
||||||
|
|
||||||
|
|
||||||
class ResourceAccessViewsetMixin:
|
class ResourceAccessViewsetMixin:
|
||||||
"""Mixin with methods common to all access viewsets."""
|
"""Mixin with methods common to all access viewsets."""
|
||||||
@@ -309,36 +455,45 @@ class DocumentViewSet(
|
|||||||
|
|
||||||
### Additional Actions:
|
### Additional Actions:
|
||||||
1. **Trashbin**: List soft deleted documents for a document owner
|
1. **Trashbin**: List soft deleted documents for a document owner
|
||||||
Example: GET /documents/{id}/trashbin/
|
Example: GET /documents/trashbin/
|
||||||
|
|
||||||
2. **Children**: List or create child documents.
|
2. **Restore**: Restore a soft deleted document.
|
||||||
|
Example: POST /documents/{id}/restore/
|
||||||
|
|
||||||
|
3. **Move**: Move a document to another parent document.
|
||||||
|
Example: POST /documents/{id}/move/
|
||||||
|
|
||||||
|
4. **Duplicate**: Duplicate a document.
|
||||||
|
Example: POST /documents/{id}/duplicate/
|
||||||
|
|
||||||
|
5. **Children**: List or create child documents.
|
||||||
Example: GET, POST /documents/{id}/children/
|
Example: GET, POST /documents/{id}/children/
|
||||||
|
|
||||||
3. **Versions List**: Retrieve version history of a document.
|
6. **Versions List**: Retrieve version history of a document.
|
||||||
Example: GET /documents/{id}/versions/
|
Example: GET /documents/{id}/versions/
|
||||||
|
|
||||||
4. **Version Detail**: Get or delete a specific document version.
|
7. **Version Detail**: Get or delete a specific document version.
|
||||||
Example: GET, DELETE /documents/{id}/versions/{version_id}/
|
Example: GET, DELETE /documents/{id}/versions/{version_id}/
|
||||||
|
|
||||||
5. **Favorite**: Get list of favorite documents for a user. Mark or unmark
|
8. **Favorite**: Get list of favorite documents for a user. Mark or unmark
|
||||||
a document as favorite.
|
a document as favorite.
|
||||||
Examples:
|
Examples:
|
||||||
- GET /documents/favorite/
|
- GET /documents/favorite_list/
|
||||||
- POST, DELETE /documents/{id}/favorite/
|
- POST, DELETE /documents/{id}/favorite/
|
||||||
|
|
||||||
6. **Create for Owner**: Create a document via server-to-server on behalf of a user.
|
9. **Create for Owner**: Create a document via server-to-server on behalf of a user.
|
||||||
Example: POST /documents/create-for-owner/
|
Example: POST /documents/create-for-owner/
|
||||||
|
|
||||||
7. **Link Configuration**: Update document link configuration.
|
10. **Link Configuration**: Update document link configuration.
|
||||||
Example: PUT /documents/{id}/link-configuration/
|
Example: PUT /documents/{id}/link-configuration/
|
||||||
|
|
||||||
8. **Attachment Upload**: Upload a file attachment for the document.
|
11. **Attachment Upload**: Upload a file attachment for the document.
|
||||||
Example: POST /documents/{id}/attachment-upload/
|
Example: POST /documents/{id}/attachment-upload/
|
||||||
|
|
||||||
9. **Media Auth**: Authorize access to document media.
|
12. **Media Auth**: Authorize access to document media.
|
||||||
Example: GET /documents/media-auth/
|
Example: GET /documents/media-auth/
|
||||||
|
|
||||||
10. **AI Transform**: Apply a transformation action on a piece of text with AI.
|
13. **AI Transform**: Apply a transformation action on a piece of text with AI.
|
||||||
Example: POST /documents/{id}/ai-transform/
|
Example: POST /documents/{id}/ai-transform/
|
||||||
Expected data:
|
Expected data:
|
||||||
- text (str): The input text.
|
- text (str): The input text.
|
||||||
@@ -346,7 +501,7 @@ class DocumentViewSet(
|
|||||||
Returns: JSON response with the processed text.
|
Returns: JSON response with the processed text.
|
||||||
Throttled by: AIDocumentRateThrottle, AIUserRateThrottle.
|
Throttled by: AIDocumentRateThrottle, AIUserRateThrottle.
|
||||||
|
|
||||||
11. **AI Translate**: Translate a piece of text with AI.
|
14. **AI Translate**: Translate a piece of text with AI.
|
||||||
Example: POST /documents/{id}/ai-translate/
|
Example: POST /documents/{id}/ai-translate/
|
||||||
Expected data:
|
Expected data:
|
||||||
- text (str): The input text.
|
- text (str): The input text.
|
||||||
@@ -354,6 +509,9 @@ class DocumentViewSet(
|
|||||||
Returns: JSON response with the translated text.
|
Returns: JSON response with the translated text.
|
||||||
Throttled by: AIDocumentRateThrottle, AIUserRateThrottle.
|
Throttled by: AIDocumentRateThrottle, AIUserRateThrottle.
|
||||||
|
|
||||||
|
15. **AI Proxy**: Proxy an AI request to an external AI service.
|
||||||
|
Example: POST /api/v1.0/documents/<resource_id>/ai-proxy
|
||||||
|
|
||||||
### Ordering: created_at, updated_at, is_favorite, title
|
### Ordering: created_at, updated_at, is_favorite, title
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
@@ -459,20 +617,18 @@ class DocumentViewSet(
|
|||||||
It performs early filtering on model fields, annotates user roles, and removes
|
It performs early filtering on model fields, annotates user roles, and removes
|
||||||
descendant documents to keep only the highest ancestors readable by the current user.
|
descendant documents to keep only the highest ancestors readable by the current user.
|
||||||
"""
|
"""
|
||||||
user = self.request.user
|
user = request.user
|
||||||
|
|
||||||
# Not calling filter_queryset. We do our own cooking.
|
# Not calling filter_queryset. We do our own cooking.
|
||||||
queryset = self.get_queryset()
|
queryset = self.get_queryset()
|
||||||
|
|
||||||
filterset = ListDocumentFilter(
|
filterset = ListDocumentFilter(request.GET, queryset=queryset, request=request)
|
||||||
self.request.GET, queryset=queryset, request=self.request
|
|
||||||
)
|
|
||||||
if not filterset.is_valid():
|
if not filterset.is_valid():
|
||||||
raise drf.exceptions.ValidationError(filterset.errors)
|
raise drf.exceptions.ValidationError(filterset.errors)
|
||||||
filter_data = filterset.form.cleaned_data
|
filter_data = filterset.form.cleaned_data
|
||||||
|
|
||||||
# Filter as early as possible on fields that are available on the model
|
# Filter as early as possible on fields that are available on the model
|
||||||
for field in ["is_creator_me", "title"]:
|
for field in ["is_creator_me", "title", "q"]:
|
||||||
queryset = filterset.filters[field].filter(queryset, filter_data[field])
|
queryset = filterset.filters[field].filter(queryset, filter_data[field])
|
||||||
|
|
||||||
queryset = queryset.annotate_user_roles(user)
|
queryset = queryset.annotate_user_roles(user)
|
||||||
@@ -939,7 +1095,7 @@ class DocumentViewSet(
|
|||||||
filter_data = filterset.form.cleaned_data
|
filter_data = filterset.form.cleaned_data
|
||||||
|
|
||||||
# Filter as early as possible on fields that are available on the model
|
# Filter as early as possible on fields that are available on the model
|
||||||
for field in ["is_creator_me", "title"]:
|
for field in ["is_creator_me", "title", "q"]:
|
||||||
queryset = filterset.filters[field].filter(queryset, filter_data[field])
|
queryset = filterset.filters[field].filter(queryset, filter_data[field])
|
||||||
|
|
||||||
queryset = queryset.annotate_user_roles(user)
|
queryset = queryset.annotate_user_roles(user)
|
||||||
@@ -962,7 +1118,11 @@ class DocumentViewSet(
|
|||||||
ordering=["path"],
|
ordering=["path"],
|
||||||
)
|
)
|
||||||
def descendants(self, request, *args, **kwargs):
|
def descendants(self, request, *args, **kwargs):
|
||||||
"""Handle listing descendants of a document"""
|
"""Deprecated endpoint to list descendants of a document."""
|
||||||
|
logger.warning(
|
||||||
|
"The 'descendants' endpoint is deprecated and will be removed in a future release. "
|
||||||
|
"The search endpoint should be used for all document retrieval use cases."
|
||||||
|
)
|
||||||
document = self.get_object()
|
document = self.get_object()
|
||||||
|
|
||||||
queryset = document.get_descendants().filter(ancestors_deleted_at__isnull=True)
|
queryset = document.get_descendants().filter(ancestors_deleted_at__isnull=True)
|
||||||
@@ -1087,11 +1247,7 @@ class DocumentViewSet(
|
|||||||
@transaction.atomic
|
@transaction.atomic
|
||||||
def duplicate(self, request, *args, **kwargs):
|
def duplicate(self, request, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Duplicate a document and store the links to attached files in the duplicated
|
Duplicate a document, alongside its descendants if requested.
|
||||||
document to allow cross-access.
|
|
||||||
|
|
||||||
Optionally duplicates accesses if `with_accesses` is set to true
|
|
||||||
in the payload.
|
|
||||||
"""
|
"""
|
||||||
# Get document while checking permissions
|
# Get document while checking permissions
|
||||||
document_to_duplicate = self.get_object()
|
document_to_duplicate = self.get_object()
|
||||||
@@ -1100,8 +1256,43 @@ class DocumentViewSet(
|
|||||||
data=request.data, partial=True
|
data=request.data, partial=True
|
||||||
)
|
)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
|
user = request.user
|
||||||
|
|
||||||
|
duplicated_document = self._duplicate_document(
|
||||||
|
document_to_duplicate=document_to_duplicate,
|
||||||
|
serializer=serializer,
|
||||||
|
user=user,
|
||||||
|
)
|
||||||
|
|
||||||
|
return drf_response.Response(
|
||||||
|
{"id": str(duplicated_document.id)}, status=status.HTTP_201_CREATED
|
||||||
|
)
|
||||||
|
|
||||||
|
def _duplicate_document(
|
||||||
|
self,
|
||||||
|
document_to_duplicate,
|
||||||
|
serializer,
|
||||||
|
user,
|
||||||
|
new_parent=None,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Duplicate a document and store the links to attached files in the duplicated
|
||||||
|
document to allow cross-access.
|
||||||
|
|
||||||
|
Optionally duplicates accesses if `with_accesses` is set to true
|
||||||
|
in the payload.
|
||||||
|
|
||||||
|
Optionally duplicates sub-documents if `with_descendants` is set to true in
|
||||||
|
the payload. In this case, the whole subtree of the document will be duplicated,
|
||||||
|
and the links to attached files will be stored in all duplicated documents.
|
||||||
|
|
||||||
|
The `with_accesses` option will also be applied to all duplicated documents
|
||||||
|
if `with_descendants` is set to true.
|
||||||
|
"""
|
||||||
with_accesses = serializer.validated_data.get("with_accesses", False)
|
with_accesses = serializer.validated_data.get("with_accesses", False)
|
||||||
user_role = document_to_duplicate.get_role(request.user)
|
with_descendants = serializer.validated_data.get("with_descendants", False)
|
||||||
|
|
||||||
|
user_role = document_to_duplicate.get_role(user)
|
||||||
is_owner_or_admin = user_role in models.PRIVILEGED_ROLES
|
is_owner_or_admin = user_role in models.PRIVILEGED_ROLES
|
||||||
|
|
||||||
base64_yjs_content = document_to_duplicate.content
|
base64_yjs_content = document_to_duplicate.content
|
||||||
@@ -1120,11 +1311,41 @@ class DocumentViewSet(
|
|||||||
extracted_attachments & set(document_to_duplicate.attachments)
|
extracted_attachments & set(document_to_duplicate.attachments)
|
||||||
)
|
)
|
||||||
title = capfirst(_("copy of {title}").format(title=document_to_duplicate.title))
|
title = capfirst(_("copy of {title}").format(title=document_to_duplicate.title))
|
||||||
if not document_to_duplicate.is_root() and choices.RoleChoices.get_priority(
|
# If parent_duplicate is provided we must add the duplicated document as a child
|
||||||
|
if new_parent is not None:
|
||||||
|
duplicated_document = new_parent.add_child(
|
||||||
|
title=title,
|
||||||
|
content=base64_yjs_content,
|
||||||
|
attachments=attachments,
|
||||||
|
duplicated_from=document_to_duplicate,
|
||||||
|
creator=user,
|
||||||
|
**link_kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Handle access duplication for this child
|
||||||
|
if with_accesses and is_owner_or_admin:
|
||||||
|
original_accesses = models.DocumentAccess.objects.filter(
|
||||||
|
document=document_to_duplicate
|
||||||
|
).exclude(user=user)
|
||||||
|
|
||||||
|
accesses_to_create = [
|
||||||
|
models.DocumentAccess(
|
||||||
|
document=duplicated_document,
|
||||||
|
user_id=access.user_id,
|
||||||
|
team=access.team,
|
||||||
|
role=access.role,
|
||||||
|
)
|
||||||
|
for access in original_accesses
|
||||||
|
]
|
||||||
|
|
||||||
|
if accesses_to_create:
|
||||||
|
models.DocumentAccess.objects.bulk_create(accesses_to_create)
|
||||||
|
|
||||||
|
elif not document_to_duplicate.is_root() and choices.RoleChoices.get_priority(
|
||||||
user_role
|
user_role
|
||||||
) < choices.RoleChoices.get_priority(models.RoleChoices.EDITOR):
|
) < choices.RoleChoices.get_priority(models.RoleChoices.EDITOR):
|
||||||
duplicated_document = models.Document.add_root(
|
duplicated_document = models.Document.add_root(
|
||||||
creator=self.request.user,
|
creator=user,
|
||||||
title=title,
|
title=title,
|
||||||
content=base64_yjs_content,
|
content=base64_yjs_content,
|
||||||
attachments=attachments,
|
attachments=attachments,
|
||||||
@@ -1133,132 +1354,180 @@ class DocumentViewSet(
|
|||||||
)
|
)
|
||||||
models.DocumentAccess.objects.create(
|
models.DocumentAccess.objects.create(
|
||||||
document=duplicated_document,
|
document=duplicated_document,
|
||||||
user=self.request.user,
|
user=user,
|
||||||
role=models.RoleChoices.OWNER,
|
role=models.RoleChoices.OWNER,
|
||||||
)
|
)
|
||||||
return drf_response.Response(
|
else:
|
||||||
{"id": str(duplicated_document.id)}, status=status.HTTP_201_CREATED
|
duplicated_document = document_to_duplicate.add_sibling(
|
||||||
|
"last-sibling",
|
||||||
|
title=title,
|
||||||
|
content=base64_yjs_content,
|
||||||
|
attachments=attachments,
|
||||||
|
duplicated_from=document_to_duplicate,
|
||||||
|
creator=user,
|
||||||
|
**link_kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
duplicated_document = document_to_duplicate.add_sibling(
|
# Always add the logged-in user as OWNER for root documents
|
||||||
"right",
|
if document_to_duplicate.is_root():
|
||||||
title=title,
|
accesses_to_create = [
|
||||||
content=base64_yjs_content,
|
|
||||||
attachments=attachments,
|
|
||||||
duplicated_from=document_to_duplicate,
|
|
||||||
creator=request.user,
|
|
||||||
**link_kwargs,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Always add the logged-in user as OWNER for root documents
|
|
||||||
if document_to_duplicate.is_root():
|
|
||||||
accesses_to_create = [
|
|
||||||
models.DocumentAccess(
|
|
||||||
document=duplicated_document,
|
|
||||||
user=request.user,
|
|
||||||
role=models.RoleChoices.OWNER,
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
# If accesses should be duplicated, add other users' accesses as per original document
|
|
||||||
if with_accesses and is_owner_or_admin:
|
|
||||||
original_accesses = models.DocumentAccess.objects.filter(
|
|
||||||
document=document_to_duplicate
|
|
||||||
).exclude(user=request.user)
|
|
||||||
|
|
||||||
accesses_to_create.extend(
|
|
||||||
models.DocumentAccess(
|
models.DocumentAccess(
|
||||||
document=duplicated_document,
|
document=duplicated_document,
|
||||||
user_id=access.user_id,
|
user=user,
|
||||||
team=access.team,
|
role=models.RoleChoices.OWNER,
|
||||||
role=access.role,
|
|
||||||
)
|
)
|
||||||
for access in original_accesses
|
]
|
||||||
|
|
||||||
|
# If accesses should be duplicated,
|
||||||
|
# add other users' accesses as per original document
|
||||||
|
if with_accesses and is_owner_or_admin:
|
||||||
|
original_accesses = models.DocumentAccess.objects.filter(
|
||||||
|
document=document_to_duplicate
|
||||||
|
).exclude(user=user)
|
||||||
|
|
||||||
|
accesses_to_create.extend(
|
||||||
|
models.DocumentAccess(
|
||||||
|
document=duplicated_document,
|
||||||
|
user_id=access.user_id,
|
||||||
|
team=access.team,
|
||||||
|
role=access.role,
|
||||||
|
)
|
||||||
|
for access in original_accesses
|
||||||
|
)
|
||||||
|
|
||||||
|
# Bulk create all the duplicated accesses
|
||||||
|
models.DocumentAccess.objects.bulk_create(accesses_to_create)
|
||||||
|
|
||||||
|
if with_descendants:
|
||||||
|
for child in document_to_duplicate.get_children().filter(
|
||||||
|
ancestors_deleted_at__isnull=True
|
||||||
|
):
|
||||||
|
# When duplicating descendants, attach duplicates under the duplicated_document
|
||||||
|
self._duplicate_document(
|
||||||
|
document_to_duplicate=child,
|
||||||
|
serializer=serializer,
|
||||||
|
user=user,
|
||||||
|
new_parent=duplicated_document,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Bulk create all the duplicated accesses
|
return duplicated_document
|
||||||
models.DocumentAccess.objects.bulk_create(accesses_to_create)
|
|
||||||
|
|
||||||
return drf_response.Response(
|
|
||||||
{"id": str(duplicated_document.id)}, status=status.HTTP_201_CREATED
|
|
||||||
)
|
|
||||||
|
|
||||||
def _search_simple(self, request, text):
|
|
||||||
"""
|
|
||||||
Returns a queryset filtered by the content of the document title
|
|
||||||
"""
|
|
||||||
# As the 'list' view we get a prefiltered queryset (deleted docs are excluded)
|
|
||||||
queryset = self.get_queryset()
|
|
||||||
filterset = DocumentFilter({"title": text}, queryset=queryset)
|
|
||||||
|
|
||||||
if not filterset.is_valid():
|
|
||||||
raise drf.exceptions.ValidationError(filterset.errors)
|
|
||||||
|
|
||||||
queryset = filterset.filter_queryset(queryset)
|
|
||||||
|
|
||||||
return self.get_response_for_queryset(
|
|
||||||
queryset.order_by("-updated_at"),
|
|
||||||
context={
|
|
||||||
"request": request,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
def _search_fulltext(self, indexer, request, params):
|
|
||||||
"""
|
|
||||||
Returns a queryset from the results the fulltext search of Find
|
|
||||||
"""
|
|
||||||
access_token = request.session.get("oidc_access_token")
|
|
||||||
user = request.user
|
|
||||||
text = params.validated_data["q"]
|
|
||||||
queryset = models.Document.objects.all()
|
|
||||||
|
|
||||||
# Retrieve the documents ids from Find.
|
|
||||||
results = indexer.search(
|
|
||||||
text=text,
|
|
||||||
token=access_token,
|
|
||||||
visited=get_visited_document_ids_of(queryset, user),
|
|
||||||
)
|
|
||||||
|
|
||||||
docs_by_uuid = {str(d.pk): d for d in queryset.filter(pk__in=results)}
|
|
||||||
ordered_docs = [docs_by_uuid[id] for id in results]
|
|
||||||
|
|
||||||
page = self.paginate_queryset(ordered_docs)
|
|
||||||
|
|
||||||
serializer = self.get_serializer(
|
|
||||||
page if page else ordered_docs,
|
|
||||||
many=True,
|
|
||||||
context={
|
|
||||||
"request": request,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
return self.get_paginated_response(serializer.data)
|
|
||||||
|
|
||||||
@drf.decorators.action(detail=False, methods=["get"], url_path="search")
|
@drf.decorators.action(detail=False, methods=["get"], url_path="search")
|
||||||
@method_decorator(refresh_oidc_access_token)
|
@utils.conditional_refresh_oidc_token
|
||||||
def search(self, request, *args, **kwargs):
|
def search(self, request, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Returns a DRF response containing the filtered, annotated and ordered document list.
|
Returns an ordered list of documents best matching the search query parameter 'q'.
|
||||||
|
|
||||||
Applies filtering based on request parameter 'q' from `SearchDocumentSerializer`.
|
It depends on a search configurable Search Indexer. If no Search Indexer is configured
|
||||||
Depending of the configuration it can be:
|
or if it is not reachable, the function falls back to a basic title search.
|
||||||
- A fulltext search through the opensearch indexation app "find" if the backend is
|
|
||||||
enabled (see SEARCH_INDEXER_CLASS)
|
|
||||||
- A filtering by the model field 'title'.
|
|
||||||
|
|
||||||
The ordering is always by the most recent first.
|
|
||||||
"""
|
"""
|
||||||
params = serializers.SearchDocumentSerializer(data=request.query_params)
|
params = serializers.SearchDocumentSerializer(data=request.query_params)
|
||||||
params.is_valid(raise_exception=True)
|
params.is_valid(raise_exception=True)
|
||||||
|
search_type = self._get_search_type()
|
||||||
|
if search_type == SearchType.TITLE:
|
||||||
|
return self._title_search(request, params.validated_data, *args, **kwargs)
|
||||||
|
|
||||||
indexer = get_document_indexer()
|
indexer = get_document_indexer()
|
||||||
|
if indexer is None:
|
||||||
|
# fallback on title search if the indexer is not configured
|
||||||
|
return self._title_search(request, params.validated_data, *args, **kwargs)
|
||||||
|
|
||||||
if indexer:
|
try:
|
||||||
return self._search_fulltext(indexer, request, params=params)
|
return self._search_with_indexer(
|
||||||
|
indexer, request, params=params, search_type=search_type
|
||||||
|
)
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
logger.error("Error while searching documents with indexer: %s", e)
|
||||||
|
# fallback on title search if the indexer is not reached
|
||||||
|
return self._title_search(request, params.validated_data, *args, **kwargs)
|
||||||
|
|
||||||
# The indexer is not configured, we fallback on a simple icontains filter by the
|
def _get_search_type(self) -> SearchType:
|
||||||
# model field 'title'.
|
"""
|
||||||
return self._search_simple(request, text=params.validated_data["q"])
|
Returns the search type to use for the search endpoint based on feature flags.
|
||||||
|
If a user has both flags activated the most advanced search is used
|
||||||
|
(HYBRID > FULL_TEXT > TITLE).
|
||||||
|
A user with no flag will default to the basic title search.
|
||||||
|
"""
|
||||||
|
if waffle.flag_is_active(self.request, FeatureFlag.FLAG_FIND_HYBRID_SEARCH):
|
||||||
|
return SearchType.HYBRID
|
||||||
|
if waffle.flag_is_active(self.request, FeatureFlag.FLAG_FIND_FULL_TEXT_SEARCH):
|
||||||
|
return SearchType.FULL_TEXT
|
||||||
|
return SearchType.TITLE
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _search_with_indexer(indexer, request, params, search_type):
|
||||||
|
"""
|
||||||
|
Returns a list of documents matching the query (q) according to the configured indexer.
|
||||||
|
"""
|
||||||
|
queryset = models.Document.objects.all()
|
||||||
|
|
||||||
|
results = indexer.search(
|
||||||
|
q=params.validated_data["q"],
|
||||||
|
search_type=search_type,
|
||||||
|
token=request.session.get("oidc_access_token"),
|
||||||
|
path=(
|
||||||
|
params.validated_data["path"]
|
||||||
|
if "path" in params.validated_data
|
||||||
|
else None
|
||||||
|
),
|
||||||
|
visited=get_visited_document_ids_of(queryset, request.user),
|
||||||
|
)
|
||||||
|
|
||||||
|
return drf_response.Response(
|
||||||
|
{
|
||||||
|
"count": len(results),
|
||||||
|
"next": None,
|
||||||
|
"previous": None,
|
||||||
|
"results": results,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
def _title_search(self, request, validated_data, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Fallback search method when no indexer is configured.
|
||||||
|
Only searches in the title field of documents.
|
||||||
|
"""
|
||||||
|
if not validated_data.get("path"):
|
||||||
|
return self.list(request, *args, **kwargs)
|
||||||
|
|
||||||
|
return self._list_descendants(request, validated_data)
|
||||||
|
|
||||||
|
def _list_descendants(self, request, validated_data):
|
||||||
|
"""
|
||||||
|
List all documents whose path starts with the provided path parameter.
|
||||||
|
Includes the parent document itself.
|
||||||
|
Used internally by the search endpoint when path filtering is requested.
|
||||||
|
"""
|
||||||
|
# Get parent document without access filtering
|
||||||
|
parent_path = validated_data["path"]
|
||||||
|
try:
|
||||||
|
parent = models.Document.objects.annotate_user_roles(request.user).get(
|
||||||
|
path=parent_path
|
||||||
|
)
|
||||||
|
except models.Document.DoesNotExist as exc:
|
||||||
|
raise drf.exceptions.NotFound("Document not found from path.") from exc
|
||||||
|
|
||||||
|
abilities = parent.get_abilities(request.user)
|
||||||
|
if not abilities.get("search"):
|
||||||
|
raise drf.exceptions.PermissionDenied(
|
||||||
|
"You do not have permission to search within this document."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get descendants and include the parent, ordered by path
|
||||||
|
queryset = (
|
||||||
|
parent.get_descendants(include_self=True)
|
||||||
|
.filter(ancestors_deleted_at__isnull=True)
|
||||||
|
.order_by("path")
|
||||||
|
)
|
||||||
|
queryset = self.filter_queryset(queryset)
|
||||||
|
|
||||||
|
# filter by title
|
||||||
|
filterset = DocumentFilter(request.GET, queryset=queryset)
|
||||||
|
if not filterset.is_valid():
|
||||||
|
raise drf.exceptions.ValidationError(filterset.errors)
|
||||||
|
|
||||||
|
queryset = filterset.qs
|
||||||
|
return self.get_response_for_queryset(queryset)
|
||||||
|
|
||||||
@drf.decorators.action(detail=True, methods=["get"], url_path="versions")
|
@drf.decorators.action(detail=True, methods=["get"], url_path="versions")
|
||||||
def versions_list(self, request, *args, **kwargs):
|
def versions_list(self, request, *args, **kwargs):
|
||||||
@@ -1467,11 +1736,19 @@ class DocumentViewSet(
|
|||||||
or serializer.validated_data["is_unsafe"]
|
or serializer.validated_data["is_unsafe"]
|
||||||
):
|
):
|
||||||
extra_args.update(
|
extra_args.update(
|
||||||
{"ContentDisposition": f'attachment; filename="{file_name:s}"'}
|
{
|
||||||
|
"ContentDisposition": content_disposition_header(
|
||||||
|
as_attachment=True, filename=file_name
|
||||||
|
)
|
||||||
|
}
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
extra_args.update(
|
extra_args.update(
|
||||||
{"ContentDisposition": f'inline; filename="{file_name:s}"'}
|
{
|
||||||
|
"ContentDisposition": content_disposition_header(
|
||||||
|
as_attachment=False, filename=file_name
|
||||||
|
)
|
||||||
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
file = serializer.validated_data["file"]
|
file = serializer.validated_data["file"]
|
||||||
@@ -1642,6 +1919,45 @@ class DocumentViewSet(
|
|||||||
|
|
||||||
return drf.response.Response(body, status=drf.status.HTTP_200_OK)
|
return drf.response.Response(body, status=drf.status.HTTP_200_OK)
|
||||||
|
|
||||||
|
@drf.decorators.action(
|
||||||
|
detail=True,
|
||||||
|
methods=["post"],
|
||||||
|
name="Proxy AI requests to the AI provider",
|
||||||
|
url_path="ai-proxy",
|
||||||
|
throttle_classes=[utils.AIDocumentRateThrottle, utils.AIUserRateThrottle],
|
||||||
|
)
|
||||||
|
def ai_proxy(self, request, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
POST /api/v1.0/documents/<resource_id>/ai-proxy
|
||||||
|
Proxy AI requests to the configured AI provider.
|
||||||
|
This endpoint forwards requests to the AI provider and returns the complete response.
|
||||||
|
"""
|
||||||
|
# Check permissions first
|
||||||
|
self.get_object()
|
||||||
|
|
||||||
|
if not settings.AI_FEATURE_ENABLED or not settings.AI_FEATURE_BLOCKNOTE_ENABLED:
|
||||||
|
raise ValidationError("AI feature is not enabled.")
|
||||||
|
|
||||||
|
ai_service = AIService()
|
||||||
|
|
||||||
|
try:
|
||||||
|
stream = ai_service.stream(request)
|
||||||
|
except PydanticValidationError as err:
|
||||||
|
logger.info("pydantic validation error: %s", err)
|
||||||
|
return drf.response.Response(
|
||||||
|
{"detail": "Invalid submitted payload"},
|
||||||
|
status=drf.status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
return StreamingHttpResponse(
|
||||||
|
stream,
|
||||||
|
content_type="text/event-stream",
|
||||||
|
headers={
|
||||||
|
"x-vercel-ai-data-stream": "v1", # This header is used for Vercel AI streaming,
|
||||||
|
"X-Accel-Buffering": "no", # Prevent nginx buffering
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
@drf.decorators.action(
|
@drf.decorators.action(
|
||||||
detail=True,
|
detail=True,
|
||||||
methods=["post"],
|
methods=["post"],
|
||||||
@@ -1982,6 +2298,7 @@ class DocumentAccessViewSet(
|
|||||||
"user__full_name",
|
"user__full_name",
|
||||||
"user__email",
|
"user__email",
|
||||||
"user__language",
|
"user__language",
|
||||||
|
"user__is_first_connection",
|
||||||
"document__id",
|
"document__id",
|
||||||
"document__path",
|
"document__path",
|
||||||
"document__depth",
|
"document__depth",
|
||||||
@@ -2281,6 +2598,12 @@ class DocumentAskForAccessViewSet(
|
|||||||
"""Create a document ask for access resource."""
|
"""Create a document ask for access resource."""
|
||||||
document = self.get_document_or_404()
|
document = self.get_document_or_404()
|
||||||
|
|
||||||
|
if document.get_role(request.user) in models.PRIVILEGED_ROLES:
|
||||||
|
return drf.response.Response(
|
||||||
|
{"detail": "You already have privileged access to this document."},
|
||||||
|
status=drf.status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
serializer = serializers.DocumentAskForAccessCreateSerializer(data=request.data)
|
serializer = serializers.DocumentAskForAccessCreateSerializer(data=request.data)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
@@ -2337,7 +2660,11 @@ class ConfigView(drf.views.APIView):
|
|||||||
Return a dictionary of public settings.
|
Return a dictionary of public settings.
|
||||||
"""
|
"""
|
||||||
array_settings = [
|
array_settings = [
|
||||||
|
"AI_BOT",
|
||||||
"AI_FEATURE_ENABLED",
|
"AI_FEATURE_ENABLED",
|
||||||
|
"AI_FEATURE_BLOCKNOTE_ENABLED",
|
||||||
|
"AI_FEATURE_LEGACY_ENABLED",
|
||||||
|
"API_USERS_SEARCH_QUERY_MIN_LENGTH",
|
||||||
"COLLABORATION_WS_URL",
|
"COLLABORATION_WS_URL",
|
||||||
"COLLABORATION_WS_NOT_CONNECTED_READY_ONLY",
|
"COLLABORATION_WS_NOT_CONNECTED_READY_ONLY",
|
||||||
"CONVERSION_FILE_EXTENSIONS_ALLOWED",
|
"CONVERSION_FILE_EXTENSIONS_ALLOWED",
|
||||||
@@ -2347,6 +2674,7 @@ class ConfigView(drf.views.APIView):
|
|||||||
"FRONTEND_CSS_URL",
|
"FRONTEND_CSS_URL",
|
||||||
"FRONTEND_HOMEPAGE_FEATURE_ENABLED",
|
"FRONTEND_HOMEPAGE_FEATURE_ENABLED",
|
||||||
"FRONTEND_JS_URL",
|
"FRONTEND_JS_URL",
|
||||||
|
"FRONTEND_SILENT_LOGIN_ENABLED",
|
||||||
"FRONTEND_THEME",
|
"FRONTEND_THEME",
|
||||||
"MEDIA_BASE_URL",
|
"MEDIA_BASE_URL",
|
||||||
"POSTHOG_KEY",
|
"POSTHOG_KEY",
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ Core application enums declaration
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
from enum import StrEnum
|
from enum import Enum, StrEnum
|
||||||
|
|
||||||
from django.conf import global_settings, settings
|
from django.conf import global_settings, settings
|
||||||
from django.db import models
|
from django.db import models
|
||||||
@@ -46,3 +46,24 @@ class DocumentAttachmentStatus(StrEnum):
|
|||||||
|
|
||||||
PROCESSING = "processing"
|
PROCESSING = "processing"
|
||||||
READY = "ready"
|
READY = "ready"
|
||||||
|
|
||||||
|
|
||||||
|
class SearchType(str, Enum):
|
||||||
|
"""
|
||||||
|
Defines the possible search types for a document search query.
|
||||||
|
- TITLE: DRF based search in the title of the documents only.
|
||||||
|
- HYBRID and FULL_TEXT: more advanced search based on Find indexer.
|
||||||
|
"""
|
||||||
|
|
||||||
|
TITLE = "title"
|
||||||
|
HYBRID = "hybrid"
|
||||||
|
FULL_TEXT = "full-text"
|
||||||
|
|
||||||
|
|
||||||
|
class FeatureFlag(str, Enum):
|
||||||
|
"""
|
||||||
|
Defines the possible feature flags for the application.
|
||||||
|
"""
|
||||||
|
|
||||||
|
FLAG_FIND_HYBRID_SEARCH = "flag_find_hybrid_search"
|
||||||
|
FLAG_FIND_FULL_TEXT_SEARCH = "flag_find_full_text_search"
|
||||||
|
|||||||
41
src/backend/core/external_api/permissions.py
Normal file
41
src/backend/core/external_api/permissions.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
"""Resource Server Permissions for the Docs app."""
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
from lasuite.oidc_resource_server.authentication import ResourceServerAuthentication
|
||||||
|
from rest_framework import permissions
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceServerClientPermission(permissions.BasePermission):
|
||||||
|
"""
|
||||||
|
Permission class for resource server views.
|
||||||
|
This provides a way to open the resource server views to a limited set of
|
||||||
|
Service Providers.
|
||||||
|
Note: we might add a more complex permission system in the future, based on
|
||||||
|
the Service Provider ID and the requested scopes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def has_permission(self, request, view):
|
||||||
|
"""
|
||||||
|
Check if the user is authenticated and the token introspection
|
||||||
|
provides an authorized Service Provider.
|
||||||
|
"""
|
||||||
|
if not isinstance(
|
||||||
|
request.successful_authenticator, ResourceServerAuthentication
|
||||||
|
):
|
||||||
|
# Not a resource server request
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check if the user is authenticated
|
||||||
|
if not request.user.is_authenticated:
|
||||||
|
return False
|
||||||
|
if (
|
||||||
|
hasattr(view, "resource_server_actions")
|
||||||
|
and view.action not in view.resource_server_actions
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# When used as a resource server, the request has a token audience
|
||||||
|
return (
|
||||||
|
request.resource_server_token_audience in settings.OIDC_RS_ALLOWED_AUDIENCES
|
||||||
|
)
|
||||||
91
src/backend/core/external_api/viewsets.py
Normal file
91
src/backend/core/external_api/viewsets.py
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
"""Resource Server Viewsets for the Docs app."""
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
from lasuite.oidc_resource_server.authentication import ResourceServerAuthentication
|
||||||
|
|
||||||
|
from core.api.permissions import (
|
||||||
|
CanCreateInvitationPermission,
|
||||||
|
DocumentPermission,
|
||||||
|
IsSelf,
|
||||||
|
ResourceAccessPermission,
|
||||||
|
)
|
||||||
|
from core.api.viewsets import (
|
||||||
|
DocumentAccessViewSet,
|
||||||
|
DocumentViewSet,
|
||||||
|
InvitationViewset,
|
||||||
|
UserViewSet,
|
||||||
|
)
|
||||||
|
from core.external_api.permissions import ResourceServerClientPermission
|
||||||
|
|
||||||
|
# pylint: disable=too-many-ancestors
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceServerRestrictionMixin:
|
||||||
|
"""
|
||||||
|
Mixin for Resource Server Viewsets to provide shortcut to get
|
||||||
|
configured actions for a given resource.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _get_resource_server_actions(self, resource_name):
|
||||||
|
"""Get resource_server_actions from settings."""
|
||||||
|
external_api_config = settings.EXTERNAL_API.get(resource_name, {})
|
||||||
|
return list(external_api_config.get("actions", []))
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceServerDocumentViewSet(ResourceServerRestrictionMixin, DocumentViewSet):
|
||||||
|
"""Resource Server Viewset for Documents."""
|
||||||
|
|
||||||
|
authentication_classes = [ResourceServerAuthentication]
|
||||||
|
|
||||||
|
permission_classes = [ResourceServerClientPermission & DocumentPermission] # type: ignore
|
||||||
|
|
||||||
|
@property
|
||||||
|
def resource_server_actions(self):
|
||||||
|
"""Build resource_server_actions from settings."""
|
||||||
|
return self._get_resource_server_actions("documents")
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceServerDocumentAccessViewSet(
|
||||||
|
ResourceServerRestrictionMixin, DocumentAccessViewSet
|
||||||
|
):
|
||||||
|
"""Resource Server Viewset for DocumentAccess."""
|
||||||
|
|
||||||
|
authentication_classes = [ResourceServerAuthentication]
|
||||||
|
|
||||||
|
permission_classes = [ResourceServerClientPermission & ResourceAccessPermission] # type: ignore
|
||||||
|
|
||||||
|
@property
|
||||||
|
def resource_server_actions(self):
|
||||||
|
"""Get resource_server_actions from settings."""
|
||||||
|
return self._get_resource_server_actions("document_access")
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceServerInvitationViewSet(
|
||||||
|
ResourceServerRestrictionMixin, InvitationViewset
|
||||||
|
):
|
||||||
|
"""Resource Server Viewset for Invitations."""
|
||||||
|
|
||||||
|
authentication_classes = [ResourceServerAuthentication]
|
||||||
|
|
||||||
|
permission_classes = [
|
||||||
|
ResourceServerClientPermission & CanCreateInvitationPermission
|
||||||
|
]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def resource_server_actions(self):
|
||||||
|
"""Get resource_server_actions from settings."""
|
||||||
|
return self._get_resource_server_actions("document_invitation")
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceServerUserViewSet(ResourceServerRestrictionMixin, UserViewSet):
|
||||||
|
"""Resource Server Viewset for User."""
|
||||||
|
|
||||||
|
authentication_classes = [ResourceServerAuthentication]
|
||||||
|
|
||||||
|
permission_classes = [ResourceServerClientPermission & IsSelf] # type: ignore
|
||||||
|
|
||||||
|
@property
|
||||||
|
def resource_server_actions(self):
|
||||||
|
"""Get resource_server_actions from settings."""
|
||||||
|
return self._get_resource_server_actions("users")
|
||||||
@@ -19,3 +19,21 @@ class ForceSessionMiddleware:
|
|||||||
|
|
||||||
response = self.get_response(request)
|
response = self.get_response(request)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
class SaveRawBodyMiddleware:
|
||||||
|
"""
|
||||||
|
Save the raw request body to use it later.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, get_response):
|
||||||
|
"""Initialize the middleware."""
|
||||||
|
self.get_response = get_response
|
||||||
|
|
||||||
|
def __call__(self, request):
|
||||||
|
"""Save the raw request body in the request to use it later."""
|
||||||
|
if request.path.endswith(("/ai-proxy/", "/ai-proxy")):
|
||||||
|
request.raw_body = request.body
|
||||||
|
|
||||||
|
response = self.get_response(request)
|
||||||
|
return response
|
||||||
|
|||||||
@@ -0,0 +1,178 @@
|
|||||||
|
# Generated by Django 5.2.11 on 2026-02-10 15:47
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("core", "0028_remove_templateaccess_template_and_more"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="UserReconciliationCsvImport",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.UUIDField(
|
||||||
|
default=uuid.uuid4,
|
||||||
|
editable=False,
|
||||||
|
help_text="primary key for the record as UUID",
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
verbose_name="id",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"created_at",
|
||||||
|
models.DateTimeField(
|
||||||
|
auto_now_add=True,
|
||||||
|
help_text="date and time at which a record was created",
|
||||||
|
verbose_name="created on",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"updated_at",
|
||||||
|
models.DateTimeField(
|
||||||
|
auto_now=True,
|
||||||
|
help_text="date and time at which a record was last updated",
|
||||||
|
verbose_name="updated on",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"file",
|
||||||
|
models.FileField(upload_to="imports/", verbose_name="CSV file"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"status",
|
||||||
|
models.CharField(
|
||||||
|
choices=[
|
||||||
|
("pending", "Pending"),
|
||||||
|
("running", "Running"),
|
||||||
|
("done", "Done"),
|
||||||
|
("error", "Error"),
|
||||||
|
],
|
||||||
|
default="pending",
|
||||||
|
max_length=20,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("logs", models.TextField(blank=True)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "user reconciliation CSV import",
|
||||||
|
"verbose_name_plural": "user reconciliation CSV imports",
|
||||||
|
"db_table": "impress_user_reconciliation_csv_import",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="UserReconciliation",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.UUIDField(
|
||||||
|
default=uuid.uuid4,
|
||||||
|
editable=False,
|
||||||
|
help_text="primary key for the record as UUID",
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
verbose_name="id",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"created_at",
|
||||||
|
models.DateTimeField(
|
||||||
|
auto_now_add=True,
|
||||||
|
help_text="date and time at which a record was created",
|
||||||
|
verbose_name="created on",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"updated_at",
|
||||||
|
models.DateTimeField(
|
||||||
|
auto_now=True,
|
||||||
|
help_text="date and time at which a record was last updated",
|
||||||
|
verbose_name="updated on",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"active_email",
|
||||||
|
models.EmailField(
|
||||||
|
max_length=254, verbose_name="Active email address"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"inactive_email",
|
||||||
|
models.EmailField(
|
||||||
|
max_length=254, verbose_name="Email address to deactivate"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("active_email_checked", models.BooleanField(default=False)),
|
||||||
|
("inactive_email_checked", models.BooleanField(default=False)),
|
||||||
|
(
|
||||||
|
"active_email_confirmation_id",
|
||||||
|
models.UUIDField(
|
||||||
|
default=uuid.uuid4, editable=False, null=True, unique=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"inactive_email_confirmation_id",
|
||||||
|
models.UUIDField(
|
||||||
|
default=uuid.uuid4, editable=False, null=True, unique=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"source_unique_id",
|
||||||
|
models.CharField(
|
||||||
|
blank=True,
|
||||||
|
max_length=100,
|
||||||
|
null=True,
|
||||||
|
verbose_name="Unique ID in the source file",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"status",
|
||||||
|
models.CharField(
|
||||||
|
choices=[
|
||||||
|
("pending", "Pending"),
|
||||||
|
("ready", "Ready"),
|
||||||
|
("done", "Done"),
|
||||||
|
("error", "Error"),
|
||||||
|
],
|
||||||
|
default="pending",
|
||||||
|
max_length=20,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("logs", models.TextField(blank=True)),
|
||||||
|
(
|
||||||
|
"active_user",
|
||||||
|
models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="active_user",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"inactive_user",
|
||||||
|
models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="inactive_user",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "user reconciliation",
|
||||||
|
"verbose_name_plural": "user reconciliations",
|
||||||
|
"db_table": "impress_user_reconciliation",
|
||||||
|
"ordering": ["-created_at"],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
32
src/backend/core/migrations/0030_user_is_first_connection.py
Normal file
32
src/backend/core/migrations/0030_user_is_first_connection.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
# Generated by Django 5.2.11 on 2026-03-04 14:49
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
def set_is_first_connection_false(apps, schema_editor):
|
||||||
|
"""Update all existing user.is_first_connection to False."""
|
||||||
|
user = apps.get_model("core", "User")
|
||||||
|
|
||||||
|
user.objects.update(is_first_connection=False)
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("core", "0029_userreconciliationcsvimport_userreconciliation"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="user",
|
||||||
|
name="is_first_connection",
|
||||||
|
field=models.BooleanField(
|
||||||
|
default=True,
|
||||||
|
help_text="Whether the user has completed the first connection process.",
|
||||||
|
verbose_name="first connection status",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.RunPython(
|
||||||
|
set_is_first_connection_false,
|
||||||
|
reverse_code=migrations.RunPython.noop,
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,34 @@
|
|||||||
|
# Generated by Django 5.2.12 on 2026-03-11 17:16
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
from core.models import PRIVILEGED_ROLES
|
||||||
|
|
||||||
|
|
||||||
|
def clean_onboarding_accesses(apps, schema_editor):
|
||||||
|
"""clean accesses on on-boarding documents."""
|
||||||
|
onboarding_document_ids = settings.USER_ONBOARDING_DOCUMENTS
|
||||||
|
if not onboarding_document_ids:
|
||||||
|
return
|
||||||
|
|
||||||
|
onboarding_document_ids = set(settings.USER_ONBOARDING_DOCUMENTS)
|
||||||
|
|
||||||
|
DocumentAccess = apps.get_model("core", "DocumentAccess")
|
||||||
|
|
||||||
|
DocumentAccess.objects.filter(document_id__in=onboarding_document_ids).exclude(
|
||||||
|
role__in=PRIVILEGED_ROLES
|
||||||
|
).delete()
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("core", "0030_user_is_first_connection"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RunPython(
|
||||||
|
clean_onboarding_accesses,
|
||||||
|
reverse_code=migrations.RunPython.noop,
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -15,12 +15,11 @@ from django.contrib.auth import models as auth_models
|
|||||||
from django.contrib.auth.base_user import AbstractBaseUser
|
from django.contrib.auth.base_user import AbstractBaseUser
|
||||||
from django.contrib.postgres.fields import ArrayField
|
from django.contrib.postgres.fields import ArrayField
|
||||||
from django.contrib.sites.models import Site
|
from django.contrib.sites.models import Site
|
||||||
from django.core import mail
|
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.core.files.base import ContentFile
|
from django.core.files.base import ContentFile
|
||||||
from django.core.files.storage import default_storage
|
from django.core.files.storage import default_storage
|
||||||
from django.core.mail import send_mail
|
from django.core.mail import send_mail
|
||||||
from django.db import models, transaction
|
from django.db import connection, models, transaction
|
||||||
from django.db.models.functions import Left, Length
|
from django.db.models.functions import Left, Length
|
||||||
from django.template.loader import render_to_string
|
from django.template.loader import render_to_string
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
@@ -33,14 +32,14 @@ from rest_framework.exceptions import ValidationError
|
|||||||
from timezone_field import TimeZoneField
|
from timezone_field import TimeZoneField
|
||||||
from treebeard.mp_tree import MP_Node, MP_NodeManager, MP_NodeQuerySet
|
from treebeard.mp_tree import MP_Node, MP_NodeManager, MP_NodeQuerySet
|
||||||
|
|
||||||
from .choices import (
|
from core.choices import (
|
||||||
PRIVILEGED_ROLES,
|
PRIVILEGED_ROLES,
|
||||||
LinkReachChoices,
|
LinkReachChoices,
|
||||||
LinkRoleChoices,
|
LinkRoleChoices,
|
||||||
RoleChoices,
|
RoleChoices,
|
||||||
get_equivalent_link_definition,
|
get_equivalent_link_definition,
|
||||||
)
|
)
|
||||||
from .validators import sub_validator
|
from core.validators import sub_validator
|
||||||
|
|
||||||
logger = getLogger(__name__)
|
logger = getLogger(__name__)
|
||||||
|
|
||||||
@@ -119,11 +118,11 @@ class UserManager(auth_models.UserManager):
|
|||||||
|
|
||||||
if settings.OIDC_FALLBACK_TO_EMAIL_FOR_IDENTIFICATION:
|
if settings.OIDC_FALLBACK_TO_EMAIL_FOR_IDENTIFICATION:
|
||||||
try:
|
try:
|
||||||
return self.get(email=email)
|
return self.get(email__iexact=email)
|
||||||
except self.model.DoesNotExist:
|
except self.model.DoesNotExist:
|
||||||
pass
|
pass
|
||||||
elif (
|
elif (
|
||||||
self.filter(email=email).exists()
|
self.filter(email__iexact=email).exists()
|
||||||
and not settings.OIDC_ALLOW_DUPLICATE_EMAILS
|
and not settings.OIDC_ALLOW_DUPLICATE_EMAILS
|
||||||
):
|
):
|
||||||
raise DuplicateEmailError(
|
raise DuplicateEmailError(
|
||||||
@@ -194,6 +193,11 @@ class User(AbstractBaseUser, BaseModel, auth_models.PermissionsMixin):
|
|||||||
"Unselect this instead of deleting accounts."
|
"Unselect this instead of deleting accounts."
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
is_first_connection = models.BooleanField(
|
||||||
|
_("first connection status"),
|
||||||
|
default=True,
|
||||||
|
help_text=_("Whether the user has completed the first connection process."),
|
||||||
|
)
|
||||||
|
|
||||||
objects = UserManager()
|
objects = UserManager()
|
||||||
|
|
||||||
@@ -210,14 +214,89 @@ class User(AbstractBaseUser, BaseModel, auth_models.PermissionsMixin):
|
|||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
If it's a new user, give its user access to the documents to which s.he was invited.
|
If it's a new user, give its user access to the documents they were invited to.
|
||||||
"""
|
"""
|
||||||
is_adding = self._state.adding
|
is_adding = self._state.adding
|
||||||
super().save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
if is_adding:
|
if is_adding:
|
||||||
|
self._handle_onboarding_documents_access()
|
||||||
|
self._duplicate_onboarding_sandbox_document()
|
||||||
self._convert_valid_invitations()
|
self._convert_valid_invitations()
|
||||||
|
|
||||||
|
def _handle_onboarding_documents_access(self):
|
||||||
|
"""
|
||||||
|
If the user is new and there are documents configured to be given to new users,
|
||||||
|
create link traces to these documents and pin them as favorites for the user.
|
||||||
|
"""
|
||||||
|
if settings.USER_ONBOARDING_DOCUMENTS:
|
||||||
|
onboarding_document_ids = set(settings.USER_ONBOARDING_DOCUMENTS)
|
||||||
|
onboarding_link_traces = []
|
||||||
|
favorite_documents = []
|
||||||
|
for document_id in onboarding_document_ids:
|
||||||
|
try:
|
||||||
|
document = Document.objects.get(id=document_id)
|
||||||
|
except Document.DoesNotExist:
|
||||||
|
logger.warning(
|
||||||
|
"Onboarding document with id %s does not exist. Skipping.",
|
||||||
|
document_id,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if document.link_reach == LinkReachChoices.RESTRICTED:
|
||||||
|
logger.warning(
|
||||||
|
"Onboarding on a restricted document is not allowed. Must be public or "
|
||||||
|
"connected. Restricted document: %s",
|
||||||
|
document_id,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
onboarding_link_traces.append(LinkTrace(user=self, document=document))
|
||||||
|
favorite_documents.append(
|
||||||
|
DocumentFavorite(user=self, document_id=document_id)
|
||||||
|
)
|
||||||
|
|
||||||
|
LinkTrace.objects.bulk_create(onboarding_link_traces)
|
||||||
|
DocumentFavorite.objects.bulk_create(favorite_documents)
|
||||||
|
|
||||||
|
def _duplicate_onboarding_sandbox_document(self):
|
||||||
|
"""
|
||||||
|
If the user is new and there is a sandbox document configured,
|
||||||
|
duplicate the sandbox document for the user
|
||||||
|
"""
|
||||||
|
if settings.USER_ONBOARDING_SANDBOX_DOCUMENT:
|
||||||
|
# transaction.atomic is used in a context manager to avoid a transaction if
|
||||||
|
# the settings USER_ONBOARDING_SANDBOX_DOCUMENT is unused
|
||||||
|
with transaction.atomic():
|
||||||
|
# locks the table to ensure safe concurrent access
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
cursor.execute(
|
||||||
|
f'LOCK TABLE "{Document._meta.db_table}" ' # noqa: SLF001
|
||||||
|
"IN SHARE ROW EXCLUSIVE MODE;"
|
||||||
|
)
|
||||||
|
|
||||||
|
sandbox_id = settings.USER_ONBOARDING_SANDBOX_DOCUMENT
|
||||||
|
try:
|
||||||
|
template_document = Document.objects.get(id=sandbox_id)
|
||||||
|
except Document.DoesNotExist:
|
||||||
|
logger.warning(
|
||||||
|
"Onboarding sandbox document with id %s does not exist. Skipping.",
|
||||||
|
sandbox_id,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
sandbox_document = Document.add_root(
|
||||||
|
title=template_document.title,
|
||||||
|
content=template_document.content,
|
||||||
|
attachments=template_document.attachments,
|
||||||
|
duplicated_from=template_document,
|
||||||
|
creator=self,
|
||||||
|
)
|
||||||
|
|
||||||
|
DocumentAccess.objects.create(
|
||||||
|
user=self, document=sandbox_document, role=RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
|
||||||
def _convert_valid_invitations(self):
|
def _convert_valid_invitations(self):
|
||||||
"""
|
"""
|
||||||
Convert valid invitations to document accesses.
|
Convert valid invitations to document accesses.
|
||||||
@@ -251,11 +330,37 @@ class User(AbstractBaseUser, BaseModel, auth_models.PermissionsMixin):
|
|||||||
|
|
||||||
valid_invitations.delete()
|
valid_invitations.delete()
|
||||||
|
|
||||||
def email_user(self, subject, message, from_email=None, **kwargs):
|
def send_email(self, subject, context=None, language=None):
|
||||||
"""Email this user."""
|
"""Generate and send email to the user from a template."""
|
||||||
if not self.email:
|
emails = [self.email]
|
||||||
raise ValueError("User has no email address.")
|
context = context or {}
|
||||||
mail.send_mail(subject, message, from_email, [self.email], **kwargs)
|
domain = settings.EMAIL_URL_APP or Site.objects.get_current().domain
|
||||||
|
|
||||||
|
language = language or get_language()
|
||||||
|
context.update(
|
||||||
|
{
|
||||||
|
"brandname": settings.EMAIL_BRAND_NAME,
|
||||||
|
"domain": domain,
|
||||||
|
"logo_img": settings.EMAIL_LOGO_IMG,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
with override(language):
|
||||||
|
msg_html = render_to_string("mail/html/template.html", context)
|
||||||
|
msg_plain = render_to_string("mail/text/template.txt", context)
|
||||||
|
subject = str(subject) # Force translation
|
||||||
|
|
||||||
|
try:
|
||||||
|
send_mail(
|
||||||
|
subject.capitalize(),
|
||||||
|
msg_plain,
|
||||||
|
settings.EMAIL_FROM,
|
||||||
|
emails,
|
||||||
|
html_message=msg_html,
|
||||||
|
fail_silently=False,
|
||||||
|
)
|
||||||
|
except smtplib.SMTPException as exception:
|
||||||
|
logger.error("invitation to %s was not sent: %s", emails, exception)
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def teams(self):
|
def teams(self):
|
||||||
@@ -266,6 +371,417 @@ class User(AbstractBaseUser, BaseModel, auth_models.PermissionsMixin):
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
class UserReconciliation(BaseModel):
|
||||||
|
"""Model to run batch jobs to replace an active user by another one"""
|
||||||
|
|
||||||
|
active_email = models.EmailField(_("Active email address"))
|
||||||
|
inactive_email = models.EmailField(_("Email address to deactivate"))
|
||||||
|
active_email_checked = models.BooleanField(default=False)
|
||||||
|
inactive_email_checked = models.BooleanField(default=False)
|
||||||
|
active_user = models.ForeignKey(
|
||||||
|
User,
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
related_name="active_user",
|
||||||
|
)
|
||||||
|
inactive_user = models.ForeignKey(
|
||||||
|
User,
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
related_name="inactive_user",
|
||||||
|
)
|
||||||
|
active_email_confirmation_id = models.UUIDField(
|
||||||
|
default=uuid.uuid4, unique=True, editable=False, null=True
|
||||||
|
)
|
||||||
|
inactive_email_confirmation_id = models.UUIDField(
|
||||||
|
default=uuid.uuid4, unique=True, editable=False, null=True
|
||||||
|
)
|
||||||
|
source_unique_id = models.CharField(
|
||||||
|
max_length=100,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
verbose_name=_("Unique ID in the source file"),
|
||||||
|
)
|
||||||
|
|
||||||
|
status = models.CharField(
|
||||||
|
max_length=20,
|
||||||
|
choices=[
|
||||||
|
("pending", _("Pending")),
|
||||||
|
("ready", _("Ready")),
|
||||||
|
("done", _("Done")),
|
||||||
|
("error", _("Error")),
|
||||||
|
],
|
||||||
|
default="pending",
|
||||||
|
)
|
||||||
|
logs = models.TextField(blank=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
db_table = "impress_user_reconciliation"
|
||||||
|
verbose_name = _("user reconciliation")
|
||||||
|
verbose_name_plural = _("user reconciliations")
|
||||||
|
ordering = ["-created_at"]
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"Reconciliation from {self.inactive_email} to {self.active_email}"
|
||||||
|
|
||||||
|
def save(self, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
For pending queries, identify the actual users and send validation emails
|
||||||
|
"""
|
||||||
|
if self.status == "pending":
|
||||||
|
self.active_user = User.objects.filter(email=self.active_email).first()
|
||||||
|
self.inactive_user = User.objects.filter(email=self.inactive_email).first()
|
||||||
|
|
||||||
|
if self.active_user and self.inactive_user:
|
||||||
|
if not self.active_email_checked:
|
||||||
|
self.send_reconciliation_confirm_email(
|
||||||
|
self.active_user, "active", self.active_email_confirmation_id
|
||||||
|
)
|
||||||
|
if not self.inactive_email_checked:
|
||||||
|
self.send_reconciliation_confirm_email(
|
||||||
|
self.inactive_user,
|
||||||
|
"inactive",
|
||||||
|
self.inactive_email_confirmation_id,
|
||||||
|
)
|
||||||
|
self.status = "ready"
|
||||||
|
else:
|
||||||
|
self.status = "error"
|
||||||
|
self.logs = "Error: Both active and inactive users need to exist."
|
||||||
|
|
||||||
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
|
@transaction.atomic
|
||||||
|
def process_reconciliation_request(self):
|
||||||
|
"""
|
||||||
|
Process the reconciliation request as a transaction.
|
||||||
|
|
||||||
|
- Transfer document accesses from inactive to active user, updating roles as needed.
|
||||||
|
- Transfer document favorites from inactive to active user.
|
||||||
|
- Transfer link traces from inactive to active user.
|
||||||
|
- Transfer comment-related content from inactive to active user
|
||||||
|
(threads, comments and reactions)
|
||||||
|
- Activate the active user and deactivate the inactive user.
|
||||||
|
- Update the reconciliation entry itself.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Prepare the data to perform the reconciliation on
|
||||||
|
updated_accesses, removed_accesses = (
|
||||||
|
self.prepare_documentaccess_reconciliation()
|
||||||
|
)
|
||||||
|
updated_linktraces, removed_linktraces = self.prepare_linktrace_reconciliation()
|
||||||
|
update_favorites, removed_favorites = (
|
||||||
|
self.prepare_document_favorite_reconciliation()
|
||||||
|
)
|
||||||
|
updated_threads = self.prepare_thread_reconciliation()
|
||||||
|
updated_comments = self.prepare_comment_reconciliation()
|
||||||
|
updated_reactions, removed_reactions = self.prepare_reaction_reconciliation()
|
||||||
|
|
||||||
|
self.active_user.is_active = True
|
||||||
|
self.inactive_user.is_active = False
|
||||||
|
|
||||||
|
# Actually perform the bulk operations
|
||||||
|
DocumentAccess.objects.bulk_update(updated_accesses, ["user", "role"])
|
||||||
|
|
||||||
|
if removed_accesses:
|
||||||
|
ids_to_delete = [entry.id for entry in removed_accesses]
|
||||||
|
DocumentAccess.objects.filter(id__in=ids_to_delete).delete()
|
||||||
|
|
||||||
|
DocumentFavorite.objects.bulk_update(update_favorites, ["user"])
|
||||||
|
if removed_favorites:
|
||||||
|
ids_to_delete = [entry.id for entry in removed_favorites]
|
||||||
|
DocumentFavorite.objects.filter(id__in=ids_to_delete).delete()
|
||||||
|
|
||||||
|
LinkTrace.objects.bulk_update(updated_linktraces, ["user"])
|
||||||
|
if removed_linktraces:
|
||||||
|
ids_to_delete = [entry.id for entry in removed_linktraces]
|
||||||
|
LinkTrace.objects.filter(id__in=ids_to_delete).delete()
|
||||||
|
|
||||||
|
Thread.objects.bulk_update(updated_threads, ["creator"])
|
||||||
|
Comment.objects.bulk_update(updated_comments, ["user"])
|
||||||
|
|
||||||
|
# pylint: disable=C0103
|
||||||
|
ReactionThroughModel = Reaction.users.through
|
||||||
|
reactions_to_create = []
|
||||||
|
for updated_reaction in updated_reactions:
|
||||||
|
reactions_to_create.append(
|
||||||
|
ReactionThroughModel(
|
||||||
|
user_id=self.active_user.pk, reaction_id=updated_reaction.pk
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if reactions_to_create:
|
||||||
|
ReactionThroughModel.objects.bulk_create(reactions_to_create)
|
||||||
|
|
||||||
|
if removed_reactions:
|
||||||
|
ids_to_delete = [entry.id for entry in removed_reactions]
|
||||||
|
ReactionThroughModel.objects.filter(
|
||||||
|
reaction_id__in=ids_to_delete, user_id=self.inactive_user.pk
|
||||||
|
).delete()
|
||||||
|
|
||||||
|
User.objects.bulk_update([self.active_user, self.inactive_user], ["is_active"])
|
||||||
|
|
||||||
|
# Wrap up the reconciliation entry
|
||||||
|
self.logs += f"""Requested update for {len(updated_accesses)} DocumentAccess items
|
||||||
|
and deletion for {len(removed_accesses)} DocumentAccess items.\n"""
|
||||||
|
self.status = "done"
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
self.send_reconciliation_done_email()
|
||||||
|
|
||||||
|
def prepare_documentaccess_reconciliation(self):
|
||||||
|
"""
|
||||||
|
Prepare the reconciliation by transferring document accesses from the inactive user
|
||||||
|
to the active user.
|
||||||
|
"""
|
||||||
|
updated_accesses = []
|
||||||
|
removed_accesses = []
|
||||||
|
inactive_accesses = DocumentAccess.objects.filter(user=self.inactive_user)
|
||||||
|
|
||||||
|
# Check documents where the active user already has access
|
||||||
|
inactive_accesses_documents = inactive_accesses.values_list(
|
||||||
|
"document", flat=True
|
||||||
|
)
|
||||||
|
existing_accesses = DocumentAccess.objects.filter(user=self.active_user).filter(
|
||||||
|
document__in=inactive_accesses_documents
|
||||||
|
)
|
||||||
|
existing_roles_per_doc = dict(existing_accesses.values_list("document", "role"))
|
||||||
|
|
||||||
|
for entry in inactive_accesses:
|
||||||
|
if entry.document_id in existing_roles_per_doc:
|
||||||
|
# Update role if needed
|
||||||
|
existing_role = existing_roles_per_doc[entry.document_id]
|
||||||
|
max_role = RoleChoices.max(entry.role, existing_role)
|
||||||
|
if existing_role != max_role:
|
||||||
|
existing_access = existing_accesses.get(document=entry.document)
|
||||||
|
existing_access.role = max_role
|
||||||
|
updated_accesses.append(existing_access)
|
||||||
|
removed_accesses.append(entry)
|
||||||
|
else:
|
||||||
|
entry.user = self.active_user
|
||||||
|
updated_accesses.append(entry)
|
||||||
|
|
||||||
|
return updated_accesses, removed_accesses
|
||||||
|
|
||||||
|
def prepare_document_favorite_reconciliation(self):
|
||||||
|
"""
|
||||||
|
Prepare the reconciliation by transferring document favorites from the inactive user
|
||||||
|
to the active user.
|
||||||
|
"""
|
||||||
|
updated_favorites = []
|
||||||
|
removed_favorites = []
|
||||||
|
|
||||||
|
existing_favorites = DocumentFavorite.objects.filter(user=self.active_user)
|
||||||
|
existing_favorite_doc_ids = set(
|
||||||
|
existing_favorites.values_list("document_id", flat=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
inactive_favorites = DocumentFavorite.objects.filter(user=self.inactive_user)
|
||||||
|
|
||||||
|
for entry in inactive_favorites:
|
||||||
|
if entry.document_id in existing_favorite_doc_ids:
|
||||||
|
removed_favorites.append(entry)
|
||||||
|
else:
|
||||||
|
entry.user = self.active_user
|
||||||
|
updated_favorites.append(entry)
|
||||||
|
|
||||||
|
return updated_favorites, removed_favorites
|
||||||
|
|
||||||
|
def prepare_linktrace_reconciliation(self):
|
||||||
|
"""
|
||||||
|
Prepare the reconciliation by transferring link traces from the inactive user
|
||||||
|
to the active user.
|
||||||
|
"""
|
||||||
|
updated_linktraces = []
|
||||||
|
removed_linktraces = []
|
||||||
|
|
||||||
|
existing_linktraces = LinkTrace.objects.filter(user=self.active_user)
|
||||||
|
inactive_linktraces = LinkTrace.objects.filter(user=self.inactive_user)
|
||||||
|
|
||||||
|
for entry in inactive_linktraces:
|
||||||
|
if existing_linktraces.filter(document=entry.document).exists():
|
||||||
|
removed_linktraces.append(entry)
|
||||||
|
else:
|
||||||
|
entry.user = self.active_user
|
||||||
|
updated_linktraces.append(entry)
|
||||||
|
|
||||||
|
return updated_linktraces, removed_linktraces
|
||||||
|
|
||||||
|
def prepare_thread_reconciliation(self):
|
||||||
|
"""
|
||||||
|
Prepare the reconciliation by transferring threads from the inactive user
|
||||||
|
to the active user.
|
||||||
|
"""
|
||||||
|
updated_threads = []
|
||||||
|
|
||||||
|
inactive_threads = Thread.objects.filter(creator=self.inactive_user)
|
||||||
|
|
||||||
|
for entry in inactive_threads:
|
||||||
|
entry.creator = self.active_user
|
||||||
|
updated_threads.append(entry)
|
||||||
|
|
||||||
|
return updated_threads
|
||||||
|
|
||||||
|
def prepare_comment_reconciliation(self):
|
||||||
|
"""
|
||||||
|
Prepare the reconciliation by transferring comments from the inactive user
|
||||||
|
to the active user.
|
||||||
|
"""
|
||||||
|
updated_comments = []
|
||||||
|
|
||||||
|
inactive_comments = Comment.objects.filter(user=self.inactive_user)
|
||||||
|
|
||||||
|
for entry in inactive_comments:
|
||||||
|
entry.user = self.active_user
|
||||||
|
updated_comments.append(entry)
|
||||||
|
|
||||||
|
return updated_comments
|
||||||
|
|
||||||
|
def prepare_reaction_reconciliation(self):
|
||||||
|
"""
|
||||||
|
Prepare the reconciliation by creating missing reactions for the active user
|
||||||
|
(ie, the ones that exist for the inactive user but not the active user)
|
||||||
|
and then deleting all reactions of the inactive user.
|
||||||
|
"""
|
||||||
|
|
||||||
|
inactive_reactions = Reaction.objects.filter(users=self.inactive_user)
|
||||||
|
updated_reactions = inactive_reactions.exclude(users=self.active_user)
|
||||||
|
|
||||||
|
return updated_reactions, inactive_reactions
|
||||||
|
|
||||||
|
def send_reconciliation_confirm_email(
|
||||||
|
self, user, user_type, confirmation_id, language=None
|
||||||
|
):
|
||||||
|
"""Method allowing to send confirmation email for reconciliation requests."""
|
||||||
|
language = language or get_language()
|
||||||
|
domain = settings.EMAIL_URL_APP or Site.objects.get_current().domain
|
||||||
|
|
||||||
|
message = _(
|
||||||
|
"""You have requested a reconciliation of your user accounts on Docs.
|
||||||
|
To confirm that you are the one who initiated the request
|
||||||
|
and that this email belongs to you:"""
|
||||||
|
)
|
||||||
|
|
||||||
|
with override(language):
|
||||||
|
subject = _("Confirm by clicking the link to start the reconciliation")
|
||||||
|
context = {
|
||||||
|
"title": subject,
|
||||||
|
"message": message,
|
||||||
|
"link": f"{domain}/user-reconciliations/{user_type}/{confirmation_id}/",
|
||||||
|
"link_label": str(_("Click here")),
|
||||||
|
"button_label": str(_("Confirm")),
|
||||||
|
}
|
||||||
|
|
||||||
|
user.send_email(subject, context, language)
|
||||||
|
|
||||||
|
def send_reconciliation_done_email(self, language=None):
|
||||||
|
"""Method allowing to send done email for reconciliation requests."""
|
||||||
|
language = language or get_language()
|
||||||
|
domain = settings.EMAIL_URL_APP or Site.objects.get_current().domain
|
||||||
|
|
||||||
|
message = _(
|
||||||
|
"""Your reconciliation request has been processed.
|
||||||
|
New documents are likely associated with your account:"""
|
||||||
|
)
|
||||||
|
|
||||||
|
with override(language):
|
||||||
|
subject = _("Your accounts have been merged")
|
||||||
|
context = {
|
||||||
|
"title": subject,
|
||||||
|
"message": message,
|
||||||
|
"link": f"{domain}/",
|
||||||
|
"link_label": str(_("Click here to see")),
|
||||||
|
"button_label": str(_("See my documents")),
|
||||||
|
}
|
||||||
|
|
||||||
|
self.active_user.send_email(subject, context, language)
|
||||||
|
|
||||||
|
|
||||||
|
class UserReconciliationCsvImport(BaseModel):
|
||||||
|
"""Model to import reconciliations requests from an external source
|
||||||
|
(eg, )"""
|
||||||
|
|
||||||
|
file = models.FileField(upload_to="imports/", verbose_name=_("CSV file"))
|
||||||
|
status = models.CharField(
|
||||||
|
max_length=20,
|
||||||
|
choices=[
|
||||||
|
("pending", _("Pending")),
|
||||||
|
("running", _("Running")),
|
||||||
|
("done", _("Done")),
|
||||||
|
("error", _("Error")),
|
||||||
|
],
|
||||||
|
default="pending",
|
||||||
|
)
|
||||||
|
logs = models.TextField(blank=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
db_table = "impress_user_reconciliation_csv_import"
|
||||||
|
verbose_name = _("user reconciliation CSV import")
|
||||||
|
verbose_name_plural = _("user reconciliation CSV imports")
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"User reconciliation CSV import {self.id}"
|
||||||
|
|
||||||
|
def send_email(self, subject, emails, context=None, language=None):
|
||||||
|
"""Generate and send email to the user from a template."""
|
||||||
|
context = context or {}
|
||||||
|
domain = settings.EMAIL_URL_APP or Site.objects.get_current().domain
|
||||||
|
language = language or get_language()
|
||||||
|
context.update(
|
||||||
|
{
|
||||||
|
"brandname": settings.EMAIL_BRAND_NAME,
|
||||||
|
"domain": domain,
|
||||||
|
"logo_img": settings.EMAIL_LOGO_IMG,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
with override(language):
|
||||||
|
msg_html = render_to_string("mail/html/template.html", context)
|
||||||
|
msg_plain = render_to_string("mail/text/template.txt", context)
|
||||||
|
subject = str(subject) # Force translation
|
||||||
|
|
||||||
|
try:
|
||||||
|
send_mail(
|
||||||
|
subject.capitalize(),
|
||||||
|
msg_plain,
|
||||||
|
settings.EMAIL_FROM,
|
||||||
|
emails,
|
||||||
|
html_message=msg_html,
|
||||||
|
fail_silently=False,
|
||||||
|
)
|
||||||
|
except smtplib.SMTPException as exception:
|
||||||
|
logger.error("invitation to %s was not sent: %s", emails, exception)
|
||||||
|
|
||||||
|
def send_reconciliation_error_email(
|
||||||
|
self, recipient_email, other_email, language=None
|
||||||
|
):
|
||||||
|
"""Method allowing to send email for reconciliation requests with errors."""
|
||||||
|
language = language or get_language()
|
||||||
|
|
||||||
|
emails = [recipient_email]
|
||||||
|
|
||||||
|
message = _(
|
||||||
|
"""Your request for reconciliation was unsuccessful.
|
||||||
|
Reconciliation failed for the following email addresses:
|
||||||
|
{recipient_email}, {other_email}.
|
||||||
|
Please check for typos.
|
||||||
|
You can submit another request with the valid email addresses."""
|
||||||
|
).format(recipient_email=recipient_email, other_email=other_email)
|
||||||
|
|
||||||
|
with override(language):
|
||||||
|
subject = _("Reconciliation of your Docs accounts not completed")
|
||||||
|
context = {
|
||||||
|
"title": subject,
|
||||||
|
"message": message,
|
||||||
|
"link": settings.USER_RECONCILIATION_FORM_URL,
|
||||||
|
"link_label": str(_("Click here")),
|
||||||
|
"button_label": str(_("Make a new request")),
|
||||||
|
}
|
||||||
|
|
||||||
|
self.send_email(subject, emails, context, language)
|
||||||
|
|
||||||
|
|
||||||
class BaseAccess(BaseModel):
|
class BaseAccess(BaseModel):
|
||||||
"""Base model for accesses to handle resources."""
|
"""Base model for accesses to handle resources."""
|
||||||
|
|
||||||
@@ -783,6 +1299,7 @@ class Document(MP_Node, BaseModel):
|
|||||||
return {
|
return {
|
||||||
"accesses_manage": is_owner_or_admin,
|
"accesses_manage": is_owner_or_admin,
|
||||||
"accesses_view": has_access_role,
|
"accesses_view": has_access_role,
|
||||||
|
"ai_proxy": ai_access,
|
||||||
"ai_transform": ai_access,
|
"ai_transform": ai_access,
|
||||||
"ai_translate": ai_access,
|
"ai_translate": ai_access,
|
||||||
"attachment_upload": can_update,
|
"attachment_upload": can_update,
|
||||||
@@ -812,6 +1329,7 @@ class Document(MP_Node, BaseModel):
|
|||||||
"versions_destroy": is_owner_or_admin,
|
"versions_destroy": is_owner_or_admin,
|
||||||
"versions_list": has_access_role,
|
"versions_list": has_access_role,
|
||||||
"versions_retrieve": has_access_role,
|
"versions_retrieve": has_access_role,
|
||||||
|
"search": can_get,
|
||||||
}
|
}
|
||||||
|
|
||||||
def send_email(self, subject, emails, context=None, language=None):
|
def send_email(self, subject, emails, context=None, language=None):
|
||||||
@@ -824,7 +1342,7 @@ class Document(MP_Node, BaseModel):
|
|||||||
"brandname": settings.EMAIL_BRAND_NAME,
|
"brandname": settings.EMAIL_BRAND_NAME,
|
||||||
"document": self,
|
"document": self,
|
||||||
"domain": domain,
|
"domain": domain,
|
||||||
"link": f"{domain}/docs/{self.id}/",
|
"link": f"{domain}/docs/{self.id}/?utm_source=docssharelink&utm_campaign={self.id}",
|
||||||
"link_label": self.title or str(_("Untitled Document")),
|
"link_label": self.title or str(_("Untitled Document")),
|
||||||
"button_label": _("Open"),
|
"button_label": _("Open"),
|
||||||
"logo_img": settings.EMAIL_LOGO_IMG,
|
"logo_img": settings.EMAIL_LOGO_IMG,
|
||||||
@@ -1469,7 +1987,7 @@ class Invitation(BaseModel):
|
|||||||
|
|
||||||
# Check if an identity already exists for the provided email
|
# Check if an identity already exists for the provided email
|
||||||
if (
|
if (
|
||||||
User.objects.filter(email=self.email).exists()
|
User.objects.filter(email__iexact=self.email).exists()
|
||||||
and not settings.OIDC_ALLOW_DUPLICATE_EMAILS
|
and not settings.OIDC_ALLOW_DUPLICATE_EMAILS
|
||||||
):
|
):
|
||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
|
|||||||
@@ -1,15 +1,68 @@
|
|||||||
"""AI services."""
|
"""AI services."""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import queue
|
||||||
|
import threading
|
||||||
|
from collections.abc import AsyncIterator, Iterator
|
||||||
|
from typing import Any, Dict, Union
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.exceptions import ImproperlyConfigured
|
from django.core.exceptions import ImproperlyConfigured
|
||||||
|
|
||||||
|
from langfuse import get_client
|
||||||
|
from langfuse.openai import OpenAI as OpenAI_Langfuse
|
||||||
|
from pydantic_ai import Agent, DeferredToolRequests
|
||||||
|
from pydantic_ai.models.openai import OpenAIChatModel
|
||||||
|
from pydantic_ai.providers.openai import OpenAIProvider
|
||||||
|
from pydantic_ai.tools import ToolDefinition
|
||||||
|
from pydantic_ai.toolsets.external import ExternalToolset
|
||||||
|
from pydantic_ai.ui import SSE_CONTENT_TYPE
|
||||||
|
from pydantic_ai.ui.vercel_ai import VercelAIAdapter
|
||||||
|
from pydantic_ai.ui.vercel_ai.request_types import RequestData, TextUIPart, UIMessage
|
||||||
|
from rest_framework.request import Request
|
||||||
|
|
||||||
from core import enums
|
from core import enums
|
||||||
|
|
||||||
if settings.LANGFUSE_PUBLIC_KEY:
|
if settings.LANGFUSE_PUBLIC_KEY:
|
||||||
from langfuse.openai import OpenAI
|
OpenAI = OpenAI_Langfuse
|
||||||
else:
|
else:
|
||||||
from openai import OpenAI
|
from openai import OpenAI
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
BLOCKNOTE_TOOL_STRICT_PROMPT = """
|
||||||
|
You are editing a BlockNote document via the tool applyDocumentOperations.
|
||||||
|
|
||||||
|
You MUST respond ONLY by calling applyDocumentOperations.
|
||||||
|
The tool input MUST be valid JSON:
|
||||||
|
{ "operations": [ ... ] }
|
||||||
|
|
||||||
|
Each operation MUST include "type" and it MUST be one of:
|
||||||
|
- "update" (requires: id, block)
|
||||||
|
- "add" (requires: referenceId, position, blocks)
|
||||||
|
- "delete" (requires: id)
|
||||||
|
|
||||||
|
VALID SHAPES (FOLLOW EXACTLY):
|
||||||
|
|
||||||
|
Update:
|
||||||
|
{ "type":"update", "id":"<id$>", "block":"<p>...</p>" }
|
||||||
|
IMPORTANT: "block" MUST be a STRING containing a SINGLE valid HTML element.
|
||||||
|
|
||||||
|
Add:
|
||||||
|
{ "type":"add", "referenceId":"<id$>", "position":"before|after", "blocks":["<p>...</p>"] }
|
||||||
|
IMPORTANT: "blocks" MUST be an ARRAY OF STRINGS.
|
||||||
|
Each item MUST be a STRING containing a SINGLE valid HTML element.
|
||||||
|
|
||||||
|
Delete:
|
||||||
|
{ "type":"delete", "id":"<id$>" }
|
||||||
|
|
||||||
|
IDs ALWAYS end with "$". Use ids EXACTLY as provided.
|
||||||
|
|
||||||
|
Return ONLY the JSON tool input. No prose, no markdown.
|
||||||
|
"""
|
||||||
|
|
||||||
AI_ACTIONS = {
|
AI_ACTIONS = {
|
||||||
"prompt": (
|
"prompt": (
|
||||||
@@ -56,6 +109,40 @@ AI_TRANSLATE = (
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def convert_async_generator_to_sync(async_gen: AsyncIterator[str]) -> Iterator[str]:
|
||||||
|
"""Convert an async generator to a sync generator."""
|
||||||
|
q: queue.Queue[str | object] = queue.Queue()
|
||||||
|
sentinel = object()
|
||||||
|
exc_sentinel = object()
|
||||||
|
|
||||||
|
async def run_async_gen():
|
||||||
|
try:
|
||||||
|
async for async_item in async_gen:
|
||||||
|
q.put(async_item)
|
||||||
|
except Exception as exc: # pylint: disable=broad-except #noqa: BLE001
|
||||||
|
q.put((exc_sentinel, exc))
|
||||||
|
finally:
|
||||||
|
q.put(sentinel)
|
||||||
|
|
||||||
|
def start_async_loop():
|
||||||
|
asyncio.run(run_async_gen())
|
||||||
|
|
||||||
|
thread = threading.Thread(target=start_async_loop, daemon=True)
|
||||||
|
thread.start()
|
||||||
|
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
item = q.get()
|
||||||
|
if item is sentinel:
|
||||||
|
break
|
||||||
|
if isinstance(item, tuple) and item[0] is exc_sentinel:
|
||||||
|
# re-raise the exception in the sync context
|
||||||
|
raise item[1]
|
||||||
|
yield item
|
||||||
|
finally:
|
||||||
|
thread.join()
|
||||||
|
|
||||||
|
|
||||||
class AIService:
|
class AIService:
|
||||||
"""Service class for AI-related operations."""
|
"""Service class for AI-related operations."""
|
||||||
|
|
||||||
@@ -96,3 +183,198 @@ class AIService:
|
|||||||
language_display = enums.ALL_LANGUAGES.get(language, language)
|
language_display = enums.ALL_LANGUAGES.get(language, language)
|
||||||
system_content = AI_TRANSLATE.format(language=language_display)
|
system_content = AI_TRANSLATE.format(language=language_display)
|
||||||
return self.call_ai_api(system_content, text)
|
return self.call_ai_api(system_content, text)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def inject_document_state_messages(
|
||||||
|
messages: list[UIMessage],
|
||||||
|
) -> list[UIMessage]:
|
||||||
|
"""Inject document state context before user messages.
|
||||||
|
|
||||||
|
Port of BlockNote's injectDocumentStateMessages.
|
||||||
|
For each user message carrying documentState metadata, an assistant
|
||||||
|
message describing the current document/selection state is prepended
|
||||||
|
so the LLM sees it as context.
|
||||||
|
"""
|
||||||
|
result: list[UIMessage] = []
|
||||||
|
for message in messages:
|
||||||
|
if (
|
||||||
|
message.role == "user"
|
||||||
|
and isinstance(message.metadata, dict)
|
||||||
|
and "documentState" in message.metadata
|
||||||
|
):
|
||||||
|
doc_state = message.metadata["documentState"]
|
||||||
|
selection = doc_state.get("selection")
|
||||||
|
blocks = doc_state.get("blocks")
|
||||||
|
|
||||||
|
if selection:
|
||||||
|
parts = [
|
||||||
|
TextUIPart(
|
||||||
|
text=(
|
||||||
|
"This is the latest state of the selection "
|
||||||
|
"(ignore previous selections, you MUST issue "
|
||||||
|
"operations against this latest version of "
|
||||||
|
"the selection):"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
TextUIPart(
|
||||||
|
text=json.dumps(doc_state.get("selectedBlocks")),
|
||||||
|
),
|
||||||
|
TextUIPart(
|
||||||
|
text=(
|
||||||
|
"This is the latest state of the entire "
|
||||||
|
"document (INCLUDING the selected text), you "
|
||||||
|
"can use this to find the selected text to "
|
||||||
|
"understand the context (but you MUST NOT "
|
||||||
|
"issue operations against this document, you "
|
||||||
|
"MUST issue operations against the selection):"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
TextUIPart(text=json.dumps(blocks)),
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
text = (
|
||||||
|
"There is no active selection. This is the latest "
|
||||||
|
"state of the document (ignore previous documents, "
|
||||||
|
"you MUST issue operations against this latest "
|
||||||
|
"version of the document). The cursor is BETWEEN "
|
||||||
|
"two blocks as indicated by cursor: true."
|
||||||
|
)
|
||||||
|
if doc_state.get("isEmptyDocument"):
|
||||||
|
text += (
|
||||||
|
"Because the document is empty, YOU MUST first "
|
||||||
|
"update the empty block before adding new blocks."
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
text += (
|
||||||
|
"Prefer updating existing blocks over removing "
|
||||||
|
"and adding (but this also depends on the "
|
||||||
|
"user's question)."
|
||||||
|
)
|
||||||
|
parts = [
|
||||||
|
TextUIPart(text=text),
|
||||||
|
TextUIPart(text=json.dumps(blocks)),
|
||||||
|
]
|
||||||
|
|
||||||
|
result.append(
|
||||||
|
UIMessage(
|
||||||
|
role="assistant",
|
||||||
|
id=f"assistant-document-state-{message.id}",
|
||||||
|
parts=parts,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
result.append(message)
|
||||||
|
return result
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def tool_definitions_to_toolset(
|
||||||
|
tool_definitions: Dict[str, Any],
|
||||||
|
) -> ExternalToolset:
|
||||||
|
"""Convert serialized tool definitions to a pydantic-ai ExternalToolset.
|
||||||
|
|
||||||
|
Port of BlockNote's toolDefinitionsToToolSet.
|
||||||
|
Builds ToolDefinition objects from the JSON-Schema-based definitions
|
||||||
|
sent by the frontend and wraps them in an ExternalToolset so that
|
||||||
|
pydantic-ai advertises them to the LLM without trying to execute them
|
||||||
|
server-side (execution is deferred to the frontend).
|
||||||
|
"""
|
||||||
|
tool_defs = [
|
||||||
|
ToolDefinition(
|
||||||
|
name=name,
|
||||||
|
description=defn.get("description", ""),
|
||||||
|
parameters_json_schema=defn.get("inputSchema", {}),
|
||||||
|
kind="external",
|
||||||
|
metadata={
|
||||||
|
"output_schema": defn.get("outputSchema"),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
for name, defn in tool_definitions.items()
|
||||||
|
]
|
||||||
|
return ExternalToolset(tool_defs)
|
||||||
|
|
||||||
|
def _harden_messages(
|
||||||
|
self, run_input: RequestData, tool_definitions: Dict[str, Any]
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Harden messages if applyDocumentOperations tool is used.
|
||||||
|
We would like the system_prompt property in the Agent initialization
|
||||||
|
but for UI adapter, like vercel, the agent is ignoring it
|
||||||
|
see https://github.com/pydantic/pydantic-ai/issues/3315
|
||||||
|
|
||||||
|
We have to inject it in the run_input.messages if needed.
|
||||||
|
"""
|
||||||
|
for name, _defn in tool_definitions.items():
|
||||||
|
if name == "applyDocumentOperations":
|
||||||
|
run_input.messages.insert(
|
||||||
|
0,
|
||||||
|
UIMessage(
|
||||||
|
id="system-force-tool-usage",
|
||||||
|
role="system",
|
||||||
|
parts=[TextUIPart(text=BLOCKNOTE_TOOL_STRICT_PROMPT)],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
def _build_async_stream(self, request: Request) -> AsyncIterator[str]:
|
||||||
|
"""Build the async stream from the AI provider."""
|
||||||
|
instrument_enabled = settings.LANGFUSE_PUBLIC_KEY is not None
|
||||||
|
|
||||||
|
if instrument_enabled:
|
||||||
|
langfuse = get_client()
|
||||||
|
langfuse.auth_check()
|
||||||
|
Agent.instrument_all()
|
||||||
|
|
||||||
|
model = OpenAIChatModel(
|
||||||
|
settings.AI_MODEL,
|
||||||
|
provider=OpenAIProvider(
|
||||||
|
base_url=settings.AI_BASE_URL, api_key=settings.AI_API_KEY
|
||||||
|
),
|
||||||
|
)
|
||||||
|
agent = Agent(model, instrument=instrument_enabled)
|
||||||
|
|
||||||
|
accept = request.META.get("HTTP_ACCEPT", SSE_CONTENT_TYPE)
|
||||||
|
|
||||||
|
run_input = VercelAIAdapter.build_run_input(request.raw_body)
|
||||||
|
|
||||||
|
# Inject document state context into the conversation
|
||||||
|
run_input.messages = self.inject_document_state_messages(run_input.messages)
|
||||||
|
|
||||||
|
# Build an ExternalToolset from frontend-supplied tool definitions
|
||||||
|
raw_tool_defs = (
|
||||||
|
run_input.model_extra.get("toolDefinitions")
|
||||||
|
if run_input.model_extra
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
toolset = (
|
||||||
|
self.tool_definitions_to_toolset(raw_tool_defs) if raw_tool_defs else None
|
||||||
|
)
|
||||||
|
|
||||||
|
if raw_tool_defs:
|
||||||
|
self._harden_messages(run_input, raw_tool_defs)
|
||||||
|
|
||||||
|
adapter = VercelAIAdapter(
|
||||||
|
agent=agent,
|
||||||
|
run_input=run_input,
|
||||||
|
accept=accept,
|
||||||
|
sdk_version=settings.AI_VERCEL_SDK_VERSION,
|
||||||
|
)
|
||||||
|
|
||||||
|
event_stream = adapter.run_stream(
|
||||||
|
output_type=[str, DeferredToolRequests] if toolset else None,
|
||||||
|
toolsets=[toolset] if toolset else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
return adapter.encode_stream(event_stream)
|
||||||
|
|
||||||
|
def stream(self, request: Request) -> Union[AsyncIterator[str], Iterator[str]]:
|
||||||
|
"""Stream AI API requests to the configured AI provider.
|
||||||
|
|
||||||
|
Returns an async iterator when running in async mode (ASGI)
|
||||||
|
or a sync iterator when running in sync mode (WSGI).
|
||||||
|
"""
|
||||||
|
async_stream = self._build_async_stream(request)
|
||||||
|
|
||||||
|
if os.environ.get("PYTHON_SERVER_MODE", "sync") == "async":
|
||||||
|
return async_stream
|
||||||
|
|
||||||
|
return convert_async_generator_to_sync(async_stream)
|
||||||
|
|||||||
@@ -8,12 +8,12 @@ from functools import cache
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.auth.models import AnonymousUser
|
from django.contrib.auth.models import AnonymousUser
|
||||||
from django.core.exceptions import ImproperlyConfigured
|
from django.core.exceptions import ImproperlyConfigured
|
||||||
from django.db.models import Subquery
|
|
||||||
from django.utils.module_loading import import_string
|
from django.utils.module_loading import import_string
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from core import models, utils
|
from core import models, utils
|
||||||
|
from core.enums import SearchType
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -69,7 +69,7 @@ def get_batch_accesses_by_users_and_teams(paths):
|
|||||||
return dict(access_by_document_path)
|
return dict(access_by_document_path)
|
||||||
|
|
||||||
|
|
||||||
def get_visited_document_ids_of(queryset, user):
|
def get_visited_document_ids_of(queryset, user) -> tuple[str, ...]:
|
||||||
"""
|
"""
|
||||||
Returns the ids of the documents that have a linktrace to the user and NOT owned.
|
Returns the ids of the documents that have a linktrace to the user and NOT owned.
|
||||||
It will be use to limit the opensearch responses to the public documents already
|
It will be use to limit the opensearch responses to the public documents already
|
||||||
@@ -78,7 +78,9 @@ def get_visited_document_ids_of(queryset, user):
|
|||||||
if isinstance(user, AnonymousUser):
|
if isinstance(user, AnonymousUser):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
qs = models.LinkTrace.objects.filter(user=user)
|
visited_ids = models.LinkTrace.objects.filter(user=user).values_list(
|
||||||
|
"document_id", flat=True
|
||||||
|
)
|
||||||
|
|
||||||
docs = (
|
docs = (
|
||||||
queryset.exclude(accesses__user=user)
|
queryset.exclude(accesses__user=user)
|
||||||
@@ -86,12 +88,12 @@ def get_visited_document_ids_of(queryset, user):
|
|||||||
deleted_at__isnull=True,
|
deleted_at__isnull=True,
|
||||||
ancestors_deleted_at__isnull=True,
|
ancestors_deleted_at__isnull=True,
|
||||||
)
|
)
|
||||||
.filter(pk__in=Subquery(qs.values("document_id")))
|
.filter(pk__in=visited_ids)
|
||||||
.order_by("pk")
|
.order_by("pk")
|
||||||
.distinct("pk")
|
.distinct("pk")
|
||||||
)
|
)
|
||||||
|
|
||||||
return [str(id) for id in docs.values_list("pk", flat=True)]
|
return tuple(str(id) for id in docs.values_list("pk", flat=True))
|
||||||
|
|
||||||
|
|
||||||
class BaseDocumentIndexer(ABC):
|
class BaseDocumentIndexer(ABC):
|
||||||
@@ -107,15 +109,13 @@ class BaseDocumentIndexer(ABC):
|
|||||||
Initialize the indexer.
|
Initialize the indexer.
|
||||||
"""
|
"""
|
||||||
self.batch_size = settings.SEARCH_INDEXER_BATCH_SIZE
|
self.batch_size = settings.SEARCH_INDEXER_BATCH_SIZE
|
||||||
self.indexer_url = settings.SEARCH_INDEXER_URL
|
self.indexer_url = settings.INDEXING_URL
|
||||||
self.indexer_secret = settings.SEARCH_INDEXER_SECRET
|
self.indexer_secret = settings.SEARCH_INDEXER_SECRET
|
||||||
self.search_url = settings.SEARCH_INDEXER_QUERY_URL
|
self.search_url = settings.SEARCH_URL
|
||||||
self.search_limit = settings.SEARCH_INDEXER_QUERY_LIMIT
|
self.search_limit = settings.SEARCH_INDEXER_QUERY_LIMIT
|
||||||
|
|
||||||
if not self.indexer_url:
|
if not self.indexer_url:
|
||||||
raise ImproperlyConfigured(
|
raise ImproperlyConfigured("INDEXING_URL must be set in Django settings.")
|
||||||
"SEARCH_INDEXER_URL must be set in Django settings."
|
|
||||||
)
|
|
||||||
|
|
||||||
if not self.indexer_secret:
|
if not self.indexer_secret:
|
||||||
raise ImproperlyConfigured(
|
raise ImproperlyConfigured(
|
||||||
@@ -123,9 +123,7 @@ class BaseDocumentIndexer(ABC):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if not self.search_url:
|
if not self.search_url:
|
||||||
raise ImproperlyConfigured(
|
raise ImproperlyConfigured("SEARCH_URL must be set in Django settings.")
|
||||||
"SEARCH_INDEXER_QUERY_URL must be set in Django settings."
|
|
||||||
)
|
|
||||||
|
|
||||||
def index(self, queryset=None, batch_size=None):
|
def index(self, queryset=None, batch_size=None):
|
||||||
"""
|
"""
|
||||||
@@ -184,8 +182,16 @@ class BaseDocumentIndexer(ABC):
|
|||||||
Must be implemented by subclasses.
|
Must be implemented by subclasses.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# pylint: disable-next=too-many-arguments,too-many-positional-arguments
|
# pylint: disable=too-many-arguments, too-many-positional-arguments
|
||||||
def search(self, text, token, visited=(), nb_results=None):
|
def search( # noqa : PLR0913
|
||||||
|
self,
|
||||||
|
q: str,
|
||||||
|
token: str,
|
||||||
|
visited: tuple[str, ...] = (),
|
||||||
|
nb_results: int = None,
|
||||||
|
path: str = None,
|
||||||
|
search_type: SearchType = None,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Search for documents in Find app.
|
Search for documents in Find app.
|
||||||
Ensure the same default ordering as "Docs" list : -updated_at
|
Ensure the same default ordering as "Docs" list : -updated_at
|
||||||
@@ -193,7 +199,7 @@ class BaseDocumentIndexer(ABC):
|
|||||||
Returns ids of the documents
|
Returns ids of the documents
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
text (str): Text search content.
|
q (str): user query.
|
||||||
token (str): OIDC Authentication token.
|
token (str): OIDC Authentication token.
|
||||||
visited (list, optional):
|
visited (list, optional):
|
||||||
List of ids of active public documents with LinkTrace
|
List of ids of active public documents with LinkTrace
|
||||||
@@ -201,21 +207,28 @@ class BaseDocumentIndexer(ABC):
|
|||||||
nb_results (int, optional):
|
nb_results (int, optional):
|
||||||
The number of results to return.
|
The number of results to return.
|
||||||
Defaults to 50 if not specified.
|
Defaults to 50 if not specified.
|
||||||
|
path (str, optional):
|
||||||
|
The parent path to search descendants of.
|
||||||
|
search_type (SearchType, optional):
|
||||||
|
Type of search to perform. Can be SearchType.HYBRID or SearchType.FULL_TEXT.
|
||||||
|
If None, the backend search service will use its default search behavior.
|
||||||
"""
|
"""
|
||||||
nb_results = nb_results or self.search_limit
|
nb_results = nb_results or self.search_limit
|
||||||
response = self.search_query(
|
results = self.search_query(
|
||||||
data={
|
data={
|
||||||
"q": text,
|
"q": q,
|
||||||
"visited": visited,
|
"visited": visited,
|
||||||
"services": ["docs"],
|
"services": ["docs"],
|
||||||
"nb_results": nb_results,
|
"nb_results": nb_results,
|
||||||
"order_by": "updated_at",
|
"order_by": "updated_at",
|
||||||
"order_direction": "desc",
|
"order_direction": "desc",
|
||||||
|
"path": path,
|
||||||
|
"search_type": search_type,
|
||||||
},
|
},
|
||||||
token=token,
|
token=token,
|
||||||
)
|
)
|
||||||
|
|
||||||
return [d["_id"] for d in response]
|
return results
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def search_query(self, data, token) -> dict:
|
def search_query(self, data, token) -> dict:
|
||||||
@@ -226,11 +239,72 @@ class BaseDocumentIndexer(ABC):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class SearchIndexer(BaseDocumentIndexer):
|
class FindDocumentIndexer(BaseDocumentIndexer):
|
||||||
"""
|
"""
|
||||||
Document indexer that pushes documents to La Suite Find app.
|
Document indexer that indexes and searches documents with La Suite Find app.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# pylint: disable=too-many-arguments, too-many-positional-arguments
|
||||||
|
def search( # noqa : PLR0913
|
||||||
|
self,
|
||||||
|
q: str,
|
||||||
|
token: str,
|
||||||
|
visited: tuple[()] = (),
|
||||||
|
nb_results: int = None,
|
||||||
|
path: str = None,
|
||||||
|
search_type: SearchType = None,
|
||||||
|
):
|
||||||
|
"""format Find search results"""
|
||||||
|
search_results = super().search(
|
||||||
|
q=q,
|
||||||
|
token=token,
|
||||||
|
visited=visited,
|
||||||
|
nb_results=nb_results,
|
||||||
|
path=path,
|
||||||
|
search_type=search_type,
|
||||||
|
)
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
**hit["_source"],
|
||||||
|
"id": hit["_id"],
|
||||||
|
"title": self.get_title(hit["_source"]),
|
||||||
|
}
|
||||||
|
for hit in search_results
|
||||||
|
]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_title(source):
|
||||||
|
"""
|
||||||
|
Find returns the titles with an extension depending on the language.
|
||||||
|
This function extracts the title in a generic way.
|
||||||
|
|
||||||
|
Handles multiple cases:
|
||||||
|
- Localized title fields like "title.<some_extension>"
|
||||||
|
- Fallback to plain "title" field if localized version not found
|
||||||
|
- Returns empty string if no title field exists
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source (dict): The _source dictionary from a search hit
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The extracted title or empty string if not found
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> get_title({"title.fr": "Bonjour", "id": 1})
|
||||||
|
"Bonjour"
|
||||||
|
>>> get_title({"title": "Hello", "id": 1})
|
||||||
|
"Hello"
|
||||||
|
>>> get_title({"id": 1})
|
||||||
|
""
|
||||||
|
"""
|
||||||
|
titles = utils.get_value_by_pattern(source, r"^title\.")
|
||||||
|
for title in titles:
|
||||||
|
if title:
|
||||||
|
return title
|
||||||
|
if "title" in source:
|
||||||
|
return source["title"]
|
||||||
|
return ""
|
||||||
|
|
||||||
def serialize_document(self, document, accesses):
|
def serialize_document(self, document, accesses):
|
||||||
"""
|
"""
|
||||||
Convert a Document to the JSON format expected by La Suite Find.
|
Convert a Document to the JSON format expected by La Suite Find.
|
||||||
|
|||||||
@@ -4,12 +4,14 @@ Declare and configure the signals for the impress core application
|
|||||||
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
|
from django.core.cache import cache
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
from django.db.models import signals
|
from django.db.models import signals
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
|
|
||||||
from . import models
|
from core import models
|
||||||
from .tasks.search import trigger_batch_document_indexer
|
from core.tasks.search import trigger_batch_document_indexer
|
||||||
|
from core.utils import get_users_sharing_documents_with_cache_key
|
||||||
|
|
||||||
|
|
||||||
@receiver(signals.post_save, sender=models.Document)
|
@receiver(signals.post_save, sender=models.Document)
|
||||||
@@ -26,8 +28,24 @@ def document_post_save(sender, instance, **kwargs): # pylint: disable=unused-ar
|
|||||||
def document_access_post_save(sender, instance, created, **kwargs): # pylint: disable=unused-argument
|
def document_access_post_save(sender, instance, created, **kwargs): # pylint: disable=unused-argument
|
||||||
"""
|
"""
|
||||||
Asynchronous call to the document indexer at the end of the transaction.
|
Asynchronous call to the document indexer at the end of the transaction.
|
||||||
|
Clear cache for the affected user.
|
||||||
"""
|
"""
|
||||||
if not created:
|
if not created:
|
||||||
transaction.on_commit(
|
transaction.on_commit(
|
||||||
partial(trigger_batch_document_indexer, instance.document)
|
partial(trigger_batch_document_indexer, instance.document)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Invalidate cache for the user
|
||||||
|
if instance.user:
|
||||||
|
cache_key = get_users_sharing_documents_with_cache_key(instance.user)
|
||||||
|
cache.delete(cache_key)
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(signals.post_delete, sender=models.DocumentAccess)
|
||||||
|
def document_access_post_delete(sender, instance, **kwargs): # pylint: disable=unused-argument
|
||||||
|
"""
|
||||||
|
Clear cache for the affected user when document access is deleted.
|
||||||
|
"""
|
||||||
|
if instance.user:
|
||||||
|
cache_key = get_users_sharing_documents_with_cache_key(instance.user)
|
||||||
|
cache.delete(cache_key)
|
||||||
|
|||||||
@@ -63,7 +63,7 @@ def batch_document_indexer_task(timestamp):
|
|||||||
logger.info("Indexed %d documents", count)
|
logger.info("Indexed %d documents", count)
|
||||||
|
|
||||||
|
|
||||||
def trigger_batch_document_indexer(item):
|
def trigger_batch_document_indexer(document):
|
||||||
"""
|
"""
|
||||||
Trigger indexation task with debounce a delay set by the SEARCH_INDEXER_COUNTDOWN setting.
|
Trigger indexation task with debounce a delay set by the SEARCH_INDEXER_COUNTDOWN setting.
|
||||||
|
|
||||||
@@ -82,14 +82,14 @@ def trigger_batch_document_indexer(item):
|
|||||||
if batch_indexer_throttle_acquire(timeout=countdown):
|
if batch_indexer_throttle_acquire(timeout=countdown):
|
||||||
logger.info(
|
logger.info(
|
||||||
"Add task for batch document indexation from updated_at=%s in %d seconds",
|
"Add task for batch document indexation from updated_at=%s in %d seconds",
|
||||||
item.updated_at.isoformat(),
|
document.updated_at.isoformat(),
|
||||||
countdown,
|
countdown,
|
||||||
)
|
)
|
||||||
|
|
||||||
batch_document_indexer_task.apply_async(
|
batch_document_indexer_task.apply_async(
|
||||||
args=[item.updated_at], countdown=countdown
|
args=[document.updated_at], countdown=countdown
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
logger.info("Skip task for batch document %s indexation", item.pk)
|
logger.info("Skip task for batch document %s indexation", document.pk)
|
||||||
else:
|
else:
|
||||||
document_indexer_task.apply(args=[item.pk])
|
document_indexer_task.apply(args=[document.pk])
|
||||||
|
|||||||
135
src/backend/core/tasks/user_reconciliation.py
Normal file
135
src/backend/core/tasks/user_reconciliation.py
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
"""Processing tasks for user reconciliation CSV imports."""
|
||||||
|
|
||||||
|
import csv
|
||||||
|
import traceback
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from django.core.exceptions import ValidationError
|
||||||
|
from django.core.validators import validate_email
|
||||||
|
from django.db import IntegrityError
|
||||||
|
|
||||||
|
from botocore.exceptions import ClientError
|
||||||
|
|
||||||
|
from core.models import UserReconciliation, UserReconciliationCsvImport
|
||||||
|
|
||||||
|
from impress.celery_app import app
|
||||||
|
|
||||||
|
|
||||||
|
def _process_row(row, job, counters):
|
||||||
|
"""Process a single row from the CSV file."""
|
||||||
|
|
||||||
|
source_unique_id = row["id"].strip()
|
||||||
|
|
||||||
|
# Skip entries if they already exist with this source_unique_id
|
||||||
|
if UserReconciliation.objects.filter(source_unique_id=source_unique_id).exists():
|
||||||
|
counters["already_processed_source_ids"] += 1
|
||||||
|
return counters
|
||||||
|
|
||||||
|
active_email_checked = row.get("active_email_checked", "0") == "1"
|
||||||
|
inactive_email_checked = row.get("inactive_email_checked", "0") == "1"
|
||||||
|
|
||||||
|
active_email = row["active_email"]
|
||||||
|
inactive_emails = row["inactive_email"].split("|")
|
||||||
|
try:
|
||||||
|
validate_email(active_email)
|
||||||
|
except ValidationError:
|
||||||
|
job.send_reconciliation_error_email(
|
||||||
|
recipient_email=inactive_emails[0], other_email=active_email
|
||||||
|
)
|
||||||
|
job.logs += f"Invalid active email address on row {source_unique_id}."
|
||||||
|
counters["rows_with_errors"] += 1
|
||||||
|
return counters
|
||||||
|
|
||||||
|
for inactive_email in inactive_emails:
|
||||||
|
try:
|
||||||
|
validate_email(inactive_email)
|
||||||
|
except (ValidationError, ValueError):
|
||||||
|
job.send_reconciliation_error_email(
|
||||||
|
recipient_email=active_email, other_email=inactive_email
|
||||||
|
)
|
||||||
|
job.logs += f"Invalid inactive email address on row {source_unique_id}.\n"
|
||||||
|
counters["rows_with_errors"] += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
if inactive_email == active_email:
|
||||||
|
job.send_reconciliation_error_email(
|
||||||
|
recipient_email=active_email, other_email=inactive_email
|
||||||
|
)
|
||||||
|
job.logs += (
|
||||||
|
f"Error on row {source_unique_id}: "
|
||||||
|
f"{active_email} set as both active and inactive email.\n"
|
||||||
|
)
|
||||||
|
counters["rows_with_errors"] += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
_rec_entry = UserReconciliation.objects.create(
|
||||||
|
active_email=active_email,
|
||||||
|
inactive_email=inactive_email,
|
||||||
|
active_email_checked=active_email_checked,
|
||||||
|
inactive_email_checked=inactive_email_checked,
|
||||||
|
active_email_confirmation_id=uuid.uuid4(),
|
||||||
|
inactive_email_confirmation_id=uuid.uuid4(),
|
||||||
|
source_unique_id=source_unique_id,
|
||||||
|
status="pending",
|
||||||
|
)
|
||||||
|
counters["rec_entries_created"] += 1
|
||||||
|
|
||||||
|
return counters
|
||||||
|
|
||||||
|
|
||||||
|
@app.task
|
||||||
|
def user_reconciliation_csv_import_job(job_id):
|
||||||
|
"""Process a UserReconciliationCsvImport job.
|
||||||
|
Creates UserReconciliation entries from the CSV file.
|
||||||
|
|
||||||
|
Does some sanity checks on the data:
|
||||||
|
- active_email and inactive_email must be valid email addresses
|
||||||
|
- active_email and inactive_email cannot be the same
|
||||||
|
|
||||||
|
Rows with errors are logged in the job logs and skipped, but do not cause
|
||||||
|
the entire job to fail or prevent the next rows from being processed.
|
||||||
|
"""
|
||||||
|
# Imports the CSV file, breaks it into UserReconciliation items
|
||||||
|
job = UserReconciliationCsvImport.objects.get(id=job_id)
|
||||||
|
job.status = "running"
|
||||||
|
job.save()
|
||||||
|
|
||||||
|
counters = {
|
||||||
|
"rec_entries_created": 0,
|
||||||
|
"rows_with_errors": 0,
|
||||||
|
"already_processed_source_ids": 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
with job.file.open(mode="r") as f:
|
||||||
|
reader = csv.DictReader(f)
|
||||||
|
|
||||||
|
if not {"active_email", "inactive_email", "id"}.issubset(reader.fieldnames):
|
||||||
|
raise KeyError(
|
||||||
|
"CSV is missing mandatory columns: active_email, inactive_email, id"
|
||||||
|
)
|
||||||
|
|
||||||
|
for row in reader:
|
||||||
|
counters = _process_row(row, job, counters)
|
||||||
|
|
||||||
|
job.status = "done"
|
||||||
|
job.logs += (
|
||||||
|
f"Import completed successfully. {reader.line_num} rows processed."
|
||||||
|
f" {counters['rec_entries_created']} reconciliation entries created."
|
||||||
|
f" {counters['already_processed_source_ids']} rows were already processed."
|
||||||
|
f" {counters['rows_with_errors']} rows had errors."
|
||||||
|
)
|
||||||
|
except (
|
||||||
|
csv.Error,
|
||||||
|
KeyError,
|
||||||
|
ValidationError,
|
||||||
|
ValueError,
|
||||||
|
IntegrityError,
|
||||||
|
OSError,
|
||||||
|
ClientError,
|
||||||
|
) as e:
|
||||||
|
# Catch expected I/O/CSV/model errors and record traceback in logs for debugging
|
||||||
|
job.status = "error"
|
||||||
|
job.logs += f"{e!s}\n{traceback.format_exc()}"
|
||||||
|
finally:
|
||||||
|
job.save()
|
||||||
@@ -68,6 +68,30 @@ def test_authentication_getter_existing_user_via_email(
|
|||||||
assert user == db_user
|
assert user == db_user
|
||||||
|
|
||||||
|
|
||||||
|
def test_authentication_getter_existing_user_via_email_case_insensitive(
|
||||||
|
django_assert_num_queries, monkeypatch
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
If an existing user doesn't match the sub but matches the email with different case,
|
||||||
|
the user should be returned (case-insensitive email matching).
|
||||||
|
"""
|
||||||
|
|
||||||
|
klass = OIDCAuthenticationBackend()
|
||||||
|
db_user = UserFactory(email="john.doe@example.com")
|
||||||
|
|
||||||
|
def get_userinfo_mocked(*args):
|
||||||
|
return {"sub": "123", "email": "JOHN.DOE@EXAMPLE.COM"}
|
||||||
|
|
||||||
|
monkeypatch.setattr(OIDCAuthenticationBackend, "get_userinfo", get_userinfo_mocked)
|
||||||
|
|
||||||
|
with django_assert_num_queries(4): # user by sub, user by mail, update sub
|
||||||
|
user = klass.get_or_create_user(
|
||||||
|
access_token="test-token", id_token=None, payload=None
|
||||||
|
)
|
||||||
|
|
||||||
|
assert user == db_user
|
||||||
|
|
||||||
|
|
||||||
def test_authentication_getter_email_none(monkeypatch):
|
def test_authentication_getter_email_none(monkeypatch):
|
||||||
"""
|
"""
|
||||||
If no user is found with the sub and no email is provided, a new user should be created.
|
If no user is found with the sub and no email is provided, a new user should be created.
|
||||||
@@ -157,6 +181,39 @@ def test_authentication_getter_existing_user_no_fallback_to_email_no_duplicate(
|
|||||||
assert models.User.objects.count() == 1
|
assert models.User.objects.count() == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_authentication_getter_existing_user_no_fallback_to_email_no_duplicate_case_insensitive(
|
||||||
|
settings, monkeypatch
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
When the "OIDC_FALLBACK_TO_EMAIL_FOR_IDENTIFICATION" setting is set to False,
|
||||||
|
the system should detect duplicate emails even with different case.
|
||||||
|
"""
|
||||||
|
|
||||||
|
klass = OIDCAuthenticationBackend()
|
||||||
|
_db_user = UserFactory(email="john.doe@example.com")
|
||||||
|
|
||||||
|
# Set the setting to False
|
||||||
|
settings.OIDC_FALLBACK_TO_EMAIL_FOR_IDENTIFICATION = False
|
||||||
|
settings.OIDC_ALLOW_DUPLICATE_EMAILS = False
|
||||||
|
|
||||||
|
def get_userinfo_mocked(*args):
|
||||||
|
return {"sub": "123", "email": "JOHN.DOE@EXAMPLE.COM"}
|
||||||
|
|
||||||
|
monkeypatch.setattr(OIDCAuthenticationBackend, "get_userinfo", get_userinfo_mocked)
|
||||||
|
|
||||||
|
with pytest.raises(
|
||||||
|
SuspiciousOperation,
|
||||||
|
match=(
|
||||||
|
"We couldn't find a user with this sub but the email is already associated "
|
||||||
|
"with a registered user."
|
||||||
|
),
|
||||||
|
):
|
||||||
|
klass.get_or_create_user(access_token="test-token", id_token=None, payload=None)
|
||||||
|
|
||||||
|
# Since the sub doesn't match, it should not create a new user
|
||||||
|
assert models.User.objects.count() == 1
|
||||||
|
|
||||||
|
|
||||||
def test_authentication_getter_existing_user_with_email(
|
def test_authentication_getter_existing_user_with_email(
|
||||||
django_assert_num_queries, monkeypatch
|
django_assert_num_queries, monkeypatch
|
||||||
):
|
):
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ from django.db import transaction
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from core import factories
|
from core import factories
|
||||||
from core.services.search_indexers import SearchIndexer
|
from core.services.search_indexers import FindDocumentIndexer
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
@@ -19,7 +19,7 @@ from core.services.search_indexers import SearchIndexer
|
|||||||
def test_index():
|
def test_index():
|
||||||
"""Test the command `index` that run the Find app indexer for all the available documents."""
|
"""Test the command `index` that run the Find app indexer for all the available documents."""
|
||||||
user = factories.UserFactory()
|
user = factories.UserFactory()
|
||||||
indexer = SearchIndexer()
|
indexer = FindDocumentIndexer()
|
||||||
|
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
doc = factories.DocumentFactory()
|
doc = factories.DocumentFactory()
|
||||||
@@ -36,7 +36,7 @@ def test_index():
|
|||||||
str(no_title_doc.path): {"users": [user.sub]},
|
str(no_title_doc.path): {"users": [user.sub]},
|
||||||
}
|
}
|
||||||
|
|
||||||
with mock.patch.object(SearchIndexer, "push") as mock_push:
|
with mock.patch.object(FindDocumentIndexer, "push") as mock_push:
|
||||||
call_command("index")
|
call_command("index")
|
||||||
|
|
||||||
push_call_args = [call.args[0] for call in mock_push.call_args_list]
|
push_call_args = [call.args[0] for call in mock_push.call_args_list]
|
||||||
|
|||||||
@@ -1,10 +1,15 @@
|
|||||||
"""Fixtures for tests in the impress core application"""
|
"""Fixtures for tests in the impress core application"""
|
||||||
|
|
||||||
|
import base64
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
import responses
|
||||||
|
|
||||||
|
from core import factories
|
||||||
|
from core.tests.utils.urls import reload_urls
|
||||||
|
|
||||||
USER = "user"
|
USER = "user"
|
||||||
TEAM = "team"
|
TEAM = "team"
|
||||||
@@ -39,15 +44,102 @@ def indexer_settings_fixture(settings):
|
|||||||
|
|
||||||
get_document_indexer.cache_clear()
|
get_document_indexer.cache_clear()
|
||||||
|
|
||||||
settings.SEARCH_INDEXER_CLASS = "core.services.search_indexers.SearchIndexer"
|
settings.SEARCH_INDEXER_CLASS = "core.services.search_indexers.FindDocumentIndexer"
|
||||||
settings.SEARCH_INDEXER_SECRET = "ThisIsAKeyForTest"
|
settings.SEARCH_INDEXER_SECRET = "ThisIsAKeyForTest"
|
||||||
settings.SEARCH_INDEXER_URL = "http://localhost:8081/api/v1.0/documents/index/"
|
settings.INDEXING_URL = "http://localhost:8081/api/v1.0/documents/index/"
|
||||||
settings.SEARCH_INDEXER_QUERY_URL = (
|
settings.SEARCH_URL = "http://localhost:8081/api/v1.0/documents/search/"
|
||||||
"http://localhost:8081/api/v1.0/documents/search/"
|
|
||||||
)
|
|
||||||
settings.SEARCH_INDEXER_COUNTDOWN = 1
|
settings.SEARCH_INDEXER_COUNTDOWN = 1
|
||||||
|
|
||||||
yield settings
|
yield settings
|
||||||
|
|
||||||
# clear cache to prevent issues with other tests
|
# clear cache to prevent issues with other tests
|
||||||
get_document_indexer.cache_clear()
|
get_document_indexer.cache_clear()
|
||||||
|
|
||||||
|
|
||||||
|
def resource_server_backend_setup(settings):
|
||||||
|
"""
|
||||||
|
A fixture to create a user token for testing.
|
||||||
|
"""
|
||||||
|
assert (
|
||||||
|
settings.OIDC_RS_BACKEND_CLASS
|
||||||
|
== "lasuite.oidc_resource_server.backend.ResourceServerBackend"
|
||||||
|
)
|
||||||
|
|
||||||
|
settings.OIDC_RESOURCE_SERVER_ENABLED = True
|
||||||
|
settings.OIDC_RS_CLIENT_ID = "some_client_id"
|
||||||
|
settings.OIDC_RS_CLIENT_SECRET = "some_client_secret"
|
||||||
|
|
||||||
|
settings.OIDC_OP_URL = "https://oidc.example.com"
|
||||||
|
settings.OIDC_VERIFY_SSL = False
|
||||||
|
settings.OIDC_TIMEOUT = 5
|
||||||
|
settings.OIDC_PROXY = None
|
||||||
|
settings.OIDC_OP_JWKS_ENDPOINT = "https://oidc.example.com/jwks"
|
||||||
|
settings.OIDC_OP_INTROSPECTION_ENDPOINT = "https://oidc.example.com/introspect"
|
||||||
|
settings.OIDC_RS_SCOPES = ["openid", "groups"]
|
||||||
|
settings.OIDC_RS_ALLOWED_AUDIENCES = ["some_service_provider"]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def resource_server_backend_conf(settings):
|
||||||
|
"""
|
||||||
|
A fixture to create a user token for testing.
|
||||||
|
"""
|
||||||
|
resource_server_backend_setup(settings)
|
||||||
|
reload_urls()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def resource_server_backend(settings):
|
||||||
|
"""
|
||||||
|
A fixture to create a user token for testing.
|
||||||
|
Including a mocked introspection endpoint.
|
||||||
|
"""
|
||||||
|
resource_server_backend_setup(settings)
|
||||||
|
reload_urls()
|
||||||
|
|
||||||
|
with responses.RequestsMock() as rsps:
|
||||||
|
rsps.add(
|
||||||
|
responses.POST,
|
||||||
|
"https://oidc.example.com/introspect",
|
||||||
|
json={
|
||||||
|
"iss": "https://oidc.example.com",
|
||||||
|
"aud": "some_client_id", # settings.OIDC_RS_CLIENT_ID
|
||||||
|
"sub": "very-specific-sub",
|
||||||
|
"client_id": "some_service_provider",
|
||||||
|
"scope": "openid groups",
|
||||||
|
"active": True,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
yield rsps
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def user_specific_sub():
|
||||||
|
"""
|
||||||
|
A fixture to create a user token for testing.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory(sub="very-specific-sub", full_name="External User")
|
||||||
|
|
||||||
|
yield user
|
||||||
|
|
||||||
|
|
||||||
|
def build_authorization_bearer(token):
|
||||||
|
"""
|
||||||
|
Build an Authorization Bearer header value from a token.
|
||||||
|
|
||||||
|
This can be used like this:
|
||||||
|
client.post(
|
||||||
|
...
|
||||||
|
HTTP_AUTHORIZATION=f"Bearer {build_authorization_bearer('some_token')}",
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
return base64.b64encode(token.encode("utf-8")).decode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def user_token():
|
||||||
|
"""
|
||||||
|
A fixture to create a user token for testing.
|
||||||
|
"""
|
||||||
|
return build_authorization_bearer("some_token")
|
||||||
|
|||||||
@@ -0,0 +1,6 @@
|
|||||||
|
active_email,inactive_email,active_email_checked,inactive_email_checked,status,id
|
||||||
|
"user.test40@example.com","user.test41@example.com",0,0,pending,1
|
||||||
|
"user.test42@example.com","user.test43@example.com",0,1,pending,2
|
||||||
|
"user.test44@example.com","user.test45@example.com",1,0,pending,3
|
||||||
|
"user.test46@example.com","user.test47@example.com",1,1,pending,4
|
||||||
|
"user.test48@example.com","user.test49@example.com",1,1,pending,5
|
||||||
|
@@ -0,0 +1,2 @@
|
|||||||
|
active_email,inactive_email,active_email_checked,inactive_email_checked,status,id
|
||||||
|
"user.test40@example.com",,0,0,pending,40
|
||||||
|
@@ -0,0 +1,5 @@
|
|||||||
|
merge_accept,active_email,inactive_email,status,id
|
||||||
|
true,user.test10@example.com,user.test11@example.com|user.test12@example.com,pending,10
|
||||||
|
true,user.test30@example.com,user.test31@example.com|user.test32@example.com|user.test33@example.com|user.test34@example.com|user.test35@example.com,pending,11
|
||||||
|
true,user.test20@example.com,user.test21@example.com,pending,12
|
||||||
|
true,user.test22@example.com,user.test23@example.com,pending,13
|
||||||
|
@@ -0,0 +1,2 @@
|
|||||||
|
merge_accept,active_email,inactive_email,status,id
|
||||||
|
true,user.test20@example.com,user.test20@example.com,pending,20
|
||||||
|
@@ -0,0 +1,6 @@
|
|||||||
|
active_email,inactive_email,active_email_checked,inactive_email_checked,status
|
||||||
|
"user.test40@example.com","user.test41@example.com",0,0,pending
|
||||||
|
"user.test42@example.com","user.test43@example.com",0,1,pending
|
||||||
|
"user.test44@example.com","user.test45@example.com",1,0,pending
|
||||||
|
"user.test46@example.com","user.test47@example.com",1,1,pending
|
||||||
|
"user.test48@example.com","user.test49@example.com",1,1,pending
|
||||||
|
@@ -245,15 +245,18 @@ def test_api_document_accesses_list_authenticated_related_privileged(
|
|||||||
"path": access.document.path,
|
"path": access.document.path,
|
||||||
"depth": access.document.depth,
|
"depth": access.document.depth,
|
||||||
},
|
},
|
||||||
"user": {
|
"user": (
|
||||||
"id": str(access.user.id),
|
{
|
||||||
"email": access.user.email,
|
"id": str(access.user.id),
|
||||||
"language": access.user.language,
|
"email": access.user.email,
|
||||||
"full_name": access.user.full_name,
|
"language": access.user.language,
|
||||||
"short_name": access.user.short_name,
|
"full_name": access.user.full_name,
|
||||||
}
|
"short_name": access.user.short_name,
|
||||||
if access.user
|
"is_first_connection": access.user.is_first_connection,
|
||||||
else None,
|
}
|
||||||
|
if access.user
|
||||||
|
else None
|
||||||
|
),
|
||||||
"max_ancestors_role": None,
|
"max_ancestors_role": None,
|
||||||
"max_role": access.role,
|
"max_role": access.role,
|
||||||
"team": access.team,
|
"team": access.team,
|
||||||
|
|||||||
@@ -596,6 +596,38 @@ def test_api_document_invitations_create_cannot_invite_existing_users():
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_item_invitations_create_cannot_invite_existing_users_case_insensitive():
|
||||||
|
"""
|
||||||
|
It should not be possible to invite already existing users, even with different email case.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
document = factories.DocumentFactory(users=[(user, "owner")])
|
||||||
|
existing_user = factories.UserFactory()
|
||||||
|
|
||||||
|
# Build an invitation to the email of an existing identity with different case
|
||||||
|
invitation_values = {
|
||||||
|
"email": existing_user.email.upper(),
|
||||||
|
"role": random.choice(models.RoleChoices.values),
|
||||||
|
}
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
f"/api/v1.0/documents/{document.id!s}/invitations/",
|
||||||
|
invitation_values,
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 400
|
||||||
|
assert response.json() == {
|
||||||
|
"email": ["This email is already associated to a registered user."]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_api_document_invitations_create_lower_email():
|
def test_api_document_invitations_create_lower_email():
|
||||||
"""
|
"""
|
||||||
No matter the case, the email should be converted to lowercase.
|
No matter the case, the email should be converted to lowercase.
|
||||||
|
|||||||
387
src/backend/core/tests/documents/test_api_documents_ai_proxy.py
Normal file
387
src/backend/core/tests/documents/test_api_documents_ai_proxy.py
Normal file
@@ -0,0 +1,387 @@
|
|||||||
|
"""
|
||||||
|
Test AI proxy API endpoint for users in impress's core app.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import random
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from django.test import override_settings
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from rest_framework.test import APIClient
|
||||||
|
|
||||||
|
from core import factories
|
||||||
|
from core.tests.conftest import TEAM, USER, VIA
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.django_db
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def ai_settings(settings):
|
||||||
|
"""Fixture to set AI settings."""
|
||||||
|
settings.AI_MODEL = "llama"
|
||||||
|
settings.AI_BASE_URL = "http://localhost-ai:12345/"
|
||||||
|
settings.AI_API_KEY = "test-key"
|
||||||
|
settings.AI_FEATURE_ENABLED = True
|
||||||
|
settings.AI_FEATURE_BLOCKNOTE_ENABLED = True
|
||||||
|
settings.AI_FEATURE_LEGACY_ENABLED = True
|
||||||
|
settings.LANGFUSE_PUBLIC_KEY = None
|
||||||
|
settings.AI_VERCEL_SDK_VERSION = 6
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
AI_ALLOW_REACH_FROM=random.choice(["public", "authenticated", "restricted"])
|
||||||
|
)
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"reach, role",
|
||||||
|
[
|
||||||
|
("restricted", "reader"),
|
||||||
|
("restricted", "editor"),
|
||||||
|
("authenticated", "reader"),
|
||||||
|
("authenticated", "editor"),
|
||||||
|
("public", "reader"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_api_documents_ai_proxy_anonymous_forbidden(reach, role):
|
||||||
|
"""
|
||||||
|
Anonymous users should not be able to request AI proxy if the link reach
|
||||||
|
and role don't allow it.
|
||||||
|
"""
|
||||||
|
document = factories.DocumentFactory(link_reach=reach, link_role=role)
|
||||||
|
|
||||||
|
url = f"/api/v1.0/documents/{document.id!s}/ai-proxy/"
|
||||||
|
response = APIClient().post(
|
||||||
|
url,
|
||||||
|
{
|
||||||
|
"messages": [{"role": "user", "content": "Hello"}],
|
||||||
|
},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
assert response.json() == {
|
||||||
|
"detail": "Authentication credentials were not provided."
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(AI_ALLOW_REACH_FROM="public")
|
||||||
|
@patch("core.services.ai_services.AIService.stream")
|
||||||
|
def test_api_documents_ai_proxy_anonymous_success(mock_stream):
|
||||||
|
"""
|
||||||
|
Anonymous users should be able to request AI proxy to a document
|
||||||
|
if the link reach and role permit it.
|
||||||
|
"""
|
||||||
|
document = factories.DocumentFactory(link_reach="public", link_role="editor")
|
||||||
|
|
||||||
|
mock_stream.return_value = iter(["data: chunk1\n", "data: chunk2\n"])
|
||||||
|
|
||||||
|
url = f"/api/v1.0/documents/{document.id!s}/ai-proxy/"
|
||||||
|
response = APIClient().post(
|
||||||
|
url,
|
||||||
|
b"{}",
|
||||||
|
content_type="application/json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response["Content-Type"] == "text/event-stream"
|
||||||
|
assert response["x-vercel-ai-data-stream"] == "v1"
|
||||||
|
assert response["X-Accel-Buffering"] == "no"
|
||||||
|
|
||||||
|
content = b"".join(response.streaming_content).decode()
|
||||||
|
assert "chunk1" in content
|
||||||
|
assert "chunk2" in content
|
||||||
|
mock_stream.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(AI_ALLOW_REACH_FROM=random.choice(["authenticated", "restricted"]))
|
||||||
|
def test_api_documents_ai_proxy_anonymous_limited_by_setting():
|
||||||
|
"""
|
||||||
|
Anonymous users should not be able to request AI proxy to a document
|
||||||
|
if AI_ALLOW_REACH_FROM setting restricts it.
|
||||||
|
"""
|
||||||
|
document = factories.DocumentFactory(link_reach="public", link_role="editor")
|
||||||
|
|
||||||
|
url = f"/api/v1.0/documents/{document.id!s}/ai-proxy/"
|
||||||
|
response = APIClient().post(
|
||||||
|
url,
|
||||||
|
b"{}",
|
||||||
|
content_type="application/json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"reach, role",
|
||||||
|
[
|
||||||
|
("restricted", "reader"),
|
||||||
|
("restricted", "editor"),
|
||||||
|
("authenticated", "reader"),
|
||||||
|
("public", "reader"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_api_documents_ai_proxy_authenticated_forbidden(reach, role):
|
||||||
|
"""
|
||||||
|
Users who are not related to a document can't request AI proxy if the
|
||||||
|
link reach and role don't allow it.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach=reach, link_role=role)
|
||||||
|
|
||||||
|
url = f"/api/v1.0/documents/{document.id!s}/ai-proxy/"
|
||||||
|
response = client.post(
|
||||||
|
url,
|
||||||
|
b"{}",
|
||||||
|
content_type="application/json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"reach, role",
|
||||||
|
[
|
||||||
|
("authenticated", "editor"),
|
||||||
|
("public", "editor"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
@patch("core.services.ai_services.AIService.stream")
|
||||||
|
def test_api_documents_ai_proxy_authenticated_success(mock_stream, reach, role):
|
||||||
|
"""
|
||||||
|
Authenticated users should be able to request AI proxy to a document
|
||||||
|
if the link reach and role permit it.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach=reach, link_role=role)
|
||||||
|
|
||||||
|
mock_stream.return_value = iter(["data: response\n"])
|
||||||
|
|
||||||
|
url = f"/api/v1.0/documents/{document.id!s}/ai-proxy/"
|
||||||
|
response = client.post(
|
||||||
|
url,
|
||||||
|
b"{}",
|
||||||
|
content_type="application/json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response["Content-Type"] == "text/event-stream"
|
||||||
|
mock_stream.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("via", VIA)
|
||||||
|
def test_api_documents_ai_proxy_reader(via, mock_user_teams):
|
||||||
|
"""Users with reader access should not be able to request AI proxy."""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach="restricted")
|
||||||
|
if via == USER:
|
||||||
|
factories.UserDocumentAccessFactory(document=document, user=user, role="reader")
|
||||||
|
elif via == TEAM:
|
||||||
|
mock_user_teams.return_value = ["lasuite", "unknown"]
|
||||||
|
factories.TeamDocumentAccessFactory(
|
||||||
|
document=document, team="lasuite", role="reader"
|
||||||
|
)
|
||||||
|
|
||||||
|
url = f"/api/v1.0/documents/{document.id!s}/ai-proxy/"
|
||||||
|
response = client.post(
|
||||||
|
url,
|
||||||
|
b"{}",
|
||||||
|
content_type="application/json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("role", ["editor", "administrator", "owner"])
|
||||||
|
@pytest.mark.parametrize("via", VIA)
|
||||||
|
@patch("core.services.ai_services.AIService.stream")
|
||||||
|
def test_api_documents_ai_proxy_success(mock_stream, via, role, mock_user_teams):
|
||||||
|
"""Users with sufficient permissions should be able to request AI proxy."""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach="restricted")
|
||||||
|
if via == USER:
|
||||||
|
factories.UserDocumentAccessFactory(document=document, user=user, role=role)
|
||||||
|
elif via == TEAM:
|
||||||
|
mock_user_teams.return_value = ["lasuite", "unknown"]
|
||||||
|
factories.TeamDocumentAccessFactory(
|
||||||
|
document=document, team="lasuite", role=role
|
||||||
|
)
|
||||||
|
|
||||||
|
mock_stream.return_value = iter(["data: success\n"])
|
||||||
|
|
||||||
|
url = f"/api/v1.0/documents/{document.id!s}/ai-proxy/"
|
||||||
|
response = client.post(
|
||||||
|
url,
|
||||||
|
b"{}",
|
||||||
|
content_type="application/json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response["Content-Type"] == "text/event-stream"
|
||||||
|
assert response["x-vercel-ai-data-stream"] == "v1"
|
||||||
|
assert response["X-Accel-Buffering"] == "no"
|
||||||
|
|
||||||
|
content = b"".join(response.streaming_content).decode()
|
||||||
|
assert "success" in content
|
||||||
|
mock_stream.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"setting_to_disable", ["AI_FEATURE_ENABLED", "AI_FEATURE_BLOCKNOTE_ENABLED"]
|
||||||
|
)
|
||||||
|
def test_api_documents_ai_proxy_ai_feature_disabled(settings, setting_to_disable):
|
||||||
|
"""When AI_FEATURE_ENABLED is False, the endpoint returns 400."""
|
||||||
|
setattr(settings, setting_to_disable, False)
|
||||||
|
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach="public", link_role="editor")
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
f"/api/v1.0/documents/{document.id!s}/ai-proxy/",
|
||||||
|
b"{}",
|
||||||
|
content_type="application/json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 400
|
||||||
|
assert response.json() == ["AI feature is not enabled."]
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(AI_DOCUMENT_RATE_THROTTLE_RATES={"minute": 3, "hour": 6, "day": 10})
|
||||||
|
@patch("core.services.ai_services.AIService.stream")
|
||||||
|
def test_api_documents_ai_proxy_throttling_document(mock_stream):
|
||||||
|
"""
|
||||||
|
Throttling per document should be triggered on the AI proxy endpoint.
|
||||||
|
For full throttle class test see: `test_api_utils_ai_document_rate_throttles`
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
document = factories.DocumentFactory(link_reach="public", link_role="editor")
|
||||||
|
|
||||||
|
mock_stream.return_value = iter(["data: ok\n"])
|
||||||
|
|
||||||
|
url = f"/api/v1.0/documents/{document.id!s}/ai-proxy/"
|
||||||
|
for _ in range(3):
|
||||||
|
mock_stream.return_value = iter(["data: ok\n"])
|
||||||
|
user = factories.UserFactory()
|
||||||
|
client.force_login(user)
|
||||||
|
response = client.post(
|
||||||
|
url,
|
||||||
|
b"{}",
|
||||||
|
content_type="application/json",
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
user = factories.UserFactory()
|
||||||
|
client.force_login(user)
|
||||||
|
response = client.post(
|
||||||
|
url,
|
||||||
|
b"{}",
|
||||||
|
content_type="application/json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 429
|
||||||
|
assert response.json() == {
|
||||||
|
"detail": "Request was throttled. Expected available in 60 seconds."
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(AI_USER_RATE_THROTTLE_RATES={"minute": 3, "hour": 6, "day": 10})
|
||||||
|
@patch("core.services.ai_services.AIService.stream")
|
||||||
|
def test_api_documents_ai_proxy_throttling_user(mock_stream):
|
||||||
|
"""
|
||||||
|
Throttling per user should be triggered on the AI proxy endpoint.
|
||||||
|
For full throttle class test see: `test_api_utils_ai_user_rate_throttles`
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
for _ in range(3):
|
||||||
|
mock_stream.return_value = iter(["data: ok\n"])
|
||||||
|
document = factories.DocumentFactory(link_reach="public", link_role="editor")
|
||||||
|
url = f"/api/v1.0/documents/{document.id!s}/ai-proxy/"
|
||||||
|
response = client.post(
|
||||||
|
url,
|
||||||
|
b"{}",
|
||||||
|
content_type="application/json",
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach="public", link_role="editor")
|
||||||
|
url = f"/api/v1.0/documents/{document.id!s}/ai-proxy/"
|
||||||
|
response = client.post(
|
||||||
|
url,
|
||||||
|
b"{}",
|
||||||
|
content_type="application/json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 429
|
||||||
|
assert response.json() == {
|
||||||
|
"detail": "Request was throttled. Expected available in 60 seconds."
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@patch("core.services.ai_services.AIService.stream")
|
||||||
|
def test_api_documents_ai_proxy_returns_streaming_response(mock_stream):
|
||||||
|
"""AI proxy should return a StreamingHttpResponse with correct headers."""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach="public", link_role="editor")
|
||||||
|
|
||||||
|
mock_stream.return_value = iter(["data: part1\n", "data: part2\n", "data: part3\n"])
|
||||||
|
|
||||||
|
url = f"/api/v1.0/documents/{document.id!s}/ai-proxy/"
|
||||||
|
response = client.post(
|
||||||
|
url,
|
||||||
|
b"{}",
|
||||||
|
content_type="application/json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response["Content-Type"] == "text/event-stream"
|
||||||
|
assert response["x-vercel-ai-data-stream"] == "v1"
|
||||||
|
assert response["X-Accel-Buffering"] == "no"
|
||||||
|
|
||||||
|
chunks = list(response.streaming_content)
|
||||||
|
assert len(chunks) == 3
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_documents_ai_proxy_invalid_payload():
|
||||||
|
"""AI Proxy should return a 400 if the payload is invalid."""
|
||||||
|
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(users=[(user, "owner")])
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
f"/api/v1.0/documents/{document.id!s}/ai-proxy/",
|
||||||
|
b'{"foo": "bar", "trigger": "submit-message"}',
|
||||||
|
content_type="application/json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 400
|
||||||
|
assert response.json() == {"detail": "Invalid submitted payload"}
|
||||||
@@ -9,6 +9,7 @@ import pytest
|
|||||||
from rest_framework.test import APIClient
|
from rest_framework.test import APIClient
|
||||||
|
|
||||||
from core.api.serializers import UserSerializer
|
from core.api.serializers import UserSerializer
|
||||||
|
from core.choices import PRIVILEGED_ROLES
|
||||||
from core.factories import (
|
from core.factories import (
|
||||||
DocumentAskForAccessFactory,
|
DocumentAskForAccessFactory,
|
||||||
DocumentFactory,
|
DocumentFactory,
|
||||||
@@ -199,6 +200,27 @@ def test_api_documents_ask_for_access_create_authenticated_already_has_ask_for_a
|
|||||||
assert response.json() == {"detail": "You already ask to access to this document."}
|
assert response.json() == {"detail": "You already ask to access to this document."}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("role", PRIVILEGED_ROLES)
|
||||||
|
def test_api_documents_ask_for_access_create_authenticated_already_has_privileged_access(
|
||||||
|
role,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Authenticated users with privileged access (owner or admin) should not be able to
|
||||||
|
create a document ask for access.
|
||||||
|
"""
|
||||||
|
user = UserFactory()
|
||||||
|
document = DocumentFactory(users=[(user, role)])
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
response = client.post(f"/api/v1.0/documents/{document.id}/ask-for-access/")
|
||||||
|
assert response.status_code == 400
|
||||||
|
assert response.json() == {
|
||||||
|
"detail": "You already have privileged access to this document."
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
## List
|
## List
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -123,7 +123,7 @@ def test_api_documents_duplicate_success(index):
|
|||||||
image_refs[0][0]
|
image_refs[0][0]
|
||||||
] # Only the first image key
|
] # Only the first image key
|
||||||
assert duplicated_document.get_parent() == document.get_parent()
|
assert duplicated_document.get_parent() == document.get_parent()
|
||||||
assert duplicated_document.path == document.get_next_sibling().path
|
assert duplicated_document.path == document.get_last_sibling().path
|
||||||
|
|
||||||
# Check that accesses were not duplicated.
|
# Check that accesses were not duplicated.
|
||||||
# The user who did the duplicate is forced as owner
|
# The user who did the duplicate is forced as owner
|
||||||
@@ -180,6 +180,7 @@ def test_api_documents_duplicate_with_accesses_admin(role):
|
|||||||
client = APIClient()
|
client = APIClient()
|
||||||
client.force_login(user)
|
client.force_login(user)
|
||||||
|
|
||||||
|
documents_before = factories.DocumentFactory.create_batch(20)
|
||||||
document = factories.DocumentFactory(
|
document = factories.DocumentFactory(
|
||||||
users=[(user, role)],
|
users=[(user, role)],
|
||||||
title="document with accesses",
|
title="document with accesses",
|
||||||
@@ -187,6 +188,12 @@ def test_api_documents_duplicate_with_accesses_admin(role):
|
|||||||
user_access = factories.UserDocumentAccessFactory(document=document)
|
user_access = factories.UserDocumentAccessFactory(document=document)
|
||||||
team_access = factories.TeamDocumentAccessFactory(document=document)
|
team_access = factories.TeamDocumentAccessFactory(document=document)
|
||||||
|
|
||||||
|
documents_after = factories.DocumentFactory.create_batch(20)
|
||||||
|
|
||||||
|
all_documents = documents_before + [document] + documents_after
|
||||||
|
|
||||||
|
paths = {document.pk: document.path for document in all_documents}
|
||||||
|
|
||||||
# Duplicate the document via the API endpoint requesting to duplicate accesses
|
# Duplicate the document via the API endpoint requesting to duplicate accesses
|
||||||
response = client.post(
|
response = client.post(
|
||||||
f"/api/v1.0/documents/{document.id!s}/duplicate/",
|
f"/api/v1.0/documents/{document.id!s}/duplicate/",
|
||||||
@@ -212,6 +219,10 @@ def test_api_documents_duplicate_with_accesses_admin(role):
|
|||||||
assert duplicated_accesses.get(user=user_access.user).role == user_access.role
|
assert duplicated_accesses.get(user=user_access.user).role == user_access.role
|
||||||
assert duplicated_accesses.get(team=team_access.team).role == team_access.role
|
assert duplicated_accesses.get(team=team_access.team).role == team_access.role
|
||||||
|
|
||||||
|
for document in all_documents:
|
||||||
|
document.refresh_from_db()
|
||||||
|
assert document.path == paths[document.id]
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("role", ["editor", "reader"])
|
@pytest.mark.parametrize("role", ["editor", "reader"])
|
||||||
def test_api_documents_duplicate_with_accesses_non_admin(role):
|
def test_api_documents_duplicate_with_accesses_non_admin(role):
|
||||||
@@ -318,3 +329,424 @@ def test_api_documents_duplicate_reader_non_root_document():
|
|||||||
assert duplicated_document.is_root()
|
assert duplicated_document.is_root()
|
||||||
assert duplicated_document.accesses.count() == 1
|
assert duplicated_document.accesses.count() == 1
|
||||||
assert duplicated_document.accesses.get(user=user).role == "owner"
|
assert duplicated_document.accesses.get(user=user).role == "owner"
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_documents_duplicate_with_descendants_simple():
|
||||||
|
"""
|
||||||
|
Duplicating a document with descendants flag should recursively duplicate all children.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
# Create document tree
|
||||||
|
root = factories.DocumentFactory(
|
||||||
|
users=[(user, "owner")],
|
||||||
|
title="Root Document",
|
||||||
|
)
|
||||||
|
child1 = factories.DocumentFactory(
|
||||||
|
parent=root,
|
||||||
|
title="Child 1",
|
||||||
|
)
|
||||||
|
child2 = factories.DocumentFactory(
|
||||||
|
parent=root,
|
||||||
|
title="Child 2",
|
||||||
|
)
|
||||||
|
|
||||||
|
initial_count = models.Document.objects.count()
|
||||||
|
assert initial_count == 3
|
||||||
|
|
||||||
|
# Duplicate with descendants
|
||||||
|
response = client.post(
|
||||||
|
f"/api/v1.0/documents/{root.id!s}/duplicate/",
|
||||||
|
{"with_descendants": True},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
duplicated_root = models.Document.objects.get(id=response.json()["id"])
|
||||||
|
|
||||||
|
# Check that all documents were duplicated (6 total: 3 original + 3 duplicated)
|
||||||
|
assert models.Document.objects.count() == 6
|
||||||
|
|
||||||
|
# Check root duplication
|
||||||
|
assert duplicated_root.title == "Copy of Root Document"
|
||||||
|
assert duplicated_root.creator == user
|
||||||
|
assert duplicated_root.duplicated_from == root
|
||||||
|
assert duplicated_root.get_children().count() == 2
|
||||||
|
|
||||||
|
# Check children duplication
|
||||||
|
duplicated_children = duplicated_root.get_children().order_by("title")
|
||||||
|
assert duplicated_children.count() == 2
|
||||||
|
|
||||||
|
duplicated_child1 = duplicated_children.first()
|
||||||
|
assert duplicated_child1.title == "Copy of Child 1"
|
||||||
|
assert duplicated_child1.creator == user
|
||||||
|
assert duplicated_child1.duplicated_from == child1
|
||||||
|
assert duplicated_child1.get_parent() == duplicated_root
|
||||||
|
|
||||||
|
duplicated_child2 = duplicated_children.last()
|
||||||
|
assert duplicated_child2.title == "Copy of Child 2"
|
||||||
|
assert duplicated_child2.creator == user
|
||||||
|
assert duplicated_child2.duplicated_from == child2
|
||||||
|
assert duplicated_child2.get_parent() == duplicated_root
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_documents_duplicate_with_descendants_multi_level():
|
||||||
|
"""
|
||||||
|
Duplicating should recursively handle multiple levels of nesting.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
root = factories.DocumentFactory(
|
||||||
|
users=[(user, "owner")],
|
||||||
|
title="Level 0",
|
||||||
|
)
|
||||||
|
child = factories.DocumentFactory(
|
||||||
|
parent=root,
|
||||||
|
title="Level 1",
|
||||||
|
)
|
||||||
|
grandchild = factories.DocumentFactory(
|
||||||
|
parent=child,
|
||||||
|
title="Level 2",
|
||||||
|
)
|
||||||
|
great_grandchild = factories.DocumentFactory(
|
||||||
|
parent=grandchild,
|
||||||
|
title="Level 3",
|
||||||
|
)
|
||||||
|
|
||||||
|
initial_count = models.Document.objects.count()
|
||||||
|
assert initial_count == 4
|
||||||
|
|
||||||
|
# Duplicate with descendants
|
||||||
|
response = client.post(
|
||||||
|
f"/api/v1.0/documents/{root.id!s}/duplicate/",
|
||||||
|
{"with_descendants": True},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
duplicated_root = models.Document.objects.get(id=response.json()["id"])
|
||||||
|
|
||||||
|
# Check that all documents were duplicated
|
||||||
|
assert models.Document.objects.count() == 8
|
||||||
|
|
||||||
|
# Verify the tree structure
|
||||||
|
assert duplicated_root.depth == root.depth
|
||||||
|
dup_children = duplicated_root.get_children()
|
||||||
|
assert dup_children.count() == 1
|
||||||
|
|
||||||
|
dup_child = dup_children.first()
|
||||||
|
assert dup_child.title == "Copy of Level 1"
|
||||||
|
assert dup_child.duplicated_from == child
|
||||||
|
dup_grandchildren = dup_child.get_children()
|
||||||
|
assert dup_grandchildren.count() == 1
|
||||||
|
|
||||||
|
dup_grandchild = dup_grandchildren.first()
|
||||||
|
assert dup_grandchild.title == "Copy of Level 2"
|
||||||
|
assert dup_grandchild.duplicated_from == grandchild
|
||||||
|
dup_great_grandchildren = dup_grandchild.get_children()
|
||||||
|
assert dup_great_grandchildren.count() == 1
|
||||||
|
|
||||||
|
dup_great_grandchild = dup_great_grandchildren.first()
|
||||||
|
assert dup_great_grandchild.title == "Copy of Level 3"
|
||||||
|
assert dup_great_grandchild.duplicated_from == great_grandchild
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_documents_duplicate_with_descendants_and_attachments():
|
||||||
|
"""
|
||||||
|
Duplicating with descendants should properly handle attachments in all children.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
# Create documents with attachments
|
||||||
|
root_id = uuid.uuid4()
|
||||||
|
child_id = uuid.uuid4()
|
||||||
|
image_key_root, image_url_root = get_image_refs(root_id)
|
||||||
|
image_key_child, image_url_child = get_image_refs(child_id)
|
||||||
|
|
||||||
|
# Create root document with attachment
|
||||||
|
ydoc = pycrdt.Doc()
|
||||||
|
fragment = pycrdt.XmlFragment(
|
||||||
|
[
|
||||||
|
pycrdt.XmlElement("img", {"src": image_url_root}),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
ydoc["document-store"] = fragment
|
||||||
|
update = ydoc.get_update()
|
||||||
|
root_content = base64.b64encode(update).decode("utf-8")
|
||||||
|
|
||||||
|
root = factories.DocumentFactory(
|
||||||
|
id=root_id,
|
||||||
|
users=[(user, "owner")],
|
||||||
|
title="Root with Image",
|
||||||
|
content=root_content,
|
||||||
|
attachments=[image_key_root],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create child with different attachment
|
||||||
|
ydoc_child = pycrdt.Doc()
|
||||||
|
fragment_child = pycrdt.XmlFragment(
|
||||||
|
[
|
||||||
|
pycrdt.XmlElement("img", {"src": image_url_child}),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
ydoc_child["document-store"] = fragment_child
|
||||||
|
update_child = ydoc_child.get_update()
|
||||||
|
child_content = base64.b64encode(update_child).decode("utf-8")
|
||||||
|
|
||||||
|
child = factories.DocumentFactory(
|
||||||
|
id=child_id,
|
||||||
|
parent=root,
|
||||||
|
title="Child with Image",
|
||||||
|
content=child_content,
|
||||||
|
attachments=[image_key_child],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Duplicate with descendants
|
||||||
|
response = client.post(
|
||||||
|
f"/api/v1.0/documents/{root.id!s}/duplicate/",
|
||||||
|
{"with_descendants": True},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
duplicated_root = models.Document.objects.get(id=response.json()["id"])
|
||||||
|
|
||||||
|
# Check root attachments
|
||||||
|
assert duplicated_root.attachments == [image_key_root]
|
||||||
|
assert duplicated_root.content == root_content
|
||||||
|
|
||||||
|
# Check child attachments
|
||||||
|
dup_children = duplicated_root.get_children()
|
||||||
|
assert dup_children.count() == 1
|
||||||
|
dup_child = dup_children.first()
|
||||||
|
assert dup_child.attachments == [image_key_child]
|
||||||
|
assert dup_child.content == child_content
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_documents_duplicate_with_descendants_and_accesses():
|
||||||
|
"""
|
||||||
|
Duplicating with descendants and accesses should propagate accesses to all children.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
other_user = factories.UserFactory()
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
# Create document tree with accesses
|
||||||
|
root = factories.DocumentFactory(
|
||||||
|
users=[(user, "owner"), (other_user, "editor")],
|
||||||
|
title="Root",
|
||||||
|
)
|
||||||
|
child = factories.DocumentFactory(
|
||||||
|
parent=root,
|
||||||
|
title="Child",
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(document=child, user=other_user, role="reader")
|
||||||
|
|
||||||
|
# Duplicate with descendants and accesses
|
||||||
|
response = client.post(
|
||||||
|
f"/api/v1.0/documents/{root.id!s}/duplicate/",
|
||||||
|
{"with_descendants": True, "with_accesses": True},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
duplicated_root = models.Document.objects.get(id=response.json()["id"])
|
||||||
|
|
||||||
|
# Check root accesses (should be duplicated)
|
||||||
|
root_accesses = duplicated_root.accesses.order_by("user_id")
|
||||||
|
assert root_accesses.count() == 2
|
||||||
|
assert root_accesses.get(user=user).role == "owner"
|
||||||
|
assert root_accesses.get(user=other_user).role == "editor"
|
||||||
|
|
||||||
|
# Check child accesses (should be duplicated)
|
||||||
|
dup_children = duplicated_root.get_children()
|
||||||
|
dup_child = dup_children.first()
|
||||||
|
child_accesses = dup_child.accesses.order_by("user_id")
|
||||||
|
assert child_accesses.count() == 1
|
||||||
|
assert child_accesses.get(user=other_user).role == "reader"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("role", ["editor", "reader"])
|
||||||
|
def test_api_documents_duplicate_with_descendants_non_root_document_becomes_root(role):
|
||||||
|
"""
|
||||||
|
When duplicating a non-root document with descendants as a reader/editor,
|
||||||
|
it should become a root document and still duplicate its children.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
parent = factories.DocumentFactory(users=[(user, "owner")])
|
||||||
|
child = factories.DocumentFactory(
|
||||||
|
parent=parent,
|
||||||
|
users=[(user, role)],
|
||||||
|
title="Sub Document",
|
||||||
|
)
|
||||||
|
grandchild = factories.DocumentFactory(
|
||||||
|
parent=child,
|
||||||
|
title="Grandchild",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert child.is_child_of(parent)
|
||||||
|
|
||||||
|
# Duplicate the child (non-root) with descendants
|
||||||
|
response = client.post(
|
||||||
|
f"/api/v1.0/documents/{child.id!s}/duplicate/",
|
||||||
|
{"with_descendants": True},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
duplicated_child = models.Document.objects.get(id=response.json()["id"])
|
||||||
|
|
||||||
|
assert duplicated_child.title == "Copy of Sub Document"
|
||||||
|
|
||||||
|
dup_grandchildren = duplicated_child.get_children()
|
||||||
|
assert dup_grandchildren.count() == 1
|
||||||
|
dup_grandchild = dup_grandchildren.first()
|
||||||
|
assert dup_grandchild.title == "Copy of Grandchild"
|
||||||
|
assert dup_grandchild.duplicated_from == grandchild
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_documents_duplicate_without_descendants_should_not_duplicate_children():
|
||||||
|
"""
|
||||||
|
When with_descendants is not set or False, children should not be duplicated.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
# Create document tree
|
||||||
|
root = factories.DocumentFactory(
|
||||||
|
users=[(user, "owner")],
|
||||||
|
title="Root",
|
||||||
|
)
|
||||||
|
child = factories.DocumentFactory(
|
||||||
|
parent=root,
|
||||||
|
title="Child",
|
||||||
|
)
|
||||||
|
|
||||||
|
initial_count = models.Document.objects.count()
|
||||||
|
assert initial_count == 2
|
||||||
|
|
||||||
|
# Duplicate without descendants (default behavior)
|
||||||
|
response = client.post(
|
||||||
|
f"/api/v1.0/documents/{root.id!s}/duplicate/",
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
duplicated_root = models.Document.objects.get(id=response.json()["id"])
|
||||||
|
|
||||||
|
# Only root should be duplicated, not children
|
||||||
|
assert models.Document.objects.count() == 3
|
||||||
|
assert duplicated_root.get_children().count() == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_documents_duplicate_with_descendants_preserves_link_configuration():
|
||||||
|
"""
|
||||||
|
Duplicating with descendants should preserve link configuration (link_reach, link_role)
|
||||||
|
for all children when with_accesses is True.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
# Create document tree with specific link configurations
|
||||||
|
root = factories.DocumentFactory(
|
||||||
|
users=[(user, "owner")],
|
||||||
|
title="Root",
|
||||||
|
link_reach="public",
|
||||||
|
link_role="reader",
|
||||||
|
)
|
||||||
|
child = factories.DocumentFactory(
|
||||||
|
parent=root,
|
||||||
|
title="Child",
|
||||||
|
link_reach="restricted",
|
||||||
|
link_role="editor",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Duplicate with descendants and accesses
|
||||||
|
response = client.post(
|
||||||
|
f"/api/v1.0/documents/{root.id!s}/duplicate/",
|
||||||
|
{"with_descendants": True, "with_accesses": True},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
duplicated_root = models.Document.objects.get(id=response.json()["id"])
|
||||||
|
|
||||||
|
# Check root link configuration
|
||||||
|
assert duplicated_root.link_reach == root.link_reach
|
||||||
|
assert duplicated_root.link_role == root.link_role
|
||||||
|
|
||||||
|
# Check child link configuration
|
||||||
|
dup_children = duplicated_root.get_children()
|
||||||
|
dup_child = dup_children.first()
|
||||||
|
assert dup_child.link_reach == child.link_reach
|
||||||
|
assert dup_child.link_role == child.link_role
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_documents_duplicate_with_descendants_complex_tree():
|
||||||
|
"""
|
||||||
|
Test duplication of a complex tree structure with multiple branches.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
# Create a complex tree:
|
||||||
|
# root
|
||||||
|
# / \
|
||||||
|
# c1 c2
|
||||||
|
# / \ \
|
||||||
|
# gc1 gc2 gc3
|
||||||
|
root = factories.DocumentFactory(
|
||||||
|
users=[(user, "owner")],
|
||||||
|
title="Root",
|
||||||
|
)
|
||||||
|
child1 = factories.DocumentFactory(parent=root, title="Child 1")
|
||||||
|
child2 = factories.DocumentFactory(parent=root, title="Child 2")
|
||||||
|
_grandchild1 = factories.DocumentFactory(parent=child1, title="GrandChild 1")
|
||||||
|
_grandchild2 = factories.DocumentFactory(parent=child1, title="GrandChild 2")
|
||||||
|
_grandchild3 = factories.DocumentFactory(parent=child2, title="GrandChild 3")
|
||||||
|
|
||||||
|
initial_count = models.Document.objects.count()
|
||||||
|
assert initial_count == 6
|
||||||
|
|
||||||
|
# Duplicate with descendants
|
||||||
|
response = client.post(
|
||||||
|
f"/api/v1.0/documents/{root.id!s}/duplicate/",
|
||||||
|
{"with_descendants": True},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
duplicated_root = models.Document.objects.get(id=response.json()["id"])
|
||||||
|
|
||||||
|
# All documents should be duplicated
|
||||||
|
assert models.Document.objects.count() == 12
|
||||||
|
|
||||||
|
# Check structure is preserved
|
||||||
|
dup_children = duplicated_root.get_children().order_by("title")
|
||||||
|
assert dup_children.count() == 2
|
||||||
|
|
||||||
|
dup_child1 = dup_children.first()
|
||||||
|
assert dup_child1.title == "Copy of Child 1"
|
||||||
|
dup_grandchildren1 = dup_child1.get_children().order_by("title")
|
||||||
|
assert dup_grandchildren1.count() == 2
|
||||||
|
assert dup_grandchildren1.first().title == "Copy of GrandChild 1"
|
||||||
|
assert dup_grandchildren1.last().title == "Copy of GrandChild 2"
|
||||||
|
|
||||||
|
dup_child2 = dup_children.last()
|
||||||
|
assert dup_child2.title == "Copy of Child 2"
|
||||||
|
dup_grandchildren2 = dup_child2.get_children()
|
||||||
|
assert dup_grandchildren2.count() == 1
|
||||||
|
assert dup_grandchildren2.first().title == "Copy of GrandChild 3"
|
||||||
|
|||||||
@@ -16,7 +16,16 @@ fake = Faker()
|
|||||||
pytestmark = pytest.mark.django_db
|
pytestmark = pytest.mark.django_db
|
||||||
|
|
||||||
|
|
||||||
def test_api_documents_list_filter_and_access_rights():
|
@pytest.mark.parametrize(
|
||||||
|
"title_search_field",
|
||||||
|
# for integration with indexer search we must have
|
||||||
|
# the same filtering behaviour with "q" and "title" parameters
|
||||||
|
[
|
||||||
|
("title"),
|
||||||
|
("q"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_api_documents_list_filter_and_access_rights(title_search_field):
|
||||||
"""Filtering on querystring parameters should respect access rights."""
|
"""Filtering on querystring parameters should respect access rights."""
|
||||||
user = factories.UserFactory()
|
user = factories.UserFactory()
|
||||||
client = APIClient()
|
client = APIClient()
|
||||||
@@ -76,7 +85,7 @@ def test_api_documents_list_filter_and_access_rights():
|
|||||||
|
|
||||||
filters = {
|
filters = {
|
||||||
"link_reach": random.choice([None, *models.LinkReachChoices.values]),
|
"link_reach": random.choice([None, *models.LinkReachChoices.values]),
|
||||||
"title": random.choice([None, *word_list]),
|
title_search_field: random.choice([None, *word_list]),
|
||||||
"favorite": random.choice([None, True, False]),
|
"favorite": random.choice([None, True, False]),
|
||||||
"creator": random.choice([None, user, other_user]),
|
"creator": random.choice([None, user, other_user]),
|
||||||
"ordering": random.choice(
|
"ordering": random.choice(
|
||||||
|
|||||||
@@ -29,6 +29,7 @@ def test_api_documents_retrieve_anonymous_public_standalone():
|
|||||||
"abilities": {
|
"abilities": {
|
||||||
"accesses_manage": False,
|
"accesses_manage": False,
|
||||||
"accesses_view": False,
|
"accesses_view": False,
|
||||||
|
"ai_proxy": False,
|
||||||
"ai_transform": False,
|
"ai_transform": False,
|
||||||
"ai_translate": False,
|
"ai_translate": False,
|
||||||
"attachment_upload": document.link_role == "editor",
|
"attachment_upload": document.link_role == "editor",
|
||||||
@@ -58,6 +59,7 @@ def test_api_documents_retrieve_anonymous_public_standalone():
|
|||||||
"partial_update": document.link_role == "editor",
|
"partial_update": document.link_role == "editor",
|
||||||
"restore": False,
|
"restore": False,
|
||||||
"retrieve": True,
|
"retrieve": True,
|
||||||
|
"search": True,
|
||||||
"tree": True,
|
"tree": True,
|
||||||
"update": document.link_role == "editor",
|
"update": document.link_role == "editor",
|
||||||
"versions_destroy": False,
|
"versions_destroy": False,
|
||||||
@@ -107,6 +109,7 @@ def test_api_documents_retrieve_anonymous_public_parent():
|
|||||||
"abilities": {
|
"abilities": {
|
||||||
"accesses_manage": False,
|
"accesses_manage": False,
|
||||||
"accesses_view": False,
|
"accesses_view": False,
|
||||||
|
"ai_proxy": False,
|
||||||
"ai_transform": False,
|
"ai_transform": False,
|
||||||
"ai_translate": False,
|
"ai_translate": False,
|
||||||
"attachment_upload": grand_parent.link_role == "editor",
|
"attachment_upload": grand_parent.link_role == "editor",
|
||||||
@@ -134,6 +137,7 @@ def test_api_documents_retrieve_anonymous_public_parent():
|
|||||||
"partial_update": grand_parent.link_role == "editor",
|
"partial_update": grand_parent.link_role == "editor",
|
||||||
"restore": False,
|
"restore": False,
|
||||||
"retrieve": True,
|
"retrieve": True,
|
||||||
|
"search": True,
|
||||||
"tree": True,
|
"tree": True,
|
||||||
"update": grand_parent.link_role == "editor",
|
"update": grand_parent.link_role == "editor",
|
||||||
"versions_destroy": False,
|
"versions_destroy": False,
|
||||||
@@ -215,6 +219,7 @@ def test_api_documents_retrieve_authenticated_unrelated_public_or_authenticated(
|
|||||||
"abilities": {
|
"abilities": {
|
||||||
"accesses_manage": False,
|
"accesses_manage": False,
|
||||||
"accesses_view": False,
|
"accesses_view": False,
|
||||||
|
"ai_proxy": document.link_role == "editor",
|
||||||
"ai_transform": document.link_role == "editor",
|
"ai_transform": document.link_role == "editor",
|
||||||
"ai_translate": document.link_role == "editor",
|
"ai_translate": document.link_role == "editor",
|
||||||
"attachment_upload": document.link_role == "editor",
|
"attachment_upload": document.link_role == "editor",
|
||||||
@@ -243,6 +248,7 @@ def test_api_documents_retrieve_authenticated_unrelated_public_or_authenticated(
|
|||||||
"partial_update": document.link_role == "editor",
|
"partial_update": document.link_role == "editor",
|
||||||
"restore": False,
|
"restore": False,
|
||||||
"retrieve": True,
|
"retrieve": True,
|
||||||
|
"search": True,
|
||||||
"tree": True,
|
"tree": True,
|
||||||
"update": document.link_role == "editor",
|
"update": document.link_role == "editor",
|
||||||
"versions_destroy": False,
|
"versions_destroy": False,
|
||||||
@@ -300,6 +306,7 @@ def test_api_documents_retrieve_authenticated_public_or_authenticated_parent(rea
|
|||||||
"abilities": {
|
"abilities": {
|
||||||
"accesses_manage": False,
|
"accesses_manage": False,
|
||||||
"accesses_view": False,
|
"accesses_view": False,
|
||||||
|
"ai_proxy": grand_parent.link_role == "editor",
|
||||||
"ai_transform": grand_parent.link_role == "editor",
|
"ai_transform": grand_parent.link_role == "editor",
|
||||||
"ai_translate": grand_parent.link_role == "editor",
|
"ai_translate": grand_parent.link_role == "editor",
|
||||||
"attachment_upload": grand_parent.link_role == "editor",
|
"attachment_upload": grand_parent.link_role == "editor",
|
||||||
@@ -326,6 +333,7 @@ def test_api_documents_retrieve_authenticated_public_or_authenticated_parent(rea
|
|||||||
"partial_update": grand_parent.link_role == "editor",
|
"partial_update": grand_parent.link_role == "editor",
|
||||||
"restore": False,
|
"restore": False,
|
||||||
"retrieve": True,
|
"retrieve": True,
|
||||||
|
"search": True,
|
||||||
"tree": True,
|
"tree": True,
|
||||||
"update": grand_parent.link_role == "editor",
|
"update": grand_parent.link_role == "editor",
|
||||||
"versions_destroy": False,
|
"versions_destroy": False,
|
||||||
@@ -498,6 +506,7 @@ def test_api_documents_retrieve_authenticated_related_parent():
|
|||||||
"abilities": {
|
"abilities": {
|
||||||
"accesses_manage": access.role in ["administrator", "owner"],
|
"accesses_manage": access.role in ["administrator", "owner"],
|
||||||
"accesses_view": True,
|
"accesses_view": True,
|
||||||
|
"ai_proxy": access.role not in ["reader", "commenter"],
|
||||||
"ai_transform": access.role not in ["reader", "commenter"],
|
"ai_transform": access.role not in ["reader", "commenter"],
|
||||||
"ai_translate": access.role not in ["reader", "commenter"],
|
"ai_translate": access.role not in ["reader", "commenter"],
|
||||||
"attachment_upload": access.role not in ["reader", "commenter"],
|
"attachment_upload": access.role not in ["reader", "commenter"],
|
||||||
@@ -524,6 +533,7 @@ def test_api_documents_retrieve_authenticated_related_parent():
|
|||||||
"partial_update": access.role not in ["reader", "commenter"],
|
"partial_update": access.role not in ["reader", "commenter"],
|
||||||
"restore": access.role == "owner",
|
"restore": access.role == "owner",
|
||||||
"retrieve": True,
|
"retrieve": True,
|
||||||
|
"search": True,
|
||||||
"tree": True,
|
"tree": True,
|
||||||
"update": access.role not in ["reader", "commenter"],
|
"update": access.role not in ["reader", "commenter"],
|
||||||
"versions_destroy": access.role in ["administrator", "owner"],
|
"versions_destroy": access.role in ["administrator", "owner"],
|
||||||
@@ -1057,3 +1067,48 @@ def test_api_documents_retrieve_permanently_deleted_related(role, depth):
|
|||||||
|
|
||||||
assert response.status_code == 404
|
assert response.status_code == 404
|
||||||
assert response.json() == {"detail": "Not found."}
|
assert response.json() == {"detail": "Not found."}
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_documents_retrieve_without_content():
|
||||||
|
"""
|
||||||
|
Test retrieve using without_content query string should remove the content in the response
|
||||||
|
"""
|
||||||
|
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(creator=user, users=[(user, "owner")])
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
with mock.patch("core.models.Document.content") as mock_document_content:
|
||||||
|
response = client.get(
|
||||||
|
f"/api/v1.0/documents/{document.id!s}/?without_content=true"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
payload = response.json()
|
||||||
|
assert "content" not in payload
|
||||||
|
mock_document_content.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_documents_retrieve_without_content_invalid_value():
|
||||||
|
"""
|
||||||
|
Test retrieve using without_content query string but an invalid value
|
||||||
|
should return a 400
|
||||||
|
"""
|
||||||
|
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(creator=user, users=[(user, "owner")])
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
response = client.get(
|
||||||
|
f"/api/v1.0/documents/{document.id!s}/?without_content=invalid-value"
|
||||||
|
)
|
||||||
|
assert response.status_code == 400
|
||||||
|
|
||||||
|
assert response.json() == ["Must be a valid boolean."]
|
||||||
|
|||||||
@@ -1,46 +1,40 @@
|
|||||||
"""
|
"""
|
||||||
Tests for Documents API endpoint in impress's core app: list
|
Tests for Documents API endpoint in impress's core app: search
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import random
|
from unittest import mock
|
||||||
from json import loads as json_loads
|
|
||||||
|
|
||||||
from django.test import RequestFactory
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import responses
|
import responses
|
||||||
from faker import Faker
|
from faker import Faker
|
||||||
|
from rest_framework import response as drf_response
|
||||||
from rest_framework.test import APIClient
|
from rest_framework.test import APIClient
|
||||||
|
from waffle.testutils import override_flag
|
||||||
|
|
||||||
from core import factories, models
|
from core import factories
|
||||||
|
from core.enums import FeatureFlag, SearchType
|
||||||
from core.services.search_indexers import get_document_indexer
|
from core.services.search_indexers import get_document_indexer
|
||||||
|
|
||||||
fake = Faker()
|
fake = Faker()
|
||||||
pytestmark = pytest.mark.django_db
|
pytestmark = pytest.mark.django_db
|
||||||
|
|
||||||
|
|
||||||
def build_search_url(**kwargs):
|
@pytest.fixture(autouse=True)
|
||||||
"""Build absolute uri for search endpoint with ORDERED query arguments"""
|
def enable_flag_find_hybrid_search():
|
||||||
return (
|
"""Enable flag_find_hybrid_search for all tests in this module."""
|
||||||
RequestFactory()
|
with override_flag(FeatureFlag.FLAG_FIND_HYBRID_SEARCH, active=True):
|
||||||
.get("/api/v1.0/documents/search/", dict(sorted(kwargs.items())))
|
yield
|
||||||
.build_absolute_uri()
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("role", models.LinkRoleChoices.values)
|
@mock.patch("core.services.search_indexers.FindDocumentIndexer.search_query")
|
||||||
@pytest.mark.parametrize("reach", models.LinkReachChoices.values)
|
|
||||||
@responses.activate
|
@responses.activate
|
||||||
def test_api_documents_search_anonymous(reach, role, indexer_settings):
|
def test_api_documents_search_anonymous(search_query, indexer_settings):
|
||||||
"""
|
"""
|
||||||
Anonymous users should not be allowed to search documents whatever the
|
Anonymous users should be allowed to search documents with Find.
|
||||||
link reach and link role
|
|
||||||
"""
|
"""
|
||||||
indexer_settings.SEARCH_INDEXER_QUERY_URL = "http://find/api/v1.0/search"
|
indexer_settings.SEARCH_URL = "http://find/api/v1.0/search"
|
||||||
|
|
||||||
factories.DocumentFactory(link_reach=reach, link_role=role)
|
# mock Find response
|
||||||
|
|
||||||
# Find response
|
|
||||||
responses.add(
|
responses.add(
|
||||||
responses.POST,
|
responses.POST,
|
||||||
"http://find/api/v1.0/search",
|
"http://find/api/v1.0/search",
|
||||||
@@ -48,7 +42,23 @@ def test_api_documents_search_anonymous(reach, role, indexer_settings):
|
|||||||
status=200,
|
status=200,
|
||||||
)
|
)
|
||||||
|
|
||||||
response = APIClient().get("/api/v1.0/documents/search/", data={"q": "alpha"})
|
q = "alpha"
|
||||||
|
response = APIClient().get("/api/v1.0/documents/search/", data={"q": q})
|
||||||
|
|
||||||
|
assert search_query.call_count == 1
|
||||||
|
assert search_query.call_args[1] == {
|
||||||
|
"data": {
|
||||||
|
"q": q,
|
||||||
|
"visited": [],
|
||||||
|
"services": ["docs"],
|
||||||
|
"nb_results": 50,
|
||||||
|
"order_by": "updated_at",
|
||||||
|
"order_direction": "desc",
|
||||||
|
"path": None,
|
||||||
|
"search_type": SearchType.HYBRID,
|
||||||
|
},
|
||||||
|
"token": None,
|
||||||
|
}
|
||||||
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.json() == {
|
assert response.json() == {
|
||||||
@@ -59,115 +69,163 @@ def test_api_documents_search_anonymous(reach, role, indexer_settings):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_api_documents_search_endpoint_is_none(indexer_settings):
|
@mock.patch("core.api.viewsets.DocumentViewSet.list")
|
||||||
|
def test_api_documents_search_fall_back_on_search_list(mock_list, settings):
|
||||||
"""
|
"""
|
||||||
Missing SEARCH_INDEXER_QUERY_URL, so the indexer is not properly configured.
|
When indexer is not configured and no path is provided,
|
||||||
Should fallback on title filter
|
should fall back on list method
|
||||||
"""
|
"""
|
||||||
indexer_settings.SEARCH_INDEXER_QUERY_URL = None
|
|
||||||
|
|
||||||
assert get_document_indexer() is None
|
assert get_document_indexer() is None
|
||||||
|
assert settings.OIDC_STORE_REFRESH_TOKEN is False
|
||||||
|
assert settings.OIDC_STORE_ACCESS_TOKEN is False
|
||||||
|
|
||||||
user = factories.UserFactory()
|
user = factories.UserFactory()
|
||||||
document = factories.DocumentFactory(title="alpha")
|
|
||||||
access = factories.UserDocumentAccessFactory(document=document, user=user)
|
|
||||||
|
|
||||||
client = APIClient()
|
client = APIClient()
|
||||||
client.force_login(user)
|
client.force_login(
|
||||||
|
user, backend="core.authentication.backends.OIDCAuthenticationBackend"
|
||||||
|
)
|
||||||
|
|
||||||
response = client.get("/api/v1.0/documents/search/", data={"q": "alpha"})
|
mocked_response = {
|
||||||
|
"count": 0,
|
||||||
assert response.status_code == 200
|
|
||||||
content = response.json()
|
|
||||||
results = content.pop("results")
|
|
||||||
assert content == {
|
|
||||||
"count": 1,
|
|
||||||
"next": None,
|
"next": None,
|
||||||
"previous": None,
|
"previous": None,
|
||||||
|
"results": [{"title": "mocked list result"}],
|
||||||
}
|
}
|
||||||
assert len(results) == 1
|
mock_list.return_value = drf_response.Response(mocked_response)
|
||||||
assert results[0] == {
|
|
||||||
"id": str(document.id),
|
q = "alpha"
|
||||||
"abilities": document.get_abilities(user),
|
response = client.get("/api/v1.0/documents/search/", data={"q": q})
|
||||||
"ancestors_link_reach": None,
|
|
||||||
"ancestors_link_role": None,
|
assert response.status_code == 200
|
||||||
"computed_link_reach": document.computed_link_reach,
|
|
||||||
"computed_link_role": document.computed_link_role,
|
assert mock_list.call_count == 1
|
||||||
"created_at": document.created_at.isoformat().replace("+00:00", "Z"),
|
assert mock_list.call_args[0][0].GET.get("q") == q
|
||||||
"creator": str(document.creator.id),
|
assert response.json() == mocked_response
|
||||||
"depth": 1,
|
|
||||||
"excerpt": document.excerpt,
|
|
||||||
"link_reach": document.link_reach,
|
@mock.patch("core.api.viewsets.DocumentViewSet._list_descendants")
|
||||||
"link_role": document.link_role,
|
def test_api_documents_search_fallback_on_search_list_sub_docs(
|
||||||
"nb_accesses_ancestors": 1,
|
mock_list_descendants, settings
|
||||||
"nb_accesses_direct": 1,
|
):
|
||||||
"numchild": 0,
|
"""
|
||||||
"path": document.path,
|
When indexer is not configured and path parameter is provided,
|
||||||
"title": document.title,
|
should call _list_descendants() method
|
||||||
"updated_at": document.updated_at.isoformat().replace("+00:00", "Z"),
|
"""
|
||||||
"deleted_at": None,
|
assert get_document_indexer() is None
|
||||||
"user_role": access.role,
|
assert settings.OIDC_STORE_REFRESH_TOKEN is False
|
||||||
|
assert settings.OIDC_STORE_ACCESS_TOKEN is False
|
||||||
|
|
||||||
|
user = factories.UserFactory()
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(
|
||||||
|
user, backend="core.authentication.backends.OIDCAuthenticationBackend"
|
||||||
|
)
|
||||||
|
|
||||||
|
parent = factories.DocumentFactory(title="parent", users=[user])
|
||||||
|
|
||||||
|
mocked_response = {
|
||||||
|
"count": 0,
|
||||||
|
"next": None,
|
||||||
|
"previous": None,
|
||||||
|
"results": [{"title": "mocked _list_descendants result"}],
|
||||||
}
|
}
|
||||||
|
mock_list_descendants.return_value = drf_response.Response(mocked_response)
|
||||||
|
|
||||||
|
q = "alpha"
|
||||||
|
response = client.get(
|
||||||
|
"/api/v1.0/documents/search/", data={"q": q, "path": parent.path}
|
||||||
|
)
|
||||||
|
|
||||||
|
mock_list_descendants.assert_called_with(
|
||||||
|
mock.ANY, {"q": "alpha", "path": parent.path}
|
||||||
|
)
|
||||||
|
assert response.json() == mocked_response
|
||||||
|
|
||||||
|
|
||||||
|
@mock.patch("core.api.viewsets.DocumentViewSet._title_search")
|
||||||
|
def test_api_documents_search_indexer_crashes(mock_title_search, indexer_settings):
|
||||||
|
"""
|
||||||
|
When indexer is configured but crashes -> falls back on title_search
|
||||||
|
"""
|
||||||
|
# indexer is properly configured
|
||||||
|
indexer_settings.SEARCH_URL = None
|
||||||
|
assert get_document_indexer() is None
|
||||||
|
# but returns an error when the query is sent
|
||||||
|
responses.add(
|
||||||
|
responses.POST,
|
||||||
|
"http://find/api/v1.0/search",
|
||||||
|
json=[{"error": "Some indexer error"}],
|
||||||
|
status=404,
|
||||||
|
)
|
||||||
|
|
||||||
|
user = factories.UserFactory()
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(
|
||||||
|
user, backend="core.authentication.backends.OIDCAuthenticationBackend"
|
||||||
|
)
|
||||||
|
|
||||||
|
mocked_response = {
|
||||||
|
"count": 0,
|
||||||
|
"next": None,
|
||||||
|
"previous": None,
|
||||||
|
"results": [{"title": "mocked title_search result"}],
|
||||||
|
}
|
||||||
|
mock_title_search.return_value = drf_response.Response(mocked_response)
|
||||||
|
|
||||||
|
parent = factories.DocumentFactory(title="parent", users=[user])
|
||||||
|
q = "alpha"
|
||||||
|
response = client.get(
|
||||||
|
"/api/v1.0/documents/search/", data={"q": "alpha", "path": parent.path}
|
||||||
|
)
|
||||||
|
|
||||||
|
# the search endpoint did not crash
|
||||||
|
assert response.status_code == 200
|
||||||
|
# fallback on title_search
|
||||||
|
assert mock_title_search.call_count == 1
|
||||||
|
assert mock_title_search.call_args[0][0].GET.get("q") == q
|
||||||
|
assert mock_title_search.call_args[0][0].GET.get("path") == parent.path
|
||||||
|
assert response.json() == mocked_response
|
||||||
|
|
||||||
|
|
||||||
@responses.activate
|
@responses.activate
|
||||||
def test_api_documents_search_invalid_params(indexer_settings):
|
def test_api_documents_search_invalid_params(indexer_settings):
|
||||||
"""Validate the format of documents as returned by the search view."""
|
"""Validate the format of documents as returned by the search view."""
|
||||||
indexer_settings.SEARCH_INDEXER_QUERY_URL = "http://find/api/v1.0/search"
|
indexer_settings.SEARCH_URL = "http://find/api/v1.0/search"
|
||||||
|
assert get_document_indexer() is not None
|
||||||
|
|
||||||
user = factories.UserFactory()
|
user = factories.UserFactory()
|
||||||
|
|
||||||
client = APIClient()
|
client = APIClient()
|
||||||
client.force_login(user)
|
client.force_login(
|
||||||
|
user, backend="core.authentication.backends.OIDCAuthenticationBackend"
|
||||||
|
)
|
||||||
|
|
||||||
response = client.get("/api/v1.0/documents/search/")
|
response = client.get("/api/v1.0/documents/search/")
|
||||||
|
|
||||||
assert response.status_code == 400
|
assert response.status_code == 400
|
||||||
assert response.json() == {"q": ["This field is required."]}
|
assert response.json() == {"q": ["This field is required."]}
|
||||||
|
|
||||||
response = client.get("/api/v1.0/documents/search/", data={"q": " "})
|
|
||||||
|
|
||||||
assert response.status_code == 400
|
|
||||||
assert response.json() == {"q": ["This field may not be blank."]}
|
|
||||||
|
|
||||||
response = client.get(
|
|
||||||
"/api/v1.0/documents/search/", data={"q": "any", "page": "NaN"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 400
|
|
||||||
assert response.json() == {"page": ["A valid integer is required."]}
|
|
||||||
|
|
||||||
|
|
||||||
@responses.activate
|
@responses.activate
|
||||||
def test_api_documents_search_format(indexer_settings):
|
def test_api_documents_search_success(indexer_settings):
|
||||||
"""Validate the format of documents as returned by the search view."""
|
"""Validate the format of documents as returned by the search view."""
|
||||||
indexer_settings.SEARCH_INDEXER_QUERY_URL = "http://find/api/v1.0/search"
|
indexer_settings.SEARCH_URL = "http://find/api/v1.0/search"
|
||||||
|
|
||||||
assert get_document_indexer() is not None
|
assert get_document_indexer() is not None
|
||||||
|
|
||||||
user = factories.UserFactory()
|
document = {"id": "doc-123", "title": "alpha", "path": "path/to/alpha.pdf"}
|
||||||
|
|
||||||
client = APIClient()
|
|
||||||
client.force_login(user)
|
|
||||||
|
|
||||||
user_a, user_b, user_c = factories.UserFactory.create_batch(3)
|
|
||||||
document = factories.DocumentFactory(
|
|
||||||
title="alpha",
|
|
||||||
users=(user_a, user_c),
|
|
||||||
link_traces=(user, user_b),
|
|
||||||
)
|
|
||||||
access = factories.UserDocumentAccessFactory(document=document, user=user)
|
|
||||||
|
|
||||||
# Find response
|
# Find response
|
||||||
responses.add(
|
responses.add(
|
||||||
responses.POST,
|
responses.POST,
|
||||||
"http://find/api/v1.0/search",
|
"http://find/api/v1.0/search",
|
||||||
json=[
|
json=[
|
||||||
{"_id": str(document.pk)},
|
{
|
||||||
|
"_id": str(document["id"]),
|
||||||
|
"_source": {"title": document["title"], "path": document["path"]},
|
||||||
|
},
|
||||||
],
|
],
|
||||||
status=200,
|
status=200,
|
||||||
)
|
)
|
||||||
response = client.get("/api/v1.0/documents/search/", data={"q": "alpha"})
|
response = APIClient().get("/api/v1.0/documents/search/", data={"q": "alpha"})
|
||||||
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
content = response.json()
|
content = response.json()
|
||||||
@@ -177,249 +235,6 @@ def test_api_documents_search_format(indexer_settings):
|
|||||||
"next": None,
|
"next": None,
|
||||||
"previous": None,
|
"previous": None,
|
||||||
}
|
}
|
||||||
assert len(results) == 1
|
assert results == [
|
||||||
assert results[0] == {
|
{"id": document["id"], "title": document["title"], "path": document["path"]}
|
||||||
"id": str(document.id),
|
]
|
||||||
"abilities": document.get_abilities(user),
|
|
||||||
"ancestors_link_reach": None,
|
|
||||||
"ancestors_link_role": None,
|
|
||||||
"computed_link_reach": document.computed_link_reach,
|
|
||||||
"computed_link_role": document.computed_link_role,
|
|
||||||
"created_at": document.created_at.isoformat().replace("+00:00", "Z"),
|
|
||||||
"creator": str(document.creator.id),
|
|
||||||
"depth": 1,
|
|
||||||
"excerpt": document.excerpt,
|
|
||||||
"link_reach": document.link_reach,
|
|
||||||
"link_role": document.link_role,
|
|
||||||
"nb_accesses_ancestors": 3,
|
|
||||||
"nb_accesses_direct": 3,
|
|
||||||
"numchild": 0,
|
|
||||||
"path": document.path,
|
|
||||||
"title": document.title,
|
|
||||||
"updated_at": document.updated_at.isoformat().replace("+00:00", "Z"),
|
|
||||||
"deleted_at": None,
|
|
||||||
"user_role": access.role,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@responses.activate
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"pagination, status, expected",
|
|
||||||
(
|
|
||||||
(
|
|
||||||
{"page": 1, "page_size": 10},
|
|
||||||
200,
|
|
||||||
{
|
|
||||||
"count": 10,
|
|
||||||
"previous": None,
|
|
||||||
"next": None,
|
|
||||||
"range": (0, None),
|
|
||||||
},
|
|
||||||
),
|
|
||||||
(
|
|
||||||
{},
|
|
||||||
200,
|
|
||||||
{
|
|
||||||
"count": 10,
|
|
||||||
"previous": None,
|
|
||||||
"next": None,
|
|
||||||
"range": (0, None),
|
|
||||||
"api_page_size": 21, # default page_size is 20
|
|
||||||
},
|
|
||||||
),
|
|
||||||
(
|
|
||||||
{"page": 2, "page_size": 10},
|
|
||||||
404,
|
|
||||||
{},
|
|
||||||
),
|
|
||||||
(
|
|
||||||
{"page": 1, "page_size": 5},
|
|
||||||
200,
|
|
||||||
{
|
|
||||||
"count": 10,
|
|
||||||
"previous": None,
|
|
||||||
"next": {"page": 2, "page_size": 5},
|
|
||||||
"range": (0, 5),
|
|
||||||
},
|
|
||||||
),
|
|
||||||
(
|
|
||||||
{"page": 2, "page_size": 5},
|
|
||||||
200,
|
|
||||||
{
|
|
||||||
"count": 10,
|
|
||||||
"previous": {"page_size": 5},
|
|
||||||
"next": None,
|
|
||||||
"range": (5, None),
|
|
||||||
},
|
|
||||||
),
|
|
||||||
({"page": 3, "page_size": 5}, 404, {}),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
def test_api_documents_search_pagination(
|
|
||||||
indexer_settings, pagination, status, expected
|
|
||||||
):
|
|
||||||
"""Documents should be ordered by descending "score" by default"""
|
|
||||||
indexer_settings.SEARCH_INDEXER_QUERY_URL = "http://find/api/v1.0/search"
|
|
||||||
|
|
||||||
assert get_document_indexer() is not None
|
|
||||||
|
|
||||||
user = factories.UserFactory()
|
|
||||||
|
|
||||||
client = APIClient()
|
|
||||||
client.force_login(user)
|
|
||||||
|
|
||||||
docs = factories.DocumentFactory.create_batch(10, title="alpha", users=[user])
|
|
||||||
|
|
||||||
docs_by_uuid = {str(doc.pk): doc for doc in docs}
|
|
||||||
api_results = [{"_id": id} for id in docs_by_uuid.keys()]
|
|
||||||
|
|
||||||
# reorder randomly to simulate score ordering
|
|
||||||
random.shuffle(api_results)
|
|
||||||
|
|
||||||
# Find response
|
|
||||||
# pylint: disable-next=assignment-from-none
|
|
||||||
api_search = responses.add(
|
|
||||||
responses.POST,
|
|
||||||
"http://find/api/v1.0/search",
|
|
||||||
json=api_results,
|
|
||||||
status=200,
|
|
||||||
)
|
|
||||||
|
|
||||||
response = client.get(
|
|
||||||
"/api/v1.0/documents/search/",
|
|
||||||
data={
|
|
||||||
"q": "alpha",
|
|
||||||
**pagination,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == status
|
|
||||||
|
|
||||||
if response.status_code < 300:
|
|
||||||
previous_url = (
|
|
||||||
build_search_url(q="alpha", **expected["previous"])
|
|
||||||
if expected["previous"]
|
|
||||||
else None
|
|
||||||
)
|
|
||||||
next_url = (
|
|
||||||
build_search_url(q="alpha", **expected["next"])
|
|
||||||
if expected["next"]
|
|
||||||
else None
|
|
||||||
)
|
|
||||||
start, end = expected["range"]
|
|
||||||
|
|
||||||
content = response.json()
|
|
||||||
|
|
||||||
assert content["count"] == expected["count"]
|
|
||||||
assert content["previous"] == previous_url
|
|
||||||
assert content["next"] == next_url
|
|
||||||
|
|
||||||
results = content.pop("results")
|
|
||||||
|
|
||||||
# The find api results ordering by score is kept
|
|
||||||
assert [r["id"] for r in results] == [r["_id"] for r in api_results[start:end]]
|
|
||||||
|
|
||||||
# Check the query parameters.
|
|
||||||
assert api_search.call_count == 1
|
|
||||||
assert api_search.calls[0].response.status_code == 200
|
|
||||||
assert json_loads(api_search.calls[0].request.body) == {
|
|
||||||
"q": "alpha",
|
|
||||||
"visited": [],
|
|
||||||
"services": ["docs"],
|
|
||||||
"nb_results": 50,
|
|
||||||
"order_by": "updated_at",
|
|
||||||
"order_direction": "desc",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@responses.activate
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"pagination, status, expected",
|
|
||||||
(
|
|
||||||
(
|
|
||||||
{"page": 1, "page_size": 10},
|
|
||||||
200,
|
|
||||||
{"count": 10, "previous": None, "next": None, "range": (0, None)},
|
|
||||||
),
|
|
||||||
(
|
|
||||||
{},
|
|
||||||
200,
|
|
||||||
{"count": 10, "previous": None, "next": None, "range": (0, None)},
|
|
||||||
),
|
|
||||||
(
|
|
||||||
{"page": 2, "page_size": 10},
|
|
||||||
404,
|
|
||||||
{},
|
|
||||||
),
|
|
||||||
(
|
|
||||||
{"page": 1, "page_size": 5},
|
|
||||||
200,
|
|
||||||
{
|
|
||||||
"count": 10,
|
|
||||||
"previous": None,
|
|
||||||
"next": {"page": 2, "page_size": 5},
|
|
||||||
"range": (0, 5),
|
|
||||||
},
|
|
||||||
),
|
|
||||||
(
|
|
||||||
{"page": 2, "page_size": 5},
|
|
||||||
200,
|
|
||||||
{
|
|
||||||
"count": 10,
|
|
||||||
"previous": {"page_size": 5},
|
|
||||||
"next": None,
|
|
||||||
"range": (5, None),
|
|
||||||
},
|
|
||||||
),
|
|
||||||
({"page": 3, "page_size": 5}, 404, {}),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
def test_api_documents_search_pagination_endpoint_is_none(
|
|
||||||
indexer_settings, pagination, status, expected
|
|
||||||
):
|
|
||||||
"""Documents should be ordered by descending "-updated_at" by default"""
|
|
||||||
indexer_settings.SEARCH_INDEXER_QUERY_URL = None
|
|
||||||
|
|
||||||
assert get_document_indexer() is None
|
|
||||||
|
|
||||||
user = factories.UserFactory()
|
|
||||||
|
|
||||||
client = APIClient()
|
|
||||||
client.force_login(user)
|
|
||||||
|
|
||||||
factories.DocumentFactory.create_batch(10, title="alpha", users=[user])
|
|
||||||
|
|
||||||
response = client.get(
|
|
||||||
"/api/v1.0/documents/search/",
|
|
||||||
data={
|
|
||||||
"q": "alpha",
|
|
||||||
**pagination,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == status
|
|
||||||
|
|
||||||
if response.status_code < 300:
|
|
||||||
previous_url = (
|
|
||||||
build_search_url(q="alpha", **expected["previous"])
|
|
||||||
if expected["previous"]
|
|
||||||
else None
|
|
||||||
)
|
|
||||||
next_url = (
|
|
||||||
build_search_url(q="alpha", **expected["next"])
|
|
||||||
if expected["next"]
|
|
||||||
else None
|
|
||||||
)
|
|
||||||
queryset = models.Document.objects.order_by("-updated_at")
|
|
||||||
start, end = expected["range"]
|
|
||||||
expected_results = [str(d.pk) for d in queryset[start:end]]
|
|
||||||
|
|
||||||
content = response.json()
|
|
||||||
|
|
||||||
assert content["count"] == expected["count"]
|
|
||||||
assert content["previous"] == previous_url
|
|
||||||
assert content["next"] == next_url
|
|
||||||
|
|
||||||
results = content.pop("results")
|
|
||||||
|
|
||||||
assert [r["id"] for r in results] == expected_results
|
|
||||||
|
|||||||
@@ -0,0 +1,956 @@
|
|||||||
|
"""
|
||||||
|
Tests for search API endpoint in impress's core app when indexer is not
|
||||||
|
available and a path param is given.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import random
|
||||||
|
|
||||||
|
from django.contrib.auth.models import AnonymousUser
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from rest_framework.test import APIClient
|
||||||
|
|
||||||
|
from core import factories
|
||||||
|
from core.api.filters import remove_accents
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.django_db
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def disable_indexer(indexer_settings):
|
||||||
|
"""Disable search indexer for all tests in this file."""
|
||||||
|
indexer_settings.SEARCH_INDEXER_CLASS = None
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_documents_search_descendants_list_anonymous_public_standalone():
|
||||||
|
"""Anonymous users should be allowed to retrieve the descendants of a public document."""
|
||||||
|
document = factories.DocumentFactory(link_reach="public", title="doc parent")
|
||||||
|
child1, child2 = factories.DocumentFactory.create_batch(
|
||||||
|
2, parent=document, title="doc child"
|
||||||
|
)
|
||||||
|
grand_child = factories.DocumentFactory(parent=child1, title="doc grand child")
|
||||||
|
|
||||||
|
factories.UserDocumentAccessFactory(document=child1)
|
||||||
|
|
||||||
|
response = APIClient().get(
|
||||||
|
"/api/v1.0/documents/search/", data={"q": "doc", "path": document.path}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {
|
||||||
|
"count": 4,
|
||||||
|
"next": None,
|
||||||
|
"previous": None,
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
# the search should include the parent document itself
|
||||||
|
"abilities": document.get_abilities(AnonymousUser()),
|
||||||
|
"ancestors_link_role": None,
|
||||||
|
"ancestors_link_reach": None,
|
||||||
|
"computed_link_reach": document.computed_link_reach,
|
||||||
|
"computed_link_role": document.computed_link_role,
|
||||||
|
"created_at": document.created_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"creator": str(document.creator.id),
|
||||||
|
"deleted_at": None,
|
||||||
|
"depth": 1,
|
||||||
|
"excerpt": document.excerpt,
|
||||||
|
"id": str(document.id),
|
||||||
|
"is_favorite": False,
|
||||||
|
"link_reach": document.link_reach,
|
||||||
|
"link_role": document.link_role,
|
||||||
|
"numchild": 2,
|
||||||
|
"nb_accesses_ancestors": 0,
|
||||||
|
"nb_accesses_direct": 0,
|
||||||
|
"path": document.path,
|
||||||
|
"title": document.title,
|
||||||
|
"updated_at": document.updated_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"user_role": None,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"abilities": child1.get_abilities(AnonymousUser()),
|
||||||
|
"ancestors_link_reach": document.link_reach,
|
||||||
|
"ancestors_link_role": document.link_role,
|
||||||
|
"computed_link_reach": child1.computed_link_reach,
|
||||||
|
"computed_link_role": child1.computed_link_role,
|
||||||
|
"created_at": child1.created_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"creator": str(child1.creator.id),
|
||||||
|
"deleted_at": None,
|
||||||
|
"depth": 2,
|
||||||
|
"excerpt": child1.excerpt,
|
||||||
|
"id": str(child1.id),
|
||||||
|
"is_favorite": False,
|
||||||
|
"link_reach": child1.link_reach,
|
||||||
|
"link_role": child1.link_role,
|
||||||
|
"numchild": 1,
|
||||||
|
"nb_accesses_ancestors": 1,
|
||||||
|
"nb_accesses_direct": 1,
|
||||||
|
"path": child1.path,
|
||||||
|
"title": child1.title,
|
||||||
|
"updated_at": child1.updated_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"user_role": None,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"abilities": grand_child.get_abilities(AnonymousUser()),
|
||||||
|
"ancestors_link_reach": document.link_reach,
|
||||||
|
"ancestors_link_role": document.link_role
|
||||||
|
if (child1.link_reach == "public" and child1.link_role == "editor")
|
||||||
|
else document.link_role,
|
||||||
|
"computed_link_reach": "public",
|
||||||
|
"computed_link_role": grand_child.computed_link_role,
|
||||||
|
"created_at": grand_child.created_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"creator": str(grand_child.creator.id),
|
||||||
|
"deleted_at": None,
|
||||||
|
"depth": 3,
|
||||||
|
"excerpt": grand_child.excerpt,
|
||||||
|
"id": str(grand_child.id),
|
||||||
|
"is_favorite": False,
|
||||||
|
"link_reach": grand_child.link_reach,
|
||||||
|
"link_role": grand_child.link_role,
|
||||||
|
"numchild": 0,
|
||||||
|
"nb_accesses_ancestors": 1,
|
||||||
|
"nb_accesses_direct": 0,
|
||||||
|
"path": grand_child.path,
|
||||||
|
"title": grand_child.title,
|
||||||
|
"updated_at": grand_child.updated_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"user_role": None,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"abilities": child2.get_abilities(AnonymousUser()),
|
||||||
|
"ancestors_link_reach": document.link_reach,
|
||||||
|
"ancestors_link_role": document.link_role,
|
||||||
|
"computed_link_reach": "public",
|
||||||
|
"computed_link_role": child2.computed_link_role,
|
||||||
|
"created_at": child2.created_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"creator": str(child2.creator.id),
|
||||||
|
"deleted_at": None,
|
||||||
|
"depth": 2,
|
||||||
|
"excerpt": child2.excerpt,
|
||||||
|
"id": str(child2.id),
|
||||||
|
"is_favorite": False,
|
||||||
|
"link_reach": child2.link_reach,
|
||||||
|
"link_role": child2.link_role,
|
||||||
|
"numchild": 0,
|
||||||
|
"nb_accesses_ancestors": 0,
|
||||||
|
"nb_accesses_direct": 0,
|
||||||
|
"path": child2.path,
|
||||||
|
"title": child2.title,
|
||||||
|
"updated_at": child2.updated_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"user_role": None,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_documents_search_descendants_list_anonymous_public_parent():
|
||||||
|
"""
|
||||||
|
Anonymous users should be allowed to retrieve the descendants of a document who
|
||||||
|
has a public ancestor.
|
||||||
|
"""
|
||||||
|
grand_parent = factories.DocumentFactory(
|
||||||
|
link_reach="public", title="grand parent doc"
|
||||||
|
)
|
||||||
|
parent = factories.DocumentFactory(
|
||||||
|
parent=grand_parent,
|
||||||
|
link_reach=random.choice(["authenticated", "restricted"]),
|
||||||
|
title="parent doc",
|
||||||
|
)
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=random.choice(["authenticated", "restricted"]),
|
||||||
|
parent=parent,
|
||||||
|
title="document",
|
||||||
|
)
|
||||||
|
child1, child2 = factories.DocumentFactory.create_batch(
|
||||||
|
2, parent=document, title="child doc"
|
||||||
|
)
|
||||||
|
grand_child = factories.DocumentFactory(parent=child1, title="grand child doc")
|
||||||
|
|
||||||
|
factories.UserDocumentAccessFactory(document=child1)
|
||||||
|
|
||||||
|
response = APIClient().get(
|
||||||
|
"/api/v1.0/documents/search/", data={"q": "doc", "path": document.path}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {
|
||||||
|
"count": 4,
|
||||||
|
"next": None,
|
||||||
|
"previous": None,
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
# the search should include the parent document itself
|
||||||
|
"abilities": document.get_abilities(AnonymousUser()),
|
||||||
|
"ancestors_link_reach": "public",
|
||||||
|
"ancestors_link_role": grand_parent.link_role,
|
||||||
|
"computed_link_reach": document.computed_link_reach,
|
||||||
|
"computed_link_role": document.computed_link_role,
|
||||||
|
"created_at": document.created_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"creator": str(document.creator.id),
|
||||||
|
"deleted_at": None,
|
||||||
|
"depth": 3,
|
||||||
|
"excerpt": document.excerpt,
|
||||||
|
"id": str(document.id),
|
||||||
|
"is_favorite": False,
|
||||||
|
"link_reach": document.link_reach,
|
||||||
|
"link_role": document.link_role,
|
||||||
|
"numchild": 2,
|
||||||
|
"nb_accesses_ancestors": 0,
|
||||||
|
"nb_accesses_direct": 0,
|
||||||
|
"path": document.path,
|
||||||
|
"title": document.title,
|
||||||
|
"updated_at": document.updated_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"user_role": None,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"abilities": child1.get_abilities(AnonymousUser()),
|
||||||
|
"ancestors_link_reach": "public",
|
||||||
|
"ancestors_link_role": grand_parent.link_role,
|
||||||
|
"computed_link_reach": child1.computed_link_reach,
|
||||||
|
"computed_link_role": child1.computed_link_role,
|
||||||
|
"created_at": child1.created_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"creator": str(child1.creator.id),
|
||||||
|
"deleted_at": None,
|
||||||
|
"depth": 4,
|
||||||
|
"excerpt": child1.excerpt,
|
||||||
|
"id": str(child1.id),
|
||||||
|
"is_favorite": False,
|
||||||
|
"link_reach": child1.link_reach,
|
||||||
|
"link_role": child1.link_role,
|
||||||
|
"numchild": 1,
|
||||||
|
"nb_accesses_ancestors": 1,
|
||||||
|
"nb_accesses_direct": 1,
|
||||||
|
"path": child1.path,
|
||||||
|
"title": child1.title,
|
||||||
|
"updated_at": child1.updated_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"user_role": None,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"abilities": grand_child.get_abilities(AnonymousUser()),
|
||||||
|
"ancestors_link_reach": "public",
|
||||||
|
"ancestors_link_role": grand_child.ancestors_link_role,
|
||||||
|
"computed_link_reach": "public",
|
||||||
|
"computed_link_role": grand_child.computed_link_role,
|
||||||
|
"created_at": grand_child.created_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"creator": str(grand_child.creator.id),
|
||||||
|
"deleted_at": None,
|
||||||
|
"depth": 5,
|
||||||
|
"excerpt": grand_child.excerpt,
|
||||||
|
"id": str(grand_child.id),
|
||||||
|
"is_favorite": False,
|
||||||
|
"link_reach": grand_child.link_reach,
|
||||||
|
"link_role": grand_child.link_role,
|
||||||
|
"numchild": 0,
|
||||||
|
"nb_accesses_ancestors": 1,
|
||||||
|
"nb_accesses_direct": 0,
|
||||||
|
"path": grand_child.path,
|
||||||
|
"title": grand_child.title,
|
||||||
|
"updated_at": grand_child.updated_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"user_role": None,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"abilities": child2.get_abilities(AnonymousUser()),
|
||||||
|
"ancestors_link_reach": "public",
|
||||||
|
"ancestors_link_role": grand_parent.link_role,
|
||||||
|
"computed_link_reach": "public",
|
||||||
|
"computed_link_role": child2.computed_link_role,
|
||||||
|
"created_at": child2.created_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"creator": str(child2.creator.id),
|
||||||
|
"deleted_at": None,
|
||||||
|
"depth": 4,
|
||||||
|
"excerpt": child2.excerpt,
|
||||||
|
"id": str(child2.id),
|
||||||
|
"is_favorite": False,
|
||||||
|
"link_reach": child2.link_reach,
|
||||||
|
"link_role": child2.link_role,
|
||||||
|
"numchild": 0,
|
||||||
|
"nb_accesses_ancestors": 0,
|
||||||
|
"nb_accesses_direct": 0,
|
||||||
|
"path": child2.path,
|
||||||
|
"title": child2.title,
|
||||||
|
"updated_at": child2.updated_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"user_role": None,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("reach", ["restricted", "authenticated"])
|
||||||
|
def test_api_documents_search_descendants_list_anonymous_restricted_or_authenticated(
|
||||||
|
reach,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Anonymous users should not be able to retrieve descendants of a document that is not public.
|
||||||
|
"""
|
||||||
|
document = factories.DocumentFactory(title="parent", link_reach=reach)
|
||||||
|
child = factories.DocumentFactory(title="child", parent=document)
|
||||||
|
_grand_child = factories.DocumentFactory(title="grand child", parent=child)
|
||||||
|
|
||||||
|
response = APIClient().get(
|
||||||
|
"/api/v1.0/documents/search/", data={"q": "child", "path": document.path}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
assert response.json() == {
|
||||||
|
"detail": "You do not have permission to search within this document."
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("reach", ["public", "authenticated"])
|
||||||
|
def test_api_documents_search_descendants_list_authenticated_unrelated_public_or_authenticated(
|
||||||
|
reach,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Authenticated users should be able to retrieve the descendants of a public/authenticated
|
||||||
|
document to which they are not related.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach=reach, title="parent")
|
||||||
|
child1, child2 = factories.DocumentFactory.create_batch(
|
||||||
|
2, parent=document, link_reach="restricted", title="child"
|
||||||
|
)
|
||||||
|
grand_child = factories.DocumentFactory(parent=child1, title="grand child")
|
||||||
|
|
||||||
|
factories.UserDocumentAccessFactory(document=child1)
|
||||||
|
|
||||||
|
response = client.get(
|
||||||
|
"/api/v1.0/documents/search/", data={"q": "child", "path": document.path}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {
|
||||||
|
"count": 3,
|
||||||
|
"next": None,
|
||||||
|
"previous": None,
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"abilities": child1.get_abilities(user),
|
||||||
|
"ancestors_link_reach": reach,
|
||||||
|
"ancestors_link_role": document.link_role,
|
||||||
|
"computed_link_reach": child1.computed_link_reach,
|
||||||
|
"computed_link_role": child1.computed_link_role,
|
||||||
|
"created_at": child1.created_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"creator": str(child1.creator.id),
|
||||||
|
"deleted_at": None,
|
||||||
|
"depth": 2,
|
||||||
|
"excerpt": child1.excerpt,
|
||||||
|
"id": str(child1.id),
|
||||||
|
"is_favorite": False,
|
||||||
|
"link_reach": child1.link_reach,
|
||||||
|
"link_role": child1.link_role,
|
||||||
|
"numchild": 1,
|
||||||
|
"nb_accesses_ancestors": 1,
|
||||||
|
"nb_accesses_direct": 1,
|
||||||
|
"path": child1.path,
|
||||||
|
"title": child1.title,
|
||||||
|
"updated_at": child1.updated_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"user_role": None,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"abilities": grand_child.get_abilities(user),
|
||||||
|
"ancestors_link_reach": reach,
|
||||||
|
"ancestors_link_role": document.link_role,
|
||||||
|
"computed_link_reach": grand_child.computed_link_reach,
|
||||||
|
"computed_link_role": grand_child.computed_link_role,
|
||||||
|
"created_at": grand_child.created_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"creator": str(grand_child.creator.id),
|
||||||
|
"deleted_at": None,
|
||||||
|
"depth": 3,
|
||||||
|
"excerpt": grand_child.excerpt,
|
||||||
|
"id": str(grand_child.id),
|
||||||
|
"is_favorite": False,
|
||||||
|
"link_reach": grand_child.link_reach,
|
||||||
|
"link_role": grand_child.link_role,
|
||||||
|
"numchild": 0,
|
||||||
|
"nb_accesses_ancestors": 1,
|
||||||
|
"nb_accesses_direct": 0,
|
||||||
|
"path": grand_child.path,
|
||||||
|
"title": grand_child.title,
|
||||||
|
"updated_at": grand_child.updated_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"user_role": None,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"abilities": child2.get_abilities(user),
|
||||||
|
"ancestors_link_reach": reach,
|
||||||
|
"ancestors_link_role": document.link_role,
|
||||||
|
"computed_link_reach": child2.computed_link_reach,
|
||||||
|
"computed_link_role": child2.computed_link_role,
|
||||||
|
"created_at": child2.created_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"creator": str(child2.creator.id),
|
||||||
|
"deleted_at": None,
|
||||||
|
"depth": 2,
|
||||||
|
"excerpt": child2.excerpt,
|
||||||
|
"id": str(child2.id),
|
||||||
|
"is_favorite": False,
|
||||||
|
"link_reach": child2.link_reach,
|
||||||
|
"link_role": child2.link_role,
|
||||||
|
"numchild": 0,
|
||||||
|
"nb_accesses_ancestors": 0,
|
||||||
|
"nb_accesses_direct": 0,
|
||||||
|
"path": child2.path,
|
||||||
|
"title": child2.title,
|
||||||
|
"updated_at": child2.updated_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"user_role": None,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("reach", ["public", "authenticated"])
|
||||||
|
def test_api_documents_search_descendants_list_authenticated_public_or_authenticated_parent(
|
||||||
|
reach,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Authenticated users should be allowed to retrieve the descendants of a document who
|
||||||
|
has a public or authenticated ancestor.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
grand_parent = factories.DocumentFactory(link_reach=reach, title="grand parent")
|
||||||
|
parent = factories.DocumentFactory(
|
||||||
|
parent=grand_parent, link_reach="restricted", title="parent"
|
||||||
|
)
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach="restricted", parent=parent, title="document"
|
||||||
|
)
|
||||||
|
child1, child2 = factories.DocumentFactory.create_batch(
|
||||||
|
2, parent=document, link_reach="restricted", title="child"
|
||||||
|
)
|
||||||
|
grand_child = factories.DocumentFactory(parent=child1, title="grand child")
|
||||||
|
|
||||||
|
factories.UserDocumentAccessFactory(document=child1)
|
||||||
|
|
||||||
|
response = client.get(
|
||||||
|
"/api/v1.0/documents/search/", data={"q": "child", "path": document.path}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {
|
||||||
|
"count": 3,
|
||||||
|
"next": None,
|
||||||
|
"previous": None,
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"abilities": child1.get_abilities(user),
|
||||||
|
"ancestors_link_reach": reach,
|
||||||
|
"ancestors_link_role": grand_parent.link_role,
|
||||||
|
"computed_link_reach": child1.computed_link_reach,
|
||||||
|
"computed_link_role": child1.computed_link_role,
|
||||||
|
"created_at": child1.created_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"creator": str(child1.creator.id),
|
||||||
|
"deleted_at": None,
|
||||||
|
"depth": 4,
|
||||||
|
"excerpt": child1.excerpt,
|
||||||
|
"id": str(child1.id),
|
||||||
|
"is_favorite": False,
|
||||||
|
"link_reach": child1.link_reach,
|
||||||
|
"link_role": child1.link_role,
|
||||||
|
"numchild": 1,
|
||||||
|
"nb_accesses_ancestors": 1,
|
||||||
|
"nb_accesses_direct": 1,
|
||||||
|
"path": child1.path,
|
||||||
|
"title": child1.title,
|
||||||
|
"updated_at": child1.updated_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"user_role": None,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"abilities": grand_child.get_abilities(user),
|
||||||
|
"ancestors_link_reach": reach,
|
||||||
|
"ancestors_link_role": grand_parent.link_role,
|
||||||
|
"computed_link_reach": grand_child.computed_link_reach,
|
||||||
|
"computed_link_role": grand_child.computed_link_role,
|
||||||
|
"created_at": grand_child.created_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"creator": str(grand_child.creator.id),
|
||||||
|
"deleted_at": None,
|
||||||
|
"depth": 5,
|
||||||
|
"excerpt": grand_child.excerpt,
|
||||||
|
"id": str(grand_child.id),
|
||||||
|
"is_favorite": False,
|
||||||
|
"link_reach": grand_child.link_reach,
|
||||||
|
"link_role": grand_child.link_role,
|
||||||
|
"numchild": 0,
|
||||||
|
"nb_accesses_ancestors": 1,
|
||||||
|
"nb_accesses_direct": 0,
|
||||||
|
"path": grand_child.path,
|
||||||
|
"title": grand_child.title,
|
||||||
|
"updated_at": grand_child.updated_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"user_role": None,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"abilities": child2.get_abilities(user),
|
||||||
|
"ancestors_link_reach": reach,
|
||||||
|
"ancestors_link_role": grand_parent.link_role,
|
||||||
|
"computed_link_reach": child2.computed_link_reach,
|
||||||
|
"computed_link_role": child2.computed_link_role,
|
||||||
|
"created_at": child2.created_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"creator": str(child2.creator.id),
|
||||||
|
"deleted_at": None,
|
||||||
|
"depth": 4,
|
||||||
|
"excerpt": child2.excerpt,
|
||||||
|
"id": str(child2.id),
|
||||||
|
"is_favorite": False,
|
||||||
|
"link_reach": child2.link_reach,
|
||||||
|
"link_role": child2.link_role,
|
||||||
|
"numchild": 0,
|
||||||
|
"nb_accesses_ancestors": 0,
|
||||||
|
"nb_accesses_direct": 0,
|
||||||
|
"path": child2.path,
|
||||||
|
"title": child2.title,
|
||||||
|
"updated_at": child2.updated_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"user_role": None,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_documents_search_descendants_list_authenticated_unrelated_restricted():
|
||||||
|
"""
|
||||||
|
Authenticated users should not be allowed to retrieve the descendants of a document that is
|
||||||
|
restricted and to which they are not related.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory(with_owned_document=True)
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach="restricted", title="parent")
|
||||||
|
child1, _child2 = factories.DocumentFactory.create_batch(
|
||||||
|
2, parent=document, title="child"
|
||||||
|
)
|
||||||
|
_grand_child = factories.DocumentFactory(parent=child1, title="grand child")
|
||||||
|
|
||||||
|
factories.UserDocumentAccessFactory(document=child1)
|
||||||
|
|
||||||
|
response = client.get(
|
||||||
|
"/api/v1.0/documents/search/", data={"q": "child", "path": document.path}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
assert response.json() == {
|
||||||
|
"detail": "You do not have permission to search within this document."
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_documents_search_descendants_list_authenticated_related_direct():
|
||||||
|
"""
|
||||||
|
Authenticated users should be allowed to retrieve the descendants of a document
|
||||||
|
to which they are directly related whatever the role.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(title="parent")
|
||||||
|
access = factories.UserDocumentAccessFactory(document=document, user=user)
|
||||||
|
factories.UserDocumentAccessFactory(document=document)
|
||||||
|
|
||||||
|
child1, child2 = factories.DocumentFactory.create_batch(
|
||||||
|
2, parent=document, title="child"
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(document=child1)
|
||||||
|
|
||||||
|
grand_child = factories.DocumentFactory(parent=child1, title="grand child")
|
||||||
|
|
||||||
|
response = client.get(
|
||||||
|
"/api/v1.0/documents/search/", data={"q": "child", "path": document.path}
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {
|
||||||
|
"count": 3,
|
||||||
|
"next": None,
|
||||||
|
"previous": None,
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"abilities": child1.get_abilities(user),
|
||||||
|
"ancestors_link_reach": child1.ancestors_link_reach,
|
||||||
|
"ancestors_link_role": child1.ancestors_link_role,
|
||||||
|
"computed_link_reach": child1.computed_link_reach,
|
||||||
|
"computed_link_role": child1.computed_link_role,
|
||||||
|
"created_at": child1.created_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"creator": str(child1.creator.id),
|
||||||
|
"deleted_at": None,
|
||||||
|
"depth": 2,
|
||||||
|
"excerpt": child1.excerpt,
|
||||||
|
"id": str(child1.id),
|
||||||
|
"is_favorite": False,
|
||||||
|
"link_reach": child1.link_reach,
|
||||||
|
"link_role": child1.link_role,
|
||||||
|
"numchild": 1,
|
||||||
|
"nb_accesses_ancestors": 3,
|
||||||
|
"nb_accesses_direct": 1,
|
||||||
|
"path": child1.path,
|
||||||
|
"title": child1.title,
|
||||||
|
"updated_at": child1.updated_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"user_role": access.role,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"abilities": grand_child.get_abilities(user),
|
||||||
|
"ancestors_link_reach": grand_child.ancestors_link_reach,
|
||||||
|
"ancestors_link_role": grand_child.ancestors_link_role,
|
||||||
|
"computed_link_reach": grand_child.computed_link_reach,
|
||||||
|
"computed_link_role": grand_child.computed_link_role,
|
||||||
|
"created_at": grand_child.created_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"creator": str(grand_child.creator.id),
|
||||||
|
"deleted_at": None,
|
||||||
|
"depth": 3,
|
||||||
|
"excerpt": grand_child.excerpt,
|
||||||
|
"id": str(grand_child.id),
|
||||||
|
"is_favorite": False,
|
||||||
|
"link_reach": grand_child.link_reach,
|
||||||
|
"link_role": grand_child.link_role,
|
||||||
|
"numchild": 0,
|
||||||
|
"nb_accesses_ancestors": 3,
|
||||||
|
"nb_accesses_direct": 0,
|
||||||
|
"path": grand_child.path,
|
||||||
|
"title": grand_child.title,
|
||||||
|
"updated_at": grand_child.updated_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"user_role": access.role,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"abilities": child2.get_abilities(user),
|
||||||
|
"ancestors_link_reach": child2.ancestors_link_reach,
|
||||||
|
"ancestors_link_role": child2.ancestors_link_role,
|
||||||
|
"computed_link_reach": child2.computed_link_reach,
|
||||||
|
"computed_link_role": child2.computed_link_role,
|
||||||
|
"created_at": child2.created_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"creator": str(child2.creator.id),
|
||||||
|
"deleted_at": None,
|
||||||
|
"depth": 2,
|
||||||
|
"excerpt": child2.excerpt,
|
||||||
|
"id": str(child2.id),
|
||||||
|
"is_favorite": False,
|
||||||
|
"link_reach": child2.link_reach,
|
||||||
|
"link_role": child2.link_role,
|
||||||
|
"numchild": 0,
|
||||||
|
"nb_accesses_ancestors": 2,
|
||||||
|
"nb_accesses_direct": 0,
|
||||||
|
"path": child2.path,
|
||||||
|
"title": child2.title,
|
||||||
|
"updated_at": child2.updated_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"user_role": access.role,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_documents_search_descendants_list_authenticated_related_parent():
|
||||||
|
"""
|
||||||
|
Authenticated users should be allowed to retrieve the descendants of a document if they
|
||||||
|
are related to one of its ancestors whatever the role.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
grand_parent = factories.DocumentFactory(link_reach="restricted", title="parent")
|
||||||
|
grand_parent_access = factories.UserDocumentAccessFactory(
|
||||||
|
document=grand_parent, user=user
|
||||||
|
)
|
||||||
|
|
||||||
|
parent = factories.DocumentFactory(
|
||||||
|
parent=grand_parent, link_reach="restricted", title="parent"
|
||||||
|
)
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
parent=parent, link_reach="restricted", title="document"
|
||||||
|
)
|
||||||
|
|
||||||
|
child1, child2 = factories.DocumentFactory.create_batch(
|
||||||
|
2, parent=document, title="child"
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(document=child1)
|
||||||
|
|
||||||
|
grand_child = factories.DocumentFactory(parent=child1, title="grand child")
|
||||||
|
|
||||||
|
response = client.get(
|
||||||
|
"/api/v1.0/documents/search/", data={"q": "child", "path": document.path}
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {
|
||||||
|
"count": 3,
|
||||||
|
"next": None,
|
||||||
|
"previous": None,
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"abilities": child1.get_abilities(user),
|
||||||
|
"ancestors_link_reach": child1.ancestors_link_reach,
|
||||||
|
"ancestors_link_role": child1.ancestors_link_role,
|
||||||
|
"computed_link_reach": child1.computed_link_reach,
|
||||||
|
"computed_link_role": child1.computed_link_role,
|
||||||
|
"created_at": child1.created_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"creator": str(child1.creator.id),
|
||||||
|
"deleted_at": None,
|
||||||
|
"depth": 4,
|
||||||
|
"excerpt": child1.excerpt,
|
||||||
|
"id": str(child1.id),
|
||||||
|
"is_favorite": False,
|
||||||
|
"link_reach": child1.link_reach,
|
||||||
|
"link_role": child1.link_role,
|
||||||
|
"numchild": 1,
|
||||||
|
"nb_accesses_ancestors": 2,
|
||||||
|
"nb_accesses_direct": 1,
|
||||||
|
"path": child1.path,
|
||||||
|
"title": child1.title,
|
||||||
|
"updated_at": child1.updated_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"user_role": grand_parent_access.role,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"abilities": grand_child.get_abilities(user),
|
||||||
|
"ancestors_link_reach": grand_child.ancestors_link_reach,
|
||||||
|
"ancestors_link_role": grand_child.ancestors_link_role,
|
||||||
|
"computed_link_reach": grand_child.computed_link_reach,
|
||||||
|
"computed_link_role": grand_child.computed_link_role,
|
||||||
|
"created_at": grand_child.created_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"creator": str(grand_child.creator.id),
|
||||||
|
"deleted_at": None,
|
||||||
|
"depth": 5,
|
||||||
|
"excerpt": grand_child.excerpt,
|
||||||
|
"id": str(grand_child.id),
|
||||||
|
"is_favorite": False,
|
||||||
|
"link_reach": grand_child.link_reach,
|
||||||
|
"link_role": grand_child.link_role,
|
||||||
|
"numchild": 0,
|
||||||
|
"nb_accesses_ancestors": 2,
|
||||||
|
"nb_accesses_direct": 0,
|
||||||
|
"path": grand_child.path,
|
||||||
|
"title": grand_child.title,
|
||||||
|
"updated_at": grand_child.updated_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"user_role": grand_parent_access.role,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"abilities": child2.get_abilities(user),
|
||||||
|
"ancestors_link_reach": child2.ancestors_link_reach,
|
||||||
|
"ancestors_link_role": child2.ancestors_link_role,
|
||||||
|
"computed_link_reach": child2.computed_link_reach,
|
||||||
|
"computed_link_role": child2.computed_link_role,
|
||||||
|
"created_at": child2.created_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"creator": str(child2.creator.id),
|
||||||
|
"deleted_at": None,
|
||||||
|
"depth": 4,
|
||||||
|
"excerpt": child2.excerpt,
|
||||||
|
"id": str(child2.id),
|
||||||
|
"is_favorite": False,
|
||||||
|
"link_reach": child2.link_reach,
|
||||||
|
"link_role": child2.link_role,
|
||||||
|
"numchild": 0,
|
||||||
|
"nb_accesses_ancestors": 1,
|
||||||
|
"nb_accesses_direct": 0,
|
||||||
|
"path": child2.path,
|
||||||
|
"title": child2.title,
|
||||||
|
"updated_at": child2.updated_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"user_role": grand_parent_access.role,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_documents_search_descendants_list_authenticated_related_child():
|
||||||
|
"""
|
||||||
|
Authenticated users should not be allowed to retrieve all the descendants of a document
|
||||||
|
as a result of being related to one of its children.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach="restricted")
|
||||||
|
child1, _child2 = factories.DocumentFactory.create_batch(2, parent=document)
|
||||||
|
_grand_child = factories.DocumentFactory(parent=child1)
|
||||||
|
|
||||||
|
factories.UserDocumentAccessFactory(document=child1, user=user)
|
||||||
|
factories.UserDocumentAccessFactory(document=document)
|
||||||
|
|
||||||
|
response = client.get(
|
||||||
|
"/api/v1.0/documents/search/", data={"q": "doc", "path": document.path}
|
||||||
|
)
|
||||||
|
assert response.status_code == 403
|
||||||
|
assert response.json() == {
|
||||||
|
"detail": "You do not have permission to search within this document."
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_documents_search_descendants_list_authenticated_related_team_none(
|
||||||
|
mock_user_teams,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Authenticated users should not be able to retrieve the descendants of a restricted document
|
||||||
|
related to teams in which the user is not.
|
||||||
|
"""
|
||||||
|
mock_user_teams.return_value = []
|
||||||
|
|
||||||
|
user = factories.UserFactory(with_owned_document=True)
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach="restricted", title="document")
|
||||||
|
factories.DocumentFactory.create_batch(2, parent=document, title="child")
|
||||||
|
|
||||||
|
factories.TeamDocumentAccessFactory(document=document, team="myteam")
|
||||||
|
|
||||||
|
response = client.get(
|
||||||
|
"/api/v1.0/documents/search/", data={"q": "doc", "path": document.path}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
assert response.json() == {
|
||||||
|
"detail": "You do not have permission to search within this document."
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_documents_search_descendants_list_authenticated_related_team_members(
|
||||||
|
mock_user_teams,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Authenticated users should be allowed to retrieve the descendants of a document to which they
|
||||||
|
are related via a team whatever the role.
|
||||||
|
"""
|
||||||
|
mock_user_teams.return_value = ["myteam"]
|
||||||
|
|
||||||
|
user = factories.UserFactory()
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach="restricted", title="parent")
|
||||||
|
child1, child2 = factories.DocumentFactory.create_batch(
|
||||||
|
2, parent=document, title="child"
|
||||||
|
)
|
||||||
|
grand_child = factories.DocumentFactory(parent=child1, title="grand child")
|
||||||
|
|
||||||
|
access = factories.TeamDocumentAccessFactory(document=document, team="myteam")
|
||||||
|
|
||||||
|
response = client.get(
|
||||||
|
"/api/v1.0/documents/search/", data={"q": "child", "path": document.path}
|
||||||
|
)
|
||||||
|
|
||||||
|
# pylint: disable=R0801
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {
|
||||||
|
"count": 3,
|
||||||
|
"next": None,
|
||||||
|
"previous": None,
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"abilities": child1.get_abilities(user),
|
||||||
|
"ancestors_link_reach": child1.ancestors_link_reach,
|
||||||
|
"ancestors_link_role": child1.ancestors_link_role,
|
||||||
|
"computed_link_reach": child1.computed_link_reach,
|
||||||
|
"computed_link_role": child1.computed_link_role,
|
||||||
|
"created_at": child1.created_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"creator": str(child1.creator.id),
|
||||||
|
"deleted_at": None,
|
||||||
|
"depth": 2,
|
||||||
|
"excerpt": child1.excerpt,
|
||||||
|
"id": str(child1.id),
|
||||||
|
"is_favorite": False,
|
||||||
|
"link_reach": child1.link_reach,
|
||||||
|
"link_role": child1.link_role,
|
||||||
|
"numchild": 1,
|
||||||
|
"nb_accesses_ancestors": 1,
|
||||||
|
"nb_accesses_direct": 0,
|
||||||
|
"path": child1.path,
|
||||||
|
"title": child1.title,
|
||||||
|
"updated_at": child1.updated_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"user_role": access.role,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"abilities": grand_child.get_abilities(user),
|
||||||
|
"ancestors_link_reach": grand_child.ancestors_link_reach,
|
||||||
|
"ancestors_link_role": grand_child.ancestors_link_role,
|
||||||
|
"computed_link_reach": grand_child.computed_link_reach,
|
||||||
|
"computed_link_role": grand_child.computed_link_role,
|
||||||
|
"created_at": grand_child.created_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"creator": str(grand_child.creator.id),
|
||||||
|
"deleted_at": None,
|
||||||
|
"depth": 3,
|
||||||
|
"excerpt": grand_child.excerpt,
|
||||||
|
"id": str(grand_child.id),
|
||||||
|
"is_favorite": False,
|
||||||
|
"link_reach": grand_child.link_reach,
|
||||||
|
"link_role": grand_child.link_role,
|
||||||
|
"numchild": 0,
|
||||||
|
"nb_accesses_ancestors": 1,
|
||||||
|
"nb_accesses_direct": 0,
|
||||||
|
"path": grand_child.path,
|
||||||
|
"title": grand_child.title,
|
||||||
|
"updated_at": grand_child.updated_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"user_role": access.role,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"abilities": child2.get_abilities(user),
|
||||||
|
"ancestors_link_reach": child2.ancestors_link_reach,
|
||||||
|
"ancestors_link_role": child2.ancestors_link_role,
|
||||||
|
"computed_link_reach": child2.computed_link_reach,
|
||||||
|
"computed_link_role": child2.computed_link_role,
|
||||||
|
"created_at": child2.created_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"creator": str(child2.creator.id),
|
||||||
|
"deleted_at": None,
|
||||||
|
"depth": 2,
|
||||||
|
"excerpt": child2.excerpt,
|
||||||
|
"id": str(child2.id),
|
||||||
|
"is_favorite": False,
|
||||||
|
"link_reach": child2.link_reach,
|
||||||
|
"link_role": child2.link_role,
|
||||||
|
"numchild": 0,
|
||||||
|
"nb_accesses_ancestors": 1,
|
||||||
|
"nb_accesses_direct": 0,
|
||||||
|
"path": child2.path,
|
||||||
|
"title": child2.title,
|
||||||
|
"updated_at": child2.updated_at.isoformat().replace("+00:00", "Z"),
|
||||||
|
"user_role": access.role,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"query,nb_results",
|
||||||
|
[
|
||||||
|
("", 7), # Empty string
|
||||||
|
("Project Alpha", 1), # Exact match
|
||||||
|
("project", 2), # Partial match (case-insensitive)
|
||||||
|
("Guide", 2), # Word match within a title
|
||||||
|
("Special", 0), # No match (nonexistent keyword)
|
||||||
|
("2024", 2), # Match by numeric keyword
|
||||||
|
("velo", 1), # Accent-insensitive match (velo vs vélo)
|
||||||
|
("bêta", 1), # Accent-insensitive match (bêta vs beta)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_api_documents_search_descendants_search_on_title(query, nb_results):
|
||||||
|
"""Authenticated users should be able to search documents by their unaccented title."""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
parent = factories.DocumentFactory(users=[user])
|
||||||
|
|
||||||
|
# Create documents with predefined titles
|
||||||
|
titles = [
|
||||||
|
"Project Alpha Documentation",
|
||||||
|
"Project Beta Overview",
|
||||||
|
"User Guide",
|
||||||
|
"Financial Report 2024",
|
||||||
|
"Annual Review 2024",
|
||||||
|
"Guide du vélo urbain", # <-- Title with accent for accent-insensitive test
|
||||||
|
]
|
||||||
|
for title in titles:
|
||||||
|
factories.DocumentFactory(title=title, parent=parent)
|
||||||
|
|
||||||
|
# Perform the search query
|
||||||
|
response = client.get(
|
||||||
|
"/api/v1.0/documents/search/", data={"q": query, "path": parent.path}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
results = response.json()["results"]
|
||||||
|
assert len(results) == nb_results
|
||||||
|
|
||||||
|
# Ensure all results contain the query in their title
|
||||||
|
for result in results:
|
||||||
|
assert (
|
||||||
|
remove_accents(query).lower().strip()
|
||||||
|
in remove_accents(result["title"]).lower()
|
||||||
|
)
|
||||||
@@ -0,0 +1,90 @@
|
|||||||
|
"""
|
||||||
|
Tests for Find search feature flags
|
||||||
|
"""
|
||||||
|
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
|
from django.http import HttpResponse
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import responses
|
||||||
|
from rest_framework.test import APIClient
|
||||||
|
from waffle.testutils import override_flag
|
||||||
|
|
||||||
|
from core.enums import FeatureFlag, SearchType
|
||||||
|
from core.services.search_indexers import get_document_indexer
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.django_db
|
||||||
|
|
||||||
|
|
||||||
|
@responses.activate
|
||||||
|
@mock.patch("core.api.viewsets.DocumentViewSet._title_search")
|
||||||
|
@mock.patch("core.api.viewsets.DocumentViewSet._search_with_indexer")
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"activated_flags,"
|
||||||
|
"expected_search_type,"
|
||||||
|
"expected_search_with_indexer_called,"
|
||||||
|
"expected_title_search_called",
|
||||||
|
[
|
||||||
|
([], SearchType.TITLE, False, True),
|
||||||
|
([FeatureFlag.FLAG_FIND_HYBRID_SEARCH], SearchType.HYBRID, True, False),
|
||||||
|
(
|
||||||
|
[
|
||||||
|
FeatureFlag.FLAG_FIND_HYBRID_SEARCH,
|
||||||
|
FeatureFlag.FLAG_FIND_FULL_TEXT_SEARCH,
|
||||||
|
],
|
||||||
|
SearchType.HYBRID,
|
||||||
|
True,
|
||||||
|
False,
|
||||||
|
),
|
||||||
|
([FeatureFlag.FLAG_FIND_FULL_TEXT_SEARCH], SearchType.FULL_TEXT, True, False),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
# pylint: disable=too-many-arguments, too-many-positional-arguments
|
||||||
|
def test_api_documents_search_success( # noqa : PLR0913
|
||||||
|
mock_search_with_indexer,
|
||||||
|
mock_title_search,
|
||||||
|
activated_flags,
|
||||||
|
expected_search_type,
|
||||||
|
expected_search_with_indexer_called,
|
||||||
|
expected_title_search_called,
|
||||||
|
indexer_settings,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Test that the API endpoint for searching documents returns a successful response
|
||||||
|
with the expected search type according to the activated feature flags,
|
||||||
|
and that the appropriate search method is called.
|
||||||
|
"""
|
||||||
|
assert get_document_indexer() is not None
|
||||||
|
|
||||||
|
mock_search_with_indexer.return_value = HttpResponse()
|
||||||
|
mock_title_search.return_value = HttpResponse()
|
||||||
|
|
||||||
|
with override_flag(
|
||||||
|
FeatureFlag.FLAG_FIND_HYBRID_SEARCH,
|
||||||
|
active=FeatureFlag.FLAG_FIND_HYBRID_SEARCH in activated_flags,
|
||||||
|
):
|
||||||
|
with override_flag(
|
||||||
|
FeatureFlag.FLAG_FIND_FULL_TEXT_SEARCH,
|
||||||
|
active=FeatureFlag.FLAG_FIND_FULL_TEXT_SEARCH in activated_flags,
|
||||||
|
):
|
||||||
|
response = APIClient().get(
|
||||||
|
"/api/v1.0/documents/search/", data={"q": "alpha"}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
if expected_search_with_indexer_called:
|
||||||
|
mock_search_with_indexer.assert_called_once()
|
||||||
|
assert (
|
||||||
|
mock_search_with_indexer.call_args.kwargs["search_type"]
|
||||||
|
== expected_search_type
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
assert not mock_search_with_indexer.called
|
||||||
|
|
||||||
|
if expected_title_search_called:
|
||||||
|
assert SearchType.TITLE == expected_search_type
|
||||||
|
mock_title_search.assert_called_once()
|
||||||
|
else:
|
||||||
|
assert not mock_title_search.called
|
||||||
@@ -72,6 +72,7 @@ def test_api_documents_trashbin_format():
|
|||||||
"abilities": {
|
"abilities": {
|
||||||
"accesses_manage": False,
|
"accesses_manage": False,
|
||||||
"accesses_view": False,
|
"accesses_view": False,
|
||||||
|
"ai_proxy": False,
|
||||||
"ai_transform": False,
|
"ai_transform": False,
|
||||||
"ai_translate": False,
|
"ai_translate": False,
|
||||||
"attachment_upload": False,
|
"attachment_upload": False,
|
||||||
@@ -100,6 +101,7 @@ def test_api_documents_trashbin_format():
|
|||||||
"partial_update": False,
|
"partial_update": False,
|
||||||
"restore": True,
|
"restore": True,
|
||||||
"retrieve": True,
|
"retrieve": True,
|
||||||
|
"search": False,
|
||||||
"tree": True,
|
"tree": True,
|
||||||
"update": False,
|
"update": False,
|
||||||
"versions_destroy": False,
|
"versions_destroy": False,
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
"""
|
"""
|
||||||
Tests for Documents API endpoint in impress's core app: update
|
Tests for Documents API endpoint in impress's core app: update
|
||||||
"""
|
"""
|
||||||
|
# pylint: disable=too-many-lines
|
||||||
|
|
||||||
import random
|
import random
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
from django.contrib.auth.models import AnonymousUser
|
from django.contrib.auth.models import AnonymousUser
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
@@ -17,6 +19,25 @@ from core.tests.conftest import TEAM, USER, VIA
|
|||||||
|
|
||||||
pytestmark = pytest.mark.django_db
|
pytestmark = pytest.mark.django_db
|
||||||
|
|
||||||
|
# A valid Yjs document derived from YDOC_HELLO_WORLD_BASE64 with "Hello" replaced by "World",
|
||||||
|
# used in PATCH tests to guarantee a real content change distinct from what DocumentFactory
|
||||||
|
# produces.
|
||||||
|
YDOC_UPDATED_CONTENT_BASE64 = (
|
||||||
|
"AR717vLVDgAHAQ5kb2N1bWVudC1zdG9yZQMKYmxvY2tHcm91cAcA9e7y1Q4AAw5ibG9ja0NvbnRh"
|
||||||
|
"aW5lcgcA9e7y1Q4BAwdoZWFkaW5nBwD17vLVDgIGBgD17vLVDgMGaXRhbGljAnt9hPXu8tUOBAVX"
|
||||||
|
"b3JsZIb17vLVDgkGaXRhbGljBG51bGwoAPXu8tUOAg10ZXh0QWxpZ25tZW50AXcEbGVmdCgA9e7y"
|
||||||
|
"1Q4CBWxldmVsAX0BKAD17vLVDgECaWQBdyQwNGQ2MjM0MS04MzI2LTQyMzYtYTA4My00ODdlMjZm"
|
||||||
|
"YWQyMzAoAPXu8tUOAQl0ZXh0Q29sb3IBdwdkZWZhdWx0KAD17vLVDgEPYmFja2dyb3VuZENvbG9y"
|
||||||
|
"AXcHZGVmYXVsdIf17vLVDgEDDmJsb2NrQ29udGFpbmVyBwD17vLVDhADDmJ1bGxldExpc3RJdGVt"
|
||||||
|
"BwD17vLVDhEGBAD17vLVDhIBd4b17vLVDhMEYm9sZAJ7fYT17vLVDhQCb3KG9e7y1Q4WBGJvbGQE"
|
||||||
|
"bnVsbIT17vLVDhcCbGQoAPXu8tUOEQ10ZXh0QWxpZ25tZW50AXcEbGVmdCgA9e7y1Q4QAmlkAXck"
|
||||||
|
"ZDM1MWUwNjgtM2U1NS00MjI2LThlYTUtYWJiMjYzMTk4ZTJhKAD17vLVDhAJdGV4dENvbG9yAXcH"
|
||||||
|
"ZGVmYXVsdCgA9e7y1Q4QD2JhY2tncm91bmRDb2xvcgF3B2RlZmF1bHSH9e7y1Q4QAw5ibG9ja0Nv"
|
||||||
|
"bnRhaW5lcgcA9e7y1Q4eAwlwYXJhZ3JhcGgoAPXu8tUOHw10ZXh0QWxpZ25tZW50AXcEbGVmdCgA"
|
||||||
|
"9e7y1Q4eAmlkAXckODk3MDBjMDctZTBlMS00ZmUwLWFjYTItODQ5MzIwOWE3ZTQyKAD17vLVDh4J"
|
||||||
|
"dGV4dENvbG9yAXcHZGVmYXVsdCgA9e7y1Q4eD2JhY2tncm91bmRDb2xvcgF3B2RlZmF1bHQA"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("via_parent", [True, False])
|
@pytest.mark.parametrize("via_parent", [True, False])
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
@@ -330,6 +351,7 @@ def test_api_documents_update_authenticated_no_websocket(settings):
|
|||||||
ws_resp = responses.get(endpoint_url, json={"count": 0, "exists": False})
|
ws_resp = responses.get(endpoint_url, json={"count": 0, "exists": False})
|
||||||
|
|
||||||
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
||||||
|
old_path = document.path
|
||||||
|
|
||||||
response = client.put(
|
response = client.put(
|
||||||
f"/api/v1.0/documents/{document.id!s}/",
|
f"/api/v1.0/documents/{document.id!s}/",
|
||||||
@@ -338,6 +360,8 @@ def test_api_documents_update_authenticated_no_websocket(settings):
|
|||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
document.refresh_from_db()
|
||||||
|
assert document.path == old_path
|
||||||
assert cache.get(f"docs:no-websocket:{document.id}") == session_key
|
assert cache.get(f"docs:no-websocket:{document.id}") == session_key
|
||||||
assert ws_resp.call_count == 1
|
assert ws_resp.call_count == 1
|
||||||
|
|
||||||
@@ -446,6 +470,7 @@ def test_api_documents_update_user_connected_to_websocket(settings):
|
|||||||
ws_resp = responses.get(endpoint_url, json={"count": 3, "exists": True})
|
ws_resp = responses.get(endpoint_url, json={"count": 3, "exists": True})
|
||||||
|
|
||||||
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
||||||
|
old_path = document.path
|
||||||
|
|
||||||
response = client.put(
|
response = client.put(
|
||||||
f"/api/v1.0/documents/{document.id!s}/",
|
f"/api/v1.0/documents/{document.id!s}/",
|
||||||
@@ -453,6 +478,9 @@ def test_api_documents_update_user_connected_to_websocket(settings):
|
|||||||
format="json",
|
format="json",
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
document.refresh_from_db()
|
||||||
|
assert document.path == old_path
|
||||||
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
||||||
assert ws_resp.call_count == 1
|
assert ws_resp.call_count == 1
|
||||||
|
|
||||||
@@ -486,6 +514,7 @@ def test_api_documents_update_websocket_server_unreachable_fallback_to_no_websoc
|
|||||||
ws_resp = responses.get(endpoint_url, status=500)
|
ws_resp = responses.get(endpoint_url, status=500)
|
||||||
|
|
||||||
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
||||||
|
old_path = document.path
|
||||||
|
|
||||||
response = client.put(
|
response = client.put(
|
||||||
f"/api/v1.0/documents/{document.id!s}/",
|
f"/api/v1.0/documents/{document.id!s}/",
|
||||||
@@ -494,6 +523,8 @@ def test_api_documents_update_websocket_server_unreachable_fallback_to_no_websoc
|
|||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
document.refresh_from_db()
|
||||||
|
assert document.path == old_path
|
||||||
assert cache.get(f"docs:no-websocket:{document.id}") == session_key
|
assert cache.get(f"docs:no-websocket:{document.id}") == session_key
|
||||||
assert ws_resp.call_count == 1
|
assert ws_resp.call_count == 1
|
||||||
|
|
||||||
@@ -605,6 +636,7 @@ def test_api_documents_update_force_websocket_param_to_true(settings):
|
|||||||
ws_resp = responses.get(endpoint_url, status=500)
|
ws_resp = responses.get(endpoint_url, status=500)
|
||||||
|
|
||||||
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
||||||
|
old_path = document.path
|
||||||
|
|
||||||
response = client.put(
|
response = client.put(
|
||||||
f"/api/v1.0/documents/{document.id!s}/",
|
f"/api/v1.0/documents/{document.id!s}/",
|
||||||
@@ -613,6 +645,8 @@ def test_api_documents_update_force_websocket_param_to_true(settings):
|
|||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
document.refresh_from_db()
|
||||||
|
assert document.path == old_path
|
||||||
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
||||||
assert ws_resp.call_count == 0
|
assert ws_resp.call_count == 0
|
||||||
|
|
||||||
@@ -643,6 +677,7 @@ def test_api_documents_update_feature_flag_disabled(settings):
|
|||||||
ws_resp = responses.get(endpoint_url, status=500)
|
ws_resp = responses.get(endpoint_url, status=500)
|
||||||
|
|
||||||
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
||||||
|
old_path = document.path
|
||||||
|
|
||||||
response = client.put(
|
response = client.put(
|
||||||
f"/api/v1.0/documents/{document.id!s}/",
|
f"/api/v1.0/documents/{document.id!s}/",
|
||||||
@@ -651,6 +686,8 @@ def test_api_documents_update_feature_flag_disabled(settings):
|
|||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
document.refresh_from_db()
|
||||||
|
assert document.path == old_path
|
||||||
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
||||||
assert ws_resp.call_count == 0
|
assert ws_resp.call_count == 0
|
||||||
|
|
||||||
@@ -716,3 +753,724 @@ def test_api_documents_update_invalid_content():
|
|||||||
)
|
)
|
||||||
assert response.status_code == 400
|
assert response.status_code == 400
|
||||||
assert response.json() == {"content": ["Invalid base64 content."]}
|
assert response.json() == {"content": ["Invalid base64 content."]}
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# PATCH tests
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("via_parent", [True, False])
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"reach, role",
|
||||||
|
[
|
||||||
|
("restricted", "reader"),
|
||||||
|
("restricted", "editor"),
|
||||||
|
("authenticated", "reader"),
|
||||||
|
("authenticated", "editor"),
|
||||||
|
("public", "reader"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_api_documents_patch_anonymous_forbidden(reach, role, via_parent):
|
||||||
|
"""
|
||||||
|
Anonymous users should not be allowed to patch a document when link
|
||||||
|
configuration does not allow it.
|
||||||
|
"""
|
||||||
|
if via_parent:
|
||||||
|
grand_parent = factories.DocumentFactory(link_reach=reach, link_role=role)
|
||||||
|
parent = factories.DocumentFactory(parent=grand_parent, link_reach="restricted")
|
||||||
|
document = factories.DocumentFactory(parent=parent, link_reach="restricted")
|
||||||
|
else:
|
||||||
|
document = factories.DocumentFactory(link_reach=reach, link_role=role)
|
||||||
|
|
||||||
|
old_document_values = serializers.DocumentSerializer(instance=document).data
|
||||||
|
new_content = YDOC_UPDATED_CONTENT_BASE64
|
||||||
|
|
||||||
|
response = APIClient().patch(
|
||||||
|
f"/api/v1.0/documents/{document.id!s}/",
|
||||||
|
{"content": new_content},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
assert response.status_code == 401
|
||||||
|
assert response.json() == {
|
||||||
|
"detail": "Authentication credentials were not provided."
|
||||||
|
}
|
||||||
|
|
||||||
|
document.refresh_from_db()
|
||||||
|
assert serializers.DocumentSerializer(instance=document).data == old_document_values
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("via_parent", [True, False])
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"reach,role",
|
||||||
|
[
|
||||||
|
("public", "reader"),
|
||||||
|
("authenticated", "reader"),
|
||||||
|
("restricted", "reader"),
|
||||||
|
("restricted", "editor"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_api_documents_patch_authenticated_unrelated_forbidden(reach, role, via_parent):
|
||||||
|
"""
|
||||||
|
Authenticated users should not be allowed to patch a document to which
|
||||||
|
they are not related if the link configuration does not allow it.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory(with_owned_document=True)
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
if via_parent:
|
||||||
|
grand_parent = factories.DocumentFactory(link_reach=reach, link_role=role)
|
||||||
|
parent = factories.DocumentFactory(parent=grand_parent, link_reach="restricted")
|
||||||
|
document = factories.DocumentFactory(parent=parent, link_reach="restricted")
|
||||||
|
else:
|
||||||
|
document = factories.DocumentFactory(link_reach=reach, link_role=role)
|
||||||
|
|
||||||
|
old_document_values = serializers.DocumentSerializer(instance=document).data
|
||||||
|
new_content = YDOC_UPDATED_CONTENT_BASE64
|
||||||
|
|
||||||
|
response = client.patch(
|
||||||
|
f"/api/v1.0/documents/{document.id!s}/",
|
||||||
|
{"content": new_content},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
assert response.json() == {
|
||||||
|
"detail": "You do not have permission to perform this action."
|
||||||
|
}
|
||||||
|
|
||||||
|
document.refresh_from_db()
|
||||||
|
assert serializers.DocumentSerializer(instance=document).data == old_document_values
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("via_parent", [True, False])
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"is_authenticated,reach,role",
|
||||||
|
[
|
||||||
|
(False, "public", "editor"),
|
||||||
|
(True, "public", "editor"),
|
||||||
|
(True, "authenticated", "editor"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_api_documents_patch_anonymous_or_authenticated_unrelated(
|
||||||
|
is_authenticated, reach, role, via_parent
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Anonymous and authenticated users should be able to patch a document to which
|
||||||
|
they are not related if the link configuration allows it.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
|
||||||
|
if is_authenticated:
|
||||||
|
user = factories.UserFactory(with_owned_document=True)
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
if via_parent:
|
||||||
|
grand_parent = factories.DocumentFactory(link_reach=reach, link_role=role)
|
||||||
|
parent = factories.DocumentFactory(parent=grand_parent, link_reach="restricted")
|
||||||
|
document = factories.DocumentFactory(parent=parent, link_reach="restricted")
|
||||||
|
else:
|
||||||
|
document = factories.DocumentFactory(link_reach=reach, link_role=role)
|
||||||
|
|
||||||
|
old_document_values = serializers.DocumentSerializer(instance=document).data
|
||||||
|
old_path = document.path
|
||||||
|
new_content = YDOC_UPDATED_CONTENT_BASE64
|
||||||
|
|
||||||
|
response = client.patch(
|
||||||
|
f"/api/v1.0/documents/{document.id!s}/",
|
||||||
|
{"content": new_content, "websocket": True},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Using document.refresh_from_db does not wirk because the content is in cache.
|
||||||
|
# Force reloading it by fetching the document in the database.
|
||||||
|
document = models.Document.objects.get(id=document.id)
|
||||||
|
assert document.path == old_path
|
||||||
|
assert document.content == new_content
|
||||||
|
document_values = serializers.DocumentSerializer(instance=document).data
|
||||||
|
for key in [
|
||||||
|
"id",
|
||||||
|
"title",
|
||||||
|
"link_reach",
|
||||||
|
"link_role",
|
||||||
|
"creator",
|
||||||
|
"depth",
|
||||||
|
"numchild",
|
||||||
|
"path",
|
||||||
|
]:
|
||||||
|
assert document_values[key] == old_document_values[key]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("via_parent", [True, False])
|
||||||
|
@pytest.mark.parametrize("via", VIA)
|
||||||
|
def test_api_documents_patch_authenticated_reader(via, via_parent, mock_user_teams):
|
||||||
|
"""Users who are reader of a document should not be allowed to patch it."""
|
||||||
|
user = factories.UserFactory(with_owned_document=True)
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
if via_parent:
|
||||||
|
grand_parent = factories.DocumentFactory(link_reach="restricted")
|
||||||
|
parent = factories.DocumentFactory(parent=grand_parent, link_reach="restricted")
|
||||||
|
document = factories.DocumentFactory(parent=parent, link_reach="restricted")
|
||||||
|
access_document = grand_parent
|
||||||
|
else:
|
||||||
|
document = factories.DocumentFactory(link_reach="restricted")
|
||||||
|
access_document = document
|
||||||
|
|
||||||
|
if via == USER:
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=access_document, user=user, role="reader"
|
||||||
|
)
|
||||||
|
elif via == TEAM:
|
||||||
|
mock_user_teams.return_value = ["lasuite", "unknown"]
|
||||||
|
factories.TeamDocumentAccessFactory(
|
||||||
|
document=access_document, team="lasuite", role="reader"
|
||||||
|
)
|
||||||
|
|
||||||
|
old_document_values = serializers.DocumentSerializer(instance=document).data
|
||||||
|
new_content = YDOC_UPDATED_CONTENT_BASE64
|
||||||
|
|
||||||
|
response = client.patch(
|
||||||
|
f"/api/v1.0/documents/{document.id!s}/",
|
||||||
|
{"content": new_content},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
assert response.json() == {
|
||||||
|
"detail": "You do not have permission to perform this action."
|
||||||
|
}
|
||||||
|
|
||||||
|
document.refresh_from_db()
|
||||||
|
assert serializers.DocumentSerializer(instance=document).data == old_document_values
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("via_parent", [True, False])
|
||||||
|
@pytest.mark.parametrize("role", ["editor", "administrator", "owner"])
|
||||||
|
@pytest.mark.parametrize("via", VIA)
|
||||||
|
def test_api_documents_patch_authenticated_editor_administrator_or_owner(
|
||||||
|
via, role, via_parent, mock_user_teams
|
||||||
|
):
|
||||||
|
"""A user who is editor, administrator or owner of a document should be allowed to patch it."""
|
||||||
|
user = factories.UserFactory(with_owned_document=True)
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
if via_parent:
|
||||||
|
grand_parent = factories.DocumentFactory(link_reach="restricted")
|
||||||
|
parent = factories.DocumentFactory(parent=grand_parent, link_reach="restricted")
|
||||||
|
document = factories.DocumentFactory(parent=parent, link_reach="restricted")
|
||||||
|
access_document = grand_parent
|
||||||
|
else:
|
||||||
|
document = factories.DocumentFactory(link_reach="restricted")
|
||||||
|
access_document = document
|
||||||
|
|
||||||
|
if via == USER:
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=access_document, user=user, role=role
|
||||||
|
)
|
||||||
|
elif via == TEAM:
|
||||||
|
mock_user_teams.return_value = ["lasuite", "unknown"]
|
||||||
|
factories.TeamDocumentAccessFactory(
|
||||||
|
document=access_document, team="lasuite", role=role
|
||||||
|
)
|
||||||
|
|
||||||
|
old_document_values = serializers.DocumentSerializer(instance=document).data
|
||||||
|
old_path = document.path
|
||||||
|
new_content = YDOC_UPDATED_CONTENT_BASE64
|
||||||
|
|
||||||
|
response = client.patch(
|
||||||
|
f"/api/v1.0/documents/{document.id!s}/",
|
||||||
|
{"content": new_content, "websocket": True},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Using document.refresh_from_db does not wirk because the content is in cache.
|
||||||
|
# Force reloading it by fetching the document in the database.
|
||||||
|
document = models.Document.objects.get(id=document.id)
|
||||||
|
assert document.path == old_path
|
||||||
|
assert document.content == new_content
|
||||||
|
document_values = serializers.DocumentSerializer(instance=document).data
|
||||||
|
for key in [
|
||||||
|
"id",
|
||||||
|
"title",
|
||||||
|
"link_reach",
|
||||||
|
"link_role",
|
||||||
|
"creator",
|
||||||
|
"depth",
|
||||||
|
"numchild",
|
||||||
|
"path",
|
||||||
|
"nb_accesses_ancestors",
|
||||||
|
"nb_accesses_direct",
|
||||||
|
]:
|
||||||
|
assert document_values[key] == old_document_values[key]
|
||||||
|
|
||||||
|
|
||||||
|
@responses.activate
|
||||||
|
def test_api_documents_patch_authenticated_no_websocket(settings):
|
||||||
|
"""
|
||||||
|
When a user patches the document, not connected to the websocket and is the first to update,
|
||||||
|
the document should be updated.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory(with_owned_document=True)
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
session_key = client.session.session_key
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(users=[(user, "editor")])
|
||||||
|
new_content = YDOC_UPDATED_CONTENT_BASE64
|
||||||
|
|
||||||
|
settings.COLLABORATION_API_URL = "http://example.com/"
|
||||||
|
settings.COLLABORATION_SERVER_SECRET = "secret-token"
|
||||||
|
settings.COLLABORATION_WS_NOT_CONNECTED_READY_ONLY = True
|
||||||
|
endpoint_url = (
|
||||||
|
f"{settings.COLLABORATION_API_URL}get-connections/"
|
||||||
|
f"?room={document.id}&sessionKey={session_key}"
|
||||||
|
)
|
||||||
|
ws_resp = responses.get(endpoint_url, json={"count": 0, "exists": False})
|
||||||
|
|
||||||
|
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
||||||
|
old_path = document.path
|
||||||
|
|
||||||
|
response = client.patch(
|
||||||
|
f"/api/v1.0/documents/{document.id!s}/",
|
||||||
|
{"content": new_content},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Using document.refresh_from_db does not work because the content is cached.
|
||||||
|
# Force reloading it by fetching the document from the database.
|
||||||
|
document = models.Document.objects.get(id=document.id)
|
||||||
|
assert document.path == old_path
|
||||||
|
assert document.content == new_content
|
||||||
|
assert cache.get(f"docs:no-websocket:{document.id}") == session_key
|
||||||
|
assert ws_resp.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
@responses.activate
|
||||||
|
def test_api_documents_patch_authenticated_no_websocket_user_already_editing(settings):
|
||||||
|
"""
|
||||||
|
When a user patches the document, not connected to the websocket and is not the first to
|
||||||
|
update, the document should not be updated.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory(with_owned_document=True)
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
session_key = client.session.session_key
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(users=[(user, "editor")])
|
||||||
|
new_content = YDOC_UPDATED_CONTENT_BASE64
|
||||||
|
|
||||||
|
settings.COLLABORATION_API_URL = "http://example.com/"
|
||||||
|
settings.COLLABORATION_SERVER_SECRET = "secret-token"
|
||||||
|
settings.COLLABORATION_WS_NOT_CONNECTED_READY_ONLY = True
|
||||||
|
endpoint_url = (
|
||||||
|
f"{settings.COLLABORATION_API_URL}get-connections/"
|
||||||
|
f"?room={document.id}&sessionKey={session_key}"
|
||||||
|
)
|
||||||
|
ws_resp = responses.get(endpoint_url, json={"count": 0, "exists": False})
|
||||||
|
|
||||||
|
cache.set(f"docs:no-websocket:{document.id}", "other_session_key")
|
||||||
|
|
||||||
|
response = client.patch(
|
||||||
|
f"/api/v1.0/documents/{document.id!s}/",
|
||||||
|
{"content": new_content},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
assert response.status_code == 403
|
||||||
|
assert response.json() == {"detail": "You are not allowed to edit this document."}
|
||||||
|
|
||||||
|
assert ws_resp.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
@responses.activate
|
||||||
|
def test_api_documents_patch_no_websocket_other_user_connected_to_websocket(settings):
|
||||||
|
"""
|
||||||
|
When a user patches the document, not connected to the websocket and another user is connected
|
||||||
|
to the websocket, the document should not be updated.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory(with_owned_document=True)
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
session_key = client.session.session_key
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(users=[(user, "editor")])
|
||||||
|
new_content = YDOC_UPDATED_CONTENT_BASE64
|
||||||
|
|
||||||
|
settings.COLLABORATION_API_URL = "http://example.com/"
|
||||||
|
settings.COLLABORATION_SERVER_SECRET = "secret-token"
|
||||||
|
settings.COLLABORATION_WS_NOT_CONNECTED_READY_ONLY = True
|
||||||
|
endpoint_url = (
|
||||||
|
f"{settings.COLLABORATION_API_URL}get-connections/"
|
||||||
|
f"?room={document.id}&sessionKey={session_key}"
|
||||||
|
)
|
||||||
|
ws_resp = responses.get(endpoint_url, json={"count": 3, "exists": False})
|
||||||
|
|
||||||
|
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
||||||
|
|
||||||
|
response = client.patch(
|
||||||
|
f"/api/v1.0/documents/{document.id!s}/",
|
||||||
|
{"content": new_content},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
assert response.status_code == 403
|
||||||
|
assert response.json() == {"detail": "You are not allowed to edit this document."}
|
||||||
|
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
||||||
|
assert ws_resp.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
@responses.activate
|
||||||
|
def test_api_documents_patch_user_connected_to_websocket(settings):
|
||||||
|
"""
|
||||||
|
When a user patches the document while connected to the websocket, the document should be
|
||||||
|
updated.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory(with_owned_document=True)
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
session_key = client.session.session_key
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(users=[(user, "editor")])
|
||||||
|
new_content = YDOC_UPDATED_CONTENT_BASE64
|
||||||
|
|
||||||
|
settings.COLLABORATION_API_URL = "http://example.com/"
|
||||||
|
settings.COLLABORATION_SERVER_SECRET = "secret-token"
|
||||||
|
settings.COLLABORATION_WS_NOT_CONNECTED_READY_ONLY = True
|
||||||
|
endpoint_url = (
|
||||||
|
f"{settings.COLLABORATION_API_URL}get-connections/"
|
||||||
|
f"?room={document.id}&sessionKey={session_key}"
|
||||||
|
)
|
||||||
|
ws_resp = responses.get(endpoint_url, json={"count": 3, "exists": True})
|
||||||
|
|
||||||
|
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
||||||
|
old_path = document.path
|
||||||
|
|
||||||
|
response = client.patch(
|
||||||
|
f"/api/v1.0/documents/{document.id!s}/",
|
||||||
|
{"content": new_content},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Using document.refresh_from_db does not wirk because the content is in cache.
|
||||||
|
# Force reloading it by fetching the document in the database.
|
||||||
|
document = models.Document.objects.get(id=document.id)
|
||||||
|
assert document.path == old_path
|
||||||
|
assert document.content == new_content
|
||||||
|
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
||||||
|
assert ws_resp.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
@responses.activate
|
||||||
|
def test_api_documents_patch_websocket_server_unreachable_fallback_to_no_websocket(
|
||||||
|
settings,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
When the websocket server is unreachable, the patch should be applied like if the user was
|
||||||
|
not connected to the websocket.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory(with_owned_document=True)
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
session_key = client.session.session_key
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(users=[(user, "editor")])
|
||||||
|
new_content = YDOC_UPDATED_CONTENT_BASE64
|
||||||
|
|
||||||
|
settings.COLLABORATION_API_URL = "http://example.com/"
|
||||||
|
settings.COLLABORATION_SERVER_SECRET = "secret-token"
|
||||||
|
settings.COLLABORATION_WS_NOT_CONNECTED_READY_ONLY = True
|
||||||
|
endpoint_url = (
|
||||||
|
f"{settings.COLLABORATION_API_URL}get-connections/"
|
||||||
|
f"?room={document.id}&sessionKey={session_key}"
|
||||||
|
)
|
||||||
|
ws_resp = responses.get(endpoint_url, status=500)
|
||||||
|
|
||||||
|
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
||||||
|
old_path = document.path
|
||||||
|
|
||||||
|
response = client.patch(
|
||||||
|
f"/api/v1.0/documents/{document.id!s}/",
|
||||||
|
{"content": new_content},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Using document.refresh_from_db does not work because the content is cached.
|
||||||
|
# Force reloading it by fetching the document from the database.
|
||||||
|
document = models.Document.objects.get(id=document.id)
|
||||||
|
assert document.path == old_path
|
||||||
|
assert document.content == new_content
|
||||||
|
assert cache.get(f"docs:no-websocket:{document.id}") == session_key
|
||||||
|
assert ws_resp.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
@responses.activate
|
||||||
|
def test_api_documents_patch_websocket_server_unreachable_fallback_to_no_websocket_other_users(
|
||||||
|
settings,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
When the websocket server is unreachable, the behavior falls back to no-websocket.
|
||||||
|
If another user is already editing, the patch must be denied.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory(with_owned_document=True)
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
session_key = client.session.session_key
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(users=[(user, "editor")])
|
||||||
|
new_content = YDOC_UPDATED_CONTENT_BASE64
|
||||||
|
|
||||||
|
settings.COLLABORATION_API_URL = "http://example.com/"
|
||||||
|
settings.COLLABORATION_SERVER_SECRET = "secret-token"
|
||||||
|
settings.COLLABORATION_WS_NOT_CONNECTED_READY_ONLY = True
|
||||||
|
endpoint_url = (
|
||||||
|
f"{settings.COLLABORATION_API_URL}get-connections/"
|
||||||
|
f"?room={document.id}&sessionKey={session_key}"
|
||||||
|
)
|
||||||
|
ws_resp = responses.get(endpoint_url, status=500)
|
||||||
|
|
||||||
|
cache.set(f"docs:no-websocket:{document.id}", "other_session_key")
|
||||||
|
|
||||||
|
response = client.patch(
|
||||||
|
f"/api/v1.0/documents/{document.id!s}/",
|
||||||
|
{"content": new_content},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
assert cache.get(f"docs:no-websocket:{document.id}") == "other_session_key"
|
||||||
|
assert ws_resp.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
@responses.activate
|
||||||
|
def test_api_documents_patch_websocket_server_room_not_found_fallback_to_no_websocket_other_users(
|
||||||
|
settings,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
When the WebSocket server does not have the room created, the logic should fallback to
|
||||||
|
no-WebSocket. If another user is already editing, the patch must be denied.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory(with_owned_document=True)
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
session_key = client.session.session_key
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(users=[(user, "editor")])
|
||||||
|
new_content = YDOC_UPDATED_CONTENT_BASE64
|
||||||
|
|
||||||
|
settings.COLLABORATION_API_URL = "http://example.com/"
|
||||||
|
settings.COLLABORATION_SERVER_SECRET = "secret-token"
|
||||||
|
settings.COLLABORATION_WS_NOT_CONNECTED_READY_ONLY = True
|
||||||
|
endpoint_url = (
|
||||||
|
f"{settings.COLLABORATION_API_URL}get-connections/"
|
||||||
|
f"?room={document.id}&sessionKey={session_key}"
|
||||||
|
)
|
||||||
|
ws_resp = responses.get(endpoint_url, status=404)
|
||||||
|
|
||||||
|
cache.set(f"docs:no-websocket:{document.id}", "other_session_key")
|
||||||
|
|
||||||
|
response = client.patch(
|
||||||
|
f"/api/v1.0/documents/{document.id!s}/",
|
||||||
|
{"content": new_content},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
assert cache.get(f"docs:no-websocket:{document.id}") == "other_session_key"
|
||||||
|
assert ws_resp.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
@responses.activate
|
||||||
|
def test_api_documents_patch_force_websocket_param_to_true(settings):
|
||||||
|
"""
|
||||||
|
When the websocket parameter is set to true, the patch should be applied without any check.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory(with_owned_document=True)
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
session_key = client.session.session_key
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(users=[(user, "editor")])
|
||||||
|
new_content = YDOC_UPDATED_CONTENT_BASE64
|
||||||
|
|
||||||
|
settings.COLLABORATION_API_URL = "http://example.com/"
|
||||||
|
settings.COLLABORATION_SERVER_SECRET = "secret-token"
|
||||||
|
endpoint_url = (
|
||||||
|
f"{settings.COLLABORATION_API_URL}get-connections/"
|
||||||
|
f"?room={document.id}&sessionKey={session_key}"
|
||||||
|
)
|
||||||
|
ws_resp = responses.get(endpoint_url, status=500)
|
||||||
|
|
||||||
|
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
||||||
|
old_path = document.path
|
||||||
|
|
||||||
|
response = client.patch(
|
||||||
|
f"/api/v1.0/documents/{document.id!s}/",
|
||||||
|
{"content": new_content, "websocket": True},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Using document.refresh_from_db does not work because the content is cached.
|
||||||
|
# Force reloading it by fetching the document from the database.
|
||||||
|
document = models.Document.objects.get(id=document.id)
|
||||||
|
assert document.path == old_path
|
||||||
|
assert document.content == new_content
|
||||||
|
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
||||||
|
assert ws_resp.call_count == 0
|
||||||
|
|
||||||
|
|
||||||
|
@responses.activate
|
||||||
|
def test_api_documents_patch_feature_flag_disabled(settings):
|
||||||
|
"""
|
||||||
|
When the feature flag is disabled, the patch should be applied without any check.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory(with_owned_document=True)
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
session_key = client.session.session_key
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(users=[(user, "editor")])
|
||||||
|
new_content = YDOC_UPDATED_CONTENT_BASE64
|
||||||
|
|
||||||
|
settings.COLLABORATION_API_URL = "http://example.com/"
|
||||||
|
settings.COLLABORATION_SERVER_SECRET = "secret-token"
|
||||||
|
settings.COLLABORATION_WS_NOT_CONNECTED_READY_ONLY = False
|
||||||
|
endpoint_url = (
|
||||||
|
f"{settings.COLLABORATION_API_URL}get-connections/"
|
||||||
|
f"?room={document.id}&sessionKey={session_key}"
|
||||||
|
)
|
||||||
|
ws_resp = responses.get(endpoint_url, status=500)
|
||||||
|
|
||||||
|
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
||||||
|
old_path = document.path
|
||||||
|
|
||||||
|
response = client.patch(
|
||||||
|
f"/api/v1.0/documents/{document.id!s}/",
|
||||||
|
{"content": new_content},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Using document.refresh_from_db does not work because the content is cached.
|
||||||
|
# Force reloading it by fetching the document from the database.
|
||||||
|
document = models.Document.objects.get(id=document.id)
|
||||||
|
assert document.path == old_path
|
||||||
|
assert document.content == new_content
|
||||||
|
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
||||||
|
assert ws_resp.call_count == 0
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("via", VIA)
|
||||||
|
def test_api_documents_patch_administrator_or_owner_of_another(via, mock_user_teams):
|
||||||
|
"""
|
||||||
|
Being administrator or owner of a document should not grant authorization to patch
|
||||||
|
another document.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory(with_owned_document=True)
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
document = factories.DocumentFactory()
|
||||||
|
if via == USER:
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user, role=random.choice(["administrator", "owner"])
|
||||||
|
)
|
||||||
|
elif via == TEAM:
|
||||||
|
mock_user_teams.return_value = ["lasuite", "unknown"]
|
||||||
|
factories.TeamDocumentAccessFactory(
|
||||||
|
document=document,
|
||||||
|
team="lasuite",
|
||||||
|
role=random.choice(["administrator", "owner"]),
|
||||||
|
)
|
||||||
|
|
||||||
|
other_document = factories.DocumentFactory(title="Old title", link_role="reader")
|
||||||
|
old_document_values = serializers.DocumentSerializer(instance=other_document).data
|
||||||
|
new_content = YDOC_UPDATED_CONTENT_BASE64
|
||||||
|
|
||||||
|
response = client.patch(
|
||||||
|
f"/api/v1.0/documents/{other_document.id!s}/",
|
||||||
|
{"content": new_content},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
other_document.refresh_from_db()
|
||||||
|
assert (
|
||||||
|
serializers.DocumentSerializer(instance=other_document).data
|
||||||
|
== old_document_values
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_documents_patch_invalid_content():
|
||||||
|
"""
|
||||||
|
Patching a document with a non base64 encoded content should raise a validation error.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory(with_owned_document=True)
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(users=[[user, "owner"]])
|
||||||
|
|
||||||
|
response = client.patch(
|
||||||
|
f"/api/v1.0/documents/{document.id!s}/",
|
||||||
|
{"content": "invalid content"},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
assert response.status_code == 400
|
||||||
|
assert response.json() == {"content": ["Invalid base64 content."]}
|
||||||
|
|
||||||
|
|
||||||
|
@responses.activate
|
||||||
|
def test_api_documents_patch_empty_body(settings):
|
||||||
|
"""
|
||||||
|
Test when data is empty the document should not be updated.
|
||||||
|
The `updated_at` property should not change asserting that no update in the database is made.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
session_key = client.session.session_key
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(users=[(user, "owner")], creator=user)
|
||||||
|
document_updated_at = document.updated_at
|
||||||
|
|
||||||
|
settings.COLLABORATION_API_URL = "http://example.com/"
|
||||||
|
settings.COLLABORATION_SERVER_SECRET = "secret-token"
|
||||||
|
settings.COLLABORATION_WS_NOT_CONNECTED_READY_ONLY = True
|
||||||
|
endpoint_url = (
|
||||||
|
f"{settings.COLLABORATION_API_URL}get-connections/"
|
||||||
|
f"?room={document.id}&sessionKey={session_key}"
|
||||||
|
)
|
||||||
|
ws_resp = responses.get(endpoint_url, json={"count": 3, "exists": True})
|
||||||
|
|
||||||
|
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
||||||
|
|
||||||
|
old_document_values = serializers.DocumentSerializer(instance=document).data
|
||||||
|
|
||||||
|
with patch("core.models.Document.save") as mock_document_save:
|
||||||
|
response = client.patch(
|
||||||
|
f"/api/v1.0/documents/{document.id!s}/",
|
||||||
|
content_type="application/json",
|
||||||
|
)
|
||||||
|
mock_document_save.assert_not_called()
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
document = models.Document.objects.get(id=document.id)
|
||||||
|
new_document_values = serializers.DocumentSerializer(instance=document).data
|
||||||
|
assert new_document_values == old_document_values
|
||||||
|
assert document_updated_at == document.updated_at
|
||||||
|
assert cache.get(f"docs:no-websocket:{document.id}") is None
|
||||||
|
assert ws_resp.call_count == 1
|
||||||
|
|||||||
0
src/backend/core/tests/external_api/__init__.py
Normal file
0
src/backend/core/tests/external_api/__init__.py
Normal file
@@ -0,0 +1,772 @@
|
|||||||
|
"""
|
||||||
|
Tests for the Resource Server API for documents.
|
||||||
|
|
||||||
|
Not testing external API endpoints that are already tested in the /api
|
||||||
|
because the resource server viewsets inherit from the api viewsets.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from datetime import timedelta
|
||||||
|
from io import BytesIO
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from django.test import override_settings
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from rest_framework.test import APIClient
|
||||||
|
|
||||||
|
from core import factories, models
|
||||||
|
from core.services import mime_types
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.django_db
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_retrieve_anonymous_public_standalone():
|
||||||
|
"""
|
||||||
|
Anonymous users SHOULD NOT be allowed to retrieve a document from external
|
||||||
|
API if resource server is not enabled.
|
||||||
|
"""
|
||||||
|
document = factories.DocumentFactory(link_reach="public")
|
||||||
|
|
||||||
|
response = APIClient().get(f"/external_api/v1.0/documents/{document.id!s}/")
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_list_connected_not_resource_server():
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to list documents if resource server is not enabled.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach=models.LinkReachChoices.RESTRICTED)
|
||||||
|
factories.UserDocumentAccessFactory(document=document, user=user, role="reader")
|
||||||
|
|
||||||
|
response = client.get("/external_api/v1.0/documents/")
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_list_connected_resource_server(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""Connected users should be allowed to list documents from a resource server."""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach=models.LinkReachChoices.RESTRICTED)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role="reader"
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.get("/external_api/v1.0/documents/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_list_connected_resource_server_with_invalid_token(
|
||||||
|
user_token, resource_server_backend
|
||||||
|
):
|
||||||
|
"""A user with an invalid sub SHOULD NOT be allowed to retrieve documents
|
||||||
|
from a resource server."""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
response = client.get("/external_api/v1.0/documents/")
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_retrieve_connected_resource_server_with_wrong_abilities(
|
||||||
|
user_token, user_specific_sub, resource_server_backend
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
A user with wrong abilities SHOULD NOT be allowed to retrieve a document from
|
||||||
|
a resource server.
|
||||||
|
"""
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach=models.LinkReachChoices.RESTRICTED)
|
||||||
|
|
||||||
|
response = client.get(f"/external_api/v1.0/documents/{document.id!s}/")
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_retrieve_connected_resource_server_using_access_token(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
A user with an access token SHOULD be allowed to retrieve a document from
|
||||||
|
a resource server.
|
||||||
|
"""
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
document = factories.DocumentFactory(link_reach=models.LinkReachChoices.RESTRICTED)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.LinkRoleChoices.READER
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.get(f"/external_api/v1.0/documents/{document.id!s}/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_create_root_success(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Users with an access token should be able to create a root document through the resource
|
||||||
|
server and should automatically be declared as the owner of the newly created document.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
"/external_api/v1.0/documents/",
|
||||||
|
{
|
||||||
|
"title": "Test Root Document",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
document = models.Document.objects.get(id=data["id"])
|
||||||
|
|
||||||
|
assert document.title == "Test Root Document"
|
||||||
|
assert document.creator == user_specific_sub
|
||||||
|
assert document.accesses.filter(role="owner", user=user_specific_sub).exists()
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_create_subdocument_owner_success(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Users with an access token SHOULD BE able to create a sub-document through the resource
|
||||||
|
server when they have OWNER permissions on the parent document.
|
||||||
|
The creator is set to the authenticated user, and permissions are inherited
|
||||||
|
from the parent document.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
# Create a parent document first
|
||||||
|
parent_document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=parent_document,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.OWNER,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
f"/external_api/v1.0/documents/{parent_document.id}/children/",
|
||||||
|
{
|
||||||
|
"title": "Test Sub Document",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
document = models.Document.objects.get(id=data["id"])
|
||||||
|
|
||||||
|
assert document.title == "Test Sub Document"
|
||||||
|
assert document.creator == user_specific_sub
|
||||||
|
assert document.get_parent() == parent_document
|
||||||
|
# Child documents inherit permissions from parent, no direct access needed
|
||||||
|
assert not document.accesses.exists()
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_create_subdocument_editor_success(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Users with an access token SHOULD BE able to create a sub-document through the resource
|
||||||
|
server when they have EDITOR permissions on the parent document.
|
||||||
|
Permissions are inherited from the parent document.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
# Create a parent document first
|
||||||
|
parent_document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=parent_document,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.EDITOR,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
f"/external_api/v1.0/documents/{parent_document.id}/children/",
|
||||||
|
{
|
||||||
|
"title": "Test Sub Document",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
document = models.Document.objects.get(id=data["id"])
|
||||||
|
|
||||||
|
assert document.title == "Test Sub Document"
|
||||||
|
assert document.creator == user_specific_sub
|
||||||
|
assert document.get_parent() == parent_document
|
||||||
|
# Child documents inherit permissions from parent, no direct access needed
|
||||||
|
assert not document.accesses.exists()
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_create_subdocument_reader_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Users with an access token SHOULD NOT be able to create a sub-document through the resource
|
||||||
|
server when they have READER permissions on the parent document.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
# Create a parent document first
|
||||||
|
parent_document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=parent_document,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.READER,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
f"/external_api/v1.0/documents/{parent_document.id}/children/",
|
||||||
|
{
|
||||||
|
"title": "Test Sub Document",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
@patch("core.services.converter_services.Converter.convert")
|
||||||
|
def test_external_api_documents_create_with_markdown_file_success(
|
||||||
|
mock_convert, user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Users with an access token should be able to create documents through the resource
|
||||||
|
server by uploading a Markdown file and should automatically be declared as the owner
|
||||||
|
of the newly created document.
|
||||||
|
"""
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
# Mock the conversion
|
||||||
|
converted_yjs = "base64encodedyjscontent"
|
||||||
|
mock_convert.return_value = converted_yjs
|
||||||
|
|
||||||
|
# Create a fake Markdown file
|
||||||
|
file_content = b"# Test Document\n\nThis is a test."
|
||||||
|
file = BytesIO(file_content)
|
||||||
|
file.name = "readme.md"
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
"/external_api/v1.0/documents/",
|
||||||
|
{
|
||||||
|
"file": file,
|
||||||
|
},
|
||||||
|
format="multipart",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
document = models.Document.objects.get(id=data["id"])
|
||||||
|
|
||||||
|
assert document.title == "readme.md"
|
||||||
|
assert document.content == converted_yjs
|
||||||
|
assert document.accesses.filter(role="owner", user=user_specific_sub).exists()
|
||||||
|
|
||||||
|
# Verify the converter was called correctly
|
||||||
|
mock_convert.assert_called_once_with(
|
||||||
|
file_content,
|
||||||
|
content_type=mime_types.MARKDOWN,
|
||||||
|
accept=mime_types.YJS,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_list_with_multiple_roles(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
List all documents accessible to a user with different roles and verify
|
||||||
|
that associated permissions are correctly returned in the response.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
# Create documents with different roles for the user
|
||||||
|
owner_document = factories.DocumentFactory(
|
||||||
|
title="Owner Document",
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=owner_document,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.OWNER,
|
||||||
|
)
|
||||||
|
|
||||||
|
editor_document = factories.DocumentFactory(
|
||||||
|
title="Editor Document",
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=editor_document,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.EDITOR,
|
||||||
|
)
|
||||||
|
|
||||||
|
reader_document = factories.DocumentFactory(
|
||||||
|
title="Reader Document",
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=reader_document,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.READER,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a document the user should NOT have access to
|
||||||
|
other_document = factories.DocumentFactory(
|
||||||
|
title="Other Document",
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
)
|
||||||
|
other_user = factories.UserFactory()
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=other_document,
|
||||||
|
user=other_user,
|
||||||
|
role=models.RoleChoices.OWNER,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.get("/external_api/v1.0/documents/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
# Verify the response contains results
|
||||||
|
assert "results" in data
|
||||||
|
results = data["results"]
|
||||||
|
|
||||||
|
# Verify user can see exactly 3 documents (owner, editor, reader)
|
||||||
|
result_ids = {result["id"] for result in results}
|
||||||
|
assert len(results) == 3
|
||||||
|
assert str(owner_document.id) in result_ids
|
||||||
|
assert str(editor_document.id) in result_ids
|
||||||
|
assert str(reader_document.id) in result_ids
|
||||||
|
assert str(other_document.id) not in result_ids
|
||||||
|
|
||||||
|
# Verify each document has correct user_role field indicating permission level
|
||||||
|
for result in results:
|
||||||
|
if result["id"] == str(owner_document.id):
|
||||||
|
assert result["title"] == "Owner Document"
|
||||||
|
assert result["user_role"] == models.RoleChoices.OWNER
|
||||||
|
elif result["id"] == str(editor_document.id):
|
||||||
|
assert result["title"] == "Editor Document"
|
||||||
|
assert result["user_role"] == models.RoleChoices.EDITOR
|
||||||
|
elif result["id"] == str(reader_document.id):
|
||||||
|
assert result["title"] == "Reader Document"
|
||||||
|
assert result["user_role"] == models.RoleChoices.READER
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_duplicate_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users CAN DUPLICATE a document from a resource server
|
||||||
|
when they have the required permissions on the document,
|
||||||
|
as this action bypasses the permission checks.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.OWNER,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/duplicate/",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
|
||||||
|
|
||||||
|
# NOT allowed actions on resource server.
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_put_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to PUT a document from a resource server.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.OWNER,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.put(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/", {"title": "new title"}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_document_delete_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to delete a document from a resource server.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.OWNER,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.delete(f"/external_api/v1.0/documents/{document.id!s}/")
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_move_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to MOVE a document from a resource server.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.OWNER,
|
||||||
|
)
|
||||||
|
|
||||||
|
new_parent = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=new_parent,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.OWNER,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/move/",
|
||||||
|
{"target_document_id": new_parent.id},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_restore_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to restore a document from a resource server.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.OWNER,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.post(f"/external_api/v1.0/documents/{document.id!s}/restore/")
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("role", models.LinkRoleChoices.values)
|
||||||
|
@pytest.mark.parametrize("reach", models.LinkReachChoices.values)
|
||||||
|
def test_external_api_documents_trashbin_not_allowed(
|
||||||
|
role, reach, user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to list documents from the trashbin,
|
||||||
|
regardless of the document link reach and user role, from a resource server.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=reach,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
deleted_at=timezone.now(),
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=role,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.get("/external_api/v1.0/documents/trashbin/")
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_create_for_owner_not_allowed():
|
||||||
|
"""
|
||||||
|
Authenticated users SHOULD NOT be allowed to call create documents
|
||||||
|
on behalf of other users.
|
||||||
|
This API endpoint is reserved for server-to-server calls.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"title": "My Document",
|
||||||
|
"content": "Document content",
|
||||||
|
"sub": "123",
|
||||||
|
"email": "john.doe@example.com",
|
||||||
|
}
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
"/external_api/v1.0/documents/create-for-owner/",
|
||||||
|
data,
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
assert not models.Document.objects.exists()
|
||||||
|
|
||||||
|
|
||||||
|
# Test overrides
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": ["list", "retrieve", "children", "trashbin"],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_external_api_documents_trashbin_can_be_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD be allowed to list soft deleted documents from a resource server
|
||||||
|
when the trashbin action is enabled in EXTERNAL_API settings.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach=models.LinkReachChoices.RESTRICTED)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
document.soft_delete()
|
||||||
|
|
||||||
|
response = client.get("/external_api/v1.0/documents/trashbin/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
content = response.json()
|
||||||
|
results = content.pop("results")
|
||||||
|
assert content == {
|
||||||
|
"count": 1,
|
||||||
|
"next": None,
|
||||||
|
"previous": None,
|
||||||
|
}
|
||||||
|
assert len(results) == 1
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": ["list", "retrieve", "children", "destroy"],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_external_api_documents_delete_can_be_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD be allowed to delete a document from a resource server
|
||||||
|
when the delete action is enabled in EXTERNAL_API settings.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach=models.LinkReachChoices.RESTRICTED)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.delete(f"/external_api/v1.0/documents/{document.id!s}/")
|
||||||
|
|
||||||
|
assert response.status_code == 204
|
||||||
|
# Verify the document is soft deleted
|
||||||
|
document.refresh_from_db()
|
||||||
|
assert document.deleted_at is not None
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
"update",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_external_api_documents_update_can_be_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD be allowed to update a document from a resource server
|
||||||
|
when the update action is enabled in EXTERNAL_API settings.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach=models.LinkReachChoices.RESTRICTED)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
|
||||||
|
original_title = document.title
|
||||||
|
response = client.put(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/", {"title": "new title"}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
# Verify the document is updated
|
||||||
|
document.refresh_from_db()
|
||||||
|
assert document.title == "new title"
|
||||||
|
assert document.title != original_title
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": ["list", "retrieve", "children", "move"],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_external_api_documents_move_can_be_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD be allowed to move a document from a resource server
|
||||||
|
when the move action is enabled in EXTERNAL_API settings and they
|
||||||
|
have the required permissions on the document and the target location.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
parent = factories.DocumentFactory(
|
||||||
|
users=[(user_specific_sub, "owner")], teams=[("lasuite", "owner")]
|
||||||
|
)
|
||||||
|
# A document with no owner
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
parent=parent, users=[(user_specific_sub, "reader")]
|
||||||
|
)
|
||||||
|
target = factories.DocumentFactory()
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/move/",
|
||||||
|
data={"target_document_id": str(target.id), "position": "first-sibling"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {"message": "Document moved successfully."}
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": ["list", "retrieve", "children", "restore"],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_external_api_documents_restore_can_be_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD be allowed to restore a recently soft-deleted document
|
||||||
|
from a resource server when the restore action is enabled in EXTERNAL_API
|
||||||
|
settings and they have the required permissions on the document.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
now = timezone.now() - timedelta(days=15)
|
||||||
|
document = factories.DocumentFactory(deleted_at=now)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role="owner"
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.post(f"/external_api/v1.0/documents/{document.id!s}/restore/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {"detail": "Document has been successfully restored."}
|
||||||
|
|
||||||
|
document.refresh_from_db()
|
||||||
|
assert document.deleted_at is None
|
||||||
|
assert document.ancestors_deleted_at is None
|
||||||
@@ -0,0 +1,681 @@
|
|||||||
|
"""
|
||||||
|
Tests for the Resource Server API for documents accesses.
|
||||||
|
|
||||||
|
Not testing external API endpoints that are already tested in the /api
|
||||||
|
because the resource server viewsets inherit from the api viewsets.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.test import override_settings
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import responses
|
||||||
|
from rest_framework.test import APIClient
|
||||||
|
|
||||||
|
from core import factories, models
|
||||||
|
from core.api import serializers
|
||||||
|
from core.tests.utils.urls import reload_urls
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.django_db
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_document_accesses_anonymous_public_standalone():
|
||||||
|
"""
|
||||||
|
Anonymous users SHOULD NOT be allowed to list document accesses
|
||||||
|
from external API if resource server is not enabled.
|
||||||
|
"""
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = APIClient().get(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/accesses/"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_document_accesses_list_connected_not_resource_server():
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to list document accesses
|
||||||
|
if resource server is not enabled.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach=models.LinkReachChoices.RESTRICTED)
|
||||||
|
|
||||||
|
response = APIClient().get(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/accesses/"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"document_access": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_external_api_document_accesses_list_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to list the accesses of
|
||||||
|
a document from a resource server.
|
||||||
|
"""
|
||||||
|
reload_urls()
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.OWNER,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.get(f"/external_api/v1.0/documents/{document.id!s}/accesses/")
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"document_access": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_external_api_document_accesses_retrieve_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to retrieve a specific access of
|
||||||
|
a document from a resource server.
|
||||||
|
"""
|
||||||
|
reload_urls()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.OWNER,
|
||||||
|
)
|
||||||
|
|
||||||
|
access = factories.UserDocumentAccessFactory(document=document)
|
||||||
|
|
||||||
|
response = client.get(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/accesses/{access.id!s}/"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"document_access": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_external_api_documents_accesses_create_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to create an access for a document
|
||||||
|
from a resource server.
|
||||||
|
"""
|
||||||
|
reload_urls()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.OWNER,
|
||||||
|
)
|
||||||
|
|
||||||
|
other_user = factories.UserFactory()
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/accesses/",
|
||||||
|
{"user_id": other_user.id, "role": models.RoleChoices.READER},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"document_access": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_external_api_document_accesses_update_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to update an access for a
|
||||||
|
document from a resource server through PUT.
|
||||||
|
"""
|
||||||
|
reload_urls()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.OWNER,
|
||||||
|
)
|
||||||
|
|
||||||
|
other_user = factories.UserFactory()
|
||||||
|
access = factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=other_user, role=models.RoleChoices.READER
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.put(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/accesses/{access.id!s}/",
|
||||||
|
{"role": models.RoleChoices.EDITOR},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"document_access": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_external_api_document_accesses_partial_update_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to update an access
|
||||||
|
for a document from a resource server through PATCH.
|
||||||
|
"""
|
||||||
|
reload_urls()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.OWNER,
|
||||||
|
)
|
||||||
|
|
||||||
|
other_user = factories.UserFactory()
|
||||||
|
access = factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=other_user, role=models.RoleChoices.READER
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.patch(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/accesses/{access.id!s}/",
|
||||||
|
{"role": models.RoleChoices.EDITOR},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"document_access": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_external_api_documents_accesses_delete_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to delete an access for
|
||||||
|
a document from a resource server.
|
||||||
|
"""
|
||||||
|
reload_urls()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
)
|
||||||
|
access = factories.UserDocumentAccessFactory(
|
||||||
|
document=document,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.OWNER,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.delete(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/accesses/{access.id!s}/",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
# Overrides
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"document_access": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": ["list", "retrieve"],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_external_api_document_accesses_list_can_be_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD be allowed to list the accesses of a document from a resource server
|
||||||
|
when the list action is enabled in EXTERNAL_API document_access settings.
|
||||||
|
"""
|
||||||
|
|
||||||
|
reload_urls()
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED, creator=user_specific_sub
|
||||||
|
)
|
||||||
|
user_access = factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
# Create additional accesses
|
||||||
|
other_user = factories.UserFactory()
|
||||||
|
other_access = factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=other_user, role=models.RoleChoices.READER
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.get(f"/external_api/v1.0/documents/{document.id!s}/accesses/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
access_ids = [entry["id"] for entry in data]
|
||||||
|
assert str(user_access.id) in access_ids
|
||||||
|
assert str(other_access.id) in access_ids
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"document_access": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": ["list", "retrieve"],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_external_api_document_accesses_retrieve_can_be_allowed(
|
||||||
|
user_token,
|
||||||
|
resource_server_backend,
|
||||||
|
user_specific_sub,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
A user who is related to a document SHOULD be allowed to retrieve the
|
||||||
|
associated document user accesses.
|
||||||
|
"""
|
||||||
|
reload_urls()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory()
|
||||||
|
access = factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.get(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/accesses/{access.id!s}/",
|
||||||
|
)
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert data["id"] == str(access.id)
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"document_access": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": ["list", "create"],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_external_api_document_accesses_create_can_be_allowed(
|
||||||
|
user_token,
|
||||||
|
resource_server_backend,
|
||||||
|
user_specific_sub,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
A user who is related to a document SHOULD be allowed to create
|
||||||
|
a user access for the document.
|
||||||
|
"""
|
||||||
|
reload_urls()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory()
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
|
||||||
|
other_user = factories.UserFactory()
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/accesses/",
|
||||||
|
data={"user_id": other_user.id, "role": models.RoleChoices.READER},
|
||||||
|
)
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
assert data["role"] == models.RoleChoices.READER
|
||||||
|
assert str(data["user"]["id"]) == str(other_user.id)
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"document_access": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": ["list", "update"],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_external_api_document_accesses_update_can_be_allowed(
|
||||||
|
user_token,
|
||||||
|
resource_server_backend,
|
||||||
|
user_specific_sub,
|
||||||
|
settings,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
A user who is related to a document SHOULD be allowed to update
|
||||||
|
a user access for the document through PUT.
|
||||||
|
"""
|
||||||
|
reload_urls()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory()
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
|
||||||
|
other_user = factories.UserFactory()
|
||||||
|
access = factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=other_user, role=models.RoleChoices.READER
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add the reset-connections endpoint to the existing mock
|
||||||
|
settings.COLLABORATION_API_URL = "http://example.com/"
|
||||||
|
settings.COLLABORATION_SERVER_SECRET = "secret-token"
|
||||||
|
endpoint_url = (
|
||||||
|
f"{settings.COLLABORATION_API_URL}reset-connections/?room={document.id}"
|
||||||
|
)
|
||||||
|
resource_server_backend.add(
|
||||||
|
responses.POST,
|
||||||
|
endpoint_url,
|
||||||
|
json={},
|
||||||
|
status=200,
|
||||||
|
)
|
||||||
|
|
||||||
|
old_values = serializers.DocumentAccessSerializer(instance=access).data
|
||||||
|
|
||||||
|
# Update only the role field
|
||||||
|
response = client.put(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/accesses/{access.id!s}/",
|
||||||
|
{**old_values, "role": models.RoleChoices.EDITOR}, # type: ignore
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["role"] == models.RoleChoices.EDITOR
|
||||||
|
assert str(data["user"]["id"]) == str(other_user.id)
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"document_access": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": ["list", "partial_update"],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_external_api_document_accesses_partial_update_can_be_allowed(
|
||||||
|
user_token,
|
||||||
|
resource_server_backend,
|
||||||
|
user_specific_sub,
|
||||||
|
settings,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
A user who is related to a document SHOULD be allowed to update
|
||||||
|
a user access for the document through PATCH.
|
||||||
|
"""
|
||||||
|
reload_urls()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory()
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
|
||||||
|
other_user = factories.UserFactory()
|
||||||
|
access = factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=other_user, role=models.RoleChoices.READER
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add the reset-connections endpoint to the existing mock
|
||||||
|
settings.COLLABORATION_API_URL = "http://example.com/"
|
||||||
|
settings.COLLABORATION_SERVER_SECRET = "secret-token"
|
||||||
|
endpoint_url = (
|
||||||
|
f"{settings.COLLABORATION_API_URL}reset-connections/?room={document.id}"
|
||||||
|
)
|
||||||
|
resource_server_backend.add(
|
||||||
|
responses.POST,
|
||||||
|
endpoint_url,
|
||||||
|
json={},
|
||||||
|
status=200,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.patch(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/accesses/{access.id!s}/",
|
||||||
|
data={"role": models.RoleChoices.EDITOR},
|
||||||
|
)
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert data["role"] == models.RoleChoices.EDITOR
|
||||||
|
assert str(data["user"]["id"]) == str(other_user.id)
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"document_access": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": ["list", "destroy"],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_external_api_documents_accesses_delete_can_be_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub, settings
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD be allowed to delete an access for
|
||||||
|
a document from a resource server when the destroy action is
|
||||||
|
enabled in settings.
|
||||||
|
"""
|
||||||
|
reload_urls()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.OWNER,
|
||||||
|
)
|
||||||
|
other_user = factories.UserFactory()
|
||||||
|
other_access = factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=other_user, role=models.RoleChoices.READER
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add the reset-connections endpoint to the existing mock
|
||||||
|
settings.COLLABORATION_API_URL = "http://example.com/"
|
||||||
|
settings.COLLABORATION_SERVER_SECRET = "secret-token"
|
||||||
|
endpoint_url = (
|
||||||
|
f"{settings.COLLABORATION_API_URL}reset-connections/?room={document.id}"
|
||||||
|
)
|
||||||
|
resource_server_backend.add(
|
||||||
|
responses.POST,
|
||||||
|
endpoint_url,
|
||||||
|
json={},
|
||||||
|
status=200,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.delete(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/accesses/{other_access.id!s}/",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 204
|
||||||
@@ -0,0 +1,273 @@
|
|||||||
|
"""
|
||||||
|
Tests for the Resource Server API for document AI features.
|
||||||
|
|
||||||
|
Not testing external API endpoints that are already tested in the /api
|
||||||
|
because the resource server viewsets inherit from the api viewsets.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
|
from django.test import override_settings
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from rest_framework.test import APIClient
|
||||||
|
|
||||||
|
from core import factories, models
|
||||||
|
from core.tests.documents.test_api_documents_ai_proxy import ( # pylint: disable=unused-import
|
||||||
|
ai_settings,
|
||||||
|
)
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.django_db
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_ai_transform_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to access AI transform endpoints
|
||||||
|
from a resource server by default.
|
||||||
|
"""
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
)
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/ai-transform/",
|
||||||
|
{"text": "hello", "action": "prompt"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
assert response.json() == {
|
||||||
|
"detail": "You do not have permission to perform this action."
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_ai_translate_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to access AI translate endpoints
|
||||||
|
from a resource server by default.
|
||||||
|
"""
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
)
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/ai-translate/",
|
||||||
|
{"text": "hello", "language": "es"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
assert response.json() == {
|
||||||
|
"detail": "You do not have permission to perform this action."
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_ai_proxy_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to access AI proxy endpoints
|
||||||
|
from a resource server by default.
|
||||||
|
"""
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
)
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/ai-proxy/",
|
||||||
|
b"{}",
|
||||||
|
content_type="application/json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
assert response.json() == {
|
||||||
|
"detail": "You do not have permission to perform this action."
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Overrides
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
"ai_transform",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
@pytest.mark.usefixtures("ai_settings")
|
||||||
|
@patch("openai.resources.chat.completions.Completions.create")
|
||||||
|
def test_external_api_documents_ai_transform_can_be_allowed(
|
||||||
|
mock_create, user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Users SHOULD be allowed to transform a document using AI when the
|
||||||
|
corresponding action is enabled via EXTERNAL_API settings.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED, favorited_by=[user_specific_sub]
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
|
||||||
|
mock_create.return_value = MagicMock(
|
||||||
|
choices=[MagicMock(message=MagicMock(content="Salut"))]
|
||||||
|
)
|
||||||
|
|
||||||
|
url = f"/external_api/v1.0/documents/{document.id!s}/ai-transform/"
|
||||||
|
response = client.post(url, {"text": "Hello", "action": "prompt"})
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {"answer": "Salut"}
|
||||||
|
# pylint: disable=line-too-long
|
||||||
|
mock_create.assert_called_once_with(
|
||||||
|
model="llama",
|
||||||
|
messages=[
|
||||||
|
{
|
||||||
|
"role": "system",
|
||||||
|
"content": (
|
||||||
|
"Answer the prompt using markdown formatting for structure and emphasis. "
|
||||||
|
"Return the content directly without wrapping it in code blocks or markdown delimiters. "
|
||||||
|
"Preserve the language and markdown formatting. "
|
||||||
|
"Do not provide any other information. "
|
||||||
|
"Preserve the language."
|
||||||
|
),
|
||||||
|
},
|
||||||
|
{"role": "user", "content": "Hello"},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
"ai_translate",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
@pytest.mark.usefixtures("ai_settings")
|
||||||
|
@patch("openai.resources.chat.completions.Completions.create")
|
||||||
|
def test_external_api_documents_ai_translate_can_be_allowed(
|
||||||
|
mock_create, user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Users SHOULD be allowed to translate a document using AI when the
|
||||||
|
corresponding action is enabled via EXTERNAL_API settings.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED, favorited_by=[user_specific_sub]
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
|
||||||
|
mock_create.return_value = MagicMock(
|
||||||
|
choices=[MagicMock(message=MagicMock(content="Salut"))]
|
||||||
|
)
|
||||||
|
|
||||||
|
url = f"/external_api/v1.0/documents/{document.id!s}/ai-translate/"
|
||||||
|
response = client.post(url, {"text": "Hello", "language": "es-co"})
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == {"answer": "Salut"}
|
||||||
|
mock_create.assert_called_once_with(
|
||||||
|
model="llama",
|
||||||
|
messages=[
|
||||||
|
{
|
||||||
|
"role": "system",
|
||||||
|
"content": (
|
||||||
|
"Keep the same html structure and formatting. "
|
||||||
|
"Translate the content in the html to the "
|
||||||
|
"specified language Colombian Spanish. "
|
||||||
|
"Check the translation for accuracy and make any necessary corrections. "
|
||||||
|
"Do not provide any other information."
|
||||||
|
),
|
||||||
|
},
|
||||||
|
{"role": "user", "content": "Hello"},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
"ai_proxy",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
@pytest.mark.usefixtures("ai_settings")
|
||||||
|
@patch("core.services.ai_services.AIService.stream")
|
||||||
|
def test_external_api_documents_ai_proxy_can_be_allowed(
|
||||||
|
mock_stream, user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Users SHOULD be allowed to use the AI proxy endpoint when the
|
||||||
|
corresponding action is enabled via EXTERNAL_API settings.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED, creator=user_specific_sub
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
|
||||||
|
mock_stream.return_value = iter(["data: response\n"])
|
||||||
|
|
||||||
|
url = f"/external_api/v1.0/documents/{document.id!s}/ai-proxy/"
|
||||||
|
response = client.post(
|
||||||
|
url,
|
||||||
|
b"{}",
|
||||||
|
content_type="application/json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response["Content-Type"] == "text/event-stream" # type: ignore
|
||||||
|
mock_stream.assert_called_once()
|
||||||
@@ -0,0 +1,121 @@
|
|||||||
|
"""
|
||||||
|
Tests for the Resource Server API for document attachments.
|
||||||
|
|
||||||
|
Not testing external API endpoints that are already tested in the /api
|
||||||
|
because the resource server viewsets inherit from the api viewsets.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
import uuid
|
||||||
|
from urllib.parse import parse_qs, urlparse
|
||||||
|
|
||||||
|
from django.core.files.uploadedfile import SimpleUploadedFile
|
||||||
|
from django.test import override_settings
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from rest_framework.test import APIClient
|
||||||
|
|
||||||
|
from core import factories, models
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.django_db
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_attachment_upload_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to upload attachments to a document
|
||||||
|
from a resource server.
|
||||||
|
"""
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
pixel = (
|
||||||
|
b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x01\x00\x00\x00\x01\x08\x06\x00"
|
||||||
|
b"\x00\x00\x1f\x15\xc4\x89\x00\x00\x00\nIDATx\x9cc\xf8\xff\xff?\x00\x05\xfe\x02\xfe"
|
||||||
|
b"\xa7V\xbd\xfa\x00\x00\x00\x00IEND\xaeB`\x82"
|
||||||
|
)
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
)
|
||||||
|
file = SimpleUploadedFile(name="test.png", content=pixel, content_type="image/png")
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/attachment-upload/",
|
||||||
|
{"file": file},
|
||||||
|
format="multipart",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
assert response.json() == {
|
||||||
|
"detail": "You do not have permission to perform this action."
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
"attachment_upload",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_external_api_documents_attachment_upload_can_be_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD be allowed to upload attachments to a document
|
||||||
|
from a resource server when the attachment-upload action is enabled in EXTERNAL_API settings.
|
||||||
|
"""
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
pixel = (
|
||||||
|
b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x01\x00\x00\x00\x01\x08\x06\x00"
|
||||||
|
b"\x00\x00\x1f\x15\xc4\x89\x00\x00\x00\nIDATx\x9cc\xf8\xff\xff?\x00\x05\xfe\x02\xfe"
|
||||||
|
b"\xa7V\xbd\xfa\x00\x00\x00\x00IEND\xaeB`\x82"
|
||||||
|
)
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.OWNER,
|
||||||
|
)
|
||||||
|
|
||||||
|
file = SimpleUploadedFile(name="test.png", content=pixel, content_type="image/png")
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/attachment-upload/",
|
||||||
|
{"file": file},
|
||||||
|
format="multipart",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
|
||||||
|
pattern = re.compile(rf"^{document.id!s}/attachments/(.*)\.png")
|
||||||
|
url_parsed = urlparse(response.json()["file"])
|
||||||
|
assert url_parsed.path == f"/api/v1.0/documents/{document.id!s}/media-check/"
|
||||||
|
query = parse_qs(url_parsed.query)
|
||||||
|
assert query["key"][0] is not None
|
||||||
|
file_path = query["key"][0]
|
||||||
|
match = pattern.search(file_path)
|
||||||
|
file_id = match.group(1) # type: ignore
|
||||||
|
|
||||||
|
# Validate that file_id is a valid UUID
|
||||||
|
uuid.UUID(file_id)
|
||||||
@@ -0,0 +1,157 @@
|
|||||||
|
"""
|
||||||
|
Tests for the Resource Server API for document favorites.
|
||||||
|
|
||||||
|
Not testing external API endpoints that are already tested in the /api
|
||||||
|
because the resource server viewsets inherit from the api viewsets.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.test import override_settings
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from rest_framework.test import APIClient
|
||||||
|
|
||||||
|
from core import factories, models
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.django_db
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_favorites_list_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD be allowed to list their favorites
|
||||||
|
from a resource server, as favorite_list() bypasses permissions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.UserDocumentAccessFactory(
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.READER,
|
||||||
|
document__favorited_by=[user_specific_sub],
|
||||||
|
).document
|
||||||
|
|
||||||
|
response = client.get("/external_api/v1.0/documents/favorite_list/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["count"] == 1
|
||||||
|
assert data["results"][0]["id"] == str(document.id)
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_favorite_add_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
By default the "favorite" action is not permitted on the external API.
|
||||||
|
POST to the endpoint must return 403.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach=models.LinkReachChoices.RESTRICTED)
|
||||||
|
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.post(f"/external_api/v1.0/documents/{document.id!s}/favorite/")
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_favorite_delete_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
By default the "favorite" action is not permitted on the external API.
|
||||||
|
DELETE to the endpoint must return 403.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach=models.LinkReachChoices.RESTRICTED)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.delete(f"/external_api/v1.0/documents/{document.id!s}/favorite/")
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
# Overrides
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
"favorite",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_external_api_documents_favorite_add_can_be_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Users SHOULD be allowed to POST to the favorite endpoint when the
|
||||||
|
corresponding action is enabled via EXTERNAL_API settings.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach=models.LinkReachChoices.RESTRICTED)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.post(f"/external_api/v1.0/documents/{document.id!s}/favorite/")
|
||||||
|
assert response.status_code == 201
|
||||||
|
assert models.DocumentFavorite.objects.filter(
|
||||||
|
document=document, user=user_specific_sub
|
||||||
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
"favorite",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_external_api_documents_favorite_delete_can_be_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Users SHOULD be allowed to DELETE from the favorite endpoint when the
|
||||||
|
corresponding action is enabled via EXTERNAL_API settings.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED, favorited_by=[user_specific_sub]
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.delete(f"/external_api/v1.0/documents/{document.id!s}/favorite/")
|
||||||
|
assert response.status_code == 204
|
||||||
|
assert not models.DocumentFavorite.objects.filter(
|
||||||
|
document=document, user=user_specific_sub
|
||||||
|
).exists()
|
||||||
@@ -0,0 +1,474 @@
|
|||||||
|
"""
|
||||||
|
Tests for the Resource Server API for invitations.
|
||||||
|
|
||||||
|
Not testing external API endpoints that are already tested in the /api
|
||||||
|
because the resource server viewsets inherit from the api viewsets.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.test import override_settings
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from rest_framework.test import APIClient
|
||||||
|
|
||||||
|
from core import factories, models
|
||||||
|
from core.tests.utils.urls import reload_urls
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.django_db
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_document_invitations_anonymous_public_standalone():
|
||||||
|
"""
|
||||||
|
Anonymous users SHOULD NOT be allowed to list invitations from external
|
||||||
|
API if resource server is not enabled.
|
||||||
|
"""
|
||||||
|
invitation = factories.InvitationFactory()
|
||||||
|
response = APIClient().get(
|
||||||
|
f"/external_api/v1.0/documents/{invitation.document.id!s}/invitations/"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_document_invitations_list_connected_not_resource_server():
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to list document invitations
|
||||||
|
if resource server is not enabled.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
invitation = factories.InvitationFactory()
|
||||||
|
response = APIClient().get(
|
||||||
|
f"/external_api/v1.0/documents/{invitation.document.id!s}/invitations/"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"document_invitation": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
def test_external_api_document_invitations_list_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to list document invitations
|
||||||
|
by default.
|
||||||
|
"""
|
||||||
|
reload_urls()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
invitation = factories.InvitationFactory()
|
||||||
|
response = client.get(
|
||||||
|
f"/external_api/v1.0/documents/{invitation.document.id!s}/invitations/"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"document_invitation": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
def test_external_api_document_invitations_retrieve_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to retrieve a document invitation
|
||||||
|
by default.
|
||||||
|
"""
|
||||||
|
reload_urls()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
invitation = factories.InvitationFactory()
|
||||||
|
document = invitation.document
|
||||||
|
|
||||||
|
response = client.get(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/invitations/{invitation.id!s}/"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"document_invitation": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
def test_external_api_document_invitations_create_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to create a document invitation
|
||||||
|
by default.
|
||||||
|
"""
|
||||||
|
reload_urls()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory()
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/invitations/",
|
||||||
|
{"email": "invited@example.com", "role": models.RoleChoices.READER},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"document_invitation": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": ["list", "retrieve"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
def test_external_api_document_invitations_partial_update_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to partially update a document invitation
|
||||||
|
by default.
|
||||||
|
"""
|
||||||
|
reload_urls()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory()
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
invitation = factories.InvitationFactory(
|
||||||
|
document=document, role=models.RoleChoices.READER
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.patch(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/invitations/{invitation.id!s}/",
|
||||||
|
{"role": models.RoleChoices.EDITOR},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"document_invitation": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": ["list", "retrieve"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
def test_external_api_document_invitations_delete_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to delete a document invitation
|
||||||
|
by default.
|
||||||
|
"""
|
||||||
|
reload_urls()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory()
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
invitation = factories.InvitationFactory(document=document)
|
||||||
|
|
||||||
|
response = client.delete(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/invitations/{invitation.id!s}/",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
# Overrides
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"document_invitation": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": ["list", "retrieve"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
def test_external_api_document_invitations_list_can_be_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD be allowed to list document invitations
|
||||||
|
when the action is explicitly enabled.
|
||||||
|
|
||||||
|
"""
|
||||||
|
reload_urls()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory()
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
invitation = factories.InvitationFactory(document=document)
|
||||||
|
response = client.get(f"/external_api/v1.0/documents/{document.id!s}/invitations/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["count"] == 1
|
||||||
|
assert data["results"][0]["id"] == str(invitation.id)
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"document_invitation": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": ["list", "retrieve"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
def test_external_api_document_invitations_retrieve_can_be_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD be allowed to retrieve a document invitation
|
||||||
|
when the action is explicitly enabled.
|
||||||
|
"""
|
||||||
|
reload_urls()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory()
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
invitation = factories.InvitationFactory(document=document)
|
||||||
|
|
||||||
|
response = client.get(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/invitations/{invitation.id!s}/"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["id"] == str(invitation.id)
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"document_invitation": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": ["list", "retrieve", "create"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
def test_external_api_document_invitations_create_can_be_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD be allowed to create a document invitation
|
||||||
|
when the create action is explicitly enabled.
|
||||||
|
"""
|
||||||
|
reload_urls()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory()
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/invitations/",
|
||||||
|
{"email": "invited@example.com", "role": models.RoleChoices.READER},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
data = response.json()
|
||||||
|
assert data["email"] == "invited@example.com"
|
||||||
|
assert data["role"] == models.RoleChoices.READER
|
||||||
|
assert str(data["document"]) == str(document.id)
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"document_invitation": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": ["list", "retrieve", "partial_update"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
def test_external_api_document_invitations_partial_update_can_be_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD be allowed to partially update a document invitation
|
||||||
|
when the partial_update action is explicitly enabled.
|
||||||
|
"""
|
||||||
|
reload_urls()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory()
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
invitation = factories.InvitationFactory(
|
||||||
|
document=document, role=models.RoleChoices.READER
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.patch(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/invitations/{invitation.id!s}/",
|
||||||
|
{"role": models.RoleChoices.EDITOR},
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["role"] == models.RoleChoices.EDITOR
|
||||||
|
assert data["email"] == invitation.email
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"document_invitation": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": ["list", "retrieve", "destroy"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
def test_external_api_document_invitations_delete_can_be_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD be allowed to delete a document invitation
|
||||||
|
when the destroy action is explicitly enabled.
|
||||||
|
"""
|
||||||
|
reload_urls()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory()
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
invitation = factories.InvitationFactory(document=document)
|
||||||
|
|
||||||
|
response = client.delete(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/invitations/{invitation.id!s}/",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 204
|
||||||
@@ -0,0 +1,105 @@
|
|||||||
|
"""
|
||||||
|
Tests for the Resource Server API for document link configurations.
|
||||||
|
|
||||||
|
Not testing external API endpoints that are already tested in the /api
|
||||||
|
because the resource server viewsets inherit from the api viewsets.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from django.test import override_settings
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from rest_framework.test import APIClient
|
||||||
|
|
||||||
|
from core import factories, models
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.django_db
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_link_configuration_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to update the link configuration of a document
|
||||||
|
from a resource server.
|
||||||
|
"""
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.OWNER,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.put(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/link-configuration/"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
"link_configuration",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
COLLABORATION_API_URL="http://example.com/",
|
||||||
|
COLLABORATION_SERVER_SECRET="secret-token",
|
||||||
|
)
|
||||||
|
@patch("core.services.collaboration_services.CollaborationService.reset_connections")
|
||||||
|
def test_external_api_documents_link_configuration_can_be_allowed(
|
||||||
|
mock_reset, user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD be allowed to update the link configuration of a document
|
||||||
|
from a resource server when the corresponding action is enabled in EXTERNAL_API settings.
|
||||||
|
"""
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.OWNER,
|
||||||
|
)
|
||||||
|
|
||||||
|
# attempt to change reach/role to a valid combination
|
||||||
|
new_data = {
|
||||||
|
"link_reach": models.LinkReachChoices.PUBLIC,
|
||||||
|
"link_role": models.LinkRoleChoices.EDITOR,
|
||||||
|
}
|
||||||
|
|
||||||
|
response = client.put(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/link-configuration/",
|
||||||
|
new_data,
|
||||||
|
format="json",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# verify the document was updated in the database
|
||||||
|
document.refresh_from_db()
|
||||||
|
assert document.link_reach == models.LinkReachChoices.PUBLIC
|
||||||
|
assert document.link_role == models.LinkRoleChoices.EDITOR
|
||||||
@@ -0,0 +1,94 @@
|
|||||||
|
"""
|
||||||
|
Tests for the Resource Server API for document media authentication.
|
||||||
|
|
||||||
|
Not testing external API endpoints that are already tested in the /api
|
||||||
|
because the resource server viewsets inherit from the api viewsets.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from io import BytesIO
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from django.core.files.storage import default_storage
|
||||||
|
from django.test import override_settings
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from freezegun import freeze_time
|
||||||
|
from rest_framework.test import APIClient
|
||||||
|
|
||||||
|
from core import factories, models
|
||||||
|
from core.enums import DocumentAttachmentStatus
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.django_db
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_media_auth_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to access media auth endpoints
|
||||||
|
from a resource server by default.
|
||||||
|
"""
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
response = client.get("/external_api/v1.0/documents/media-auth/")
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
"media_auth",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_external_api_documents_media_auth_can_be_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD be allowed to access media auth endpoints
|
||||||
|
from a resource server when the media-auth action is enabled in EXTERNAL_API settings.
|
||||||
|
"""
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document_id = uuid4()
|
||||||
|
filename = f"{uuid4()!s}.jpg"
|
||||||
|
key = f"{document_id!s}/attachments/{filename:s}"
|
||||||
|
media_url = f"http://localhost/media/{key:s}"
|
||||||
|
|
||||||
|
default_storage.connection.meta.client.put_object(
|
||||||
|
Bucket=default_storage.bucket_name,
|
||||||
|
Key=key,
|
||||||
|
Body=BytesIO(b"my prose"),
|
||||||
|
ContentType="text/plain",
|
||||||
|
Metadata={"status": DocumentAttachmentStatus.READY},
|
||||||
|
)
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
id=document_id, link_reach=models.LinkReachChoices.RESTRICTED, attachments=[key]
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.READER
|
||||||
|
)
|
||||||
|
|
||||||
|
now = timezone.now()
|
||||||
|
with freeze_time(now):
|
||||||
|
response = client.get(
|
||||||
|
"/external_api/v1.0/documents/media-auth/", HTTP_X_ORIGINAL_URL=media_url
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
@@ -0,0 +1,163 @@
|
|||||||
|
"""
|
||||||
|
Tests for the Resource Server API for document versions.
|
||||||
|
|
||||||
|
Not testing external API endpoints that are already tested in the /api
|
||||||
|
because the resource server viewsets inherit from the api viewsets.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import time
|
||||||
|
|
||||||
|
from django.test import override_settings
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from rest_framework.test import APIClient
|
||||||
|
|
||||||
|
from core import factories, models
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.django_db
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_versions_list_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to list the versions of a document
|
||||||
|
from a resource server by default.
|
||||||
|
"""
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(
|
||||||
|
link_reach=models.LinkReachChoices.RESTRICTED,
|
||||||
|
creator=user_specific_sub,
|
||||||
|
)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document,
|
||||||
|
user=user_specific_sub,
|
||||||
|
role=models.RoleChoices.OWNER,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.get(f"/external_api/v1.0/documents/{document.id!s}/versions/")
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_documents_versions_detail_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to retrieve a specific version of a document
|
||||||
|
from a resource server by default.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach=models.LinkReachChoices.RESTRICTED)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.get(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/versions/1234/"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
# Overrides
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": ["list", "retrieve", "children", "versions_list"],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_external_api_documents_versions_list_can_be_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD be allowed to list version of a document from a resource server
|
||||||
|
when the versions action is enabled in EXTERNAL_API settings.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach=models.LinkReachChoices.RESTRICTED)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add new versions to the document
|
||||||
|
for i in range(3):
|
||||||
|
document.content = f"new content {i:d}"
|
||||||
|
document.save()
|
||||||
|
|
||||||
|
response = client.get(f"/external_api/v1.0/documents/{document.id!s}/versions/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
content = response.json()
|
||||||
|
assert content["count"] == 2
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(
|
||||||
|
EXTERNAL_API={
|
||||||
|
"documents": {
|
||||||
|
"enabled": True,
|
||||||
|
"actions": [
|
||||||
|
"list",
|
||||||
|
"retrieve",
|
||||||
|
"children",
|
||||||
|
"versions_list",
|
||||||
|
"versions_detail",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
def test_external_api_documents_versions_detail_can_be_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD be allowed to retrieve a specific version of a document
|
||||||
|
from a resource server when the versions_detail action is enabled.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach=models.LinkReachChoices.RESTRICTED)
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
document=document, user=user_specific_sub, role=models.RoleChoices.OWNER
|
||||||
|
)
|
||||||
|
|
||||||
|
# ensure access datetime is earlier than versions (minio precision is one second)
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
# create several versions, spacing them out to get distinct LastModified values
|
||||||
|
for i in range(3):
|
||||||
|
document.content = f"new content {i:d}"
|
||||||
|
document.save()
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
# call the list endpoint and verify basic structure
|
||||||
|
response = client.get(f"/external_api/v1.0/documents/{document.id!s}/versions/")
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
content = response.json()
|
||||||
|
# count should reflect two saved versions beyond the original
|
||||||
|
assert content.get("count") == 2
|
||||||
|
|
||||||
|
# pick the first version returned by the list (should be accessible)
|
||||||
|
version_id = content.get("versions")[0]["version_id"]
|
||||||
|
|
||||||
|
detailed_response = client.get(
|
||||||
|
f"/external_api/v1.0/documents/{document.id!s}/versions/{version_id}/"
|
||||||
|
)
|
||||||
|
assert detailed_response.status_code == 200
|
||||||
|
assert detailed_response.json()["content"] == "new content 1"
|
||||||
158
src/backend/core/tests/external_api/test_external_api_users.py
Normal file
158
src/backend/core/tests/external_api/test_external_api_users.py
Normal file
@@ -0,0 +1,158 @@
|
|||||||
|
"""
|
||||||
|
Tests for the Resource Server API for users.
|
||||||
|
|
||||||
|
Not testing external API endpoints that are already tested in the /api
|
||||||
|
because the resource server viewsets inherit from the api viewsets.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from rest_framework.test import APIClient
|
||||||
|
|
||||||
|
from core import factories
|
||||||
|
from core.api import serializers
|
||||||
|
from core.tests.utils.urls import reload_urls
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.django_db
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_users_me_anonymous_public_standalone():
|
||||||
|
"""
|
||||||
|
Anonymous users SHOULD NOT be allowed to retrieve their own user information from external
|
||||||
|
API if resource server is not enabled.
|
||||||
|
"""
|
||||||
|
reload_urls()
|
||||||
|
response = APIClient().get("/external_api/v1.0/users/me/")
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_users_me_connected_not_allowed():
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to retrieve their own user information from external
|
||||||
|
API if resource server is not enabled.
|
||||||
|
"""
|
||||||
|
reload_urls()
|
||||||
|
user = factories.UserFactory()
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
response = client.get("/external_api/v1.0/users/me/")
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_users_me_connected_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD be allowed to retrieve their own user information from external API
|
||||||
|
if resource server is enabled.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
response = client.get("/external_api/v1.0/users/me/")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["id"] == str(user_specific_sub.id)
|
||||||
|
assert data["email"] == user_specific_sub.email
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_users_me_connected_with_invalid_token_not_allowed(
|
||||||
|
user_token, resource_server_backend
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to retrieve their own user information from external API
|
||||||
|
if resource server is enabled with an invalid token.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
response = client.get("/external_api/v1.0/users/me/")
|
||||||
|
|
||||||
|
assert response.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
|
# Non allowed actions on resource server.
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_users_list_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to list users from a resource server.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
response = client.get("/external_api/v1.0/users/")
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_users_retrieve_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to retrieve a specific user from a resource server.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
other_user = factories.UserFactory()
|
||||||
|
|
||||||
|
response = client.get(f"/external_api/v1.0/users/{other_user.id!s}/")
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_users_put_patch_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to update or patch a user from a resource server.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
other_user = factories.UserFactory()
|
||||||
|
|
||||||
|
new_user_values = {
|
||||||
|
k: v
|
||||||
|
for k, v in serializers.UserSerializer(
|
||||||
|
instance=factories.UserFactory()
|
||||||
|
).data.items()
|
||||||
|
if v is not None
|
||||||
|
}
|
||||||
|
response = client.put(
|
||||||
|
f"/external_api/v1.0/users/{other_user.id!s}/", new_user_values
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
response = client.patch(
|
||||||
|
f"/external_api/v1.0/users/{other_user.id!s}/",
|
||||||
|
{"email": "new_email@example.com"},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
def test_external_api_users_delete_not_allowed(
|
||||||
|
user_token, resource_server_backend, user_specific_sub
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Connected users SHOULD NOT be allowed to delete a user from a resource server.
|
||||||
|
"""
|
||||||
|
client = APIClient()
|
||||||
|
client.credentials(HTTP_AUTHORIZATION=f"Bearer {user_token}")
|
||||||
|
|
||||||
|
other_user = factories.UserFactory()
|
||||||
|
|
||||||
|
response = client.delete(f"/external_api/v1.0/users/{other_user.id!s}/")
|
||||||
|
|
||||||
|
assert response.status_code == 403
|
||||||
@@ -1,7 +1,5 @@
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from core import models
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_update_blank_title_migration(migrator):
|
def test_update_blank_title_migration(migrator):
|
||||||
|
|||||||
@@ -7,8 +7,6 @@ from django.core.files.storage import default_storage
|
|||||||
import pycrdt
|
import pycrdt
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from core import models
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.django_db
|
@pytest.mark.django_db
|
||||||
def test_populate_attachments_on_all_documents(migrator):
|
def test_populate_attachments_on_all_documents(migrator):
|
||||||
|
|||||||
@@ -0,0 +1,52 @@
|
|||||||
|
"""Module testing migration 0030 about adding is_first_connection to user model."""
|
||||||
|
|
||||||
|
from django.contrib.auth.hashers import make_password
|
||||||
|
|
||||||
|
import factory
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from core import models
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_set_is_first_connection_false(migrator):
|
||||||
|
"""
|
||||||
|
Test that once the migration adding is_first_connection column to user model is applied
|
||||||
|
all existing user have the False value.
|
||||||
|
"""
|
||||||
|
old_state = migrator.apply_initial_migration(
|
||||||
|
("core", "0029_userreconciliationcsvimport_userreconciliation")
|
||||||
|
)
|
||||||
|
OldUser = old_state.apps.get_model("core", "User")
|
||||||
|
|
||||||
|
old_user1 = OldUser.objects.create(
|
||||||
|
email="email1@example.com", sub="user1", password=make_password("password")
|
||||||
|
)
|
||||||
|
old_user2 = OldUser.objects.create(
|
||||||
|
email="email2@example.com", sub="user2", password=make_password("password")
|
||||||
|
)
|
||||||
|
|
||||||
|
assert hasattr(old_user1, "is_first_connection") is False
|
||||||
|
assert hasattr(old_user2, "is_first_connection") is False
|
||||||
|
|
||||||
|
# # Apply the migration
|
||||||
|
new_state = migrator.apply_tested_migration(
|
||||||
|
("core", "0030_user_is_first_connection")
|
||||||
|
)
|
||||||
|
|
||||||
|
NewUser = new_state.apps.get_model("core", "User")
|
||||||
|
|
||||||
|
updated_user1 = NewUser.objects.get(id=old_user1.id)
|
||||||
|
|
||||||
|
assert updated_user1.is_first_connection is False
|
||||||
|
|
||||||
|
updated_user2 = NewUser.objects.get(id=old_user2.id)
|
||||||
|
|
||||||
|
assert updated_user2.is_first_connection is False
|
||||||
|
|
||||||
|
# create a new user after migration
|
||||||
|
|
||||||
|
new_user1 = NewUser.objects.create(
|
||||||
|
email="email3example.com", sub="user3", password=make_password("password")
|
||||||
|
)
|
||||||
|
assert new_user1.is_first_connection is True
|
||||||
@@ -0,0 +1,193 @@
|
|||||||
|
"""Module testing migration 0031_clean_onboarding_accesses."""
|
||||||
|
|
||||||
|
from django.contrib.auth.hashers import make_password
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
def create_user(OldUser, n):
|
||||||
|
"""Create a user with a unique sub and email based on the given index."""
|
||||||
|
return OldUser.objects.create(
|
||||||
|
email=f"user-{n}@example.com",
|
||||||
|
sub=f"user-{n}",
|
||||||
|
password=make_password("password"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_clean_onboarding_accesses(migrator, settings):
|
||||||
|
"""Test migration 0031_clean_onboarding_accesses."""
|
||||||
|
old_state = migrator.apply_initial_migration(
|
||||||
|
("core", "0030_user_is_first_connection")
|
||||||
|
)
|
||||||
|
|
||||||
|
OldUser = old_state.apps.get_model("core", "User")
|
||||||
|
OldDocument = old_state.apps.get_model("core", "Document")
|
||||||
|
OldDocumentAccess = old_state.apps.get_model("core", "DocumentAccess")
|
||||||
|
|
||||||
|
# Create onboarding documents
|
||||||
|
onboarding_doc_1 = OldDocument.objects.create(
|
||||||
|
title="Onboarding Doc 1", depth=1, path="0000001", link_reach="public"
|
||||||
|
)
|
||||||
|
onboarding_doc_2 = OldDocument.objects.create(
|
||||||
|
title="Onboarding Doc 2", depth=1, path="0000002", link_reach="public"
|
||||||
|
)
|
||||||
|
onboarding_documents = [onboarding_doc_1, onboarding_doc_2]
|
||||||
|
|
||||||
|
settings.USER_ONBOARDING_DOCUMENTS = [str(doc.id) for doc in onboarding_documents]
|
||||||
|
|
||||||
|
# Create other documents
|
||||||
|
non_onboarding_doc_1 = OldDocument.objects.create(
|
||||||
|
title="Non-Onboarding Doc 1", depth=1, path="0000003", link_reach="public"
|
||||||
|
)
|
||||||
|
non_onboarding_doc_2 = OldDocument.objects.create(
|
||||||
|
title="Non-Onboarding Doc 2", depth=1, path="0000004", link_reach="public"
|
||||||
|
)
|
||||||
|
non_onboarding_doc_3 = OldDocument.objects.create(
|
||||||
|
title="Non-Onboarding Doc 3", depth=1, path="0000005", link_reach="public"
|
||||||
|
)
|
||||||
|
non_onboarding_documents = [
|
||||||
|
non_onboarding_doc_1,
|
||||||
|
non_onboarding_doc_2,
|
||||||
|
non_onboarding_doc_3,
|
||||||
|
]
|
||||||
|
|
||||||
|
all_documents = onboarding_documents + non_onboarding_documents
|
||||||
|
|
||||||
|
user_counter = 0
|
||||||
|
|
||||||
|
# For every document create privileged roles: owner and admin
|
||||||
|
for document in all_documents:
|
||||||
|
OldDocumentAccess.objects.create(
|
||||||
|
document=document,
|
||||||
|
user=create_user(OldUser, user_counter),
|
||||||
|
role="owner",
|
||||||
|
)
|
||||||
|
user_counter += 1
|
||||||
|
OldDocumentAccess.objects.create(
|
||||||
|
document=document,
|
||||||
|
user=create_user(OldUser, user_counter),
|
||||||
|
role="administrator",
|
||||||
|
)
|
||||||
|
user_counter += 1
|
||||||
|
|
||||||
|
# For every document, create non-privileged roles
|
||||||
|
for document in all_documents:
|
||||||
|
for role in ["reader", "editor", "commenter"]:
|
||||||
|
for _ in range(10):
|
||||||
|
OldDocumentAccess.objects.create(
|
||||||
|
document=document,
|
||||||
|
user=create_user(OldUser, user_counter),
|
||||||
|
role=role,
|
||||||
|
)
|
||||||
|
user_counter += 1
|
||||||
|
|
||||||
|
onboarding_ids = [doc.id for doc in onboarding_documents]
|
||||||
|
non_onboarding_ids = [doc.id for doc in non_onboarding_documents]
|
||||||
|
|
||||||
|
# All documents should have 32 accesses each, so 160 accesses total
|
||||||
|
assert OldDocumentAccess.objects.count() == 160
|
||||||
|
assert (
|
||||||
|
OldDocumentAccess.objects.filter(document_id__in=onboarding_ids)
|
||||||
|
.exclude(role__in=["administrator", "owner"])
|
||||||
|
.count()
|
||||||
|
== 60
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
OldDocumentAccess.objects.filter(
|
||||||
|
document_id__in=onboarding_ids, role__in=["administrator", "owner"]
|
||||||
|
).count()
|
||||||
|
== 4
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
OldDocumentAccess.objects.filter(document_id__in=non_onboarding_ids)
|
||||||
|
.exclude(role__in=["administrator", "owner"])
|
||||||
|
.count()
|
||||||
|
== 90
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
OldDocumentAccess.objects.filter(
|
||||||
|
document_id__in=non_onboarding_ids, role__in=["administrator", "owner"]
|
||||||
|
).count()
|
||||||
|
== 6
|
||||||
|
)
|
||||||
|
|
||||||
|
# Apply the migration
|
||||||
|
new_state = migrator.apply_tested_migration(
|
||||||
|
("core", "0031_clean_onboarding_accesses")
|
||||||
|
)
|
||||||
|
|
||||||
|
NewDocumentAccess = new_state.apps.get_model("core", "DocumentAccess")
|
||||||
|
|
||||||
|
# 60 accesses should have been removed (30 non-privileged for each onboarding doc)
|
||||||
|
assert NewDocumentAccess.objects.count() == 100
|
||||||
|
|
||||||
|
# Non-privileged roles should have been deleted on the onboarding documents
|
||||||
|
assert (
|
||||||
|
NewDocumentAccess.objects.filter(document_id__in=onboarding_ids)
|
||||||
|
.exclude(role__in=["administrator", "owner"])
|
||||||
|
.count()
|
||||||
|
== 0
|
||||||
|
)
|
||||||
|
|
||||||
|
# Privileged roles should have been kept
|
||||||
|
assert (
|
||||||
|
NewDocumentAccess.objects.filter(
|
||||||
|
document_id__in=onboarding_ids, role__in=["administrator", "owner"]
|
||||||
|
).count()
|
||||||
|
== 4
|
||||||
|
)
|
||||||
|
|
||||||
|
# On other documents, all accesses should remain
|
||||||
|
assert (
|
||||||
|
NewDocumentAccess.objects.filter(document_id__in=non_onboarding_ids)
|
||||||
|
.exclude(role__in=["administrator", "owner"])
|
||||||
|
.count()
|
||||||
|
== 90
|
||||||
|
)
|
||||||
|
|
||||||
|
# Privileged roles should have been kept
|
||||||
|
assert (
|
||||||
|
NewDocumentAccess.objects.filter(
|
||||||
|
document_id__in=non_onboarding_ids, role__in=["administrator", "owner"]
|
||||||
|
).count()
|
||||||
|
== 6
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_clean_onboarding_accesses_no_setting(migrator, settings):
|
||||||
|
"""Test migration 0031 does not delete any access when USER_ONBOARDING_DOCUMENTS is empty."""
|
||||||
|
old_state = migrator.apply_initial_migration(
|
||||||
|
("core", "0030_user_is_first_connection")
|
||||||
|
)
|
||||||
|
|
||||||
|
OldUser = old_state.apps.get_model("core", "User")
|
||||||
|
OldDocument = old_state.apps.get_model("core", "Document")
|
||||||
|
OldDocumentAccess = old_state.apps.get_model("core", "DocumentAccess")
|
||||||
|
|
||||||
|
settings.USER_ONBOARDING_DOCUMENTS = []
|
||||||
|
|
||||||
|
doc_1 = OldDocument.objects.create(title="Doc 1", depth=1, path="0000001")
|
||||||
|
doc_2 = OldDocument.objects.create(title="Doc 2", depth=1, path="0000002")
|
||||||
|
|
||||||
|
user_counter = 0
|
||||||
|
for document in [doc_1, doc_2]:
|
||||||
|
for role in ["owner", "administrator", "reader", "editor", "commenter"]:
|
||||||
|
OldDocumentAccess.objects.create(
|
||||||
|
document=document,
|
||||||
|
user=create_user(OldUser, user_counter),
|
||||||
|
role=role,
|
||||||
|
)
|
||||||
|
user_counter += 1
|
||||||
|
|
||||||
|
assert OldDocumentAccess.objects.count() == 10
|
||||||
|
|
||||||
|
new_state = migrator.apply_tested_migration(
|
||||||
|
("core", "0031_clean_onboarding_accesses")
|
||||||
|
)
|
||||||
|
|
||||||
|
NewDocumentAccess = new_state.apps.get_model("core", "DocumentAccess")
|
||||||
|
|
||||||
|
# No accesses should have been deleted
|
||||||
|
assert NewDocumentAccess.objects.count() == 10
|
||||||
@@ -19,7 +19,11 @@ pytestmark = pytest.mark.django_db
|
|||||||
|
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
|
AI_BOT={"name": "Test Bot", "color": "#000000"},
|
||||||
AI_FEATURE_ENABLED=False,
|
AI_FEATURE_ENABLED=False,
|
||||||
|
AI_FEATURE_BLOCKNOTE_ENABLED=False,
|
||||||
|
AI_FEATURE_LEGACY_ENABLED=False,
|
||||||
|
API_USERS_SEARCH_QUERY_MIN_LENGTH=6,
|
||||||
COLLABORATION_WS_URL="http://testcollab/",
|
COLLABORATION_WS_URL="http://testcollab/",
|
||||||
COLLABORATION_WS_NOT_CONNECTED_READY_ONLY=True,
|
COLLABORATION_WS_NOT_CONNECTED_READY_ONLY=True,
|
||||||
CRISP_WEBSITE_ID="123",
|
CRISP_WEBSITE_ID="123",
|
||||||
@@ -43,7 +47,11 @@ def test_api_config(is_authenticated):
|
|||||||
response = client.get("/api/v1.0/config/")
|
response = client.get("/api/v1.0/config/")
|
||||||
assert response.status_code == HTTP_200_OK
|
assert response.status_code == HTTP_200_OK
|
||||||
assert response.json() == {
|
assert response.json() == {
|
||||||
|
"AI_BOT": {"name": "Test Bot", "color": "#000000"},
|
||||||
"AI_FEATURE_ENABLED": False,
|
"AI_FEATURE_ENABLED": False,
|
||||||
|
"AI_FEATURE_BLOCKNOTE_ENABLED": False,
|
||||||
|
"AI_FEATURE_LEGACY_ENABLED": False,
|
||||||
|
"API_USERS_SEARCH_QUERY_MIN_LENGTH": 6,
|
||||||
"COLLABORATION_WS_URL": "http://testcollab/",
|
"COLLABORATION_WS_URL": "http://testcollab/",
|
||||||
"COLLABORATION_WS_NOT_CONNECTED_READY_ONLY": True,
|
"COLLABORATION_WS_NOT_CONNECTED_READY_ONLY": True,
|
||||||
"CONVERSION_FILE_EXTENSIONS_ALLOWED": [".docx", ".md"],
|
"CONVERSION_FILE_EXTENSIONS_ALLOWED": [".docx", ".md"],
|
||||||
@@ -53,6 +61,7 @@ def test_api_config(is_authenticated):
|
|||||||
"FRONTEND_CSS_URL": "http://testcss/",
|
"FRONTEND_CSS_URL": "http://testcss/",
|
||||||
"FRONTEND_HOMEPAGE_FEATURE_ENABLED": True,
|
"FRONTEND_HOMEPAGE_FEATURE_ENABLED": True,
|
||||||
"FRONTEND_JS_URL": "http://testjs/",
|
"FRONTEND_JS_URL": "http://testjs/",
|
||||||
|
"FRONTEND_SILENT_LOGIN_ENABLED": False,
|
||||||
"FRONTEND_THEME": "test-theme",
|
"FRONTEND_THEME": "test-theme",
|
||||||
"LANGUAGES": [
|
"LANGUAGES": [
|
||||||
["en-us", "English"],
|
["en-us", "English"],
|
||||||
|
|||||||
85
src/backend/core/tests/test_api_user_reconciliation.py
Normal file
85
src/backend/core/tests/test_api_user_reconciliation.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for the ReconciliationConfirmView API view.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from rest_framework.test import APIClient
|
||||||
|
|
||||||
|
from core import factories, models
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.django_db
|
||||||
|
|
||||||
|
|
||||||
|
def test_reconciliation_confirm_view_sets_active_checked():
|
||||||
|
"""GETting the active confirmation endpoint should set active_email_checked."""
|
||||||
|
user = factories.UserFactory(email="user.confirm1@example.com")
|
||||||
|
other = factories.UserFactory(email="user.confirm2@example.com")
|
||||||
|
rec = models.UserReconciliation.objects.create(
|
||||||
|
active_email=user.email,
|
||||||
|
inactive_email=other.email,
|
||||||
|
active_user=user,
|
||||||
|
inactive_user=other,
|
||||||
|
active_email_checked=False,
|
||||||
|
inactive_email_checked=False,
|
||||||
|
status="ready",
|
||||||
|
)
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
conf_id = rec.active_email_confirmation_id
|
||||||
|
url = f"/api/{settings.API_VERSION}/user-reconciliations/active/{conf_id}/"
|
||||||
|
resp = client.get(url)
|
||||||
|
assert resp.status_code == 200
|
||||||
|
assert resp.json() == {"detail": "Confirmation received"}
|
||||||
|
|
||||||
|
rec.refresh_from_db()
|
||||||
|
assert rec.active_email_checked is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_reconciliation_confirm_view_sets_inactive_checked():
|
||||||
|
"""GETting the inactive confirmation endpoint should set inactive_email_checked."""
|
||||||
|
user = factories.UserFactory(email="user.confirm3@example.com")
|
||||||
|
other = factories.UserFactory(email="user.confirm4@example.com")
|
||||||
|
rec = models.UserReconciliation.objects.create(
|
||||||
|
active_email=user.email,
|
||||||
|
inactive_email=other.email,
|
||||||
|
active_user=user,
|
||||||
|
inactive_user=other,
|
||||||
|
active_email_checked=False,
|
||||||
|
inactive_email_checked=False,
|
||||||
|
status="ready",
|
||||||
|
)
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
conf_id = rec.inactive_email_confirmation_id
|
||||||
|
url = f"/api/{settings.API_VERSION}/user-reconciliations/inactive/{conf_id}/"
|
||||||
|
resp = client.get(url)
|
||||||
|
assert resp.status_code == 200
|
||||||
|
assert resp.json() == {"detail": "Confirmation received"}
|
||||||
|
|
||||||
|
rec.refresh_from_db()
|
||||||
|
assert rec.inactive_email_checked is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_reconciliation_confirm_view_invalid_user_type_returns_400():
|
||||||
|
"""GETting with an invalid user_type should return 400."""
|
||||||
|
client = APIClient()
|
||||||
|
# Use a valid uuid format but invalid user_type
|
||||||
|
|
||||||
|
url = f"/api/{settings.API_VERSION}/user-reconciliations/other/{uuid.uuid4()}/"
|
||||||
|
resp = client.get(url)
|
||||||
|
assert resp.status_code == 400
|
||||||
|
assert resp.json() == {"detail": "Invalid user_type"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_reconciliation_confirm_view_not_found_returns_404():
|
||||||
|
"""GETting with a non-existing confirmation_id should return 404."""
|
||||||
|
client = APIClient()
|
||||||
|
|
||||||
|
url = f"/api/{settings.API_VERSION}/user-reconciliations/active/{uuid.uuid4()}/"
|
||||||
|
resp = client.get(url)
|
||||||
|
assert resp.status_code == 404
|
||||||
|
assert resp.json() == {"detail": "Reconciliation entry not found"}
|
||||||
@@ -2,6 +2,8 @@
|
|||||||
Test users API endpoints in the impress core app.
|
Test users API endpoints in the impress core app.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from rest_framework.test import APIClient
|
from rest_framework.test import APIClient
|
||||||
|
|
||||||
@@ -46,7 +48,7 @@ def test_api_users_list_query_email():
|
|||||||
Only results with a Levenstein distance less than 3 with the query should be returned.
|
Only results with a Levenstein distance less than 3 with the query should be returned.
|
||||||
We want to match by Levenstein distance because we want to prevent typing errors.
|
We want to match by Levenstein distance because we want to prevent typing errors.
|
||||||
"""
|
"""
|
||||||
user = factories.UserFactory()
|
user = factories.UserFactory(email="user@example.com", full_name="Example User")
|
||||||
|
|
||||||
client = APIClient()
|
client = APIClient()
|
||||||
client.force_login(user)
|
client.force_login(user)
|
||||||
@@ -81,7 +83,7 @@ def test_api_users_list_query_email_with_internationalized_domain_names():
|
|||||||
Authenticated users should be able to list users and filter by email.
|
Authenticated users should be able to list users and filter by email.
|
||||||
It should work even if the email address contains an internationalized domain name.
|
It should work even if the email address contains an internationalized domain name.
|
||||||
"""
|
"""
|
||||||
user = factories.UserFactory()
|
user = factories.UserFactory(email="user@example.com", full_name="Example User")
|
||||||
|
|
||||||
client = APIClient()
|
client = APIClient()
|
||||||
client.force_login(user)
|
client.force_login(user)
|
||||||
@@ -121,12 +123,12 @@ def test_api_users_list_query_full_name():
|
|||||||
Authenticated users should be able to list users and filter by full name.
|
Authenticated users should be able to list users and filter by full name.
|
||||||
Only results with a Trigram similarity greater than 0.2 with the query should be returned.
|
Only results with a Trigram similarity greater than 0.2 with the query should be returned.
|
||||||
"""
|
"""
|
||||||
user = factories.UserFactory()
|
user = factories.UserFactory(email="user@example.com", full_name="Example User")
|
||||||
|
|
||||||
client = APIClient()
|
client = APIClient()
|
||||||
client.force_login(user)
|
client.force_login(user)
|
||||||
|
|
||||||
dave = factories.UserFactory(email="contact@work.com", full_name="David Bowman")
|
dave = factories.UserFactory(email="contact@example.com", full_name="David Bowman")
|
||||||
|
|
||||||
response = client.get(
|
response = client.get(
|
||||||
"/api/v1.0/users/?q=David",
|
"/api/v1.0/users/?q=David",
|
||||||
@@ -166,13 +168,13 @@ def test_api_users_list_query_accented_full_name():
|
|||||||
Authenticated users should be able to list users and filter by full name with accents.
|
Authenticated users should be able to list users and filter by full name with accents.
|
||||||
Only results with a Trigram similarity greater than 0.2 with the query should be returned.
|
Only results with a Trigram similarity greater than 0.2 with the query should be returned.
|
||||||
"""
|
"""
|
||||||
user = factories.UserFactory()
|
user = factories.UserFactory(email="user@example.com", full_name="Example User")
|
||||||
|
|
||||||
client = APIClient()
|
client = APIClient()
|
||||||
client.force_login(user)
|
client.force_login(user)
|
||||||
|
|
||||||
fred = factories.UserFactory(
|
fred = factories.UserFactory(
|
||||||
email="contact@work.com", full_name="Frédérique Lefèvre"
|
email="contact@example.com", full_name="Frédérique Lefèvre"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = client.get("/api/v1.0/users/?q=Frédérique")
|
response = client.get("/api/v1.0/users/?q=Frédérique")
|
||||||
@@ -201,12 +203,82 @@ def test_api_users_list_query_accented_full_name():
|
|||||||
assert users == []
|
assert users == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_users_list_sorted_by_closest_match():
|
||||||
|
"""
|
||||||
|
Authenticated users should be able to list users and the results should be
|
||||||
|
sorted by closest match to the query.
|
||||||
|
|
||||||
|
Sorting criteria are :
|
||||||
|
- Shared documents with the user (most recent first)
|
||||||
|
- Same full email domain (example.gouv.fr)
|
||||||
|
|
||||||
|
Addresses that match neither criteria should be excluded from the results.
|
||||||
|
|
||||||
|
Case in point: the logged-in user has recently shared documents
|
||||||
|
with pierre.dupont@beta.gouv.fr and less recently with pierre.durand@impots.gouv.fr.
|
||||||
|
|
||||||
|
Other users named Pierre also exist:
|
||||||
|
- pierre.thomas@example.com
|
||||||
|
- pierre.petit@anct.gouv.fr
|
||||||
|
- pierre.robert@culture.gouv.fr
|
||||||
|
|
||||||
|
The search results should be ordered as follows:
|
||||||
|
|
||||||
|
# Shared with first
|
||||||
|
- pierre.dupond@beta.gouv.fr # Most recent first
|
||||||
|
- pierre.durand@impots.gouv.fr
|
||||||
|
# Same full domain second
|
||||||
|
- pierre.petit@anct.gouv.fr
|
||||||
|
"""
|
||||||
|
|
||||||
|
user = factories.UserFactory(
|
||||||
|
email="martin.bernard@anct.gouv.fr", full_name="Martin Bernard"
|
||||||
|
)
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
pierre_1 = factories.UserFactory(email="pierre.dupont@beta.gouv.fr")
|
||||||
|
pierre_2 = factories.UserFactory(email="pierre.durand@impots.gouv.fr")
|
||||||
|
_pierre_3 = factories.UserFactory(email="pierre.thomas@example.com")
|
||||||
|
pierre_4 = factories.UserFactory(email="pierre.petit@anct.gouv.fr")
|
||||||
|
_pierre_5 = factories.UserFactory(email="pierre.robert@culture.gouv.fr")
|
||||||
|
|
||||||
|
document_1 = factories.DocumentFactory(creator=user)
|
||||||
|
document_2 = factories.DocumentFactory(creator=user)
|
||||||
|
factories.UserDocumentAccessFactory(user=user, document=document_1)
|
||||||
|
factories.UserDocumentAccessFactory(user=user, document=document_2)
|
||||||
|
|
||||||
|
now = timezone.now()
|
||||||
|
last_week = now - timezone.timedelta(days=7)
|
||||||
|
last_month = now - timezone.timedelta(days=30)
|
||||||
|
|
||||||
|
# The factory cannot set the created_at directly, so we force it after creation
|
||||||
|
p1_d1 = factories.UserDocumentAccessFactory(user=pierre_1, document=document_1)
|
||||||
|
p1_d1.created_at = last_week
|
||||||
|
p1_d1.save()
|
||||||
|
|
||||||
|
p2_d2 = factories.UserDocumentAccessFactory(user=pierre_2, document=document_2)
|
||||||
|
p2_d2.created_at = last_month
|
||||||
|
p2_d2.save()
|
||||||
|
|
||||||
|
response = client.get("/api/v1.0/users/?q=Pierre")
|
||||||
|
assert response.status_code == 200
|
||||||
|
user_ids = [user["email"] for user in response.json()]
|
||||||
|
|
||||||
|
assert user_ids == [
|
||||||
|
str(pierre_1.email),
|
||||||
|
str(pierre_2.email),
|
||||||
|
str(pierre_4.email),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def test_api_users_list_limit(settings):
|
def test_api_users_list_limit(settings):
|
||||||
"""
|
"""
|
||||||
Authenticated users should be able to list users and the number of results
|
Authenticated users should be able to list users and the number of results
|
||||||
should be limited to 10.
|
should be limited to API_USERS_LIST_LIMIT (by default 5).
|
||||||
"""
|
"""
|
||||||
user = factories.UserFactory()
|
user = factories.UserFactory(email="user@example.com")
|
||||||
|
|
||||||
client = APIClient()
|
client = APIClient()
|
||||||
client.force_login(user)
|
client.force_login(user)
|
||||||
@@ -309,28 +381,16 @@ def test_api_users_list_query_email_exclude_doc_user():
|
|||||||
|
|
||||||
def test_api_users_list_query_short_queries():
|
def test_api_users_list_query_short_queries():
|
||||||
"""
|
"""
|
||||||
Queries shorter than 5 characters should return an empty result set.
|
If API_USERS_SEARCH_QUERY_MIN_LENGTH is not set, the default minimum length should be 3.
|
||||||
"""
|
"""
|
||||||
user = factories.UserFactory(email="paul@example.com", full_name="Paul")
|
user = factories.UserFactory(email="paul@example.com", full_name="Paul")
|
||||||
client = APIClient()
|
client = APIClient()
|
||||||
client.force_login(user)
|
client.force_login(user)
|
||||||
|
|
||||||
factories.UserFactory(email="john.doe@example.com")
|
factories.UserFactory(email="john.doe@example.com", full_name="John Doe")
|
||||||
factories.UserFactory(email="john.lennon@example.com")
|
factories.UserFactory(email="john.lennon@example.com", full_name="John Lennon")
|
||||||
|
|
||||||
response = client.get("/api/v1.0/users/?q=jo")
|
response = client.get("/api/v1.0/users/?q=joh")
|
||||||
assert response.status_code == 400
|
|
||||||
assert response.json() == {
|
|
||||||
"q": ["Ensure this value has at least 5 characters (it has 2)."]
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.get("/api/v1.0/users/?q=john")
|
|
||||||
assert response.status_code == 400
|
|
||||||
assert response.json() == {
|
|
||||||
"q": ["Ensure this value has at least 5 characters (it has 4)."]
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.get("/api/v1.0/users/?q=john.")
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert len(response.json()) == 2
|
assert len(response.json()) == 2
|
||||||
|
|
||||||
@@ -356,7 +416,7 @@ def test_api_users_list_query_long_queries():
|
|||||||
|
|
||||||
def test_api_users_list_query_inactive():
|
def test_api_users_list_query_inactive():
|
||||||
"""Inactive users should not be listed."""
|
"""Inactive users should not be listed."""
|
||||||
user = factories.UserFactory()
|
user = factories.UserFactory(email="user@example.com", full_name="Example User")
|
||||||
client = APIClient()
|
client = APIClient()
|
||||||
client.force_login(user)
|
client.force_login(user)
|
||||||
|
|
||||||
@@ -400,6 +460,7 @@ def test_api_users_retrieve_me_authenticated():
|
|||||||
"full_name": user.full_name,
|
"full_name": user.full_name,
|
||||||
"language": user.language,
|
"language": user.language,
|
||||||
"short_name": user.short_name,
|
"short_name": user.short_name,
|
||||||
|
"is_first_connection": True,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -429,9 +490,37 @@ def test_api_users_retrieve_me_authenticated_empty_name():
|
|||||||
"full_name": "test_foo",
|
"full_name": "test_foo",
|
||||||
"language": user.language,
|
"language": user.language,
|
||||||
"short_name": "test_foo",
|
"short_name": "test_foo",
|
||||||
|
"is_first_connection": True,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_api_users_retrieve_me_onboarding():
|
||||||
|
"""
|
||||||
|
On first connection of a new user, the "is_first_connection" flag should be True.
|
||||||
|
|
||||||
|
The frontend can use this flag to trigger specific behavior for first time users,
|
||||||
|
e.g. showing an onboarding message, and update the flag to False after onboarding is done.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
client = APIClient()
|
||||||
|
client.force_login(user)
|
||||||
|
|
||||||
|
# First request: flag should be True
|
||||||
|
first_response = client.get("/api/v1.0/users/me/")
|
||||||
|
assert first_response.status_code == 200
|
||||||
|
assert first_response.json()["is_first_connection"] is True
|
||||||
|
|
||||||
|
update_response = client.post("/api/v1.0/users/onboarding-done/")
|
||||||
|
|
||||||
|
assert update_response.status_code == 200
|
||||||
|
|
||||||
|
# Second request: flag should be False
|
||||||
|
second_response = client.get("/api/v1.0/users/me/")
|
||||||
|
assert second_response.status_code == 200
|
||||||
|
assert second_response.json()["is_first_connection"] is False
|
||||||
|
|
||||||
|
|
||||||
def test_api_users_retrieve_anonymous():
|
def test_api_users_retrieve_anonymous():
|
||||||
"""Anonymous users should not be allowed to retrieve a user."""
|
"""Anonymous users should not be allowed to retrieve a user."""
|
||||||
client = APIClient()
|
client = APIClient()
|
||||||
|
|||||||
@@ -0,0 +1,32 @@
|
|||||||
|
"""module testing the conditional_refresh_oidc_token utils."""
|
||||||
|
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
|
from core.api import utils
|
||||||
|
|
||||||
|
|
||||||
|
def test_refresh_oidc_access_token_storing_refresh_token_disabled(settings):
|
||||||
|
"""The method_decorator must not be called when OIDC_STORE_REFRESH_TOKEN is False."""
|
||||||
|
|
||||||
|
settings.OIDC_STORE_REFRESH_TOKEN = False
|
||||||
|
|
||||||
|
callback = mock.MagicMock()
|
||||||
|
|
||||||
|
with mock.patch.object(utils, "method_decorator") as mock_method_decorator:
|
||||||
|
result = utils.conditional_refresh_oidc_token(callback)
|
||||||
|
|
||||||
|
mock_method_decorator.assert_not_called()
|
||||||
|
assert result == callback
|
||||||
|
|
||||||
|
|
||||||
|
def test_refresh_oidc_access_token_storing_refresh_token_enabled(settings):
|
||||||
|
"""The method_decorator must not be called when OIDC_STORE_REFRESH_TOKEN is False."""
|
||||||
|
|
||||||
|
settings.OIDC_STORE_REFRESH_TOKEN = True
|
||||||
|
|
||||||
|
callback = mock.MagicMock()
|
||||||
|
|
||||||
|
with mock.patch.object(utils, "method_decorator") as mock_method_decorator:
|
||||||
|
utils.conditional_refresh_oidc_token(callback)
|
||||||
|
|
||||||
|
mock_method_decorator.assert_called_with(utils.refresh_oidc_access_token)
|
||||||
@@ -155,6 +155,7 @@ def test_models_documents_get_abilities_forbidden(
|
|||||||
expected_abilities = {
|
expected_abilities = {
|
||||||
"accesses_manage": False,
|
"accesses_manage": False,
|
||||||
"accesses_view": False,
|
"accesses_view": False,
|
||||||
|
"ai_proxy": False,
|
||||||
"ai_transform": False,
|
"ai_transform": False,
|
||||||
"ai_translate": False,
|
"ai_translate": False,
|
||||||
"attachment_upload": False,
|
"attachment_upload": False,
|
||||||
@@ -188,6 +189,7 @@ def test_models_documents_get_abilities_forbidden(
|
|||||||
"versions_destroy": False,
|
"versions_destroy": False,
|
||||||
"versions_list": False,
|
"versions_list": False,
|
||||||
"versions_retrieve": False,
|
"versions_retrieve": False,
|
||||||
|
"search": False,
|
||||||
}
|
}
|
||||||
nb_queries = 1 if is_authenticated else 0
|
nb_queries = 1 if is_authenticated else 0
|
||||||
with django_assert_num_queries(nb_queries):
|
with django_assert_num_queries(nb_queries):
|
||||||
@@ -220,6 +222,7 @@ def test_models_documents_get_abilities_reader(
|
|||||||
expected_abilities = {
|
expected_abilities = {
|
||||||
"accesses_manage": False,
|
"accesses_manage": False,
|
||||||
"accesses_view": False,
|
"accesses_view": False,
|
||||||
|
"ai_proxy": False,
|
||||||
"ai_transform": False,
|
"ai_transform": False,
|
||||||
"ai_translate": False,
|
"ai_translate": False,
|
||||||
"attachment_upload": False,
|
"attachment_upload": False,
|
||||||
@@ -253,6 +256,7 @@ def test_models_documents_get_abilities_reader(
|
|||||||
"versions_destroy": False,
|
"versions_destroy": False,
|
||||||
"versions_list": False,
|
"versions_list": False,
|
||||||
"versions_retrieve": False,
|
"versions_retrieve": False,
|
||||||
|
"search": True,
|
||||||
}
|
}
|
||||||
nb_queries = 1 if is_authenticated else 0
|
nb_queries = 1 if is_authenticated else 0
|
||||||
with django_assert_num_queries(nb_queries):
|
with django_assert_num_queries(nb_queries):
|
||||||
@@ -290,6 +294,7 @@ def test_models_documents_get_abilities_commenter(
|
|||||||
expected_abilities = {
|
expected_abilities = {
|
||||||
"accesses_manage": False,
|
"accesses_manage": False,
|
||||||
"accesses_view": False,
|
"accesses_view": False,
|
||||||
|
"ai_proxy": False,
|
||||||
"ai_transform": False,
|
"ai_transform": False,
|
||||||
"ai_translate": False,
|
"ai_translate": False,
|
||||||
"attachment_upload": False,
|
"attachment_upload": False,
|
||||||
@@ -323,6 +328,7 @@ def test_models_documents_get_abilities_commenter(
|
|||||||
"versions_destroy": False,
|
"versions_destroy": False,
|
||||||
"versions_list": False,
|
"versions_list": False,
|
||||||
"versions_retrieve": False,
|
"versions_retrieve": False,
|
||||||
|
"search": True,
|
||||||
}
|
}
|
||||||
nb_queries = 1 if is_authenticated else 0
|
nb_queries = 1 if is_authenticated else 0
|
||||||
with django_assert_num_queries(nb_queries):
|
with django_assert_num_queries(nb_queries):
|
||||||
@@ -357,6 +363,7 @@ def test_models_documents_get_abilities_editor(
|
|||||||
expected_abilities = {
|
expected_abilities = {
|
||||||
"accesses_manage": False,
|
"accesses_manage": False,
|
||||||
"accesses_view": False,
|
"accesses_view": False,
|
||||||
|
"ai_proxy": is_authenticated,
|
||||||
"ai_transform": is_authenticated,
|
"ai_transform": is_authenticated,
|
||||||
"ai_translate": is_authenticated,
|
"ai_translate": is_authenticated,
|
||||||
"attachment_upload": True,
|
"attachment_upload": True,
|
||||||
@@ -390,6 +397,7 @@ def test_models_documents_get_abilities_editor(
|
|||||||
"versions_destroy": False,
|
"versions_destroy": False,
|
||||||
"versions_list": False,
|
"versions_list": False,
|
||||||
"versions_retrieve": False,
|
"versions_retrieve": False,
|
||||||
|
"search": True,
|
||||||
}
|
}
|
||||||
nb_queries = 1 if is_authenticated else 0
|
nb_queries = 1 if is_authenticated else 0
|
||||||
with django_assert_num_queries(nb_queries):
|
with django_assert_num_queries(nb_queries):
|
||||||
@@ -413,6 +421,7 @@ def test_models_documents_get_abilities_owner(django_assert_num_queries):
|
|||||||
expected_abilities = {
|
expected_abilities = {
|
||||||
"accesses_manage": True,
|
"accesses_manage": True,
|
||||||
"accesses_view": True,
|
"accesses_view": True,
|
||||||
|
"ai_proxy": True,
|
||||||
"ai_transform": True,
|
"ai_transform": True,
|
||||||
"ai_translate": True,
|
"ai_translate": True,
|
||||||
"attachment_upload": True,
|
"attachment_upload": True,
|
||||||
@@ -446,6 +455,7 @@ def test_models_documents_get_abilities_owner(django_assert_num_queries):
|
|||||||
"versions_destroy": True,
|
"versions_destroy": True,
|
||||||
"versions_list": True,
|
"versions_list": True,
|
||||||
"versions_retrieve": True,
|
"versions_retrieve": True,
|
||||||
|
"search": True,
|
||||||
}
|
}
|
||||||
with django_assert_num_queries(1):
|
with django_assert_num_queries(1):
|
||||||
assert document.get_abilities(user) == expected_abilities
|
assert document.get_abilities(user) == expected_abilities
|
||||||
@@ -455,6 +465,7 @@ def test_models_documents_get_abilities_owner(django_assert_num_queries):
|
|||||||
assert document.get_abilities(user) == {
|
assert document.get_abilities(user) == {
|
||||||
"accesses_manage": False,
|
"accesses_manage": False,
|
||||||
"accesses_view": False,
|
"accesses_view": False,
|
||||||
|
"ai_proxy": False,
|
||||||
"ai_transform": False,
|
"ai_transform": False,
|
||||||
"ai_translate": False,
|
"ai_translate": False,
|
||||||
"attachment_upload": False,
|
"attachment_upload": False,
|
||||||
@@ -488,6 +499,7 @@ def test_models_documents_get_abilities_owner(django_assert_num_queries):
|
|||||||
"versions_destroy": False,
|
"versions_destroy": False,
|
||||||
"versions_list": False,
|
"versions_list": False,
|
||||||
"versions_retrieve": False,
|
"versions_retrieve": False,
|
||||||
|
"search": False,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -501,6 +513,7 @@ def test_models_documents_get_abilities_administrator(django_assert_num_queries)
|
|||||||
expected_abilities = {
|
expected_abilities = {
|
||||||
"accesses_manage": True,
|
"accesses_manage": True,
|
||||||
"accesses_view": True,
|
"accesses_view": True,
|
||||||
|
"ai_proxy": True,
|
||||||
"ai_transform": True,
|
"ai_transform": True,
|
||||||
"ai_translate": True,
|
"ai_translate": True,
|
||||||
"attachment_upload": True,
|
"attachment_upload": True,
|
||||||
@@ -534,6 +547,7 @@ def test_models_documents_get_abilities_administrator(django_assert_num_queries)
|
|||||||
"versions_destroy": True,
|
"versions_destroy": True,
|
||||||
"versions_list": True,
|
"versions_list": True,
|
||||||
"versions_retrieve": True,
|
"versions_retrieve": True,
|
||||||
|
"search": True,
|
||||||
}
|
}
|
||||||
with django_assert_num_queries(1):
|
with django_assert_num_queries(1):
|
||||||
assert document.get_abilities(user) == expected_abilities
|
assert document.get_abilities(user) == expected_abilities
|
||||||
@@ -557,6 +571,7 @@ def test_models_documents_get_abilities_editor_user(django_assert_num_queries):
|
|||||||
expected_abilities = {
|
expected_abilities = {
|
||||||
"accesses_manage": False,
|
"accesses_manage": False,
|
||||||
"accesses_view": True,
|
"accesses_view": True,
|
||||||
|
"ai_proxy": True,
|
||||||
"ai_transform": True,
|
"ai_transform": True,
|
||||||
"ai_translate": True,
|
"ai_translate": True,
|
||||||
"attachment_upload": True,
|
"attachment_upload": True,
|
||||||
@@ -590,6 +605,7 @@ def test_models_documents_get_abilities_editor_user(django_assert_num_queries):
|
|||||||
"versions_destroy": False,
|
"versions_destroy": False,
|
||||||
"versions_list": True,
|
"versions_list": True,
|
||||||
"versions_retrieve": True,
|
"versions_retrieve": True,
|
||||||
|
"search": True,
|
||||||
}
|
}
|
||||||
with django_assert_num_queries(1):
|
with django_assert_num_queries(1):
|
||||||
assert document.get_abilities(user) == expected_abilities
|
assert document.get_abilities(user) == expected_abilities
|
||||||
@@ -620,6 +636,7 @@ def test_models_documents_get_abilities_reader_user(
|
|||||||
"accesses_view": True,
|
"accesses_view": True,
|
||||||
# If you get your editor rights from the link role and not your access role
|
# If you get your editor rights from the link role and not your access role
|
||||||
# You should not access AI if it's restricted to users with specific access
|
# You should not access AI if it's restricted to users with specific access
|
||||||
|
"ai_proxy": access_from_link and ai_access_setting != "restricted",
|
||||||
"ai_transform": access_from_link and ai_access_setting != "restricted",
|
"ai_transform": access_from_link and ai_access_setting != "restricted",
|
||||||
"ai_translate": access_from_link and ai_access_setting != "restricted",
|
"ai_translate": access_from_link and ai_access_setting != "restricted",
|
||||||
"attachment_upload": access_from_link,
|
"attachment_upload": access_from_link,
|
||||||
@@ -654,6 +671,7 @@ def test_models_documents_get_abilities_reader_user(
|
|||||||
"versions_destroy": False,
|
"versions_destroy": False,
|
||||||
"versions_list": True,
|
"versions_list": True,
|
||||||
"versions_retrieve": True,
|
"versions_retrieve": True,
|
||||||
|
"search": True,
|
||||||
}
|
}
|
||||||
|
|
||||||
with override_settings(AI_ALLOW_REACH_FROM=ai_access_setting):
|
with override_settings(AI_ALLOW_REACH_FROM=ai_access_setting):
|
||||||
@@ -686,6 +704,7 @@ def test_models_documents_get_abilities_commenter_user(
|
|||||||
"accesses_view": True,
|
"accesses_view": True,
|
||||||
# If you get your editor rights from the link role and not your access role
|
# If you get your editor rights from the link role and not your access role
|
||||||
# You should not access AI if it's restricted to users with specific access
|
# You should not access AI if it's restricted to users with specific access
|
||||||
|
"ai_proxy": access_from_link and ai_access_setting != "restricted",
|
||||||
"ai_transform": access_from_link and ai_access_setting != "restricted",
|
"ai_transform": access_from_link and ai_access_setting != "restricted",
|
||||||
"ai_translate": access_from_link and ai_access_setting != "restricted",
|
"ai_translate": access_from_link and ai_access_setting != "restricted",
|
||||||
"attachment_upload": access_from_link,
|
"attachment_upload": access_from_link,
|
||||||
@@ -719,6 +738,7 @@ def test_models_documents_get_abilities_commenter_user(
|
|||||||
"versions_destroy": False,
|
"versions_destroy": False,
|
||||||
"versions_list": True,
|
"versions_list": True,
|
||||||
"versions_retrieve": True,
|
"versions_retrieve": True,
|
||||||
|
"search": True,
|
||||||
}
|
}
|
||||||
|
|
||||||
with override_settings(AI_ALLOW_REACH_FROM=ai_access_setting):
|
with override_settings(AI_ALLOW_REACH_FROM=ai_access_setting):
|
||||||
@@ -747,6 +767,7 @@ def test_models_documents_get_abilities_preset_role(django_assert_num_queries):
|
|||||||
assert abilities == {
|
assert abilities == {
|
||||||
"accesses_manage": False,
|
"accesses_manage": False,
|
||||||
"accesses_view": True,
|
"accesses_view": True,
|
||||||
|
"ai_proxy": False,
|
||||||
"ai_transform": False,
|
"ai_transform": False,
|
||||||
"ai_translate": False,
|
"ai_translate": False,
|
||||||
"attachment_upload": False,
|
"attachment_upload": False,
|
||||||
@@ -780,6 +801,7 @@ def test_models_documents_get_abilities_preset_role(django_assert_num_queries):
|
|||||||
"versions_destroy": False,
|
"versions_destroy": False,
|
||||||
"versions_list": True,
|
"versions_list": True,
|
||||||
"versions_retrieve": True,
|
"versions_retrieve": True,
|
||||||
|
"search": True,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -878,6 +900,7 @@ def test_models_document_get_abilities_ai_access_authenticated(is_authenticated,
|
|||||||
document = factories.DocumentFactory(link_reach=reach, link_role="editor")
|
document = factories.DocumentFactory(link_reach=reach, link_role="editor")
|
||||||
|
|
||||||
abilities = document.get_abilities(user)
|
abilities = document.get_abilities(user)
|
||||||
|
assert abilities["ai_proxy"] is True
|
||||||
assert abilities["ai_transform"] is True
|
assert abilities["ai_transform"] is True
|
||||||
assert abilities["ai_translate"] is True
|
assert abilities["ai_translate"] is True
|
||||||
|
|
||||||
@@ -897,6 +920,7 @@ def test_models_document_get_abilities_ai_access_public(is_authenticated, reach)
|
|||||||
document = factories.DocumentFactory(link_reach=reach, link_role="editor")
|
document = factories.DocumentFactory(link_reach=reach, link_role="editor")
|
||||||
|
|
||||||
abilities = document.get_abilities(user)
|
abilities = document.get_abilities(user)
|
||||||
|
assert abilities["ai_proxy"] == is_authenticated
|
||||||
assert abilities["ai_transform"] == is_authenticated
|
assert abilities["ai_transform"] == is_authenticated
|
||||||
assert abilities["ai_translate"] == is_authenticated
|
assert abilities["ai_translate"] == is_authenticated
|
||||||
|
|
||||||
@@ -1021,7 +1045,10 @@ def test_models_documents__email_invitation__success():
|
|||||||
f"Test Sender (sender@example.com) invited you with the role "editor" "
|
f"Test Sender (sender@example.com) invited you with the role "editor" "
|
||||||
f"on the following document: {document.title}" in email_content
|
f"on the following document: {document.title}" in email_content
|
||||||
)
|
)
|
||||||
assert f"docs/{document.id}/" in email_content
|
assert (
|
||||||
|
f"docs/{document.id}/?utm_source=docssharelink&utm_campaign={document.id}"
|
||||||
|
in email_content
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
@@ -1051,10 +1078,18 @@ def test_models_documents__email_invitation__url_app_param(email_url_app):
|
|||||||
|
|
||||||
# Determine expected domain
|
# Determine expected domain
|
||||||
if email_url_app:
|
if email_url_app:
|
||||||
assert f"https://test-example.com/docs/{document.id}/" in email_content
|
expected_url = (
|
||||||
|
f"https://test-example.com/docs/{document.id}/"
|
||||||
|
f"?utm_source=docssharelink&utm_campaign={document.id}"
|
||||||
|
)
|
||||||
|
assert expected_url in email_content
|
||||||
else:
|
else:
|
||||||
# Default Site domain is example.com
|
# Default Site domain is example.com
|
||||||
assert f"example.com/docs/{document.id}/" in email_content
|
expected_url = (
|
||||||
|
f"example.com/docs/{document.id}/"
|
||||||
|
f"?utm_source=docssharelink&utm_campaign={document.id}"
|
||||||
|
)
|
||||||
|
assert expected_url in email_content
|
||||||
|
|
||||||
|
|
||||||
def test_models_documents__email_invitation__success_empty_title():
|
def test_models_documents__email_invitation__success_empty_title():
|
||||||
@@ -1085,7 +1120,10 @@ def test_models_documents__email_invitation__success_empty_title():
|
|||||||
"Test Sender (sender@example.com) invited you with the role "editor" "
|
"Test Sender (sender@example.com) invited you with the role "editor" "
|
||||||
"on the following document: Untitled Document" in email_content
|
"on the following document: Untitled Document" in email_content
|
||||||
)
|
)
|
||||||
assert f"docs/{document.id}/" in email_content
|
assert (
|
||||||
|
f"docs/{document.id}/?utm_source=docssharelink&utm_campaign={document.id}"
|
||||||
|
in email_content
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_models_documents__email_invitation__success_fr():
|
def test_models_documents__email_invitation__success_fr():
|
||||||
@@ -1120,7 +1158,10 @@ def test_models_documents__email_invitation__success_fr():
|
|||||||
f"Test Sender2 (sender2@example.com) vous a invité avec le rôle "propriétaire" "
|
f"Test Sender2 (sender2@example.com) vous a invité avec le rôle "propriétaire" "
|
||||||
f"sur le document suivant : {document.title}" in email_content
|
f"sur le document suivant : {document.title}" in email_content
|
||||||
)
|
)
|
||||||
assert f"docs/{document.id}/" in email_content
|
assert (
|
||||||
|
f"docs/{document.id}/?utm_source=docssharelink&utm_campaign={document.id}"
|
||||||
|
in email_content
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@mock.patch(
|
@mock.patch(
|
||||||
|
|||||||
@@ -79,7 +79,7 @@ def test_models_invitations_is_expired():
|
|||||||
assert expired_invitation.is_expired is True
|
assert expired_invitation.is_expired is True
|
||||||
|
|
||||||
|
|
||||||
def test_models_invitationd_new_userd_convert_invitations_to_accesses():
|
def test_models_invitations_new_user_convert_invitations_to_accesses():
|
||||||
"""
|
"""
|
||||||
Upon creating a new user, invitations linked to the email
|
Upon creating a new user, invitations linked to the email
|
||||||
should be converted to accesses and then deleted.
|
should be converted to accesses and then deleted.
|
||||||
@@ -114,7 +114,7 @@ def test_models_invitationd_new_userd_convert_invitations_to_accesses():
|
|||||||
).exists() # the other invitation remains
|
).exists() # the other invitation remains
|
||||||
|
|
||||||
|
|
||||||
def test_models_invitationd_new_user_filter_expired_invitations():
|
def test_models_invitations_new_user_filter_expired_invitations():
|
||||||
"""
|
"""
|
||||||
Upon creating a new identity, valid invitations should be converted into accesses
|
Upon creating a new identity, valid invitations should be converted into accesses
|
||||||
and expired invitations should remain unchanged.
|
and expired invitations should remain unchanged.
|
||||||
@@ -145,7 +145,7 @@ def test_models_invitationd_new_user_filter_expired_invitations():
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("num_invitations, num_queries", [(0, 3), (1, 7), (20, 7)])
|
@pytest.mark.parametrize("num_invitations, num_queries", [(0, 3), (1, 7), (20, 7)])
|
||||||
def test_models_invitationd_new_userd_user_creation_constant_num_queries(
|
def test_models_invitations_new_userd_user_creation_constant_num_queries(
|
||||||
django_assert_num_queries, num_invitations, num_queries
|
django_assert_num_queries, num_invitations, num_queries
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
|
|||||||
669
src/backend/core/tests/test_models_user_reconciliation.py
Normal file
669
src/backend/core/tests/test_models_user_reconciliation.py
Normal file
@@ -0,0 +1,669 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for the UserReconciliationCsvImport model
|
||||||
|
"""
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from django.core import mail
|
||||||
|
from django.core.files.base import ContentFile
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from core import factories, models
|
||||||
|
from core.admin import process_reconciliation
|
||||||
|
from core.tasks.user_reconciliation import user_reconciliation_csv_import_job
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.django_db
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(name="import_example_csv_basic")
|
||||||
|
def fixture_import_example_csv_basic():
|
||||||
|
"""
|
||||||
|
Import an example CSV file for user reconciliation
|
||||||
|
and return the created import object.
|
||||||
|
"""
|
||||||
|
# Create users referenced in the CSV
|
||||||
|
for i in range(40, 50):
|
||||||
|
factories.UserFactory(email=f"user.test{i}@example.com")
|
||||||
|
|
||||||
|
example_csv_path = Path(__file__).parent / "data/example_reconciliation_basic.csv"
|
||||||
|
with open(example_csv_path, "rb") as f:
|
||||||
|
csv_file = ContentFile(f.read(), name="example_reconciliation_basic.csv")
|
||||||
|
csv_import = models.UserReconciliationCsvImport(file=csv_file)
|
||||||
|
csv_import.save()
|
||||||
|
|
||||||
|
return csv_import
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(name="import_example_csv_grist_form")
|
||||||
|
def fixture_import_example_csv_grist_form():
|
||||||
|
"""
|
||||||
|
Import an example CSV file for user reconciliation
|
||||||
|
and return the created import object.
|
||||||
|
"""
|
||||||
|
# Create users referenced in the CSV
|
||||||
|
for i in range(10, 40):
|
||||||
|
factories.UserFactory(email=f"user.test{i}@example.com")
|
||||||
|
|
||||||
|
example_csv_path = (
|
||||||
|
Path(__file__).parent / "data/example_reconciliation_grist_form.csv"
|
||||||
|
)
|
||||||
|
with open(example_csv_path, "rb") as f:
|
||||||
|
csv_file = ContentFile(f.read(), name="example_reconciliation_grist_form.csv")
|
||||||
|
csv_import = models.UserReconciliationCsvImport(file=csv_file)
|
||||||
|
csv_import.save()
|
||||||
|
|
||||||
|
return csv_import
|
||||||
|
|
||||||
|
|
||||||
|
def test_user_reconciliation_csv_import_entry_is_created(import_example_csv_basic):
|
||||||
|
"""Test that a UserReconciliationCsvImport entry is created correctly."""
|
||||||
|
assert import_example_csv_basic.status == "pending"
|
||||||
|
assert import_example_csv_basic.file.name.endswith(
|
||||||
|
"example_reconciliation_basic.csv"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_user_reconciliation_csv_import_entry_is_created_grist_form(
|
||||||
|
import_example_csv_grist_form,
|
||||||
|
):
|
||||||
|
"""Test that a UserReconciliationCsvImport entry is created correctly."""
|
||||||
|
assert import_example_csv_grist_form.status == "pending"
|
||||||
|
assert import_example_csv_grist_form.file.name.endswith(
|
||||||
|
"example_reconciliation_grist_form.csv"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_incorrect_csv_format_handling():
|
||||||
|
"""Test that an incorrectly formatted CSV file is handled gracefully."""
|
||||||
|
example_csv_path = (
|
||||||
|
Path(__file__).parent / "data/example_reconciliation_missing_column.csv"
|
||||||
|
)
|
||||||
|
with open(example_csv_path, "rb") as f:
|
||||||
|
csv_file = ContentFile(
|
||||||
|
f.read(), name="example_reconciliation_missing_column.csv"
|
||||||
|
)
|
||||||
|
csv_import = models.UserReconciliationCsvImport(file=csv_file)
|
||||||
|
csv_import.save()
|
||||||
|
|
||||||
|
assert csv_import.status == "pending"
|
||||||
|
|
||||||
|
user_reconciliation_csv_import_job(csv_import.id)
|
||||||
|
csv_import.refresh_from_db()
|
||||||
|
|
||||||
|
assert (
|
||||||
|
"CSV is missing mandatory columns: active_email, inactive_email, id"
|
||||||
|
in csv_import.logs
|
||||||
|
)
|
||||||
|
assert csv_import.status == "error"
|
||||||
|
|
||||||
|
|
||||||
|
def test_incorrect_email_format_handling():
|
||||||
|
"""Test that an incorrectly formatted CSV file is handled gracefully."""
|
||||||
|
example_csv_path = Path(__file__).parent / "data/example_reconciliation_error.csv"
|
||||||
|
with open(example_csv_path, "rb") as f:
|
||||||
|
csv_file = ContentFile(f.read(), name="example_reconciliation_error.csv")
|
||||||
|
csv_import = models.UserReconciliationCsvImport(file=csv_file)
|
||||||
|
csv_import.save()
|
||||||
|
|
||||||
|
assert csv_import.status == "pending"
|
||||||
|
|
||||||
|
user_reconciliation_csv_import_job(csv_import.id)
|
||||||
|
csv_import.refresh_from_db()
|
||||||
|
|
||||||
|
assert "Invalid inactive email address on row 40" in csv_import.logs
|
||||||
|
assert csv_import.status == "done"
|
||||||
|
|
||||||
|
# pylint: disable-next=no-member
|
||||||
|
assert len(mail.outbox) == 1
|
||||||
|
|
||||||
|
# pylint: disable-next=no-member
|
||||||
|
email = mail.outbox[0]
|
||||||
|
|
||||||
|
assert email.to == ["user.test40@example.com"]
|
||||||
|
email_content = " ".join(email.body.split())
|
||||||
|
|
||||||
|
assert "Reconciliation of your Docs accounts not completed" in email_content
|
||||||
|
|
||||||
|
|
||||||
|
def test_incorrect_csv_data_handling_grist_form():
|
||||||
|
"""Test that a CSV file with incorrect data is handled gracefully."""
|
||||||
|
example_csv_path = (
|
||||||
|
Path(__file__).parent / "data/example_reconciliation_grist_form_error.csv"
|
||||||
|
)
|
||||||
|
with open(example_csv_path, "rb") as f:
|
||||||
|
csv_file = ContentFile(
|
||||||
|
f.read(), name="example_reconciliation_grist_form_error.csv"
|
||||||
|
)
|
||||||
|
csv_import = models.UserReconciliationCsvImport(file=csv_file)
|
||||||
|
csv_import.save()
|
||||||
|
|
||||||
|
assert csv_import.status == "pending"
|
||||||
|
|
||||||
|
user_reconciliation_csv_import_job(csv_import.id)
|
||||||
|
csv_import.refresh_from_db()
|
||||||
|
|
||||||
|
assert (
|
||||||
|
"user.test20@example.com set as both active and inactive email"
|
||||||
|
in csv_import.logs
|
||||||
|
)
|
||||||
|
assert csv_import.status == "done"
|
||||||
|
|
||||||
|
|
||||||
|
def test_job_creates_reconciliation_entries(import_example_csv_basic):
|
||||||
|
"""Test that the CSV import job creates UserReconciliation entries."""
|
||||||
|
assert import_example_csv_basic.status == "pending"
|
||||||
|
user_reconciliation_csv_import_job(import_example_csv_basic.id)
|
||||||
|
|
||||||
|
# Verify the job status changed
|
||||||
|
import_example_csv_basic.refresh_from_db()
|
||||||
|
assert import_example_csv_basic.status == "done"
|
||||||
|
assert "Import completed successfully." in import_example_csv_basic.logs
|
||||||
|
assert "6 rows processed." in import_example_csv_basic.logs
|
||||||
|
assert "5 reconciliation entries created." in import_example_csv_basic.logs
|
||||||
|
|
||||||
|
# Verify reconciliation entries were created
|
||||||
|
reconciliations = models.UserReconciliation.objects.all()
|
||||||
|
assert reconciliations.count() == 5
|
||||||
|
|
||||||
|
|
||||||
|
def test_job_does_not_create_duplicated_reconciliation_entries(
|
||||||
|
import_example_csv_basic,
|
||||||
|
):
|
||||||
|
"""Test that the CSV import job doesn't create UserReconciliation entries
|
||||||
|
for source unique IDs that have already been processed."""
|
||||||
|
|
||||||
|
_already_created_entry = models.UserReconciliation.objects.create(
|
||||||
|
active_email="user.test40@example.com",
|
||||||
|
inactive_email="user.test41@example.com",
|
||||||
|
active_email_checked=0,
|
||||||
|
inactive_email_checked=0,
|
||||||
|
status="pending",
|
||||||
|
source_unique_id=1,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert import_example_csv_basic.status == "pending"
|
||||||
|
user_reconciliation_csv_import_job(import_example_csv_basic.id)
|
||||||
|
|
||||||
|
# Verify the job status changed
|
||||||
|
import_example_csv_basic.refresh_from_db()
|
||||||
|
assert import_example_csv_basic.status == "done"
|
||||||
|
assert "Import completed successfully." in import_example_csv_basic.logs
|
||||||
|
assert "6 rows processed." in import_example_csv_basic.logs
|
||||||
|
assert "4 reconciliation entries created." in import_example_csv_basic.logs
|
||||||
|
assert "1 rows were already processed." in import_example_csv_basic.logs
|
||||||
|
|
||||||
|
# Verify the correct number of reconciliation entries were created
|
||||||
|
reconciliations = models.UserReconciliation.objects.all()
|
||||||
|
assert reconciliations.count() == 5
|
||||||
|
|
||||||
|
|
||||||
|
def test_job_creates_reconciliation_entries_grist_form(import_example_csv_grist_form):
|
||||||
|
"""Test that the CSV import job creates UserReconciliation entries."""
|
||||||
|
assert import_example_csv_grist_form.status == "pending"
|
||||||
|
user_reconciliation_csv_import_job(import_example_csv_grist_form.id)
|
||||||
|
|
||||||
|
# Verify the job status changed
|
||||||
|
import_example_csv_grist_form.refresh_from_db()
|
||||||
|
assert "Import completed successfully" in import_example_csv_grist_form.logs
|
||||||
|
assert import_example_csv_grist_form.status == "done"
|
||||||
|
|
||||||
|
# Verify reconciliation entries were created
|
||||||
|
reconciliations = models.UserReconciliation.objects.all()
|
||||||
|
assert reconciliations.count() == 9
|
||||||
|
|
||||||
|
|
||||||
|
def test_csv_import_reconciliation_data_is_correct(import_example_csv_basic):
|
||||||
|
"""Test that the data in created UserReconciliation entries matches the CSV."""
|
||||||
|
user_reconciliation_csv_import_job(import_example_csv_basic.id)
|
||||||
|
|
||||||
|
reconciliations = models.UserReconciliation.objects.order_by("created_at")
|
||||||
|
first_entry = reconciliations.first()
|
||||||
|
|
||||||
|
assert first_entry.active_email == "user.test40@example.com"
|
||||||
|
assert first_entry.inactive_email == "user.test41@example.com"
|
||||||
|
assert first_entry.active_email_checked is False
|
||||||
|
assert first_entry.inactive_email_checked is False
|
||||||
|
|
||||||
|
for rec in reconciliations:
|
||||||
|
assert rec.status == "ready"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(name="user_reconciliation_users_and_docs")
|
||||||
|
def fixture_user_reconciliation_users_and_docs():
|
||||||
|
"""Fixture to create two users with overlapping document accesses
|
||||||
|
for reconciliation tests."""
|
||||||
|
user_1 = factories.UserFactory(email="user.test1@example.com")
|
||||||
|
user_2 = factories.UserFactory(email="user.test2@example.com")
|
||||||
|
|
||||||
|
# Create 10 distinct document accesses for each user
|
||||||
|
userdocs_u1 = [
|
||||||
|
factories.UserDocumentAccessFactory(user=user_1, role="editor")
|
||||||
|
for _ in range(10)
|
||||||
|
]
|
||||||
|
userdocs_u2 = [
|
||||||
|
factories.UserDocumentAccessFactory(user=user_2, role="editor")
|
||||||
|
for _ in range(10)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Make the first 3 documents of each list shared with the other user
|
||||||
|
# with a lower role
|
||||||
|
for ud in userdocs_u1[0:3]:
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
user=user_2, document=ud.document, role="reader"
|
||||||
|
)
|
||||||
|
|
||||||
|
for ud in userdocs_u2[0:3]:
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
user=user_1, document=ud.document, role="reader"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Make the next 3 documents of each list shared with the other user
|
||||||
|
# with a higher role
|
||||||
|
for ud in userdocs_u1[3:6]:
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
user=user_2, document=ud.document, role="owner"
|
||||||
|
)
|
||||||
|
|
||||||
|
for ud in userdocs_u2[3:6]:
|
||||||
|
factories.UserDocumentAccessFactory(
|
||||||
|
user=user_1, document=ud.document, role="owner"
|
||||||
|
)
|
||||||
|
|
||||||
|
return (user_1, user_2, userdocs_u1, userdocs_u2)
|
||||||
|
|
||||||
|
|
||||||
|
def test_user_reconciliation_is_created(user_reconciliation_users_and_docs):
|
||||||
|
"""Test that a UserReconciliation entry can be created and saved."""
|
||||||
|
user_1, user_2, _userdocs_u1, _userdocs_u2 = user_reconciliation_users_and_docs
|
||||||
|
rec = models.UserReconciliation.objects.create(
|
||||||
|
active_email=user_1.email,
|
||||||
|
inactive_email=user_2.email,
|
||||||
|
active_email_checked=False,
|
||||||
|
inactive_email_checked=True,
|
||||||
|
active_email_confirmation_id=uuid.uuid4(),
|
||||||
|
inactive_email_confirmation_id=uuid.uuid4(),
|
||||||
|
status="pending",
|
||||||
|
)
|
||||||
|
|
||||||
|
rec.save()
|
||||||
|
assert rec.status == "ready"
|
||||||
|
|
||||||
|
|
||||||
|
def test_user_reconciliation_verification_emails_are_sent(
|
||||||
|
user_reconciliation_users_and_docs,
|
||||||
|
):
|
||||||
|
"""Test that both UserReconciliation verification emails are sent."""
|
||||||
|
user_1, user_2, _userdocs_u1, _userdocs_u2 = user_reconciliation_users_and_docs
|
||||||
|
rec = models.UserReconciliation.objects.create(
|
||||||
|
active_email=user_1.email,
|
||||||
|
inactive_email=user_2.email,
|
||||||
|
active_email_checked=False,
|
||||||
|
inactive_email_checked=False,
|
||||||
|
active_email_confirmation_id=uuid.uuid4(),
|
||||||
|
inactive_email_confirmation_id=uuid.uuid4(),
|
||||||
|
status="pending",
|
||||||
|
)
|
||||||
|
|
||||||
|
rec.save()
|
||||||
|
|
||||||
|
# pylint: disable-next=no-member
|
||||||
|
assert len(mail.outbox) == 2
|
||||||
|
|
||||||
|
# pylint: disable-next=no-member
|
||||||
|
email_1 = mail.outbox[0]
|
||||||
|
|
||||||
|
assert email_1.to == [user_1.email]
|
||||||
|
email_1_content = " ".join(email_1.body.split())
|
||||||
|
|
||||||
|
assert (
|
||||||
|
"You have requested a reconciliation of your user accounts on Docs."
|
||||||
|
in email_1_content
|
||||||
|
)
|
||||||
|
active_email_confirmation_id = rec.active_email_confirmation_id
|
||||||
|
inactive_email_confirmation_id = rec.inactive_email_confirmation_id
|
||||||
|
assert (
|
||||||
|
f"user-reconciliations/active/{active_email_confirmation_id}/"
|
||||||
|
in email_1_content
|
||||||
|
)
|
||||||
|
|
||||||
|
# pylint: disable-next=no-member
|
||||||
|
email_2 = mail.outbox[1]
|
||||||
|
|
||||||
|
assert email_2.to == [user_2.email]
|
||||||
|
email_2_content = " ".join(email_2.body.split())
|
||||||
|
|
||||||
|
assert (
|
||||||
|
"You have requested a reconciliation of your user accounts on Docs."
|
||||||
|
in email_2_content
|
||||||
|
)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
f"user-reconciliations/inactive/{inactive_email_confirmation_id}/"
|
||||||
|
in email_2_content
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_user_reconciliation_only_starts_if_checks_are_made(
|
||||||
|
user_reconciliation_users_and_docs,
|
||||||
|
):
|
||||||
|
"""Test that the admin action does not process entries
|
||||||
|
unless both email checks are confirmed.
|
||||||
|
"""
|
||||||
|
user_1, user_2, _userdocs_u1, _userdocs_u2 = user_reconciliation_users_and_docs
|
||||||
|
|
||||||
|
# Create a reconciliation entry where only one email has been checked
|
||||||
|
rec = models.UserReconciliation.objects.create(
|
||||||
|
active_email=user_1.email,
|
||||||
|
inactive_email=user_2.email,
|
||||||
|
active_email_checked=True,
|
||||||
|
inactive_email_checked=False,
|
||||||
|
status="pending",
|
||||||
|
)
|
||||||
|
rec.save()
|
||||||
|
|
||||||
|
# Capture counts before running admin action
|
||||||
|
accesses_before_active = models.DocumentAccess.objects.filter(user=user_1).count()
|
||||||
|
accesses_before_inactive = models.DocumentAccess.objects.filter(user=user_2).count()
|
||||||
|
users_active_before = (user_1.is_active, user_2.is_active)
|
||||||
|
|
||||||
|
# Call the admin action with the queryset containing our single rec
|
||||||
|
qs = models.UserReconciliation.objects.filter(id=rec.id)
|
||||||
|
process_reconciliation(None, None, qs)
|
||||||
|
|
||||||
|
# Reload from DB and assert nothing was processed (checks prevent processing)
|
||||||
|
rec.refresh_from_db()
|
||||||
|
user_1.refresh_from_db()
|
||||||
|
user_2.refresh_from_db()
|
||||||
|
|
||||||
|
assert rec.status == "ready"
|
||||||
|
assert (
|
||||||
|
models.DocumentAccess.objects.filter(user=user_1).count()
|
||||||
|
== accesses_before_active
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
models.DocumentAccess.objects.filter(user=user_2).count()
|
||||||
|
== accesses_before_inactive
|
||||||
|
)
|
||||||
|
assert (user_1.is_active, user_2.is_active) == users_active_before
|
||||||
|
|
||||||
|
|
||||||
|
def test_process_reconciliation_updates_accesses(
|
||||||
|
user_reconciliation_users_and_docs,
|
||||||
|
):
|
||||||
|
"""Test that accesses are consolidated on the active user."""
|
||||||
|
user_1, user_2, userdocs_u1, userdocs_u2 = user_reconciliation_users_and_docs
|
||||||
|
|
||||||
|
u1_2 = userdocs_u1[2]
|
||||||
|
u1_5 = userdocs_u1[5]
|
||||||
|
u2doc1 = userdocs_u2[1].document
|
||||||
|
u2doc5 = userdocs_u2[5].document
|
||||||
|
|
||||||
|
rec = models.UserReconciliation.objects.create(
|
||||||
|
active_email=user_1.email,
|
||||||
|
inactive_email=user_2.email,
|
||||||
|
active_user=user_1,
|
||||||
|
inactive_user=user_2,
|
||||||
|
active_email_checked=True,
|
||||||
|
inactive_email_checked=True,
|
||||||
|
status="ready",
|
||||||
|
)
|
||||||
|
|
||||||
|
qs = models.UserReconciliation.objects.filter(id=rec.id)
|
||||||
|
process_reconciliation(None, None, qs)
|
||||||
|
|
||||||
|
rec.refresh_from_db()
|
||||||
|
user_1.refresh_from_db()
|
||||||
|
user_2.refresh_from_db()
|
||||||
|
u1_2.refresh_from_db(
|
||||||
|
from_queryset=models.DocumentAccess.objects.select_for_update()
|
||||||
|
)
|
||||||
|
u1_5.refresh_from_db(
|
||||||
|
from_queryset=models.DocumentAccess.objects.select_for_update()
|
||||||
|
)
|
||||||
|
|
||||||
|
# After processing, inactive user should have no accesses
|
||||||
|
# and active user should have one access per union document
|
||||||
|
# with the highest role
|
||||||
|
assert rec.status == "done"
|
||||||
|
assert "Requested update for 10 DocumentAccess items" in rec.logs
|
||||||
|
assert "and deletion for 12 DocumentAccess items" in rec.logs
|
||||||
|
assert models.DocumentAccess.objects.filter(user=user_2).count() == 0
|
||||||
|
assert models.DocumentAccess.objects.filter(user=user_1).count() == 20
|
||||||
|
assert u1_2.role == "editor"
|
||||||
|
assert u1_5.role == "owner"
|
||||||
|
|
||||||
|
assert (
|
||||||
|
models.DocumentAccess.objects.filter(user=user_1, document=u2doc1).first().role
|
||||||
|
== "editor"
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
models.DocumentAccess.objects.filter(user=user_1, document=u2doc5).first().role
|
||||||
|
== "owner"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert user_1.is_active is True
|
||||||
|
assert user_2.is_active is False
|
||||||
|
|
||||||
|
# pylint: disable-next=no-member
|
||||||
|
assert len(mail.outbox) == 1
|
||||||
|
|
||||||
|
# pylint: disable-next=no-member
|
||||||
|
email = mail.outbox[0]
|
||||||
|
|
||||||
|
assert email.to == [user_1.email]
|
||||||
|
email_content = " ".join(email.body.split())
|
||||||
|
|
||||||
|
assert "Your accounts have been merged" in email_content
|
||||||
|
|
||||||
|
|
||||||
|
def test_process_reconciliation_updates_linktraces(
|
||||||
|
user_reconciliation_users_and_docs,
|
||||||
|
):
|
||||||
|
"""Test that linktraces are consolidated on the active user."""
|
||||||
|
user_1, user_2, userdocs_u1, userdocs_u2 = user_reconciliation_users_and_docs
|
||||||
|
|
||||||
|
u1_2 = userdocs_u1[2]
|
||||||
|
u1_5 = userdocs_u1[5]
|
||||||
|
|
||||||
|
doc_both = u1_2.document
|
||||||
|
models.LinkTrace.objects.create(document=doc_both, user=user_1)
|
||||||
|
models.LinkTrace.objects.create(document=doc_both, user=user_2)
|
||||||
|
|
||||||
|
doc_inactive_only = userdocs_u2[4].document
|
||||||
|
models.LinkTrace.objects.create(
|
||||||
|
document=doc_inactive_only, user=user_2, is_masked=True
|
||||||
|
)
|
||||||
|
|
||||||
|
doc_active_only = userdocs_u1[4].document
|
||||||
|
models.LinkTrace.objects.create(document=doc_active_only, user=user_1)
|
||||||
|
|
||||||
|
rec = models.UserReconciliation.objects.create(
|
||||||
|
active_email=user_1.email,
|
||||||
|
inactive_email=user_2.email,
|
||||||
|
active_user=user_1,
|
||||||
|
inactive_user=user_2,
|
||||||
|
active_email_checked=True,
|
||||||
|
inactive_email_checked=True,
|
||||||
|
status="ready",
|
||||||
|
)
|
||||||
|
|
||||||
|
qs = models.UserReconciliation.objects.filter(id=rec.id)
|
||||||
|
process_reconciliation(None, None, qs)
|
||||||
|
|
||||||
|
rec.refresh_from_db()
|
||||||
|
user_1.refresh_from_db()
|
||||||
|
user_2.refresh_from_db()
|
||||||
|
u1_2.refresh_from_db(
|
||||||
|
from_queryset=models.DocumentAccess.objects.select_for_update()
|
||||||
|
)
|
||||||
|
u1_5.refresh_from_db(
|
||||||
|
from_queryset=models.DocumentAccess.objects.select_for_update()
|
||||||
|
)
|
||||||
|
|
||||||
|
# Inactive user should have no linktraces
|
||||||
|
assert models.LinkTrace.objects.filter(user=user_2).count() == 0
|
||||||
|
|
||||||
|
# doc_both should have a single LinkTrace owned by the active user
|
||||||
|
assert (
|
||||||
|
models.LinkTrace.objects.filter(user=user_1, document=doc_both).exists() is True
|
||||||
|
)
|
||||||
|
assert models.LinkTrace.objects.filter(user=user_1, document=doc_both).count() == 1
|
||||||
|
assert (
|
||||||
|
models.LinkTrace.objects.filter(user=user_2, document=doc_both).exists()
|
||||||
|
is False
|
||||||
|
)
|
||||||
|
|
||||||
|
# doc_inactive_only should now be linked to active user and preserve is_masked
|
||||||
|
lt = models.LinkTrace.objects.filter(
|
||||||
|
user=user_1, document=doc_inactive_only
|
||||||
|
).first()
|
||||||
|
assert lt is not None
|
||||||
|
assert lt.is_masked is True
|
||||||
|
|
||||||
|
# doc_active_only should still belong to active user
|
||||||
|
assert models.LinkTrace.objects.filter(
|
||||||
|
user=user_1, document=doc_active_only
|
||||||
|
).exists()
|
||||||
|
|
||||||
|
|
||||||
|
def test_process_reconciliation_updates_threads_comments_reactions(
|
||||||
|
user_reconciliation_users_and_docs,
|
||||||
|
):
|
||||||
|
"""Test that threads, comments and reactions are transferred/deduplicated
|
||||||
|
on reconciliation."""
|
||||||
|
user_1, user_2, _userdocs_u1, userdocs_u2 = user_reconciliation_users_and_docs
|
||||||
|
|
||||||
|
# Use a document from the inactive user's set
|
||||||
|
document = userdocs_u2[0].document
|
||||||
|
|
||||||
|
# Thread and comment created by inactive user -> should be moved to active
|
||||||
|
thread = factories.ThreadFactory(document=document, creator=user_2)
|
||||||
|
comment = factories.CommentFactory(thread=thread, user=user_2)
|
||||||
|
|
||||||
|
# Reaction where only inactive user reacted -> should be moved to active user
|
||||||
|
reaction_inactive_only = factories.ReactionFactory(comment=comment, users=[user_2])
|
||||||
|
|
||||||
|
# Reaction where both users reacted -> inactive user's participation should be removed
|
||||||
|
thread2 = factories.ThreadFactory(document=document, creator=user_1)
|
||||||
|
comment2 = factories.CommentFactory(thread=thread2, user=user_1)
|
||||||
|
reaction_both = factories.ReactionFactory(comment=comment2, users=[user_1, user_2])
|
||||||
|
|
||||||
|
# Reaction where only active user reacted -> unchanged
|
||||||
|
thread3 = factories.ThreadFactory(document=document, creator=user_1)
|
||||||
|
comment3 = factories.CommentFactory(thread=thread3, user=user_1)
|
||||||
|
reaction_active_only = factories.ReactionFactory(comment=comment3, users=[user_1])
|
||||||
|
|
||||||
|
rec = models.UserReconciliation.objects.create(
|
||||||
|
active_email=user_1.email,
|
||||||
|
inactive_email=user_2.email,
|
||||||
|
active_user=user_1,
|
||||||
|
inactive_user=user_2,
|
||||||
|
active_email_checked=True,
|
||||||
|
inactive_email_checked=True,
|
||||||
|
status="ready",
|
||||||
|
)
|
||||||
|
|
||||||
|
qs = models.UserReconciliation.objects.filter(id=rec.id)
|
||||||
|
process_reconciliation(None, None, qs)
|
||||||
|
|
||||||
|
# Refresh objects
|
||||||
|
thread.refresh_from_db()
|
||||||
|
comment.refresh_from_db()
|
||||||
|
reaction_inactive_only.refresh_from_db()
|
||||||
|
reaction_both.refresh_from_db()
|
||||||
|
reaction_active_only.refresh_from_db()
|
||||||
|
|
||||||
|
# Thread and comment creator should now be the active user
|
||||||
|
assert thread.creator == user_1
|
||||||
|
assert comment.user == user_1
|
||||||
|
|
||||||
|
# reaction_inactive_only: inactive user's participation should be removed and
|
||||||
|
# active user's participation added
|
||||||
|
reaction_inactive_only.refresh_from_db()
|
||||||
|
assert not reaction_inactive_only.users.filter(pk=user_2.pk).exists()
|
||||||
|
assert reaction_inactive_only.users.filter(pk=user_1.pk).exists()
|
||||||
|
|
||||||
|
# reaction_both: should end up with only active user's participation
|
||||||
|
assert reaction_both.users.filter(pk=user_2.pk).exists() is False
|
||||||
|
assert reaction_both.users.filter(pk=user_1.pk).exists() is True
|
||||||
|
|
||||||
|
# reaction_active_only should still have active user's participation
|
||||||
|
assert reaction_active_only.users.filter(pk=user_1.pk).exists()
|
||||||
|
|
||||||
|
|
||||||
|
def test_process_reconciliation_updates_favorites(
|
||||||
|
user_reconciliation_users_and_docs,
|
||||||
|
):
|
||||||
|
"""Test that favorites are consolidated on the active user."""
|
||||||
|
user_1, user_2, userdocs_u1, userdocs_u2 = user_reconciliation_users_and_docs
|
||||||
|
|
||||||
|
u1_2 = userdocs_u1[2]
|
||||||
|
u1_5 = userdocs_u1[5]
|
||||||
|
|
||||||
|
doc_both = u1_2.document
|
||||||
|
models.DocumentFavorite.objects.create(document=doc_both, user=user_1)
|
||||||
|
models.DocumentFavorite.objects.create(document=doc_both, user=user_2)
|
||||||
|
|
||||||
|
doc_inactive_only = userdocs_u2[4].document
|
||||||
|
models.DocumentFavorite.objects.create(document=doc_inactive_only, user=user_2)
|
||||||
|
|
||||||
|
doc_active_only = userdocs_u1[4].document
|
||||||
|
models.DocumentFavorite.objects.create(document=doc_active_only, user=user_1)
|
||||||
|
|
||||||
|
rec = models.UserReconciliation.objects.create(
|
||||||
|
active_email=user_1.email,
|
||||||
|
inactive_email=user_2.email,
|
||||||
|
active_user=user_1,
|
||||||
|
inactive_user=user_2,
|
||||||
|
active_email_checked=True,
|
||||||
|
inactive_email_checked=True,
|
||||||
|
status="ready",
|
||||||
|
)
|
||||||
|
|
||||||
|
qs = models.UserReconciliation.objects.filter(id=rec.id)
|
||||||
|
process_reconciliation(None, None, qs)
|
||||||
|
|
||||||
|
rec.refresh_from_db()
|
||||||
|
user_1.refresh_from_db()
|
||||||
|
user_2.refresh_from_db()
|
||||||
|
u1_2.refresh_from_db(
|
||||||
|
from_queryset=models.DocumentAccess.objects.select_for_update()
|
||||||
|
)
|
||||||
|
u1_5.refresh_from_db(
|
||||||
|
from_queryset=models.DocumentAccess.objects.select_for_update()
|
||||||
|
)
|
||||||
|
|
||||||
|
# Inactive user should have no document favorites
|
||||||
|
assert models.DocumentFavorite.objects.filter(user=user_2).count() == 0
|
||||||
|
|
||||||
|
# doc_both should have a single DocumentFavorite owned by the active user
|
||||||
|
assert (
|
||||||
|
models.DocumentFavorite.objects.filter(user=user_1, document=doc_both).exists()
|
||||||
|
is True
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
models.DocumentFavorite.objects.filter(user=user_1, document=doc_both).count()
|
||||||
|
== 1
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
models.DocumentFavorite.objects.filter(user=user_2, document=doc_both).exists()
|
||||||
|
is False
|
||||||
|
)
|
||||||
|
|
||||||
|
# doc_inactive_only should now be linked to active user
|
||||||
|
assert (
|
||||||
|
models.DocumentFavorite.objects.filter(
|
||||||
|
user=user_2, document=doc_inactive_only
|
||||||
|
).count()
|
||||||
|
== 0
|
||||||
|
)
|
||||||
|
assert models.DocumentFavorite.objects.filter(
|
||||||
|
user=user_1, document=doc_inactive_only
|
||||||
|
).exists()
|
||||||
|
|
||||||
|
# doc_active_only should still belong to active user
|
||||||
|
assert models.DocumentFavorite.objects.filter(
|
||||||
|
user=user_1, document=doc_active_only
|
||||||
|
).exists()
|
||||||
@@ -2,9 +2,12 @@
|
|||||||
Unit tests for the User model
|
Unit tests for the User model
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from unittest import mock
|
import uuid
|
||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
|
from django.test.utils import override_settings
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -26,26 +29,6 @@ def test_models_users_id_unique():
|
|||||||
factories.UserFactory(id=user.id)
|
factories.UserFactory(id=user.id)
|
||||||
|
|
||||||
|
|
||||||
def test_models_users_send_mail_main_existing():
|
|
||||||
"""The "email_user' method should send mail to the user's email address."""
|
|
||||||
user = factories.UserFactory()
|
|
||||||
|
|
||||||
with mock.patch("django.core.mail.send_mail") as mock_send:
|
|
||||||
user.email_user("my subject", "my message")
|
|
||||||
|
|
||||||
mock_send.assert_called_once_with("my subject", "my message", None, [user.email])
|
|
||||||
|
|
||||||
|
|
||||||
def test_models_users_send_mail_main_missing():
|
|
||||||
"""The "email_user' method should fail if the user has no email address."""
|
|
||||||
user = factories.UserFactory(email=None)
|
|
||||||
|
|
||||||
with pytest.raises(ValueError) as excinfo:
|
|
||||||
user.email_user("my subject", "my message")
|
|
||||||
|
|
||||||
assert str(excinfo.value) == "User has no email address."
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"sub,is_valid",
|
"sub,is_valid",
|
||||||
[
|
[
|
||||||
@@ -96,3 +79,263 @@ def test_modes_users_convert_valid_invitations():
|
|||||||
id=invitation_other_document.id
|
id=invitation_other_document.id
|
||||||
).exists()
|
).exists()
|
||||||
assert models.Invitation.objects.filter(id=other_email_invitation.id).exists()
|
assert models.Invitation.objects.filter(id=other_email_invitation.id).exists()
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(USER_ONBOARDING_DOCUMENTS=[])
|
||||||
|
def test_models_users_handle_onboarding_documents_access_empty_setting():
|
||||||
|
"""
|
||||||
|
When USER_ONBOARDING_DOCUMENTS is empty, no accesses should be created.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
assert models.DocumentAccess.objects.filter(user=user).count() == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_models_users_handle_onboarding_document_link_trace_with_single_document():
|
||||||
|
"""
|
||||||
|
When USER_ONBOARDING_DOCUMENTS has a valid document ID,
|
||||||
|
a LinkTrace should be created for the new user.
|
||||||
|
|
||||||
|
The document should be pinned as a favorite for the user.
|
||||||
|
"""
|
||||||
|
document = factories.DocumentFactory(link_reach=models.LinkReachChoices.PUBLIC)
|
||||||
|
|
||||||
|
with override_settings(USER_ONBOARDING_DOCUMENTS=[str(document.id)]):
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
assert models.LinkTrace.objects.filter(user=user, document=document).count() == 1
|
||||||
|
|
||||||
|
user_favorites = models.DocumentFavorite.objects.filter(user=user)
|
||||||
|
assert user_favorites.count() == 1
|
||||||
|
assert user_favorites.filter(document=document).exists()
|
||||||
|
|
||||||
|
|
||||||
|
def test_models_users_handle_onboarding_documents_access_with_multiple_documents():
|
||||||
|
"""
|
||||||
|
When USER_ONBOARDING_DOCUMENTS has multiple valid document IDs,
|
||||||
|
accesses should be created for all documents.
|
||||||
|
|
||||||
|
All accesses should have the READER role.
|
||||||
|
All documents should be pinned as favorites for the user.
|
||||||
|
"""
|
||||||
|
document1 = factories.DocumentFactory(
|
||||||
|
title="Document 1", link_reach=models.LinkReachChoices.PUBLIC
|
||||||
|
)
|
||||||
|
document2 = factories.DocumentFactory(
|
||||||
|
title="Document 2", link_reach=models.LinkReachChoices.AUTHENTICATED
|
||||||
|
)
|
||||||
|
document3 = factories.DocumentFactory(
|
||||||
|
title="Document 3", link_reach=models.LinkReachChoices.PUBLIC
|
||||||
|
)
|
||||||
|
|
||||||
|
with override_settings(
|
||||||
|
USER_ONBOARDING_DOCUMENTS=[
|
||||||
|
str(document1.id),
|
||||||
|
str(document2.id),
|
||||||
|
str(document3.id),
|
||||||
|
]
|
||||||
|
):
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
link_traces = models.LinkTrace.objects.filter(user=user)
|
||||||
|
assert link_traces.count() == 3
|
||||||
|
|
||||||
|
assert models.LinkTrace.objects.filter(user=user, document=document1).exists()
|
||||||
|
assert models.LinkTrace.objects.filter(user=user, document=document2).exists()
|
||||||
|
assert models.LinkTrace.objects.filter(user=user, document=document3).exists()
|
||||||
|
|
||||||
|
user_favorites = models.DocumentFavorite.objects.filter(user=user)
|
||||||
|
assert user_favorites.count() == 3
|
||||||
|
assert user_favorites.filter(document=document1).exists()
|
||||||
|
assert user_favorites.filter(document=document2).exists()
|
||||||
|
assert user_favorites.filter(document=document3).exists()
|
||||||
|
|
||||||
|
|
||||||
|
def test_models_users_handle_onboarding_documents_access_with_invalid_document_id():
|
||||||
|
"""
|
||||||
|
When USER_ONBOARDING_DOCUMENTS has an invalid document ID,
|
||||||
|
it should be skipped and logged, but not raise an exception.
|
||||||
|
"""
|
||||||
|
invalid_id = uuid.uuid4()
|
||||||
|
|
||||||
|
with override_settings(USER_ONBOARDING_DOCUMENTS=[str(invalid_id)]):
|
||||||
|
with patch("core.models.logger") as mock_logger:
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
mock_logger.warning.assert_called_once()
|
||||||
|
call_args = mock_logger.warning.call_args
|
||||||
|
assert "Onboarding document with id" in call_args[0][0]
|
||||||
|
|
||||||
|
assert models.LinkTrace.objects.filter(user=user).count() == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_models_users_handle_onboarding_documents_access_duplicate_prevention():
|
||||||
|
"""
|
||||||
|
If the same document is listed multiple times in USER_ONBOARDING_DOCUMENTS,
|
||||||
|
it should only create one access (or handle duplicates gracefully).
|
||||||
|
"""
|
||||||
|
document = factories.DocumentFactory(link_reach=models.LinkReachChoices.PUBLIC)
|
||||||
|
|
||||||
|
with override_settings(
|
||||||
|
USER_ONBOARDING_DOCUMENTS=[str(document.id), str(document.id)]
|
||||||
|
):
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
link_traces = models.LinkTrace.objects.filter(user=user, document=document)
|
||||||
|
|
||||||
|
assert link_traces.count() == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_models_users_handle_onboarding_documents_on_restricted_document_is_not_allowed():
|
||||||
|
"""On-boarding document can be used when restricted"""
|
||||||
|
|
||||||
|
document = factories.DocumentFactory(link_reach=models.LinkReachChoices.RESTRICTED)
|
||||||
|
with override_settings(USER_ONBOARDING_DOCUMENTS=[str(document.id)]):
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
assert not models.LinkTrace.objects.filter(user=user, document=document).exists()
|
||||||
|
|
||||||
|
|
||||||
|
@override_settings(USER_ONBOARDING_SANDBOX_DOCUMENT=None)
|
||||||
|
def test_models_users_duplicate_onboarding_sandbox_document_no_setting():
|
||||||
|
"""
|
||||||
|
When USER_ONBOARDING_SANDBOX_DOCUMENT is not set, no sandbox document should be created.
|
||||||
|
"""
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
assert (
|
||||||
|
models.Document.objects.filter(creator=user, title__icontains="Sandbox").count()
|
||||||
|
== 0
|
||||||
|
)
|
||||||
|
|
||||||
|
initial_accesses = models.DocumentAccess.objects.filter(user=user).count()
|
||||||
|
assert initial_accesses == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_models_users_duplicate_onboarding_sandbox_document_creates_sandbox():
|
||||||
|
"""
|
||||||
|
When USER_ONBOARDING_SANDBOX_DOCUMENT is set with a valid template document,
|
||||||
|
a new sandbox document should be created for the user with OWNER access.
|
||||||
|
"""
|
||||||
|
documents_before = factories.DocumentFactory.create_batch(20)
|
||||||
|
template_document = factories.DocumentFactory(title="Getting started with Docs")
|
||||||
|
documents_after = factories.DocumentFactory.create_batch(20)
|
||||||
|
|
||||||
|
all_documents = documents_before + [template_document] + documents_after
|
||||||
|
|
||||||
|
paths = {document.pk: document.path for document in all_documents}
|
||||||
|
|
||||||
|
with override_settings(USER_ONBOARDING_SANDBOX_DOCUMENT=str(template_document.id)):
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
sandbox_docs = models.Document.objects.filter(
|
||||||
|
creator=user, title="Getting started with Docs"
|
||||||
|
)
|
||||||
|
assert sandbox_docs.count() == 1
|
||||||
|
|
||||||
|
sandbox_doc = sandbox_docs.first()
|
||||||
|
assert sandbox_doc.creator == user
|
||||||
|
assert sandbox_doc.duplicated_from == template_document
|
||||||
|
|
||||||
|
access = models.DocumentAccess.objects.get(user=user, document=sandbox_doc)
|
||||||
|
assert access.role == models.RoleChoices.OWNER
|
||||||
|
|
||||||
|
for document in all_documents:
|
||||||
|
document.refresh_from_db()
|
||||||
|
assert document.path == paths[document.id]
|
||||||
|
|
||||||
|
|
||||||
|
def test_models_users_duplicate_onboarding_sandbox_document_with_invalid_template_id():
|
||||||
|
"""
|
||||||
|
When USER_ONBOARDING_SANDBOX_DOCUMENT has an invalid document ID,
|
||||||
|
it should be skipped and logged, but not raise an exception.
|
||||||
|
"""
|
||||||
|
invalid_id = uuid.uuid4()
|
||||||
|
|
||||||
|
with override_settings(USER_ONBOARDING_SANDBOX_DOCUMENT=str(invalid_id)):
|
||||||
|
with patch("core.models.logger") as mock_logger:
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
mock_logger.warning.assert_called_once()
|
||||||
|
call_args = mock_logger.warning.call_args
|
||||||
|
assert "Onboarding sandbox document with id" in call_args[0][0]
|
||||||
|
|
||||||
|
sandbox_docs = models.Document.objects.filter(creator=user)
|
||||||
|
assert sandbox_docs.count() == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_models_users_duplicate_onboarding_sandbox_document_creates_unique_sandbox_per_user():
|
||||||
|
"""
|
||||||
|
Each new user should get their own independent sandbox document.
|
||||||
|
"""
|
||||||
|
template_document = factories.DocumentFactory(title="Getting started with Docs")
|
||||||
|
|
||||||
|
with override_settings(USER_ONBOARDING_SANDBOX_DOCUMENT=str(template_document.id)):
|
||||||
|
user1 = factories.UserFactory()
|
||||||
|
user2 = factories.UserFactory()
|
||||||
|
|
||||||
|
sandbox_docs_user1 = models.Document.objects.filter(
|
||||||
|
creator=user1, title="Getting started with Docs"
|
||||||
|
)
|
||||||
|
sandbox_docs_user2 = models.Document.objects.filter(
|
||||||
|
creator=user2, title="Getting started with Docs"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert sandbox_docs_user1.count() == 1
|
||||||
|
assert sandbox_docs_user2.count() == 1
|
||||||
|
|
||||||
|
assert sandbox_docs_user1.first().id != sandbox_docs_user2.first().id
|
||||||
|
|
||||||
|
|
||||||
|
def test_models_users_duplicate_onboarding_sandbox_document_integration_with_other_methods():
|
||||||
|
"""
|
||||||
|
Verify that sandbox creation works alongside other onboarding methods.
|
||||||
|
"""
|
||||||
|
template_document = factories.DocumentFactory(title="Getting started with Docs")
|
||||||
|
onboarding_doc = factories.DocumentFactory(
|
||||||
|
title="Onboarding Document", link_reach=models.LinkReachChoices.AUTHENTICATED
|
||||||
|
)
|
||||||
|
|
||||||
|
with override_settings(
|
||||||
|
USER_ONBOARDING_SANDBOX_DOCUMENT=str(template_document.id),
|
||||||
|
USER_ONBOARDING_DOCUMENTS=[str(onboarding_doc.id)],
|
||||||
|
):
|
||||||
|
user = factories.UserFactory()
|
||||||
|
|
||||||
|
sandbox_doc = models.Document.objects.filter(
|
||||||
|
creator=user, title="Getting started with Docs"
|
||||||
|
).first()
|
||||||
|
|
||||||
|
assert models.DocumentAccess.objects.filter(user=user).count() == 1
|
||||||
|
assert models.LinkTrace.objects.filter(user=user).count() == 1
|
||||||
|
|
||||||
|
assert models.DocumentAccess.objects.filter(
|
||||||
|
document=sandbox_doc, user=user, role=models.RoleChoices.OWNER
|
||||||
|
).exists()
|
||||||
|
assert models.LinkTrace.objects.filter(document=onboarding_doc, user=user).exists()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db(transaction=True)
|
||||||
|
def test_models_users_duplicate_onboarding_sandbox_race_condition():
|
||||||
|
"""
|
||||||
|
It should be possible to create several documents at the same time
|
||||||
|
without causing any race conditions or data integrity issues.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def create_user():
|
||||||
|
return factories.UserFactory()
|
||||||
|
|
||||||
|
template_document = factories.DocumentFactory(title="Getting started with Docs")
|
||||||
|
with (
|
||||||
|
override_settings(
|
||||||
|
USER_ONBOARDING_SANDBOX_DOCUMENT=str(template_document.id),
|
||||||
|
),
|
||||||
|
ThreadPoolExecutor(max_workers=2) as executor,
|
||||||
|
):
|
||||||
|
future1 = executor.submit(create_user)
|
||||||
|
future2 = executor.submit(create_user)
|
||||||
|
|
||||||
|
user1 = future1.result()
|
||||||
|
user2 = future2.result()
|
||||||
|
|
||||||
|
assert isinstance(user1, models.User)
|
||||||
|
assert isinstance(user2, models.User)
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
"""
|
"""
|
||||||
Test ai API endpoints in the impress core app.
|
Test AI services in the impress core app.
|
||||||
"""
|
"""
|
||||||
|
# pylint: disable=protected-access
|
||||||
|
|
||||||
|
from collections.abc import AsyncIterator
|
||||||
from unittest.mock import MagicMock, patch
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
from django.core.exceptions import ImproperlyConfigured
|
from django.core.exceptions import ImproperlyConfigured
|
||||||
@@ -9,12 +11,33 @@ from django.test.utils import override_settings
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from openai import OpenAIError
|
from openai import OpenAIError
|
||||||
|
from pydantic_ai.ui.vercel_ai.request_types import TextUIPart, UIMessage
|
||||||
|
|
||||||
from core.services.ai_services import AIService
|
from core.services.ai_services import (
|
||||||
|
BLOCKNOTE_TOOL_STRICT_PROMPT,
|
||||||
|
AIService,
|
||||||
|
convert_async_generator_to_sync,
|
||||||
|
)
|
||||||
|
|
||||||
pytestmark = pytest.mark.django_db
|
pytestmark = pytest.mark.django_db
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def ai_settings(settings):
|
||||||
|
"""Fixture to set AI settings."""
|
||||||
|
settings.AI_MODEL = "llama"
|
||||||
|
settings.AI_BASE_URL = "http://example.com"
|
||||||
|
settings.AI_API_KEY = "test-key"
|
||||||
|
settings.AI_FEATURE_ENABLED = True
|
||||||
|
settings.AI_FEATURE_BLOCKNOTE_ENABLED = True
|
||||||
|
settings.AI_FEATURE_LEGACY_ENABLED = True
|
||||||
|
settings.LANGFUSE_PUBLIC_KEY = None
|
||||||
|
settings.AI_VERCEL_SDK_VERSION = 6
|
||||||
|
|
||||||
|
|
||||||
|
# -- AIService.__init__ --
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"setting_name, setting_value",
|
"setting_name, setting_value",
|
||||||
[
|
[
|
||||||
@@ -23,22 +46,25 @@ pytestmark = pytest.mark.django_db
|
|||||||
("AI_MODEL", None),
|
("AI_MODEL", None),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_api_ai_setting_missing(setting_name, setting_value):
|
def test_services_ai_setting_missing(setting_name, setting_value, settings):
|
||||||
"""Setting should be set"""
|
"""Setting should be set"""
|
||||||
|
setattr(settings, setting_name, setting_value)
|
||||||
|
|
||||||
with override_settings(**{setting_name: setting_value}):
|
with pytest.raises(
|
||||||
with pytest.raises(
|
ImproperlyConfigured,
|
||||||
ImproperlyConfigured,
|
match="AI configuration not set",
|
||||||
match="AI configuration not set",
|
):
|
||||||
):
|
AIService()
|
||||||
AIService()
|
|
||||||
|
|
||||||
|
# -- AIService.transform --
|
||||||
|
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
AI_BASE_URL="http://example.com", AI_API_KEY="test-key", AI_MODEL="test-model"
|
AI_BASE_URL="http://example.com", AI_API_KEY="test-key", AI_MODEL="test-model"
|
||||||
)
|
)
|
||||||
@patch("openai.resources.chat.completions.Completions.create")
|
@patch("openai.resources.chat.completions.Completions.create")
|
||||||
def test_api_ai__client_error(mock_create):
|
def test_services_ai_client_error(mock_create):
|
||||||
"""Fail when the client raises an error"""
|
"""Fail when the client raises an error"""
|
||||||
|
|
||||||
mock_create.side_effect = OpenAIError("Mocked client error")
|
mock_create.side_effect = OpenAIError("Mocked client error")
|
||||||
@@ -54,7 +80,7 @@ def test_api_ai__client_error(mock_create):
|
|||||||
AI_BASE_URL="http://example.com", AI_API_KEY="test-key", AI_MODEL="test-model"
|
AI_BASE_URL="http://example.com", AI_API_KEY="test-key", AI_MODEL="test-model"
|
||||||
)
|
)
|
||||||
@patch("openai.resources.chat.completions.Completions.create")
|
@patch("openai.resources.chat.completions.Completions.create")
|
||||||
def test_api_ai__client_invalid_response(mock_create):
|
def test_services_ai_client_invalid_response(mock_create):
|
||||||
"""Fail when the client response is invalid"""
|
"""Fail when the client response is invalid"""
|
||||||
|
|
||||||
mock_create.return_value = MagicMock(
|
mock_create.return_value = MagicMock(
|
||||||
@@ -72,7 +98,7 @@ def test_api_ai__client_invalid_response(mock_create):
|
|||||||
AI_BASE_URL="http://example.com", AI_API_KEY="test-key", AI_MODEL="test-model"
|
AI_BASE_URL="http://example.com", AI_API_KEY="test-key", AI_MODEL="test-model"
|
||||||
)
|
)
|
||||||
@patch("openai.resources.chat.completions.Completions.create")
|
@patch("openai.resources.chat.completions.Completions.create")
|
||||||
def test_api_ai__success(mock_create):
|
def test_services_ai_success(mock_create):
|
||||||
"""The AI request should work as expect when called with valid arguments."""
|
"""The AI request should work as expect when called with valid arguments."""
|
||||||
|
|
||||||
mock_create.return_value = MagicMock(
|
mock_create.return_value = MagicMock(
|
||||||
@@ -82,3 +108,483 @@ def test_api_ai__success(mock_create):
|
|||||||
response = AIService().transform("hello", "prompt")
|
response = AIService().transform("hello", "prompt")
|
||||||
|
|
||||||
assert response == {"answer": "Salut"}
|
assert response == {"answer": "Salut"}
|
||||||
|
|
||||||
|
|
||||||
|
# -- AIService.translate --
|
||||||
|
|
||||||
|
|
||||||
|
@patch("openai.resources.chat.completions.Completions.create")
|
||||||
|
def test_services_ai_translate_success(mock_create):
|
||||||
|
"""Translate should call the AI API with the correct language prompt."""
|
||||||
|
|
||||||
|
mock_create.return_value = MagicMock(
|
||||||
|
choices=[MagicMock(message=MagicMock(content="Bonjour"))]
|
||||||
|
)
|
||||||
|
|
||||||
|
response = AIService().translate("<p>Hello</p>", "fr")
|
||||||
|
|
||||||
|
assert response == {"answer": "Bonjour"}
|
||||||
|
call_args = mock_create.call_args
|
||||||
|
system_content = call_args[1]["messages"][0]["content"]
|
||||||
|
assert "French" in system_content or "fr" in system_content
|
||||||
|
|
||||||
|
|
||||||
|
@patch("openai.resources.chat.completions.Completions.create")
|
||||||
|
def test_services_ai_translate_unknown_language(mock_create):
|
||||||
|
"""Translate with an unknown language code should use the code as-is."""
|
||||||
|
|
||||||
|
mock_create.return_value = MagicMock(
|
||||||
|
choices=[MagicMock(message=MagicMock(content="Translated"))]
|
||||||
|
)
|
||||||
|
|
||||||
|
response = AIService().translate("<p>Hello</p>", "xx-unknown")
|
||||||
|
|
||||||
|
assert response == {"answer": "Translated"}
|
||||||
|
call_args = mock_create.call_args
|
||||||
|
system_content = call_args[1]["messages"][0]["content"]
|
||||||
|
assert "xx-unknown" in system_content
|
||||||
|
|
||||||
|
|
||||||
|
# -- convert_async_generator_to_sync --
|
||||||
|
|
||||||
|
|
||||||
|
def test_convert_async_generator_to_sync_basic():
|
||||||
|
"""Should convert an async generator yielding items to a sync iterator."""
|
||||||
|
|
||||||
|
async def async_gen():
|
||||||
|
for item in ["hello", "world", "!"]:
|
||||||
|
yield item
|
||||||
|
|
||||||
|
result = list(convert_async_generator_to_sync(async_gen()))
|
||||||
|
assert result == ["hello", "world", "!"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_convert_async_generator_to_sync_empty():
|
||||||
|
"""Should handle an empty async generator."""
|
||||||
|
|
||||||
|
async def async_gen():
|
||||||
|
return
|
||||||
|
yield
|
||||||
|
|
||||||
|
result = list(convert_async_generator_to_sync(async_gen()))
|
||||||
|
assert not result
|
||||||
|
|
||||||
|
|
||||||
|
def test_convert_async_generator_to_sync_exception():
|
||||||
|
"""Should propagate exceptions from the async generator."""
|
||||||
|
|
||||||
|
async def async_gen():
|
||||||
|
yield "first"
|
||||||
|
raise ValueError("async error")
|
||||||
|
|
||||||
|
sync_iter = convert_async_generator_to_sync(async_gen())
|
||||||
|
assert next(sync_iter) == "first"
|
||||||
|
|
||||||
|
with pytest.raises(ValueError, match="async error"):
|
||||||
|
next(sync_iter)
|
||||||
|
|
||||||
|
|
||||||
|
# -- AIService.inject_document_state_messages --
|
||||||
|
|
||||||
|
|
||||||
|
def test_inject_document_state_messages_no_metadata():
|
||||||
|
"""Messages without documentState metadata should pass through unchanged."""
|
||||||
|
messages = [
|
||||||
|
UIMessage(role="user", id="msg-1", parts=[TextUIPart(text="Hello")]),
|
||||||
|
]
|
||||||
|
|
||||||
|
result = AIService.inject_document_state_messages(messages)
|
||||||
|
|
||||||
|
assert len(result) == 1
|
||||||
|
assert result[0].id == "msg-1"
|
||||||
|
|
||||||
|
|
||||||
|
def test_inject_document_state_messages_with_selection():
|
||||||
|
"""A user message with documentState and selection should get an
|
||||||
|
assistant context message prepended."""
|
||||||
|
messages = [
|
||||||
|
UIMessage(
|
||||||
|
role="user",
|
||||||
|
id="msg-1",
|
||||||
|
parts=[TextUIPart(text="Fix this")],
|
||||||
|
metadata={
|
||||||
|
"documentState": {
|
||||||
|
"selection": {"start": 0, "end": 5},
|
||||||
|
"selectedBlocks": [{"type": "paragraph", "content": "Hello"}],
|
||||||
|
"blocks": [
|
||||||
|
{"type": "paragraph", "content": "Hello"},
|
||||||
|
{"type": "paragraph", "content": "World"},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
result = AIService.inject_document_state_messages(messages)
|
||||||
|
|
||||||
|
assert len(result) == 2
|
||||||
|
# First message should be the injected assistant context
|
||||||
|
assert result[0].role == "assistant"
|
||||||
|
assert result[0].id == "assistant-document-state-msg-1"
|
||||||
|
assert len(result[0].parts) == 4
|
||||||
|
assert "selection" in result[0].parts[0].text.lower()
|
||||||
|
# Second message should be the original user message
|
||||||
|
assert result[1].id == "msg-1"
|
||||||
|
|
||||||
|
|
||||||
|
def test_inject_document_state_messages_without_selection():
|
||||||
|
"""A user message with documentState but no selection should describe
|
||||||
|
the full document context."""
|
||||||
|
messages = [
|
||||||
|
UIMessage(
|
||||||
|
role="user",
|
||||||
|
id="msg-1",
|
||||||
|
parts=[TextUIPart(text="Summarize")],
|
||||||
|
metadata={
|
||||||
|
"documentState": {
|
||||||
|
"selection": None,
|
||||||
|
"blocks": [
|
||||||
|
{"type": "paragraph", "content": "Hello"},
|
||||||
|
],
|
||||||
|
"isEmptyDocument": False,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
result = AIService.inject_document_state_messages(messages)
|
||||||
|
|
||||||
|
assert len(result) == 2
|
||||||
|
assistant_msg = result[0]
|
||||||
|
assert assistant_msg.role == "assistant"
|
||||||
|
assert len(assistant_msg.parts) == 2
|
||||||
|
assert "no active selection" in assistant_msg.parts[0].text.lower()
|
||||||
|
assert "prefer updating" in assistant_msg.parts[0].text.lower()
|
||||||
|
|
||||||
|
|
||||||
|
def test_inject_document_state_messages_empty_document():
|
||||||
|
"""When the document is empty, the injected message should instruct
|
||||||
|
updating the empty block first."""
|
||||||
|
messages = [
|
||||||
|
UIMessage(
|
||||||
|
role="user",
|
||||||
|
id="msg-1",
|
||||||
|
parts=[TextUIPart(text="Write something")],
|
||||||
|
metadata={
|
||||||
|
"documentState": {
|
||||||
|
"selection": None,
|
||||||
|
"blocks": [{"type": "paragraph", "content": ""}],
|
||||||
|
"isEmptyDocument": True,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
result = AIService.inject_document_state_messages(messages)
|
||||||
|
|
||||||
|
assert len(result) == 2
|
||||||
|
assistant_msg = result[0]
|
||||||
|
assert "update the empty block" in assistant_msg.parts[0].text.lower()
|
||||||
|
|
||||||
|
|
||||||
|
def test_inject_document_state_messages_mixed():
|
||||||
|
"""Only user messages with documentState get assistant context;
|
||||||
|
other messages pass through unchanged."""
|
||||||
|
messages = [
|
||||||
|
UIMessage(
|
||||||
|
role="assistant",
|
||||||
|
id="msg-0",
|
||||||
|
parts=[TextUIPart(text="Previous response")],
|
||||||
|
),
|
||||||
|
UIMessage(
|
||||||
|
role="user",
|
||||||
|
id="msg-1",
|
||||||
|
parts=[TextUIPart(text="Hello")],
|
||||||
|
),
|
||||||
|
UIMessage(
|
||||||
|
role="user",
|
||||||
|
id="msg-2",
|
||||||
|
parts=[TextUIPart(text="Fix this")],
|
||||||
|
metadata={
|
||||||
|
"documentState": {
|
||||||
|
"selection": {"start": 0, "end": 5},
|
||||||
|
"selectedBlocks": [{"type": "paragraph", "content": "Hello"}],
|
||||||
|
"blocks": [{"type": "paragraph", "content": "Hello"}],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
result = AIService.inject_document_state_messages(messages)
|
||||||
|
|
||||||
|
# 3 original + 1 injected assistant message before msg-2
|
||||||
|
assert len(result) == 4
|
||||||
|
assert result[0].id == "msg-0"
|
||||||
|
assert result[1].id == "msg-1"
|
||||||
|
assert result[2].role == "assistant"
|
||||||
|
assert result[2].id == "assistant-document-state-msg-2"
|
||||||
|
assert result[3].id == "msg-2"
|
||||||
|
|
||||||
|
|
||||||
|
# -- AIService.tool_definitions_to_toolset --
|
||||||
|
|
||||||
|
|
||||||
|
def test_tool_definitions_to_toolset():
|
||||||
|
"""Should convert frontend tool definitions to an ExternalToolset."""
|
||||||
|
tool_definitions = {
|
||||||
|
"applyOperations": {
|
||||||
|
"description": "Apply operations to the document",
|
||||||
|
"inputSchema": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"operations": {"type": "array"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"outputSchema": {"type": "object"},
|
||||||
|
},
|
||||||
|
"insertBlocks": {
|
||||||
|
"description": "Insert blocks",
|
||||||
|
"inputSchema": {"type": "object"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
toolset = AIService.tool_definitions_to_toolset(tool_definitions)
|
||||||
|
|
||||||
|
# The ExternalToolset wraps ToolDefinition objects
|
||||||
|
assert toolset is not None
|
||||||
|
# Access internal tool definitions
|
||||||
|
tool_defs = toolset.tool_defs
|
||||||
|
assert len(tool_defs) == 2
|
||||||
|
|
||||||
|
names = {td.name for td in tool_defs}
|
||||||
|
assert names == {"applyOperations", "insertBlocks"}
|
||||||
|
|
||||||
|
for td in tool_defs:
|
||||||
|
assert td.kind == "external"
|
||||||
|
if td.name == "applyOperations":
|
||||||
|
assert td.description == "Apply operations to the document"
|
||||||
|
assert td.metadata == {"output_schema": {"type": "object"}}
|
||||||
|
|
||||||
|
|
||||||
|
def test_tool_definitions_to_toolset_missing_fields():
|
||||||
|
"""Should handle tool definitions with missing optional fields."""
|
||||||
|
tool_definitions = {
|
||||||
|
"myTool": {},
|
||||||
|
}
|
||||||
|
|
||||||
|
toolset = AIService.tool_definitions_to_toolset(tool_definitions)
|
||||||
|
|
||||||
|
tool_defs = toolset.tool_defs
|
||||||
|
assert len(tool_defs) == 1
|
||||||
|
assert tool_defs[0].name == "myTool"
|
||||||
|
assert tool_defs[0].description == ""
|
||||||
|
assert tool_defs[0].parameters_json_schema == {}
|
||||||
|
assert tool_defs[0].metadata == {"output_schema": None}
|
||||||
|
|
||||||
|
|
||||||
|
# -- AIService.stream --
|
||||||
|
|
||||||
|
|
||||||
|
@patch.object(AIService, "_build_async_stream")
|
||||||
|
def test_services_ai_stream_sync_mode(mock_build, monkeypatch):
|
||||||
|
"""In sync mode, stream() should return a sync iterator."""
|
||||||
|
|
||||||
|
async def mock_async_gen():
|
||||||
|
yield "chunk1"
|
||||||
|
yield "chunk2"
|
||||||
|
|
||||||
|
mock_build.return_value = mock_async_gen()
|
||||||
|
monkeypatch.setenv("PYTHON_SERVER_MODE", "sync")
|
||||||
|
|
||||||
|
service = AIService()
|
||||||
|
request = MagicMock()
|
||||||
|
result = service.stream(request)
|
||||||
|
|
||||||
|
# Should be a regular (sync) iterator, not async
|
||||||
|
assert not isinstance(result, AsyncIterator)
|
||||||
|
assert list(result) == ["chunk1", "chunk2"]
|
||||||
|
mock_build.assert_called_once_with(request)
|
||||||
|
|
||||||
|
|
||||||
|
@patch.object(AIService, "_build_async_stream")
|
||||||
|
def test_services_ai_stream_async_mode(mock_build, monkeypatch):
|
||||||
|
"""In async mode, stream() should return the async iterator directly."""
|
||||||
|
|
||||||
|
async def mock_async_gen():
|
||||||
|
yield "chunk1"
|
||||||
|
yield "chunk2"
|
||||||
|
|
||||||
|
mock_async_iter = mock_async_gen()
|
||||||
|
mock_build.return_value = mock_async_iter
|
||||||
|
monkeypatch.setenv("PYTHON_SERVER_MODE", "async")
|
||||||
|
|
||||||
|
service = AIService()
|
||||||
|
request = MagicMock()
|
||||||
|
result = service.stream(request)
|
||||||
|
|
||||||
|
assert result is mock_async_iter
|
||||||
|
mock_build.assert_called_once_with(request)
|
||||||
|
|
||||||
|
|
||||||
|
@patch.object(AIService, "_build_async_stream")
|
||||||
|
def test_services_ai_stream_defaults_to_sync(mock_build, monkeypatch):
|
||||||
|
"""When PYTHON_SERVER_MODE is not set, stream() should default to sync."""
|
||||||
|
|
||||||
|
async def mock_async_gen():
|
||||||
|
yield "data"
|
||||||
|
|
||||||
|
mock_build.return_value = mock_async_gen()
|
||||||
|
monkeypatch.delenv("PYTHON_SERVER_MODE", raising=False)
|
||||||
|
|
||||||
|
service = AIService()
|
||||||
|
request = MagicMock()
|
||||||
|
result = service.stream(request)
|
||||||
|
|
||||||
|
# Default should be sync mode
|
||||||
|
assert not isinstance(result, AsyncIterator)
|
||||||
|
assert list(result) == ["data"]
|
||||||
|
|
||||||
|
|
||||||
|
# -- AIService._build_async_stream --
|
||||||
|
|
||||||
|
|
||||||
|
@patch("core.services.ai_services.VercelAIAdapter")
|
||||||
|
def test_services_ai_build_async_stream(mock_adapter_cls):
|
||||||
|
"""_build_async_stream should build the pydantic-ai streaming pipeline."""
|
||||||
|
|
||||||
|
async def mock_encode():
|
||||||
|
yield "event-data"
|
||||||
|
|
||||||
|
mock_run_input = MagicMock()
|
||||||
|
mock_run_input.model_extra = None
|
||||||
|
mock_run_input.messages = []
|
||||||
|
mock_adapter_cls.build_run_input.return_value = mock_run_input
|
||||||
|
|
||||||
|
mock_adapter_instance = MagicMock()
|
||||||
|
mock_adapter_instance.run_stream.return_value = MagicMock()
|
||||||
|
mock_adapter_instance.encode_stream.return_value = mock_encode()
|
||||||
|
mock_adapter_cls.return_value = mock_adapter_instance
|
||||||
|
|
||||||
|
service = AIService()
|
||||||
|
request = MagicMock()
|
||||||
|
request.META = {"HTTP_ACCEPT": "text/event-stream"}
|
||||||
|
request.raw_body = b'{"messages": []}'
|
||||||
|
|
||||||
|
result = service._build_async_stream(request)
|
||||||
|
assert isinstance(result, AsyncIterator)
|
||||||
|
mock_adapter_cls.build_run_input.assert_called_once_with(b'{"messages": []}')
|
||||||
|
mock_adapter_instance.run_stream.assert_called_once()
|
||||||
|
mock_adapter_instance.encode_stream.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
@patch("core.services.ai_services.VercelAIAdapter")
|
||||||
|
def test_services_ai_build_async_stream_with_tool_definitions(mock_adapter_cls):
|
||||||
|
"""_build_async_stream should build an ExternalToolset when
|
||||||
|
toolDefinitions are present in the request."""
|
||||||
|
|
||||||
|
async def mock_encode():
|
||||||
|
yield "event-data"
|
||||||
|
|
||||||
|
mock_run_input = MagicMock()
|
||||||
|
mock_run_input.model_extra = {
|
||||||
|
"toolDefinitions": {
|
||||||
|
"myTool": {
|
||||||
|
"description": "A tool",
|
||||||
|
"inputSchema": {"type": "object"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
mock_run_input.messages = []
|
||||||
|
mock_adapter_cls.build_run_input.return_value = mock_run_input
|
||||||
|
|
||||||
|
mock_adapter_instance = MagicMock()
|
||||||
|
mock_adapter_instance.run_stream.return_value = MagicMock()
|
||||||
|
mock_adapter_instance.encode_stream.return_value = mock_encode()
|
||||||
|
mock_adapter_cls.return_value = mock_adapter_instance
|
||||||
|
|
||||||
|
service = AIService()
|
||||||
|
request = MagicMock()
|
||||||
|
request.META = {}
|
||||||
|
request.raw_body = b"{}"
|
||||||
|
|
||||||
|
service._build_async_stream(request)
|
||||||
|
# run_stream should have been called with a toolset
|
||||||
|
call_kwargs = mock_adapter_instance.run_stream.call_args[1]
|
||||||
|
assert call_kwargs["toolsets"] is not None
|
||||||
|
assert len(call_kwargs["toolsets"]) == 1
|
||||||
|
|
||||||
|
|
||||||
|
@patch("core.services.ai_services.VercelAIAdapter")
|
||||||
|
def test_services_ai_build_async_stream_with_tool_definitions_required_system_prompt(
|
||||||
|
mock_adapter_cls,
|
||||||
|
):
|
||||||
|
"""The presence of the applyDocumentOperations tool must force the addition
|
||||||
|
of a system prompt"""
|
||||||
|
|
||||||
|
async def mock_encode():
|
||||||
|
yield "event-data"
|
||||||
|
|
||||||
|
mock_run_input = MagicMock()
|
||||||
|
mock_run_input.model_extra = {
|
||||||
|
"toolDefinitions": {
|
||||||
|
"applyDocumentOperations": {
|
||||||
|
"description": "A tool",
|
||||||
|
"inputSchema": {"type": "object"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
mock_run_input.messages = []
|
||||||
|
mock_adapter_cls.build_run_input.return_value = mock_run_input
|
||||||
|
|
||||||
|
mock_adapter_instance = MagicMock()
|
||||||
|
mock_adapter_instance.run_stream.return_value = MagicMock()
|
||||||
|
mock_adapter_instance.encode_stream.return_value = mock_encode()
|
||||||
|
mock_adapter_cls.return_value = mock_adapter_instance
|
||||||
|
|
||||||
|
service = AIService()
|
||||||
|
request = MagicMock()
|
||||||
|
request.META = {}
|
||||||
|
request.raw_body = b"{}"
|
||||||
|
|
||||||
|
service._build_async_stream(request)
|
||||||
|
# run_stream should have been called with a toolset
|
||||||
|
call_kwargs = mock_adapter_instance.run_stream.call_args[1]
|
||||||
|
assert call_kwargs["toolsets"] is not None
|
||||||
|
assert len(call_kwargs["toolsets"]) == 1
|
||||||
|
assert len(mock_run_input.messages) == 1
|
||||||
|
assert mock_run_input.messages[0].id == "system-force-tool-usage"
|
||||||
|
assert mock_run_input.messages[0].role == "system"
|
||||||
|
assert mock_run_input.messages[0].parts[0].text == BLOCKNOTE_TOOL_STRICT_PROMPT
|
||||||
|
|
||||||
|
|
||||||
|
@patch("core.services.ai_services.Agent")
|
||||||
|
@patch("core.services.ai_services.VercelAIAdapter")
|
||||||
|
def test_services_ai_build_async_stream_langfuse_enabled(
|
||||||
|
mock_adapter_cls, mock_agent_cls, settings
|
||||||
|
):
|
||||||
|
"""When LANGFUSE_PUBLIC_KEY is set, instrument should be enabled."""
|
||||||
|
settings.LANGFUSE_PUBLIC_KEY = "pk-test-123"
|
||||||
|
|
||||||
|
async def mock_encode():
|
||||||
|
yield "data"
|
||||||
|
|
||||||
|
mock_run_input = MagicMock()
|
||||||
|
mock_run_input.model_extra = None
|
||||||
|
mock_run_input.messages = []
|
||||||
|
mock_adapter_cls.build_run_input.return_value = mock_run_input
|
||||||
|
|
||||||
|
mock_adapter_instance = MagicMock()
|
||||||
|
mock_adapter_instance.run_stream.return_value = MagicMock()
|
||||||
|
mock_adapter_instance.encode_stream.return_value = mock_encode()
|
||||||
|
mock_adapter_cls.return_value = mock_adapter_instance
|
||||||
|
|
||||||
|
service = AIService()
|
||||||
|
request = MagicMock()
|
||||||
|
request.META = {}
|
||||||
|
request.raw_body = b"{}"
|
||||||
|
|
||||||
|
service._build_async_stream(request)
|
||||||
|
mock_agent_cls.instrument_all.assert_called_once()
|
||||||
|
# Agent should be created with instrument=True
|
||||||
|
mock_agent_cls.assert_called_once()
|
||||||
|
assert mock_agent_cls.call_args[1]["instrument"] is True
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
"""
|
"""
|
||||||
Unit tests for the Document model
|
Unit tests for FindDocumentIndexer
|
||||||
"""
|
"""
|
||||||
# pylint: disable=too-many-lines
|
# pylint: disable=too-many-lines
|
||||||
|
|
||||||
@@ -12,7 +12,8 @@ from django.db import transaction
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from core import factories, models
|
from core import factories, models
|
||||||
from core.services.search_indexers import SearchIndexer
|
from core.enums import SearchType
|
||||||
|
from core.services.search_indexers import FindDocumentIndexer
|
||||||
|
|
||||||
pytestmark = pytest.mark.django_db
|
pytestmark = pytest.mark.django_db
|
||||||
|
|
||||||
@@ -30,7 +31,7 @@ def reset_throttle():
|
|||||||
reset_batch_indexer_throttle()
|
reset_batch_indexer_throttle()
|
||||||
|
|
||||||
|
|
||||||
@mock.patch.object(SearchIndexer, "push")
|
@mock.patch.object(FindDocumentIndexer, "push")
|
||||||
@pytest.mark.usefixtures("indexer_settings")
|
@pytest.mark.usefixtures("indexer_settings")
|
||||||
@pytest.mark.django_db(transaction=True)
|
@pytest.mark.django_db(transaction=True)
|
||||||
def test_models_documents_post_save_indexer(mock_push):
|
def test_models_documents_post_save_indexer(mock_push):
|
||||||
@@ -41,7 +42,7 @@ def test_models_documents_post_save_indexer(mock_push):
|
|||||||
accesses = {}
|
accesses = {}
|
||||||
data = [call.args[0] for call in mock_push.call_args_list]
|
data = [call.args[0] for call in mock_push.call_args_list]
|
||||||
|
|
||||||
indexer = SearchIndexer()
|
indexer = FindDocumentIndexer()
|
||||||
|
|
||||||
assert len(data) == 1
|
assert len(data) == 1
|
||||||
|
|
||||||
@@ -64,14 +65,14 @@ def test_models_documents_post_save_indexer_no_batches(indexer_settings):
|
|||||||
"""Test indexation task on doculment creation, no throttle"""
|
"""Test indexation task on doculment creation, no throttle"""
|
||||||
indexer_settings.SEARCH_INDEXER_COUNTDOWN = 0
|
indexer_settings.SEARCH_INDEXER_COUNTDOWN = 0
|
||||||
|
|
||||||
with mock.patch.object(SearchIndexer, "push") as mock_push:
|
with mock.patch.object(FindDocumentIndexer, "push") as mock_push:
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
doc1, doc2, doc3 = factories.DocumentFactory.create_batch(3)
|
doc1, doc2, doc3 = factories.DocumentFactory.create_batch(3)
|
||||||
|
|
||||||
accesses = {}
|
accesses = {}
|
||||||
data = [call.args[0] for call in mock_push.call_args_list]
|
data = [call.args[0] for call in mock_push.call_args_list]
|
||||||
|
|
||||||
indexer = SearchIndexer()
|
indexer = FindDocumentIndexer()
|
||||||
|
|
||||||
# 3 calls
|
# 3 calls
|
||||||
assert len(data) == 3
|
assert len(data) == 3
|
||||||
@@ -91,7 +92,7 @@ def test_models_documents_post_save_indexer_no_batches(indexer_settings):
|
|||||||
assert cache.get("file-batch-indexer-throttle") is None
|
assert cache.get("file-batch-indexer-throttle") is None
|
||||||
|
|
||||||
|
|
||||||
@mock.patch.object(SearchIndexer, "push")
|
@mock.patch.object(FindDocumentIndexer, "push")
|
||||||
@pytest.mark.django_db(transaction=True)
|
@pytest.mark.django_db(transaction=True)
|
||||||
def test_models_documents_post_save_indexer_not_configured(mock_push, indexer_settings):
|
def test_models_documents_post_save_indexer_not_configured(mock_push, indexer_settings):
|
||||||
"""Task should not start an indexation when disabled"""
|
"""Task should not start an indexation when disabled"""
|
||||||
@@ -106,13 +107,13 @@ def test_models_documents_post_save_indexer_not_configured(mock_push, indexer_se
|
|||||||
assert mock_push.assert_not_called
|
assert mock_push.assert_not_called
|
||||||
|
|
||||||
|
|
||||||
@mock.patch.object(SearchIndexer, "push")
|
@mock.patch.object(FindDocumentIndexer, "push")
|
||||||
@pytest.mark.django_db(transaction=True)
|
@pytest.mark.django_db(transaction=True)
|
||||||
def test_models_documents_post_save_indexer_wrongly_configured(
|
def test_models_documents_post_save_indexer_wrongly_configured(
|
||||||
mock_push, indexer_settings
|
mock_push, indexer_settings
|
||||||
):
|
):
|
||||||
"""Task should not start an indexation when disabled"""
|
"""Task should not start an indexation when disabled"""
|
||||||
indexer_settings.SEARCH_INDEXER_URL = None
|
indexer_settings.INDEXING_URL = None
|
||||||
|
|
||||||
user = factories.UserFactory()
|
user = factories.UserFactory()
|
||||||
|
|
||||||
@@ -123,7 +124,7 @@ def test_models_documents_post_save_indexer_wrongly_configured(
|
|||||||
assert mock_push.assert_not_called
|
assert mock_push.assert_not_called
|
||||||
|
|
||||||
|
|
||||||
@mock.patch.object(SearchIndexer, "push")
|
@mock.patch.object(FindDocumentIndexer, "push")
|
||||||
@pytest.mark.usefixtures("indexer_settings")
|
@pytest.mark.usefixtures("indexer_settings")
|
||||||
@pytest.mark.django_db(transaction=True)
|
@pytest.mark.django_db(transaction=True)
|
||||||
def test_models_documents_post_save_indexer_with_accesses(mock_push):
|
def test_models_documents_post_save_indexer_with_accesses(mock_push):
|
||||||
@@ -145,7 +146,7 @@ def test_models_documents_post_save_indexer_with_accesses(mock_push):
|
|||||||
|
|
||||||
data = [call.args[0] for call in mock_push.call_args_list]
|
data = [call.args[0] for call in mock_push.call_args_list]
|
||||||
|
|
||||||
indexer = SearchIndexer()
|
indexer = FindDocumentIndexer()
|
||||||
|
|
||||||
assert len(data) == 1
|
assert len(data) == 1
|
||||||
assert sorted(data[0], key=itemgetter("id")) == sorted(
|
assert sorted(data[0], key=itemgetter("id")) == sorted(
|
||||||
@@ -158,7 +159,7 @@ def test_models_documents_post_save_indexer_with_accesses(mock_push):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@mock.patch.object(SearchIndexer, "push")
|
@mock.patch.object(FindDocumentIndexer, "push")
|
||||||
@pytest.mark.usefixtures("indexer_settings")
|
@pytest.mark.usefixtures("indexer_settings")
|
||||||
@pytest.mark.django_db(transaction=True)
|
@pytest.mark.django_db(transaction=True)
|
||||||
def test_models_documents_post_save_indexer_deleted(mock_push):
|
def test_models_documents_post_save_indexer_deleted(mock_push):
|
||||||
@@ -207,7 +208,7 @@ def test_models_documents_post_save_indexer_deleted(mock_push):
|
|||||||
|
|
||||||
data = [call.args[0] for call in mock_push.call_args_list]
|
data = [call.args[0] for call in mock_push.call_args_list]
|
||||||
|
|
||||||
indexer = SearchIndexer()
|
indexer = FindDocumentIndexer()
|
||||||
|
|
||||||
assert len(data) == 2
|
assert len(data) == 2
|
||||||
|
|
||||||
@@ -244,14 +245,14 @@ def test_models_documents_indexer_hard_deleted():
|
|||||||
factories.UserDocumentAccessFactory(document=doc, user=user)
|
factories.UserDocumentAccessFactory(document=doc, user=user)
|
||||||
|
|
||||||
# Call task on deleted document.
|
# Call task on deleted document.
|
||||||
with mock.patch.object(SearchIndexer, "push") as mock_push:
|
with mock.patch.object(FindDocumentIndexer, "push") as mock_push:
|
||||||
doc.delete()
|
doc.delete()
|
||||||
|
|
||||||
# Hard delete document are not re-indexed.
|
# Hard delete document are not re-indexed.
|
||||||
assert mock_push.assert_not_called
|
assert mock_push.assert_not_called
|
||||||
|
|
||||||
|
|
||||||
@mock.patch.object(SearchIndexer, "push")
|
@mock.patch.object(FindDocumentIndexer, "push")
|
||||||
@pytest.mark.usefixtures("indexer_settings")
|
@pytest.mark.usefixtures("indexer_settings")
|
||||||
@pytest.mark.django_db(transaction=True)
|
@pytest.mark.django_db(transaction=True)
|
||||||
def test_models_documents_post_save_indexer_restored(mock_push):
|
def test_models_documents_post_save_indexer_restored(mock_push):
|
||||||
@@ -308,7 +309,7 @@ def test_models_documents_post_save_indexer_restored(mock_push):
|
|||||||
|
|
||||||
data = [call.args[0] for call in mock_push.call_args_list]
|
data = [call.args[0] for call in mock_push.call_args_list]
|
||||||
|
|
||||||
indexer = SearchIndexer()
|
indexer = FindDocumentIndexer()
|
||||||
|
|
||||||
# All docs are re-indexed
|
# All docs are re-indexed
|
||||||
assert len(data) == 2
|
assert len(data) == 2
|
||||||
@@ -337,16 +338,16 @@ def test_models_documents_post_save_indexer_restored(mock_push):
|
|||||||
@pytest.mark.usefixtures("indexer_settings")
|
@pytest.mark.usefixtures("indexer_settings")
|
||||||
def test_models_documents_post_save_indexer_throttle():
|
def test_models_documents_post_save_indexer_throttle():
|
||||||
"""Test indexation task skipping on document update"""
|
"""Test indexation task skipping on document update"""
|
||||||
indexer = SearchIndexer()
|
indexer = FindDocumentIndexer()
|
||||||
user = factories.UserFactory()
|
user = factories.UserFactory()
|
||||||
|
|
||||||
with mock.patch.object(SearchIndexer, "push"):
|
with mock.patch.object(FindDocumentIndexer, "push"):
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
docs = factories.DocumentFactory.create_batch(5, users=(user,))
|
docs = factories.DocumentFactory.create_batch(5, users=(user,))
|
||||||
|
|
||||||
accesses = {str(item.path): {"users": [user.sub]} for item in docs}
|
accesses = {str(item.path): {"users": [user.sub]} for item in docs}
|
||||||
|
|
||||||
with mock.patch.object(SearchIndexer, "push") as mock_push:
|
with mock.patch.object(FindDocumentIndexer, "push") as mock_push:
|
||||||
# Simulate 1 running task
|
# Simulate 1 running task
|
||||||
cache.set("document-batch-indexer-throttle", 1)
|
cache.set("document-batch-indexer-throttle", 1)
|
||||||
|
|
||||||
@@ -359,7 +360,7 @@ def test_models_documents_post_save_indexer_throttle():
|
|||||||
|
|
||||||
assert [call.args[0] for call in mock_push.call_args_list] == []
|
assert [call.args[0] for call in mock_push.call_args_list] == []
|
||||||
|
|
||||||
with mock.patch.object(SearchIndexer, "push") as mock_push:
|
with mock.patch.object(FindDocumentIndexer, "push") as mock_push:
|
||||||
# No waiting task
|
# No waiting task
|
||||||
cache.delete("document-batch-indexer-throttle")
|
cache.delete("document-batch-indexer-throttle")
|
||||||
|
|
||||||
@@ -389,7 +390,7 @@ def test_models_documents_access_post_save_indexer():
|
|||||||
"""Test indexation task on DocumentAccess update"""
|
"""Test indexation task on DocumentAccess update"""
|
||||||
users = factories.UserFactory.create_batch(3)
|
users = factories.UserFactory.create_batch(3)
|
||||||
|
|
||||||
with mock.patch.object(SearchIndexer, "push"):
|
with mock.patch.object(FindDocumentIndexer, "push"):
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
doc = factories.DocumentFactory(users=users)
|
doc = factories.DocumentFactory(users=users)
|
||||||
doc_accesses = models.DocumentAccess.objects.filter(document=doc).order_by(
|
doc_accesses = models.DocumentAccess.objects.filter(document=doc).order_by(
|
||||||
@@ -398,7 +399,7 @@ def test_models_documents_access_post_save_indexer():
|
|||||||
|
|
||||||
reset_batch_indexer_throttle()
|
reset_batch_indexer_throttle()
|
||||||
|
|
||||||
with mock.patch.object(SearchIndexer, "push") as mock_push:
|
with mock.patch.object(FindDocumentIndexer, "push") as mock_push:
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
for doc_access in doc_accesses:
|
for doc_access in doc_accesses:
|
||||||
doc_access.save()
|
doc_access.save()
|
||||||
@@ -426,7 +427,7 @@ def test_models_items_access_post_save_indexer_no_throttle(indexer_settings):
|
|||||||
|
|
||||||
reset_batch_indexer_throttle()
|
reset_batch_indexer_throttle()
|
||||||
|
|
||||||
with mock.patch.object(SearchIndexer, "push") as mock_push:
|
with mock.patch.object(FindDocumentIndexer, "push") as mock_push:
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
for doc_access in doc_accesses:
|
for doc_access in doc_accesses:
|
||||||
doc_access.save()
|
doc_access.save()
|
||||||
@@ -439,3 +440,77 @@ def test_models_items_access_post_save_indexer_no_throttle(indexer_settings):
|
|||||||
assert [len(d) for d in data] == [1] * 3
|
assert [len(d) for d in data] == [1] * 3
|
||||||
# the same document is indexed 3 times
|
# the same document is indexed 3 times
|
||||||
assert [d[0]["id"] for d in data] == [str(doc.pk)] * 3
|
assert [d[0]["id"] for d in data] == [str(doc.pk)] * 3
|
||||||
|
|
||||||
|
|
||||||
|
@mock.patch.object(FindDocumentIndexer, "search_query")
|
||||||
|
@pytest.mark.usefixtures("indexer_settings")
|
||||||
|
def test_find_document_indexer_search(mock_search_query):
|
||||||
|
"""Test search function of FindDocumentIndexer returns formatted results"""
|
||||||
|
|
||||||
|
# Mock API response from Find
|
||||||
|
hits = [
|
||||||
|
{
|
||||||
|
"_id": "doc-123",
|
||||||
|
"_source": {
|
||||||
|
"title": "Test Document",
|
||||||
|
"content": "This is test content",
|
||||||
|
"updated_at": "2024-01-01T00:00:00Z",
|
||||||
|
"path": "/some/path/doc-123",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "doc-456",
|
||||||
|
"_source": {
|
||||||
|
"title.fr": "Document de test",
|
||||||
|
"content": "Contenu de test",
|
||||||
|
"updated_at": "2024-01-02T00:00:00Z",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
mock_search_query.return_value = hits
|
||||||
|
|
||||||
|
q = "test"
|
||||||
|
token = "fake-token"
|
||||||
|
nb_results = 10
|
||||||
|
path = "/some/path/"
|
||||||
|
visited = ["doc-123"]
|
||||||
|
search_type = SearchType.HYBRID
|
||||||
|
results = FindDocumentIndexer().search(
|
||||||
|
q=q,
|
||||||
|
token=token,
|
||||||
|
nb_results=nb_results,
|
||||||
|
path=path,
|
||||||
|
visited=visited,
|
||||||
|
search_type=search_type,
|
||||||
|
)
|
||||||
|
|
||||||
|
mock_search_query.assert_called_once()
|
||||||
|
call_args = mock_search_query.call_args
|
||||||
|
assert call_args[1]["data"] == {
|
||||||
|
"q": q,
|
||||||
|
"visited": visited,
|
||||||
|
"services": ["docs"],
|
||||||
|
"nb_results": nb_results,
|
||||||
|
"order_by": "updated_at",
|
||||||
|
"order_direction": "desc",
|
||||||
|
"path": path,
|
||||||
|
"search_type": search_type,
|
||||||
|
}
|
||||||
|
|
||||||
|
assert len(results) == 2
|
||||||
|
assert results == [
|
||||||
|
{
|
||||||
|
"id": hits[0]["_id"],
|
||||||
|
"title": hits[0]["_source"]["title"],
|
||||||
|
"content": hits[0]["_source"]["content"],
|
||||||
|
"updated_at": hits[0]["_source"]["updated_at"],
|
||||||
|
"path": hits[0]["_source"]["path"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": hits[1]["_id"],
|
||||||
|
"title": hits[1]["_source"]["title.fr"],
|
||||||
|
"title.fr": hits[1]["_source"]["title.fr"], # <- Find response artefact
|
||||||
|
"content": hits[1]["_source"]["content"],
|
||||||
|
"updated_at": hits[1]["_source"]["updated_at"],
|
||||||
|
},
|
||||||
|
]
|
||||||
@@ -15,7 +15,7 @@ from requests import HTTPError
|
|||||||
from core import factories, models, utils
|
from core import factories, models, utils
|
||||||
from core.services.search_indexers import (
|
from core.services.search_indexers import (
|
||||||
BaseDocumentIndexer,
|
BaseDocumentIndexer,
|
||||||
SearchIndexer,
|
FindDocumentIndexer,
|
||||||
get_document_indexer,
|
get_document_indexer,
|
||||||
get_visited_document_ids_of,
|
get_visited_document_ids_of,
|
||||||
)
|
)
|
||||||
@@ -78,41 +78,41 @@ def test_services_search_indexer_is_configured(indexer_settings):
|
|||||||
|
|
||||||
# Valid class
|
# Valid class
|
||||||
indexer_settings.SEARCH_INDEXER_CLASS = (
|
indexer_settings.SEARCH_INDEXER_CLASS = (
|
||||||
"core.services.search_indexers.SearchIndexer"
|
"core.services.search_indexers.FindDocumentIndexer"
|
||||||
)
|
)
|
||||||
|
|
||||||
get_document_indexer.cache_clear()
|
get_document_indexer.cache_clear()
|
||||||
assert get_document_indexer() is not None
|
assert get_document_indexer() is not None
|
||||||
|
|
||||||
indexer_settings.SEARCH_INDEXER_URL = ""
|
indexer_settings.INDEXING_URL = ""
|
||||||
|
|
||||||
# Invalid url
|
# Invalid url
|
||||||
get_document_indexer.cache_clear()
|
get_document_indexer.cache_clear()
|
||||||
assert not get_document_indexer()
|
assert not get_document_indexer()
|
||||||
|
|
||||||
|
|
||||||
def test_services_search_indexer_url_is_none(indexer_settings):
|
def test_services_indexing_url_is_none(indexer_settings):
|
||||||
"""
|
"""
|
||||||
Indexer should raise RuntimeError if SEARCH_INDEXER_URL is None or empty.
|
Indexer should raise RuntimeError if INDEXING_URL is None or empty.
|
||||||
"""
|
"""
|
||||||
indexer_settings.SEARCH_INDEXER_URL = None
|
indexer_settings.INDEXING_URL = None
|
||||||
|
|
||||||
with pytest.raises(ImproperlyConfigured) as exc_info:
|
with pytest.raises(ImproperlyConfigured) as exc_info:
|
||||||
SearchIndexer()
|
FindDocumentIndexer()
|
||||||
|
|
||||||
assert "SEARCH_INDEXER_URL must be set in Django settings." in str(exc_info.value)
|
assert "INDEXING_URL must be set in Django settings." in str(exc_info.value)
|
||||||
|
|
||||||
|
|
||||||
def test_services_search_indexer_url_is_empty(indexer_settings):
|
def test_services_indexing_url_is_empty(indexer_settings):
|
||||||
"""
|
"""
|
||||||
Indexer should raise RuntimeError if SEARCH_INDEXER_URL is empty string.
|
Indexer should raise RuntimeError if INDEXING_URL is empty string.
|
||||||
"""
|
"""
|
||||||
indexer_settings.SEARCH_INDEXER_URL = ""
|
indexer_settings.INDEXING_URL = ""
|
||||||
|
|
||||||
with pytest.raises(ImproperlyConfigured) as exc_info:
|
with pytest.raises(ImproperlyConfigured) as exc_info:
|
||||||
SearchIndexer()
|
FindDocumentIndexer()
|
||||||
|
|
||||||
assert "SEARCH_INDEXER_URL must be set in Django settings." in str(exc_info.value)
|
assert "INDEXING_URL must be set in Django settings." in str(exc_info.value)
|
||||||
|
|
||||||
|
|
||||||
def test_services_search_indexer_secret_is_none(indexer_settings):
|
def test_services_search_indexer_secret_is_none(indexer_settings):
|
||||||
@@ -122,7 +122,7 @@ def test_services_search_indexer_secret_is_none(indexer_settings):
|
|||||||
indexer_settings.SEARCH_INDEXER_SECRET = None
|
indexer_settings.SEARCH_INDEXER_SECRET = None
|
||||||
|
|
||||||
with pytest.raises(ImproperlyConfigured) as exc_info:
|
with pytest.raises(ImproperlyConfigured) as exc_info:
|
||||||
SearchIndexer()
|
FindDocumentIndexer()
|
||||||
|
|
||||||
assert "SEARCH_INDEXER_SECRET must be set in Django settings." in str(
|
assert "SEARCH_INDEXER_SECRET must be set in Django settings." in str(
|
||||||
exc_info.value
|
exc_info.value
|
||||||
@@ -136,39 +136,35 @@ def test_services_search_indexer_secret_is_empty(indexer_settings):
|
|||||||
indexer_settings.SEARCH_INDEXER_SECRET = ""
|
indexer_settings.SEARCH_INDEXER_SECRET = ""
|
||||||
|
|
||||||
with pytest.raises(ImproperlyConfigured) as exc_info:
|
with pytest.raises(ImproperlyConfigured) as exc_info:
|
||||||
SearchIndexer()
|
FindDocumentIndexer()
|
||||||
|
|
||||||
assert "SEARCH_INDEXER_SECRET must be set in Django settings." in str(
|
assert "SEARCH_INDEXER_SECRET must be set in Django settings." in str(
|
||||||
exc_info.value
|
exc_info.value
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_services_search_endpoint_is_none(indexer_settings):
|
def test_services_search_url_is_none(indexer_settings):
|
||||||
"""
|
"""
|
||||||
Indexer should raise RuntimeError if SEARCH_INDEXER_QUERY_URL is None.
|
Indexer should raise RuntimeError if SEARCH_URL is None.
|
||||||
"""
|
"""
|
||||||
indexer_settings.SEARCH_INDEXER_QUERY_URL = None
|
indexer_settings.SEARCH_URL = None
|
||||||
|
|
||||||
with pytest.raises(ImproperlyConfigured) as exc_info:
|
with pytest.raises(ImproperlyConfigured) as exc_info:
|
||||||
SearchIndexer()
|
FindDocumentIndexer()
|
||||||
|
|
||||||
assert "SEARCH_INDEXER_QUERY_URL must be set in Django settings." in str(
|
assert "SEARCH_URL must be set in Django settings." in str(exc_info.value)
|
||||||
exc_info.value
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_services_search_endpoint_is_empty(indexer_settings):
|
def test_services_search_url_is_empty(indexer_settings):
|
||||||
"""
|
"""
|
||||||
Indexer should raise RuntimeError if SEARCH_INDEXER_QUERY_URL is empty.
|
Indexer should raise RuntimeError if SEARCH_URL is empty.
|
||||||
"""
|
"""
|
||||||
indexer_settings.SEARCH_INDEXER_QUERY_URL = ""
|
indexer_settings.SEARCH_URL = ""
|
||||||
|
|
||||||
with pytest.raises(ImproperlyConfigured) as exc_info:
|
with pytest.raises(ImproperlyConfigured) as exc_info:
|
||||||
SearchIndexer()
|
FindDocumentIndexer()
|
||||||
|
|
||||||
assert "SEARCH_INDEXER_QUERY_URL must be set in Django settings." in str(
|
assert "SEARCH_URL must be set in Django settings." in str(exc_info.value)
|
||||||
exc_info.value
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("indexer_settings")
|
@pytest.mark.usefixtures("indexer_settings")
|
||||||
@@ -192,7 +188,7 @@ def test_services_search_indexers_serialize_document_returns_expected_json():
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
indexer = SearchIndexer()
|
indexer = FindDocumentIndexer()
|
||||||
result = indexer.serialize_document(document, accesses)
|
result = indexer.serialize_document(document, accesses)
|
||||||
|
|
||||||
assert set(result.pop("users")) == {str(user_a.sub), str(user_b.sub)}
|
assert set(result.pop("users")) == {str(user_a.sub), str(user_b.sub)}
|
||||||
@@ -221,7 +217,7 @@ def test_services_search_indexers_serialize_document_deleted():
|
|||||||
parent.soft_delete()
|
parent.soft_delete()
|
||||||
document.refresh_from_db()
|
document.refresh_from_db()
|
||||||
|
|
||||||
indexer = SearchIndexer()
|
indexer = FindDocumentIndexer()
|
||||||
result = indexer.serialize_document(document, {})
|
result = indexer.serialize_document(document, {})
|
||||||
|
|
||||||
assert result["is_active"] is False
|
assert result["is_active"] is False
|
||||||
@@ -232,7 +228,7 @@ def test_services_search_indexers_serialize_document_empty():
|
|||||||
"""Empty documents returns empty content in the serialized json."""
|
"""Empty documents returns empty content in the serialized json."""
|
||||||
document = factories.DocumentFactory(content="", title=None)
|
document = factories.DocumentFactory(content="", title=None)
|
||||||
|
|
||||||
indexer = SearchIndexer()
|
indexer = FindDocumentIndexer()
|
||||||
result = indexer.serialize_document(document, {})
|
result = indexer.serialize_document(document, {})
|
||||||
|
|
||||||
assert result["content"] == ""
|
assert result["content"] == ""
|
||||||
@@ -246,7 +242,7 @@ def test_services_search_indexers_index_errors(indexer_settings):
|
|||||||
"""
|
"""
|
||||||
factories.DocumentFactory()
|
factories.DocumentFactory()
|
||||||
|
|
||||||
indexer_settings.SEARCH_INDEXER_URL = "http://app-find/api/v1.0/documents/index/"
|
indexer_settings.INDEXING_URL = "http://app-find/api/v1.0/documents/index/"
|
||||||
|
|
||||||
responses.add(
|
responses.add(
|
||||||
responses.POST,
|
responses.POST,
|
||||||
@@ -256,10 +252,10 @@ def test_services_search_indexers_index_errors(indexer_settings):
|
|||||||
)
|
)
|
||||||
|
|
||||||
with pytest.raises(HTTPError):
|
with pytest.raises(HTTPError):
|
||||||
SearchIndexer().index()
|
FindDocumentIndexer().index()
|
||||||
|
|
||||||
|
|
||||||
@patch.object(SearchIndexer, "push")
|
@patch.object(FindDocumentIndexer, "push")
|
||||||
def test_services_search_indexers_batches_pass_only_batch_accesses(
|
def test_services_search_indexers_batches_pass_only_batch_accesses(
|
||||||
mock_push, indexer_settings
|
mock_push, indexer_settings
|
||||||
):
|
):
|
||||||
@@ -276,7 +272,7 @@ def test_services_search_indexers_batches_pass_only_batch_accesses(
|
|||||||
access = factories.UserDocumentAccessFactory(document=document)
|
access = factories.UserDocumentAccessFactory(document=document)
|
||||||
expected_user_subs[str(document.id)] = str(access.user.sub)
|
expected_user_subs[str(document.id)] = str(access.user.sub)
|
||||||
|
|
||||||
assert SearchIndexer().index() == 5
|
assert FindDocumentIndexer().index() == 5
|
||||||
|
|
||||||
# Should be 3 batches: 2 + 2 + 1
|
# Should be 3 batches: 2 + 2 + 1
|
||||||
assert mock_push.call_count == 3
|
assert mock_push.call_count == 3
|
||||||
@@ -299,7 +295,7 @@ def test_services_search_indexers_batches_pass_only_batch_accesses(
|
|||||||
assert seen_doc_ids == {str(d.id) for d in documents}
|
assert seen_doc_ids == {str(d.id) for d in documents}
|
||||||
|
|
||||||
|
|
||||||
@patch.object(SearchIndexer, "push")
|
@patch.object(FindDocumentIndexer, "push")
|
||||||
@pytest.mark.usefixtures("indexer_settings")
|
@pytest.mark.usefixtures("indexer_settings")
|
||||||
def test_services_search_indexers_batch_size_argument(mock_push):
|
def test_services_search_indexers_batch_size_argument(mock_push):
|
||||||
"""
|
"""
|
||||||
@@ -314,7 +310,7 @@ def test_services_search_indexers_batch_size_argument(mock_push):
|
|||||||
access = factories.UserDocumentAccessFactory(document=document)
|
access = factories.UserDocumentAccessFactory(document=document)
|
||||||
expected_user_subs[str(document.id)] = str(access.user.sub)
|
expected_user_subs[str(document.id)] = str(access.user.sub)
|
||||||
|
|
||||||
assert SearchIndexer().index(batch_size=2) == 5
|
assert FindDocumentIndexer().index(batch_size=2) == 5
|
||||||
|
|
||||||
# Should be 3 batches: 2 + 2 + 1
|
# Should be 3 batches: 2 + 2 + 1
|
||||||
assert mock_push.call_count == 3
|
assert mock_push.call_count == 3
|
||||||
@@ -337,7 +333,7 @@ def test_services_search_indexers_batch_size_argument(mock_push):
|
|||||||
assert seen_doc_ids == {str(d.id) for d in documents}
|
assert seen_doc_ids == {str(d.id) for d in documents}
|
||||||
|
|
||||||
|
|
||||||
@patch.object(SearchIndexer, "push")
|
@patch.object(FindDocumentIndexer, "push")
|
||||||
@pytest.mark.usefixtures("indexer_settings")
|
@pytest.mark.usefixtures("indexer_settings")
|
||||||
def test_services_search_indexers_ignore_empty_documents(mock_push):
|
def test_services_search_indexers_ignore_empty_documents(mock_push):
|
||||||
"""
|
"""
|
||||||
@@ -349,7 +345,7 @@ def test_services_search_indexers_ignore_empty_documents(mock_push):
|
|||||||
empty_title = factories.DocumentFactory(title="")
|
empty_title = factories.DocumentFactory(title="")
|
||||||
empty_content = factories.DocumentFactory(content="")
|
empty_content = factories.DocumentFactory(content="")
|
||||||
|
|
||||||
assert SearchIndexer().index() == 3
|
assert FindDocumentIndexer().index() == 3
|
||||||
|
|
||||||
assert mock_push.call_count == 1
|
assert mock_push.call_count == 1
|
||||||
|
|
||||||
@@ -365,7 +361,7 @@ def test_services_search_indexers_ignore_empty_documents(mock_push):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@patch.object(SearchIndexer, "push")
|
@patch.object(FindDocumentIndexer, "push")
|
||||||
def test_services_search_indexers_skip_empty_batches(mock_push, indexer_settings):
|
def test_services_search_indexers_skip_empty_batches(mock_push, indexer_settings):
|
||||||
"""
|
"""
|
||||||
Documents indexing batch can be empty if all the docs are empty.
|
Documents indexing batch can be empty if all the docs are empty.
|
||||||
@@ -377,14 +373,14 @@ def test_services_search_indexers_skip_empty_batches(mock_push, indexer_settings
|
|||||||
# Only empty docs
|
# Only empty docs
|
||||||
factories.DocumentFactory.create_batch(5, content="", title="")
|
factories.DocumentFactory.create_batch(5, content="", title="")
|
||||||
|
|
||||||
assert SearchIndexer().index() == 1
|
assert FindDocumentIndexer().index() == 1
|
||||||
assert mock_push.call_count == 1
|
assert mock_push.call_count == 1
|
||||||
|
|
||||||
results = [doc["id"] for doc in mock_push.call_args[0][0]]
|
results = [doc["id"] for doc in mock_push.call_args[0][0]]
|
||||||
assert results == [str(document.id)]
|
assert results == [str(document.id)]
|
||||||
|
|
||||||
|
|
||||||
@patch.object(SearchIndexer, "push")
|
@patch.object(FindDocumentIndexer, "push")
|
||||||
@pytest.mark.usefixtures("indexer_settings")
|
@pytest.mark.usefixtures("indexer_settings")
|
||||||
def test_services_search_indexers_ancestors_link_reach(mock_push):
|
def test_services_search_indexers_ancestors_link_reach(mock_push):
|
||||||
"""Document accesses and reach should take into account ancestors link reaches."""
|
"""Document accesses and reach should take into account ancestors link reaches."""
|
||||||
@@ -395,7 +391,7 @@ def test_services_search_indexers_ancestors_link_reach(mock_push):
|
|||||||
parent = factories.DocumentFactory(parent=grand_parent, link_reach="public")
|
parent = factories.DocumentFactory(parent=grand_parent, link_reach="public")
|
||||||
document = factories.DocumentFactory(parent=parent, link_reach="restricted")
|
document = factories.DocumentFactory(parent=parent, link_reach="restricted")
|
||||||
|
|
||||||
assert SearchIndexer().index() == 4
|
assert FindDocumentIndexer().index() == 4
|
||||||
|
|
||||||
results = {doc["id"]: doc for doc in mock_push.call_args[0][0]}
|
results = {doc["id"]: doc for doc in mock_push.call_args[0][0]}
|
||||||
assert len(results) == 4
|
assert len(results) == 4
|
||||||
@@ -405,7 +401,7 @@ def test_services_search_indexers_ancestors_link_reach(mock_push):
|
|||||||
assert results[str(document.id)]["reach"] == "public"
|
assert results[str(document.id)]["reach"] == "public"
|
||||||
|
|
||||||
|
|
||||||
@patch.object(SearchIndexer, "push")
|
@patch.object(FindDocumentIndexer, "push")
|
||||||
@pytest.mark.usefixtures("indexer_settings")
|
@pytest.mark.usefixtures("indexer_settings")
|
||||||
def test_services_search_indexers_ancestors_users(mock_push):
|
def test_services_search_indexers_ancestors_users(mock_push):
|
||||||
"""Document accesses and reach should include users from ancestors."""
|
"""Document accesses and reach should include users from ancestors."""
|
||||||
@@ -415,7 +411,7 @@ def test_services_search_indexers_ancestors_users(mock_push):
|
|||||||
parent = factories.DocumentFactory(parent=grand_parent, users=[user_p])
|
parent = factories.DocumentFactory(parent=grand_parent, users=[user_p])
|
||||||
document = factories.DocumentFactory(parent=parent, users=[user_d])
|
document = factories.DocumentFactory(parent=parent, users=[user_d])
|
||||||
|
|
||||||
assert SearchIndexer().index() == 3
|
assert FindDocumentIndexer().index() == 3
|
||||||
|
|
||||||
results = {doc["id"]: doc for doc in mock_push.call_args[0][0]}
|
results = {doc["id"]: doc for doc in mock_push.call_args[0][0]}
|
||||||
assert len(results) == 3
|
assert len(results) == 3
|
||||||
@@ -428,7 +424,7 @@ def test_services_search_indexers_ancestors_users(mock_push):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@patch.object(SearchIndexer, "push")
|
@patch.object(FindDocumentIndexer, "push")
|
||||||
@pytest.mark.usefixtures("indexer_settings")
|
@pytest.mark.usefixtures("indexer_settings")
|
||||||
def test_services_search_indexers_ancestors_teams(mock_push):
|
def test_services_search_indexers_ancestors_teams(mock_push):
|
||||||
"""Document accesses and reach should include teams from ancestors."""
|
"""Document accesses and reach should include teams from ancestors."""
|
||||||
@@ -436,7 +432,7 @@ def test_services_search_indexers_ancestors_teams(mock_push):
|
|||||||
parent = factories.DocumentFactory(parent=grand_parent, teams=["team_p"])
|
parent = factories.DocumentFactory(parent=grand_parent, teams=["team_p"])
|
||||||
document = factories.DocumentFactory(parent=parent, teams=["team_d"])
|
document = factories.DocumentFactory(parent=parent, teams=["team_d"])
|
||||||
|
|
||||||
assert SearchIndexer().index() == 3
|
assert FindDocumentIndexer().index() == 3
|
||||||
|
|
||||||
results = {doc["id"]: doc for doc in mock_push.call_args[0][0]}
|
results = {doc["id"]: doc for doc in mock_push.call_args[0][0]}
|
||||||
assert len(results) == 3
|
assert len(results) == 3
|
||||||
@@ -451,9 +447,9 @@ def test_push_uses_correct_url_and_data(mock_post, indexer_settings):
|
|||||||
push() should call requests.post with the correct URL from settings
|
push() should call requests.post with the correct URL from settings
|
||||||
the timeout set to 10 seconds and the data as JSON.
|
the timeout set to 10 seconds and the data as JSON.
|
||||||
"""
|
"""
|
||||||
indexer_settings.SEARCH_INDEXER_URL = "http://example.com/index"
|
indexer_settings.INDEXING_URL = "http://example.com/index"
|
||||||
|
|
||||||
indexer = SearchIndexer()
|
indexer = FindDocumentIndexer()
|
||||||
sample_data = [{"id": "123", "title": "Test"}]
|
sample_data = [{"id": "123", "title": "Test"}]
|
||||||
|
|
||||||
mock_response = mock_post.return_value
|
mock_response = mock_post.return_value
|
||||||
@@ -464,7 +460,7 @@ def test_push_uses_correct_url_and_data(mock_post, indexer_settings):
|
|||||||
mock_post.assert_called_once()
|
mock_post.assert_called_once()
|
||||||
args, kwargs = mock_post.call_args
|
args, kwargs = mock_post.call_args
|
||||||
|
|
||||||
assert args[0] == indexer_settings.SEARCH_INDEXER_URL
|
assert args[0] == indexer_settings.INDEXING_URL
|
||||||
assert kwargs.get("json") == sample_data
|
assert kwargs.get("json") == sample_data
|
||||||
assert kwargs.get("timeout") == 10
|
assert kwargs.get("timeout") == 10
|
||||||
|
|
||||||
@@ -498,7 +494,7 @@ def test_get_visited_document_ids_of():
|
|||||||
factories.UserDocumentAccessFactory(user=user, document=doc2)
|
factories.UserDocumentAccessFactory(user=user, document=doc2)
|
||||||
|
|
||||||
# The second document have an access for the user
|
# The second document have an access for the user
|
||||||
assert get_visited_document_ids_of(queryset, user) == [str(doc1.pk)]
|
assert get_visited_document_ids_of(queryset, user) == (str(doc1.pk),)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("indexer_settings")
|
@pytest.mark.usefixtures("indexer_settings")
|
||||||
@@ -532,7 +528,7 @@ def test_get_visited_document_ids_of_deleted():
|
|||||||
doc_deleted.soft_delete()
|
doc_deleted.soft_delete()
|
||||||
|
|
||||||
# Only the first document is not deleted
|
# Only the first document is not deleted
|
||||||
assert get_visited_document_ids_of(queryset, user) == [str(doc.pk)]
|
assert get_visited_document_ids_of(queryset, user) == (str(doc.pk),)
|
||||||
|
|
||||||
|
|
||||||
@responses.activate
|
@responses.activate
|
||||||
@@ -542,9 +538,7 @@ def test_services_search_indexers_search_errors(indexer_settings):
|
|||||||
"""
|
"""
|
||||||
factories.DocumentFactory()
|
factories.DocumentFactory()
|
||||||
|
|
||||||
indexer_settings.SEARCH_INDEXER_QUERY_URL = (
|
indexer_settings.SEARCH_URL = "http://app-find/api/v1.0/documents/search/"
|
||||||
"http://app-find/api/v1.0/documents/search/"
|
|
||||||
)
|
|
||||||
|
|
||||||
responses.add(
|
responses.add(
|
||||||
responses.POST,
|
responses.POST,
|
||||||
@@ -554,17 +548,17 @@ def test_services_search_indexers_search_errors(indexer_settings):
|
|||||||
)
|
)
|
||||||
|
|
||||||
with pytest.raises(HTTPError):
|
with pytest.raises(HTTPError):
|
||||||
SearchIndexer().search("alpha", token="mytoken")
|
FindDocumentIndexer().search(q="alpha", token="mytoken")
|
||||||
|
|
||||||
|
|
||||||
@patch("requests.post")
|
@patch("requests.post")
|
||||||
def test_services_search_indexers_search(mock_post, indexer_settings):
|
def test_services_search_indexers_search(mock_post, indexer_settings):
|
||||||
"""
|
"""
|
||||||
search() should call requests.post to SEARCH_INDEXER_QUERY_URL with the
|
search() should call requests.post to SEARCH_URL with the
|
||||||
document ids from linktraces.
|
document ids from linktraces.
|
||||||
"""
|
"""
|
||||||
user = factories.UserFactory()
|
user = factories.UserFactory()
|
||||||
indexer = SearchIndexer()
|
indexer = FindDocumentIndexer()
|
||||||
|
|
||||||
mock_response = mock_post.return_value
|
mock_response = mock_post.return_value
|
||||||
mock_response.raise_for_status.return_value = None # No error
|
mock_response.raise_for_status.return_value = None # No error
|
||||||
@@ -578,11 +572,11 @@ def test_services_search_indexers_search(mock_post, indexer_settings):
|
|||||||
|
|
||||||
visited = get_visited_document_ids_of(models.Document.objects.all(), user)
|
visited = get_visited_document_ids_of(models.Document.objects.all(), user)
|
||||||
|
|
||||||
indexer.search("alpha", visited=visited, token="mytoken")
|
indexer.search(q="alpha", visited=visited, token="mytoken")
|
||||||
|
|
||||||
args, kwargs = mock_post.call_args
|
args, kwargs = mock_post.call_args
|
||||||
|
|
||||||
assert args[0] == indexer_settings.SEARCH_INDEXER_QUERY_URL
|
assert args[0] == indexer_settings.SEARCH_URL
|
||||||
|
|
||||||
query_data = kwargs.get("json")
|
query_data = kwargs.get("json")
|
||||||
assert query_data["q"] == "alpha"
|
assert query_data["q"] == "alpha"
|
||||||
@@ -605,7 +599,7 @@ def test_services_search_indexers_search_nb_results(mock_post, indexer_settings)
|
|||||||
indexer_settings.SEARCH_INDEXER_QUERY_LIMIT = 25
|
indexer_settings.SEARCH_INDEXER_QUERY_LIMIT = 25
|
||||||
|
|
||||||
user = factories.UserFactory()
|
user = factories.UserFactory()
|
||||||
indexer = SearchIndexer()
|
indexer = FindDocumentIndexer()
|
||||||
|
|
||||||
mock_response = mock_post.return_value
|
mock_response = mock_post.return_value
|
||||||
mock_response.raise_for_status.return_value = None # No error
|
mock_response.raise_for_status.return_value = None # No error
|
||||||
@@ -619,17 +613,65 @@ def test_services_search_indexers_search_nb_results(mock_post, indexer_settings)
|
|||||||
|
|
||||||
visited = get_visited_document_ids_of(models.Document.objects.all(), user)
|
visited = get_visited_document_ids_of(models.Document.objects.all(), user)
|
||||||
|
|
||||||
indexer.search("alpha", visited=visited, token="mytoken")
|
indexer.search(q="alpha", visited=visited, token="mytoken")
|
||||||
|
|
||||||
args, kwargs = mock_post.call_args
|
args, kwargs = mock_post.call_args
|
||||||
|
|
||||||
assert args[0] == indexer_settings.SEARCH_INDEXER_QUERY_URL
|
assert args[0] == indexer_settings.SEARCH_URL
|
||||||
assert kwargs.get("json")["nb_results"] == 25
|
assert kwargs.get("json")["nb_results"] == 25
|
||||||
|
|
||||||
# The argument overrides the setting value
|
# The argument overrides the setting value
|
||||||
indexer.search("alpha", visited=visited, token="mytoken", nb_results=109)
|
indexer.search(q="alpha", visited=visited, token="mytoken", nb_results=109)
|
||||||
|
|
||||||
args, kwargs = mock_post.call_args
|
args, kwargs = mock_post.call_args
|
||||||
|
|
||||||
assert args[0] == indexer_settings.SEARCH_INDEXER_QUERY_URL
|
assert args[0] == indexer_settings.SEARCH_URL
|
||||||
assert kwargs.get("json")["nb_results"] == 109
|
assert kwargs.get("json")["nb_results"] == 109
|
||||||
|
|
||||||
|
|
||||||
|
def test_search_indexer_get_title_with_localized_field():
|
||||||
|
"""Test extracting title from localized title field."""
|
||||||
|
source = {"title.extension": "Bonjour", "id": 1, "content": "test"}
|
||||||
|
result = FindDocumentIndexer.get_title(source)
|
||||||
|
|
||||||
|
assert result == "Bonjour"
|
||||||
|
|
||||||
|
|
||||||
|
def test_search_indexer_get_title_with_multiple_localized_fields():
|
||||||
|
"""Test that first matching localized title is returned."""
|
||||||
|
source = {"title.extension": "Bonjour", "title.en": "Hello", "id": 1}
|
||||||
|
result = FindDocumentIndexer.get_title(source)
|
||||||
|
|
||||||
|
assert result in ["Bonjour", "Hello"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_search_indexer_get_title_fallback_to_plain_title():
|
||||||
|
"""Test fallback to plain 'title' field when no localized field exists."""
|
||||||
|
source = {"title": "Hello World", "id": 1}
|
||||||
|
result = FindDocumentIndexer.get_title(source)
|
||||||
|
|
||||||
|
assert result == "Hello World"
|
||||||
|
|
||||||
|
|
||||||
|
def test_search_indexer_get_title_no_title_field():
|
||||||
|
"""Test that empty string is returned when no title field exists."""
|
||||||
|
source = {"id": 1, "content": "test"}
|
||||||
|
result = FindDocumentIndexer.get_title(source)
|
||||||
|
|
||||||
|
assert result == ""
|
||||||
|
|
||||||
|
|
||||||
|
def test_search_indexer_get_title_with_empty_localized_title():
|
||||||
|
"""Test that fallback works when localized title is empty."""
|
||||||
|
source = {"title.extension": "", "title": "Fallback Title", "id": 1}
|
||||||
|
result = FindDocumentIndexer.get_title(source)
|
||||||
|
|
||||||
|
assert result == "Fallback Title"
|
||||||
|
|
||||||
|
|
||||||
|
def test_search_indexer_get_title_with_multiple_extension():
|
||||||
|
"""Test extracting title from title field with multiple extensions."""
|
||||||
|
source = {"title.extension_1.extension_2": "Bonjour", "id": 1, "content": "test"}
|
||||||
|
result = FindDocumentIndexer.get_title(source)
|
||||||
|
|
||||||
|
assert result == "Bonjour"
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user