Compare commits

..

1 Commits

Author SHA1 Message Date
Anthony LC
f1b41cc4c4 POC override 2025-11-21 14:28:31 +01:00
410 changed files with 15020 additions and 32407 deletions

View File

@@ -1,24 +0,0 @@
name: 'Free Disk Space'
description: 'Free up disk space by removing large preinstalled items and cleaning up Docker'
runs:
using: "composite"
steps:
- name: Free disk space (Linux only)
if: runner.os == 'Linux'
shell: bash
run: |
echo "Disk usage before cleanup:"
df -h
# Remove large preinstalled items that are not used on GitHub-hosted runners
sudo rm -rf /usr/share/dotnet || true
sudo rm -rf /opt/ghc || true
sudo rm -rf /usr/local/lib/android || true
# Clean up Docker
docker system prune -af || true
docker volume prune -f || true
echo "Disk usage after cleanup:"
df -h

View File

@@ -5,13 +5,13 @@ on:
workflow_dispatch:
push:
branches:
- "main"
- 'main'
tags:
- "v*"
- 'v*'
pull_request:
branches:
- "main"
- "ci/trivy-fails"
- 'main'
- 'ci/trivy-fails'
env:
DOCKER_USER: 1001:127
@@ -20,97 +20,111 @@ jobs:
build-and-push-backend:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
-
name: Checkout repository
uses: actions/checkout@v4
- name: Docker meta
-
name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: lasuite/impress-backend
- name: Login to DockerHub
-
name: Login to DockerHub
if: github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'preview')
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_HUB_USER }}
password: ${{ secrets.DOCKER_HUB_PASSWORD }}
- name: Run trivy scan
-
name: Run trivy scan
uses: numerique-gouv/action-trivy-cache@main
with:
docker-build-args: "--target backend-production -f Dockerfile"
docker-image-name: "docker.io/lasuite/impress-backend:${{ github.sha }}"
- name: Build and push
docker-build-args: '--target backend-production -f Dockerfile'
docker-image-name: 'docker.io/lasuite/impress-backend:${{ github.sha }}'
-
name: Build and push
uses: docker/build-push-action@v6
with:
context: .
target: backend-production
build-args: DOCKER_USER=${{ env.DOCKER_USER }}
build-args: DOCKER_USER=${{ env.DOCKER_USER }}:-1000
push: ${{ github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'preview') }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
- name: Cleanup Docker after build
if: always()
run: |
docker system prune -af
docker volume prune -f
build-and-push-frontend:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
-
name: Checkout repository
uses: actions/checkout@v4
- name: Docker meta
- name: Checkout custom code repository
uses: actions/checkout@v4
with:
repository: 'AntoLC/docs-customized'
ref: 'main'
path: docs-custom
-
name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: lasuite/impress-frontend
- name: Login to DockerHub
-
name: Login to DockerHub
if: github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'preview')
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_HUB_USER }}
password: ${{ secrets.DOCKER_HUB_PASSWORD }}
- name: Run trivy scan
-
name: Run trivy scan
uses: numerique-gouv/action-trivy-cache@main
with:
docker-build-args: "-f src/frontend/Dockerfile --target frontend-production"
docker-image-name: "docker.io/lasuite/impress-frontend:${{ github.sha }}"
- name: Build and push
docker-build-args: '-f src/frontend/Dockerfile --target frontend-production --build-arg CUSTOM_CODE=docs-custom'
docker-image-name: 'docker.io/lasuite/impress-frontend:${{ github.sha }}'
-
name: Build and push
uses: docker/build-push-action@v6
with:
context: .
file: ./src/frontend/Dockerfile
target: frontend-production
build-args: |
DOCKER_USER=${{ env.DOCKER_USER }}
DOCKER_USER=${{ env.DOCKER_USER }}:-1000
CUSTOM_CODE=docs-custom
PUBLISH_AS_MIT=false
push: ${{ github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'preview') }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
- name: Cleanup Docker after build
if: always()
run: |
docker system prune -af
docker volume prune -f
build-and-push-y-provider:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
-
name: Checkout repository
uses: actions/checkout@v4
- name: Docker meta
-
name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: lasuite/impress-y-provider
- name: Login to DockerHub
-
name: Login to DockerHub
if: github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'preview')
run: echo "${{ secrets.DOCKER_HUB_PASSWORD }}" | docker login -u "${{ secrets.DOCKER_HUB_USER }}" --password-stdin
- name: Run trivy scan
-
name: Run trivy scan
uses: numerique-gouv/action-trivy-cache@main
with:
docker-build-args: "-f src/frontend/servers/y-provider/Dockerfile --target y-provider"
docker-image-name: "docker.io/lasuite/impress-y-provider:${{ github.sha }}"
- name: Build and push
docker-build-args: '-f src/frontend/servers/y-provider/Dockerfile --target y-provider'
docker-image-name: 'docker.io/lasuite/impress-y-provider:${{ github.sha }}'
-
name: Build and push
uses: docker/build-push-action@v6
with:
context: .
@@ -120,17 +134,11 @@ jobs:
push: ${{ github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'preview') }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
- name: Cleanup Docker after build
if: always()
run: |
docker system prune -af
docker volume prune -f
notify-argocd:
needs:
- build-and-push-backend
- build-and-push-frontend
- build-and-push-y-provider
- build-and-push-backend
runs-on: ubuntu-latest
if: github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'preview')
steps:

View File

@@ -1,142 +0,0 @@
name: Build and Push to GHCR
run-name: Build and Push to GHCR
on:
workflow_dispatch:
push:
branches:
- "main"
tags:
- "v*"
env:
DOCKER_USER: 1001:127
REGISTRY: ghcr.io
jobs:
build-and-push-backend:
runs-on: ubuntu-latest
if: github.event.repository.fork == true
permissions:
contents: read
packages: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY }}/${{ github.repository }}/backend
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=sha
- name: Login to GHCR
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v6
with:
context: .
target: backend-production
build-args: DOCKER_USER=${{ env.DOCKER_USER }}
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
- name: Cleanup Docker after build
if: always()
run: |
docker system prune -af
docker volume prune -f
build-and-push-frontend:
runs-on: ubuntu-latest
if: github.event.repository.fork == true
permissions:
contents: read
packages: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY }}/${{ github.repository }}/frontend
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=sha
- name: Login to GHCR
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v6
with:
context: .
file: ./src/frontend/Dockerfile
target: frontend-production
build-args: |
DOCKER_USER=${{ env.DOCKER_USER }}
PUBLISH_AS_MIT=false
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
- name: Cleanup Docker after build
if: always()
run: |
docker system prune -af
docker volume prune -f
build-and-push-y-provider:
runs-on: ubuntu-latest
if: github.event.repository.fork == true
permissions:
contents: read
packages: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY }}/${{ github.repository }}/y-provider
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=sha
- name: Login to GHCR
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v6
with:
context: .
file: ./src/frontend/servers/y-provider/Dockerfile
target: y-provider
build-args: DOCKER_USER=${{ env.DOCKER_USER }}:-1000
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
- name: Cleanup Docker after build
if: always()
run: |
docker system prune -af
docker volume prune -f

View File

@@ -19,8 +19,6 @@ jobs:
test-front:
needs: install-dependencies
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout repository
uses: actions/checkout@v4
@@ -43,8 +41,6 @@ jobs:
lint-front:
runs-on: ubuntu-latest
needs: install-dependencies
permissions:
contents: read
steps:
- name: Checkout repository
uses: actions/checkout@v4
@@ -89,9 +85,6 @@ jobs:
- name: Install Playwright Browsers
run: cd src/frontend/apps/e2e && yarn install --frozen-lockfile && yarn install-playwright chromium
- name: Free disk space before Docker
uses: ./.github/actions/free-disk-space
- name: Start Docker services
run: make bootstrap-e2e FLUSH_ARGS='--no-input'
@@ -131,9 +124,6 @@ jobs:
- name: Install Playwright Browsers
run: cd src/frontend/apps/e2e && yarn install --frozen-lockfile && yarn install-playwright firefox webkit chromium
- name: Free disk space before Docker
uses: ./.github/actions/free-disk-space
- name: Start Docker services
run: make bootstrap-e2e FLUSH_ARGS='--no-input'
@@ -197,38 +187,3 @@ jobs:
strip-hash: "[-_.][a-f0-9]{8,}(?=\\.(?:js|css|html)$)"
omit-unchanged: true
install-script: "yarn install --frozen-lockfile"
uikit-theme-checker:
runs-on: ubuntu-latest
needs: install-dependencies
permissions:
contents: read
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: "22.x"
- name: Restore the frontend cache
uses: actions/cache@v4
with:
path: "src/frontend/**/node_modules"
key: front-node_modules-${{ hashFiles('src/frontend/**/yarn.lock') }}
fail-on-cache-miss: true
- name: Build theme
run: cd src/frontend/apps/impress && yarn build-theme
- name: Ensure theme is up to date
shell: bash
run: |
if [[ -n "$(git status --porcelain)" ]]; then
echo "Error: build-theme produced git changes (tracked or untracked)."
echo "--- git status --porcelain ---"
git status --porcelain
echo "--- git diff ---"
git --no-pager diff
exit 1
fi

View File

@@ -19,7 +19,7 @@ jobs:
if: github.event_name == 'pull_request' # Makes sense only for pull requests
steps:
- name: Checkout repository
uses: actions/checkout@v4
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: show
@@ -27,7 +27,7 @@ jobs:
- name: Enforce absence of print statements in code
if: always()
run: |
! git diff origin/${{ github.event.pull_request.base.ref }}..HEAD -- src/backend ':(exclude)**/impress.yml' | grep "print("
! git diff origin/${{ github.event.pull_request.base.ref }}..HEAD -- . ':(exclude)**/impress.yml' | grep "print("
- name: Check absence of fixup commits
if: always()
run: |
@@ -46,7 +46,7 @@ jobs:
github.event_name == 'pull_request'
steps:
- name: Checkout repository
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
fetch-depth: 50
- name: Check that the CHANGELOG has been modified in the current branch
@@ -56,7 +56,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
uses: actions/checkout@v2
- name: Check CHANGELOG max line length
run: |
max_line_length=$(cat CHANGELOG.md | grep -Ev "^\[.*\]: https://github.com" | wc -L)
@@ -70,7 +70,7 @@ jobs:
if: github.event_name == 'pull_request'
steps:
- name: Checkout repository
uses: actions/checkout@v4
uses: actions/checkout@v2
- name: Install codespell
run: pip install --user codespell
- name: Check for typos
@@ -92,7 +92,7 @@ jobs:
working-directory: src/backend
steps:
- name: Checkout repository
uses: actions/checkout@v4
uses: actions/checkout@v2
- name: Install Python
uses: actions/setup-python@v5
with:
@@ -202,7 +202,7 @@ jobs:
run: |
sudo apt-get update
sudo apt-get install -y gettext pandoc shared-mime-info
sudo wget https://raw.githubusercontent.com/suitenumerique/django-lasuite/refs/heads/main/assets/conf/mime.types -O /etc/mime.types
sudo wget https://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types -O /etc/mime.types
- name: Generate a MO file from strings extracted from the project
run: python manage.py compilemessages

4
.gitignore vendored
View File

@@ -43,10 +43,6 @@ venv.bak/
env.d/development/*.local
env.d/terraform
# Docker
compose.override.yml
docker/auth/*.local
# npm
node_modules

View File

@@ -6,180 +6,12 @@ and this project adheres to
## [Unreleased]
### Added
- ✨(tracking) add UTM parameters to shared document links
- ✨(frontend) add floating bar with leftpanel collapse button #1876
- ✨(frontend) Can print a doc #1832
- ✨(backend) manage reconciliation requests for user accounts #1878
- 👷(CI) add GHCR workflow for forked repo testing #1851
### Changed
- ♿️(frontend) prevent dates from being focusable #1855
- ♿️(frontend) Focus main container after navigation #1854
- 💄(frontend) align colors and logo with ui-kit v2 #1869
- 🚸(backend) sort user search results by proximity with the active user #1802
- 🚸(oidc) ignore case when fallback on email #1880
### Fixed
- 🐛(frontend) fix broadcast store sync #1846
- 🐛(helm) use celery resources instead of backend resources
- 🐛(helm) reverse liveness and readiness for backend deployment
### Security
- 🔒️(secu) fix CVE-2026-26996 with minimatch #1900
## [v4.5.0] - 2026-01-28
### Added
- ✨(frontend) integrate configurable Waffle #1795
- ✨ Import of documents #1609
- 🚨(CI) gives warning if theme not updated #1811
- ✨(frontend) Add stat for Crisp #1824
- ✨(auth) add silent login #1690
- 🔧(project) add DJANGO_EMAIL_URL_APP environment variable #1825
### Changed
- ♿(frontend) improve accessibility:
- ♿️(frontend) fix subdoc opening and emoji pick focus #1745
- ✨(backend) add field for button label in email template #1817
### Fixed
- ✅(e2e) fix e2e test for other browsers #1799
- 🐛(export) fix export column NaN #1819
- 🐛(frontend) add fallback for unsupported Blocknote languages #1810
- 🐛(frontend) fix emojipicker closing in tree #1808
- 🐛(frontend) display children in favorite #1782
- 🐛(frontend) preserve typed text after @ on escape #1833
### Removed
- 🔥(project) remove all code related to template #1780
### Security
- 🔒️(trivy) fix vulnerability about jaraco.context #1806
## [v4.4.0] - 2026-01-13
### Added
- ✨(backend) add documents/all endpoint with descendants #1553
- ✅(export) add PDF regression tests #1762
- 📝(docs) Add language configuration documentation #1757
- 🔒(helm) Set default security context #1750
- ✨(backend) use langfuse to monitor AI actions #1776
### Changed
- ♿(frontend) improve accessibility:
- ♿(frontend) make html export accessible to screen reader users #1743
- ♿(frontend) add missing label and fix Axes errors to improve a11y #1693
### Fixed
- ✅(backend) reduce flakiness on backend test #1769
- 🐛(frontend) fix clickable main content regression #1773
- 🐛(backend) fix TRASHBIN_CUTOFF_DAYS type error #1778
- 💄(frontend) fix icon position in callout block #1779
### Security
- 🔒️(backend) validate more strictly url used by cors-proxy endpoint #1768
- 🔒️(frontend) fix props vulnerability in Interlinking #1792
## [v4.3.0] - 2026-01-05
### Added
- ✨(helm) redirecting system #1697
- 📱(frontend) add comments for smaller device #1737
- ✨(project) add custom js support via config #1759
### Changed
- 🥅(frontend) intercept 401 error on GET threads #1754
- 🦺(frontend) check content type pdf on PdfBlock #1756
- ✈️(frontend) pause Posthog when offline #1755
### Fixed
- 🐛(frontend) fix tables deletion #1739
- 🐛(frontend) fix children not display when first resize #1753
## [v4.2.0] - 2025-12-17
### Added
- ✨(backend) allow to create a new user in a marketing system #1707
- ✨(backend) add async indexation of documents on save (or access save) #1276
- ✨(backend) add debounce mechanism to limit indexation jobs #1276
- ✨(api) add API route to search for indexed documents in Find #1276
- 🥅(frontend) add boundary error page #1728
### Changed
- 🛂(backend) stop throttling collaboration servers #1730
- 🚸(backend) use unaccented full name for user search #1637
- 🌐(backend) internationalize demo #1644
- ♿(frontend) improve accessibility:
-Improve keyboard accessibility for the document tree #1681
### Fixed
- 🐛(frontend) paste content with comments from another document #1732
- 🐛(frontend) Select text + Go back one page crash the app #1733
- 🐛(frontend) fix versioning conflict #1742
## [v4.1.0] - 2025-12-09
### Added
- ⚡️(frontend) export html #1669
### Changed
- ♿(frontend) improve accessibility:
- ♿(frontend) add skip to content button for keyboard accessibility #1624
- ♿(frontend) fix toggle panel button a11y labels #1634
- 🔒️(frontend) remove dangerouslySetInnerHTML from codebase #1712
- ⚡️(frontend) improve Comments feature #1687
### Fixed
- 🐛(nginx) fix / location to handle new static pages #1682
- 🐛(frontend) rerendering during resize window #1715
## [v4.0.0] - 2025-12-01
### Added
- ✨ Add comments feature to the editor #1330
- ✨(backend) Comments on text editor #1330
- ✨(frontend) link to create new doc #1574
### Changed
- ⚡️(sw) stop to cache external resources likes videos #1655
- 💥(frontend) upgrade to ui-kit v2 #1605
- ⚡️(frontend) improve perf on upload and table of contents #1662
- ♿(frontend) improve accessibility:
- ♿(frontend) improve share modal button accessibility #1626
- ♿(frontend) improve screen reader support in DocShare modal #1628
### Fixed
- 🐛(frontend) fix toolbar not activated when reader #1640
- 🐛(frontend) preserve left panel width on window resize #1588
- 🐛(frontend) prevent duplicate as first character in title #1595
## [v3.10.0] - 2025-11-18
## [3.10.0] - 2025-11-18
### Added
@@ -208,18 +40,21 @@ and this project adheres to
### Security
- mitigate role escalation in the ask_for_access viewset #1580
- 🐛(frontend) preserve left panel width on window resize #1588
### Removed
- 🔥(backend) remove api managing templates
## [v3.9.0] - 2025-11-10
## [3.9.0] - 2025-11-10
### Added
- ✨(frontend) create skeleton component for DocEditor #1491
- ✨(frontend) add an EmojiPicker in the document tree and title #1381
- ✨(frontend) ajustable left panel #1456
- ✨ Add comments feature to the editor #1330
### Changed
@@ -239,13 +74,13 @@ and this project adheres to
- 🐛(frontend) button new doc UI fix #1557
- 🐛(frontend) interlinking UI fix #1557
## [v3.8.2] - 2025-10-17
## [3.8.2] - 2025-10-17
### Fixed
- 🐛(service-worker) fix sw registration and page reload logic #1500
## [v3.8.1] - 2025-10-17
## [3.8.1] - 2025-10-17
### Fixed
@@ -259,7 +94,7 @@ and this project adheres to
- 🔥(backend) remove treebeard form for the document admin #1470
## [v3.8.0] - 2025-10-14
## [3.8.0] - 2025-10-14
### Added
@@ -312,7 +147,7 @@ and this project adheres to
- 🔥(frontend) remove custom DividerBlock ##1375
## [v3.7.0] - 2025-09-12
## [3.7.0] - 2025-09-12
### Added
@@ -344,10 +179,11 @@ and this project adheres to
- 🐛(frontend) fix callout emoji list #1366
## [v3.6.0] - 2025-09-04
## [3.6.0] - 2025-09-04
### Added
- ✨(backend) Comments on text editor #1309
- 👷(CI) add bundle size check job #1268
- ✨(frontend) use title first emoji as doc icon in tree #1289
@@ -380,7 +216,7 @@ and this project adheres to
- 🐛(frontend) fix display bug on homepage #1332
- 🐛link role update #1287
## [v3.5.0] - 2025-07-31
## [3.5.0] - 2025-07-31
### Added
@@ -408,7 +244,7 @@ and this project adheres to
- 🐛(frontend) 401 redirection overridden #1214
- 🐛(frontend) include root parent in search #1243
## [v3.4.2] - 2025-07-18
## [3.4.2] - 2025-07-18
### Changed
@@ -418,7 +254,7 @@ and this project adheres to
- 🐛(backend) improve prompt to not use code blocks delimiter #1188
## [v3.4.1] - 2025-07-15
## [3.4.1] - 2025-07-15
### Fixed
@@ -429,7 +265,7 @@ and this project adheres to
- 🐛(frontend) fix crash share modal on grid options #1174
- 🐛(frontend) fix unfold subdocs not clickable at the bottom #1179
## [v3.4.0] - 2025-07-09
## [3.4.0] - 2025-07-09
### Added
@@ -473,7 +309,7 @@ and this project adheres to
- 🔥(frontend) remove Beta from logo #1095
## [v3.3.0] - 2025-05-06
## [3.3.0] - 2025-05-06
### Added
@@ -505,14 +341,14 @@ and this project adheres to
- 🔥(back) remove footer endpoint #948
## [v3.2.1] - 2025-05-06
## [3.2.1] - 2025-05-06
## Fixed
- 🐛(frontend) fix list copy paste #943
- 📝(doc) update contributing policy (commit signatures are now mandatory) #895
## [v3.2.0] - 2025-05-05
## [3.2.0] - 2025-05-05
## Added
@@ -539,7 +375,7 @@ and this project adheres to
- 🐛(backend) race condition create doc #633
- 🐛(frontend) fix breaklines in custom blocks #908
## [v3.1.0] - 2025-04-07
## [3.1.0] - 2025-04-07
## Added
@@ -557,7 +393,7 @@ and this project adheres to
- 🐛(back) validate document content in serializer #822
- 🐛(frontend) fix selection click past end of content #840
## [v3.0.0] - 2025-03-28
## [3.0.0] - 2025-03-28
## Added
@@ -573,7 +409,7 @@ and this project adheres to
- 🐛(backend) compute ancestor_links in get_abilities if needed #725
- 🔒️(back) restrict access to document accesses #801
## [v2.6.0] - 2025-03-21
## [2.6.0] - 2025-03-21
## Added
@@ -591,7 +427,7 @@ and this project adheres to
- 🔒️(back) throttle user list endpoint #636
- 🔒️(back) remove pagination and limit to 5 for user list endpoint #636
## [v2.5.0] - 2025-03-18
## [2.5.0] - 2025-03-18
## Added
@@ -621,7 +457,7 @@ and this project adheres to
- 🚨(helm) fix helmfile lint #736
- 🚚(frontend) redirect to 401 page when 401 error #759
## [v2.4.0] - 2025-03-06
## [2.4.0] - 2025-03-06
## Added
@@ -635,7 +471,7 @@ and this project adheres to
- 🐛(frontend) fix collaboration error #684
## [v2.3.0] - 2025-03-03
## [2.3.0] - 2025-03-03
## Added
@@ -662,7 +498,7 @@ and this project adheres to
- ♻️(frontend) improve table pdf rendering
- 🐛(email) invitation emails in receivers language
## [v2.2.0] - 2025-02-10
## [2.2.0] - 2025-02-10
## Added
@@ -681,7 +517,7 @@ and this project adheres to
- 🐛(frontend) fix cursor breakline #609
- 🐛(frontend) fix style pdf export #609
## [v2.1.0] - 2025-01-29
## [2.1.0] - 2025-01-29
## Added
@@ -710,14 +546,14 @@ and this project adheres to
- 🔥(backend) remove "content" field from list serializer # 516
## [v2.0.1] - 2025-01-17
## [2.0.1] - 2025-01-17
## Fixed
-🐛(frontend) share modal is shown when you don't have the abilities #557
-🐛(frontend) title copy break app #564
## [v2.0.0] - 2025-01-13
## [2.0.0] - 2025-01-13
## Added
@@ -748,7 +584,7 @@ and this project adheres to
- 🐛(frontend) hide search and create doc button if not authenticated #555
- 🐛(backend) race condition creation issue #556
## [v1.10.0] - 2024-12-17
## [1.10.0] - 2024-12-17
## Added
@@ -769,7 +605,7 @@ and this project adheres to
- 🐛(frontend) update doc editor height #481
- 💄(frontend) add doc search #485
## [v1.9.0] - 2024-12-11
## [1.9.0] - 2024-12-11
## Added
@@ -790,19 +626,19 @@ and this project adheres to
- 🐛(frontend) Fix hidden menu on Firefox #468
- 🐛(backend) fix sanitize problem IA #490
## [v1.8.2] - 2024-11-28
## [1.8.2] - 2024-11-28
## Changed
- ♻️(SW) change strategy html caching #460
## [v1.8.1] - 2024-11-27
## [1.8.1] - 2024-11-27
## Fixed
- 🐛(frontend) link not clickable and flickering firefox #457
## [v1.8.0] - 2024-11-25
## [1.8.0] - 2024-11-25
## Added
@@ -830,7 +666,7 @@ and this project adheres to
- 🐛(frontend) users have view access when revoked #387
- 🐛(frontend) fix placeholder editable when double clicks #454
## [v1.7.0] - 2024-10-24
## [1.7.0] - 2024-10-24
## Added
@@ -857,7 +693,7 @@ and this project adheres to
- 🔥(helm) remove infra related codes #366
## [v1.6.0] - 2024-10-17
## [1.6.0] - 2024-10-17
## Added
@@ -879,13 +715,13 @@ and this project adheres to
- 🐛(backend) fix nginx docker container #340
- 🐛(frontend) fix copy paste firefox #353
## [v1.5.1] - 2024-10-10
## [1.5.1] - 2024-10-10
## Fixed
- 🐛(db) fix users duplicate #316
## [v1.5.0] - 2024-10-09
## [1.5.0] - 2024-10-09
## Added
@@ -913,7 +749,7 @@ and this project adheres to
- 🔧(backend) fix configuration to avoid different ssl warning #297
- 🐛(frontend) fix editor break line not working #302
## [v1.4.0] - 2024-09-17
## [1.4.0] - 2024-09-17
## Added
@@ -933,7 +769,7 @@ and this project adheres to
- 🐛(backend) Fix forcing ID when creating a document via API endpoint #234
- 🐛 Rebuild frontend dev container from makefile #248
## [v1.3.0] - 2024-09-05
## [1.3.0] - 2024-09-05
## Added
@@ -957,14 +793,14 @@ and this project adheres to
- 🔥(frontend) remove saving modal #213
## [v1.2.1] - 2024-08-23
## [1.2.1] - 2024-08-23
## Changed
- ♻️ Change ordering docs datagrid #195
- 🔥(helm) use scaleway email #194
## [v1.2.0] - 2024-08-22
## [1.2.0] - 2024-08-22
## Added
@@ -990,7 +826,7 @@ and this project adheres to
- 🔥(helm) remove htaccess #181
## [v1.1.0] - 2024-07-15
## [1.1.0] - 2024-07-15
## Added
@@ -1005,7 +841,7 @@ and this project adheres to
- ♻️(frontend) create a doc from a modal #132
- ♻️(frontend) manage members from the share modal #140
## [v1.0.0] - 2024-07-02
## [1.0.0] - 2024-07-02
## Added
@@ -1043,20 +879,14 @@ and this project adheres to
- 💚(CI) Remove trigger workflow on push tags on CI (#68)
- 🔥(frontend) Remove coming soon page (#121)
## [v0.1.0] - 2024-05-24
## [0.1.0] - 2024-05-24
## Added
- ✨(frontend) Coming Soon page (#67)
- 🚀 Impress, project to manage your documents easily and collaboratively.
[unreleased]: https://github.com/suitenumerique/docs/compare/v4.5.0...main
[v4.5.0]: https://github.com/suitenumerique/docs/releases/v4.5.0
[v4.4.0]: https://github.com/suitenumerique/docs/releases/v4.4.0
[v4.3.0]: https://github.com/suitenumerique/docs/releases/v4.3.0
[v4.2.0]: https://github.com/suitenumerique/docs/releases/v4.2.0
[v4.1.0]: https://github.com/suitenumerique/docs/releases/v4.1.0
[v4.0.0]: https://github.com/suitenumerique/docs/releases/v4.0.0
[unreleased]: https://github.com/suitenumerique/docs/compare/v3.10.0...main
[v3.10.0]: https://github.com/suitenumerique/docs/releases/v3.10.0
[v3.9.0]: https://github.com/suitenumerique/docs/releases/v3.9.0
[v3.8.2]: https://github.com/suitenumerique/docs/releases/v3.8.2
@@ -1088,12 +918,12 @@ and this project adheres to
[v1.8.0]: https://github.com/suitenumerique/docs/releases/v1.8.0
[v1.7.0]: https://github.com/suitenumerique/docs/releases/v1.7.0
[v1.6.0]: https://github.com/suitenumerique/docs/releases/v1.6.0
[v1.5.1]: https://github.com/suitenumerique/docs/releases/v1.5.1
[v1.5.0]: https://github.com/suitenumerique/docs/releases/v1.5.0
[v1.4.0]: https://github.com/suitenumerique/docs/releases/v1.4.0
[v1.3.0]: https://github.com/suitenumerique/docs/releases/v1.3.0
[v1.2.1]: https://github.com/suitenumerique/docs/releases/v1.2.1
[v1.2.0]: https://github.com/suitenumerique/docs/releases/v1.2.0
[v1.1.0]: https://github.com/suitenumerique/docs/releases/v1.1.0
[v1.0.0]: https://github.com/suitenumerique/docs/releases/v1.0.0
[v0.1.0]: https://github.com/suitenumerique/docs/releases/v0.1.0
[1.5.1]: https://github.com/suitenumerique/docs/releases/v1.5.1
[1.5.0]: https://github.com/suitenumerique/docs/releases/v1.5.0
[1.4.0]: https://github.com/suitenumerique/docs/releases/v1.4.0
[1.3.0]: https://github.com/suitenumerique/docs/releases/v1.3.0
[1.2.1]: https://github.com/suitenumerique/docs/releases/v1.2.1
[1.2.0]: https://github.com/suitenumerique/docs/releases/v1.2.0
[1.1.0]: https://github.com/suitenumerique/docs/releases/v1.1.0
[1.0.0]: https://github.com/suitenumerique/docs/releases/v1.0.0
[0.1.0]: https://github.com/suitenumerique/docs/releases/v0.1.0

View File

@@ -4,7 +4,7 @@
FROM python:3.13.3-alpine AS base
# Upgrade pip to its latest release to speed up dependencies installation
RUN python -m pip install --upgrade pip
RUN python -m pip install --upgrade pip setuptools
# Upgrade system packages to install security updates
RUN apk update && apk upgrade --no-cache
@@ -36,7 +36,7 @@ COPY ./src/mail /mail/app
WORKDIR /mail/app
RUN yarn install --frozen-lockfile && \
yarn build
yarn build
# ---- static link collector ----
@@ -58,7 +58,7 @@ WORKDIR /app
# collectstatic
RUN DJANGO_CONFIGURATION=Build \
python manage.py collectstatic --noinput
python manage.py collectstatic --noinput
# Replace duplicated file by a symlink to decrease the overall size of the
# final image
@@ -81,7 +81,7 @@ RUN apk add --no-cache \
pango \
shared-mime-info
RUN wget https://raw.githubusercontent.com/suitenumerique/django-lasuite/refs/heads/main/assets/conf/mime.types -O /etc/mime.types
RUN wget https://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types -O /etc/mime.types
# Copy entrypoint
COPY ./docker/files/usr/local/bin/entrypoint /usr/local/bin/entrypoint
@@ -98,9 +98,9 @@ COPY --from=back-builder /install /usr/local
# when python is upgraded and the path to the certificate changes.
# The space between print and the ( is intended otherwise the git lint is failing
RUN mkdir /cert && \
path=`python -c 'import certifi;print (certifi.where())'` && \
mv $path /cert/ && \
ln -s /cert/cacert.pem $path
path=`python -c 'import certifi;print (certifi.where())'` && \
mv $path /cert/ && \
ln -s /cert/cacert.pem $path
# Copy impress application (see .dockerignore)
COPY ./src/backend /app/
@@ -109,7 +109,7 @@ WORKDIR /app
# Generate compiled translation messages
RUN DJANGO_CONFIGURATION=Build \
python manage.py compilemessages
python manage.py compilemessages
# We wrap commands run in this container by the following entrypoint that
@@ -138,7 +138,7 @@ USER ${DOCKER_USER}
# Target database host (e.g. database engine following docker compose services
# name) & port
ENV DB_HOST=postgresql \
DB_PORT=5432
DB_PORT=5432
# Run django development server
CMD ["python", "manage.py", "runserver", "0.0.0.0:8000"]

View File

@@ -213,7 +213,6 @@ logs: ## display app-dev logs (follow mode)
.PHONY: logs
run-backend: ## Start only the backend application and all needed services
@$(COMPOSE) up --force-recreate -d docspec
@$(COMPOSE) up --force-recreate -d celery-dev
@$(COMPOSE) up --force-recreate -d y-provider-development
@$(COMPOSE) up --force-recreate -d nginx
@@ -248,10 +247,6 @@ demo: ## flush db then create a demo for load testing purpose
@$(MANAGE) create_demo
.PHONY: demo
index: ## index all documents to remote search
@$(MANAGE) index
.PHONY: index
# Nota bene: Black should come after isort just in case they don't agree...
lint: ## lint back-end python sources
lint: \

View File

@@ -72,7 +72,7 @@ For some advanced features (ex: Export as PDF) Docs relies on XL packages from B
### Test it
You can test Docs on your browser by visiting this [demo document](https://docs.la-suite.eu/docs/9137bbb5-3e8a-4ff7-8a36-fcc4e8bd57f4/)
You can test Docs on your browser by visiting this [demo document](https://impress-preprod.beta.numerique.gouv.fr/docs/6ee5aac4-4fb9-457d-95bf-bb56c2467713/)
### Run Docs locally

View File

@@ -16,29 +16,6 @@ the following command inside your docker container:
## [Unreleased]
## [4.0.0] - 2025-11-26
- ⚠️ We updated `@gouvfr-lasuite/ui-kit` to `0.18.0`, so if you are customizing Docs with a css layer or with a custom template, you need to update your customization to follow the new design system structure.
More information about the changes in the design system can be found here:
- https://suitenumerique.github.io/cunningham/storybook/?path=/docs/migrating-from-v3-to-v4--docs
- https://github.com/suitenumerique/docs/pull/1605
- https://github.com/suitenumerique/docs/blob/main/docs/theming.md
- If you were using the `THEME_CUSTOMIZATION_FILE_PATH` and have overridden the header logo, you need to update your customization file to follow the new structure of the header, it is now:
```json
{
...,
"header": {
"icon": {
"src": "your_logo_src",
"width": "your_logo_width",
"height": "your_logo_height"
}
}
}
```
## [3.3.0] - 2025-05-22
⚠️ For some advanced features (ex: Export as PDF) Docs relies on XL packages from BlockNote. These are licenced under AGPL-3.0 and are not MIT compatible. You can perfectly use Docs without these packages by setting the environment variable `PUBLISH_AS_MIT` to true. That way you'll build an image of the application without the features that are not MIT compatible. Read the [environment variables documentation](/docs/env.md) for more information.

View File

@@ -8,7 +8,6 @@ docker_build(
dockerfile='../Dockerfile',
only=['./src/backend', './src/mail', './docker'],
target = 'backend-production',
build_args={'DOCKER_USER': '1000:1000'},
live_update=[
sync('../src/backend', '/app'),
run(
@@ -24,7 +23,6 @@ docker_build(
dockerfile='../src/frontend/servers/y-provider/Dockerfile',
only=['./src/frontend/', './docker/', './.dockerignore'],
target = 'y-provider',
build_args={'DOCKER_USER': '1000:1000'},
live_update=[
sync('../src/frontend/servers/y-provider/src', '/home/frontend/servers/y-provider/src'),
]
@@ -36,7 +34,6 @@ docker_build(
dockerfile='../src/frontend/Dockerfile',
only=['./src/frontend', './docker', './.dockerignore'],
target = 'impress',
build_args={'DOCKER_USER': '1000:1000'},
live_update=[
sync('../src/frontend', '/home/frontend'),
]

View File

@@ -1,6 +0,0 @@
#!/usr/bin/env bash
# shellcheck source=bin/_config.sh
source "$(dirname "${BASH_SOURCE[0]}")/_config.sh"
_dc_run app-dev python -c 'from cryptography.fernet import Fernet;import sys; sys.stdout.write("\n" + Fernet.generate_key().decode() + "\n");'

View File

@@ -72,11 +72,6 @@ services:
- env.d/development/postgresql.local
ports:
- "8071:8000"
networks:
default: {}
lasuite:
aliases:
- impress
volumes:
- ./src/backend:/app
- ./data/static:/data/static
@@ -97,9 +92,6 @@ services:
command: ["celery", "-A", "impress.celery_app", "worker", "-l", "DEBUG"]
environment:
- DJANGO_CONFIGURATION=Development
networks:
- default
- lasuite
env_file:
- env.d/development/common
- env.d/development/common.local
@@ -115,11 +107,6 @@ services:
image: nginx:1.25
ports:
- "8083:8083"
networks:
default: {}
lasuite:
aliases:
- nginx
volumes:
- ./docker/files/etc/nginx/conf.d:/etc/nginx/conf.d:ro
depends_on:
@@ -230,13 +217,3 @@ services:
kc_postgresql:
condition: service_healthy
restart: true
docspec:
image: ghcr.io/docspecio/api:2.6.3
ports:
- "4000:4000"
networks:
lasuite:
name: lasuite-network
driver: bridge

View File

@@ -12,7 +12,6 @@ flowchart TD
Back --> DB("Database (PostgreSQL)")
Back <--> Celery --> DB
Back ----> S3("Minio (S3)")
Back -- REST API --> Find
```
### Architecture decision records

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

View File

@@ -1,177 +0,0 @@
# Customization Guide 🛠
## Runtime Theming 🎨
### How to Use
To use this feature, simply set the `FRONTEND_CSS_URL` environment variable to the URL of your custom CSS file. For example:
```javascript
FRONTEND_CSS_URL=http://anything/custom-style.css
```
Once you've set this variable, Docs will load your custom CSS file and apply the styles to our frontend application.
### Benefits
This feature provides several benefits, including:
* **Easy customization** 🔄: With this feature, you can easily customize the look and feel of our application without requiring any code changes.
* **Flexibility** 🌈: You can use any CSS styles you like to create a custom theme that meets your needs.
* **Runtime theming** ⏱️: This feature allows you to change the theme of our application at runtime, without requiring a restart or recompilation.
### Example Use Case
Let's say you want to change the background color of our application to a custom color. You can create a custom CSS file with the following contents:
```css
body {
background-color: #3498db;
}
```
Then, set the `FRONTEND_CSS_URL` environment variable to the URL of your custom CSS file. Once you've done this, our application will load your custom CSS file and apply the styles, changing the background color to the custom color you specified.
----
## Runtime JavaScript Injection 🚀
### How to Use
To use this feature, simply set the `FRONTEND_JS_URL` environment variable to the URL of your custom JavaScript file. For example:
```javascript
FRONTEND_JS_URL=http://anything/custom-script.js
```
Once you've set this variable, Docs will load your custom JavaScript file and execute it in the browser, allowing you to modify the application's behavior at runtime.
### Benefits
This feature provides several benefits, including:
* **Dynamic customization** 🔄: With this feature, you can dynamically modify the behavior and appearance of our application without requiring any code changes.
* **Flexibility** 🌈: You can add custom functionality, modify existing features, or integrate third-party services.
* **Runtime injection** ⏱️: This feature allows you to inject JavaScript into the application at runtime, without requiring a restart or recompilation.
### Example Use Case
Let's say you want to add a custom menu to the application header. You can create a custom JavaScript file with the following contents:
```javascript
(function() {
'use strict';
function initCustomMenu() {
// Wait for the page to be fully loaded
const header = document.querySelector('header');
if (!header) return false;
// Create and inject your custom menu
const customMenu = document.createElement('div');
customMenu.innerHTML = '<button>Custom Menu</button>';
header.appendChild(customMenu);
console.log('Custom menu added successfully');
return true;
}
// Initialize when DOM is ready
if (document.readyState === 'loading') {
document.addEventListener('DOMContentLoaded', initCustomMenu);
} else {
initCustomMenu();
}
})();
```
Then, set the `FRONTEND_JS_URL` environment variable to the URL of your custom JavaScript file. Once you've done this, our application will load your custom JavaScript file and execute it, adding your custom menu to the header.
----
## **Your Docs icon** 📝
You can add your own Docs icon in the header from the theme customization file.
### Settings 🔧
```shellscript
THEME_CUSTOMIZATION_FILE_PATH=<path>
```
### Example of JSON
You can activate it with the `header.icon` configuration: https://github.com/suitenumerique/docs/blob/main/src/helm/env.d/dev/configuration/theme/demo.json
This configuration is optional. If not set, the default icon will be used.
----
## **Footer Configuration** 📝
The footer is configurable from the theme customization file.
### Settings 🔧
```shellscript
THEME_CUSTOMIZATION_FILE_PATH=<path>
```
### Example of JSON
The json must follow some rules: https://github.com/suitenumerique/docs/blob/main/src/helm/env.d/dev/configuration/theme/demo.json
`footer.default` is the fallback if the language is not supported.
---
Below is a visual example of a configured footer ⬇️:
![Footer Configuration Example](./assets/footer-configurable.png)
----
## **Custom Translations** 📝
The translations can be partially overridden from the theme customization file.
### Settings 🔧
```shellscript
THEME_CUSTOMIZATION_FILE_PATH=<path>
```
### Example of JSON
The json must follow some rules: https://github.com/suitenumerique/docs/blob/main/src/helm/env.d/dev/configuration/theme/demo.json
----
## **Waffle Configuration** 🧇
The Waffle (La Gaufre) is a widget that displays a grid of services.
![Waffle Configuration Example](./assets/waffle.png)
### Settings 🔧
```shellscript
THEME_CUSTOMIZATION_FILE_PATH=<path>
```
### Configuration
The Waffle can be configured in the theme customization file with the `waffle` key.
### Available Properties
See: [LaGaufreV2Props](https://github.com/suitenumerique/ui-kit/blob/main/src/components/la-gaufre/LaGaufreV2.tsx#L49)
### Complete Example
From the theme customization file: https://github.com/suitenumerique/docs/blob/main/src/helm/env.d/dev/configuration/theme/demo.json
### Behavior
- If `data.services` is provided, the Waffle will display those services statically
- If no data is provided, services can be fetched dynamically from an API endpoint thanks to the `apiUrl` property

View File

@@ -6,123 +6,103 @@ Here we describe all environment variables that can be set for the docs applicat
These are the environment variables you can set for the `impress-backend` container.
| Option | Description | default |
|-------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------|
| AI_ALLOW_REACH_FROM | Users that can use AI must be this level. options are "public", "authenticated", "restricted" | authenticated |
| AI_API_KEY | AI key to be used for AI Base url | |
| AI_BASE_URL | OpenAI compatible AI base url | |
| AI_FEATURE_ENABLED | Enable AI options | false |
| AI_MODEL | AI Model to use | |
| ALLOW_LOGOUT_GET_METHOD | Allow get logout method | true |
| API_USERS_LIST_LIMIT | Limit on API users | 5 |
| API_USERS_LIST_THROTTLE_RATE_BURST | Throttle rate for api on burst | 30/minute |
| API_USERS_LIST_THROTTLE_RATE_SUSTAINED | Throttle rate for api | 180/hour |
| API_USERS_SEARCH_QUERY_MIN_LENGTH | Minimum characters to insert to search a user | 3 |
| AWS_S3_ACCESS_KEY_ID | Access id for s3 endpoint | |
| AWS_S3_ENDPOINT_URL | S3 endpoint | |
| AWS_S3_REGION_NAME | Region name for s3 endpoint | |
| AWS_S3_SECRET_ACCESS_KEY | Access key for s3 endpoint | |
| AWS_S3_SIGNATURE_VERSION | S3 signature version (`s3v4` or `s3`) | s3v4 |
| AWS_STORAGE_BUCKET_NAME | Bucket name for s3 endpoint | impress-media-storage |
| CACHES_DEFAULT_TIMEOUT | Cache default timeout | 30 |
| CACHES_DEFAULT_KEY_PREFIX | The prefix used to every cache keys. | docs |
| COLLABORATION_API_URL | Collaboration api host | |
| COLLABORATION_SERVER_SECRET | Collaboration api secret | |
| COLLABORATION_WS_NOT_CONNECTED_READY_ONLY | Users not connected to the collaboration server cannot edit | false |
| COLLABORATION_WS_URL | Collaboration websocket url | |
| CONVERSION_API_CONTENT_FIELD | Conversion api content field | content |
| CONVERSION_API_ENDPOINT | Conversion API endpoint | convert |
| CONVERSION_API_SECURE | Require secure conversion api | false |
| CONVERSION_API_TIMEOUT | Conversion api timeout | 30 |
| CONVERSION_FILE_MAX_SIZE | The file max size allowed when uploaded to convert it | 20971520 (20MB) |
| CONVERSION_FILE_EXTENSIONS_ALLOWED | Extension list managed by the conversion service | [".docx", ".md"]
| CRISP_WEBSITE_ID | Crisp website id for support | |
| DB_ENGINE | Engine to use for database connections | django.db.backends.postgresql_psycopg2 |
| DB_HOST | Host of the database | localhost |
| DB_NAME | Name of the database | impress |
| DB_PASSWORD | Password to authenticate with | pass |
| DB_PORT | Port of the database | 5432 |
| DB_USER | User to authenticate with | dinum |
| DJANGO_ALLOWED_HOSTS | Allowed hosts | [] |
| DJANGO_CELERY_BROKER_TRANSPORT_OPTIONS | Celery broker transport options | {} |
| DJANGO_CELERY_BROKER_URL | Celery broker url | redis://redis:6379/0 |
| DJANGO_CORS_ALLOWED_ORIGINS | List of origins allowed for CORS | [] |
| DJANGO_CORS_ALLOWED_ORIGIN_REGEXES | List of origins allowed for CORS using regulair expressions | [] |
| DJANGO_CORS_ALLOW_ALL_ORIGINS | Allow all CORS origins | false |
| DJANGO_CSRF_TRUSTED_ORIGINS | CSRF trusted origins | [] |
| DJANGO_EMAIL_BACKEND | Email backend library | django.core.mail.backends.smtp.EmailBackend |
| DJANGO_EMAIL_BRAND_NAME | Brand name for email | |
| DJANGO_EMAIL_FROM | Email address used as sender | from@example.com |
| DJANGO_EMAIL_HOST | Hostname of email | |
| DJANGO_EMAIL_HOST_PASSWORD | Password to authenticate with on the email host | |
| DJANGO_EMAIL_HOST_USER | User to authenticate with on the email host | |
| DJANGO_EMAIL_LOGO_IMG | Logo for the email | |
| DJANGO_EMAIL_PORT | Port used to connect to email host | |
| DJANGO_EMAIL_URL_APP | Url used in the email to go to the app | |
| DJANGO_EMAIL_USE_SSL | Use ssl for email host connection | false |
| DJANGO_EMAIL_USE_TLS | Use tls for email host connection | false |
| DJANGO_SECRET_KEY | Secret key | |
| DJANGO_SERVER_TO_SERVER_API_TOKENS | | [] |
| DOCSPEC_API_URL | URL to endpoint of DocSpec conversion API | |
| DOCUMENT_IMAGE_MAX_SIZE | Maximum size of document in bytes | 10485760 |
| FRONTEND_CSS_URL | To add a external css file to the app | |
| FRONTEND_JS_URL | To add a external js file to the app | |
| FRONTEND_HOMEPAGE_FEATURE_ENABLED | Frontend feature flag to display the homepage | false |
| FRONTEND_THEME | Frontend theme to use | |
| LANGUAGE_CODE | Default language | en-us |
| LANGFUSE_SECRET_KEY | The Langfuse secret key used by the sdk | None |
| LANGFUSE_PUBLIC_KEY | The Langfuse public key used by the sdk | None |
| LANGFUSE_BASE_URL | The Langfuse base url used by the sdk | None |
| LASUITE_MARKETING_BACKEND | Backend used when SIGNUP_NEW_USER_TO_MARKETING_EMAIL is True. See https://github.com/suitenumerique/django-lasuite/blob/main/documentation/how-to-use-marketing-backend.md | lasuite.marketing.backends.dummy.DummyBackend |
| LASUITE_MARKETING_PARAMETERS | The parameters to configure LASUITE_MARKETING_BACKEND. See https://github.com/suitenumerique/django-lasuite/blob/main/documentation/how-to-use-marketing-backend.md | {} |
| LOGGING_LEVEL_LOGGERS_APP | Application logging level. options are "DEBUG", "INFO", "WARN", "ERROR", "CRITICAL" | INFO |
| LOGGING_LEVEL_LOGGERS_ROOT | Default logging level. options are "DEBUG", "INFO", "WARN", "ERROR", "CRITICAL" | INFO |
| LOGIN_REDIRECT_URL | Login redirect url | |
| LOGIN_REDIRECT_URL_FAILURE | Login redirect url on failure | |
| LOGOUT_REDIRECT_URL | Logout redirect url | |
| MALWARE_DETECTION_BACKEND | The malware detection backend use from the django-lasuite package | lasuite.malware_detection.backends.dummy.DummyBackend |
| MALWARE_DETECTION_PARAMETERS | A dict containing all the parameters to initiate the malware detection backend | {"callback_path": "core.malware_detection.malware_detection_callback",} |
| MEDIA_BASE_URL | | |
| NO_WEBSOCKET_CACHE_TIMEOUT | Cache used to store current editor session key when only users without websocket are editing a document | 120 |
| OIDC_ALLOW_DUPLICATE_EMAILS | Allow duplicate emails | false |
| OIDC_AUTH_REQUEST_EXTRA_PARAMS | OIDC extra auth parameters | {} |
| OIDC_CREATE_USER | Create used on OIDC | false |
| OIDC_FALLBACK_TO_EMAIL_FOR_IDENTIFICATION | Fallback to email for identification | true |
| OIDC_OP_AUTHORIZATION_ENDPOINT | Authorization endpoint for OIDC | |
| OIDC_OP_JWKS_ENDPOINT | JWKS endpoint for OIDC | |
| OIDC_OP_LOGOUT_ENDPOINT | Logout endpoint for OIDC | |
| OIDC_OP_TOKEN_ENDPOINT | Token endpoint for OIDC | |
| OIDC_OP_USER_ENDPOINT | User endpoint for OIDC | |
| OIDC_REDIRECT_ALLOWED_HOSTS | Allowed hosts for OIDC redirect url | [] |
| OIDC_REDIRECT_REQUIRE_HTTPS | Require https for OIDC redirect url | false |
| OIDC_RP_CLIENT_ID | Client id used for OIDC | impress |
| OIDC_RP_CLIENT_SECRET | Client secret used for OIDC | |
| OIDC_RP_SCOPES | Scopes requested for OIDC | openid email |
| OIDC_RP_SIGN_ALGO | verification algorithm used OIDC tokens | RS256 |
| OIDC_STORE_ID_TOKEN | Store OIDC token | true |
| OIDC_USERINFO_FULLNAME_FIELDS | OIDC token claims to create full name | ["first_name", "last_name"] |
| OIDC_USERINFO_SHORTNAME_FIELD | OIDC token claims to create shortname | first_name |
| OIDC_USE_NONCE | Use nonce for OIDC | true |
| POSTHOG_KEY | Posthog key for analytics | |
| REDIS_URL | Cache url | redis://redis:6379/1 |
| SEARCH_INDEXER_BATCH_SIZE | Size of each batch for indexation of all documents | 100000 |
| SEARCH_INDEXER_CLASS | Class of the backend for document indexation & search | |
| SEARCH_INDEXER_COUNTDOWN | Minimum debounce delay of indexation jobs (in seconds) | 1 |
| SEARCH_INDEXER_QUERY_LIMIT | Maximum number of results expected from search endpoint | 50 |
| SEARCH_INDEXER_SECRET | Token for indexation queries | |
| SEARCH_INDEXER_URL | Find application endpoint for indexation | |
| SENTRY_DSN | Sentry host | |
| SESSION_COOKIE_AGE | duration of the cookie session | 60*60*12 |
| SIGNUP_NEW_USER_TO_MARKETING_EMAIL | Register new user to the marketing onboarding. If True, see env LASUITE_MARKETING_* system | False |
| SPECTACULAR_SETTINGS_ENABLE_DJANGO_DEPLOY_CHECK | | false |
| STORAGES_STATICFILES_BACKEND | | whitenoise.storage.CompressedManifestStaticFilesStorage |
| THEME_CUSTOMIZATION_CACHE_TIMEOUT | Cache duration for the customization settings | 86400 |
| THEME_CUSTOMIZATION_FILE_PATH | Full path to the file customizing the theme. An example is provided in src/backend/impress/configuration/theme/default.json | BASE_DIR/impress/configuration/theme/default.json |
| TRASHBIN_CUTOFF_DAYS | Trashbin cutoff | 30 |
| USER_OIDC_ESSENTIAL_CLAIMS | Essential claims in OIDC token | [] |
| USER_RECONCILIATION_FORM_URL | URL of a third-party form for user reconciliation requests | |
| Y_PROVIDER_API_BASE_URL | Y Provider url | |
| Y_PROVIDER_API_KEY | Y provider API key | |
| Option | Description | default |
|-------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------|
| AI_ALLOW_REACH_FROM | Users that can use AI must be this level. options are "public", "authenticated", "restricted" | authenticated |
| AI_API_KEY | AI key to be used for AI Base url | |
| AI_BASE_URL | OpenAI compatible AI base url | |
| AI_FEATURE_ENABLED | Enable AI options | false |
| AI_MODEL | AI Model to use | |
| ALLOW_LOGOUT_GET_METHOD | Allow get logout method | true |
| API_USERS_LIST_LIMIT | Limit on API users | 5 |
| API_USERS_LIST_THROTTLE_RATE_BURST | Throttle rate for api on burst | 30/minute |
| API_USERS_LIST_THROTTLE_RATE_SUSTAINED | Throttle rate for api | 180/hour |
| AWS_S3_ACCESS_KEY_ID | Access id for s3 endpoint | |
| AWS_S3_ENDPOINT_URL | S3 endpoint | |
| AWS_S3_REGION_NAME | Region name for s3 endpoint | |
| AWS_S3_SECRET_ACCESS_KEY | Access key for s3 endpoint | |
| AWS_STORAGE_BUCKET_NAME | Bucket name for s3 endpoint | impress-media-storage |
| CACHES_DEFAULT_TIMEOUT | Cache default timeout | 30 |
| CACHES_KEY_PREFIX | The prefix used to every cache keys. | docs |
| COLLABORATION_API_URL | Collaboration api host | |
| COLLABORATION_SERVER_SECRET | Collaboration api secret | |
| COLLABORATION_WS_NOT_CONNECTED_READY_ONLY | Users not connected to the collaboration server cannot edit | false |
| COLLABORATION_WS_URL | Collaboration websocket url | |
| CONVERSION_API_CONTENT_FIELD | Conversion api content field | content |
| CONVERSION_API_ENDPOINT | Conversion API endpoint | convert |
| CONVERSION_API_SECURE | Require secure conversion api | false |
| CONVERSION_API_TIMEOUT | Conversion api timeout | 30 |
| CRISP_WEBSITE_ID | Crisp website id for support | |
| DB_ENGINE | Engine to use for database connections | django.db.backends.postgresql_psycopg2 |
| DB_HOST | Host of the database | localhost |
| DB_NAME | Name of the database | impress |
| DB_PASSWORD | Password to authenticate with | pass |
| DB_PORT | Port of the database | 5432 |
| DB_USER | User to authenticate with | dinum |
| DJANGO_ALLOWED_HOSTS | Allowed hosts | [] |
| DJANGO_CELERY_BROKER_TRANSPORT_OPTIONS | Celery broker transport options | {} |
| DJANGO_CELERY_BROKER_URL | Celery broker url | redis://redis:6379/0 |
| DJANGO_CORS_ALLOW_ALL_ORIGINS | Allow all CORS origins | false |
| DJANGO_CORS_ALLOWED_ORIGIN_REGEXES | List of origins allowed for CORS using regulair expressions | [] |
| DJANGO_CORS_ALLOWED_ORIGINS | List of origins allowed for CORS | [] |
| DJANGO_CSRF_TRUSTED_ORIGINS | CSRF trusted origins | [] |
| DJANGO_EMAIL_BACKEND | Email backend library | django.core.mail.backends.smtp.EmailBackend |
| DJANGO_EMAIL_BRAND_NAME | Brand name for email | |
| DJANGO_EMAIL_FROM | Email address used as sender | from@example.com |
| DJANGO_EMAIL_HOST | Hostname of email | |
| DJANGO_EMAIL_HOST_PASSWORD | Password to authenticate with on the email host | |
| DJANGO_EMAIL_HOST_USER | User to authenticate with on the email host | |
| DJANGO_EMAIL_LOGO_IMG | Logo for the email | |
| DJANGO_EMAIL_PORT | Port used to connect to email host | |
| DJANGO_EMAIL_USE_SSL | Use ssl for email host connection | false |
| DJANGO_EMAIL_USE_TLS | Use tls for email host connection | false |
| DJANGO_SECRET_KEY | Secret key | |
| DJANGO_SERVER_TO_SERVER_API_TOKENS | | [] |
| DOCUMENT_IMAGE_MAX_SIZE | Maximum size of document in bytes | 10485760 |
| FRONTEND_CSS_URL | To add a external css file to the app | |
| FRONTEND_HOMEPAGE_FEATURE_ENABLED | Frontend feature flag to display the homepage | false |
| FRONTEND_THEME | Frontend theme to use | |
| LANGUAGE_CODE | Default language | en-us |
| LOGGING_LEVEL_LOGGERS_APP | Application logging level. options are "DEBUG", "INFO", "WARN", "ERROR", "CRITICAL" | INFO |
| LOGGING_LEVEL_LOGGERS_ROOT | Default logging level. options are "DEBUG", "INFO", "WARN", "ERROR", "CRITICAL" | INFO |
| LOGIN_REDIRECT_URL | Login redirect url | |
| LOGIN_REDIRECT_URL_FAILURE | Login redirect url on failure | |
| LOGOUT_REDIRECT_URL | Logout redirect url | |
| MALWARE_DETECTION_BACKEND | The malware detection backend use from the django-lasuite package | lasuite.malware_detection.backends.dummy.DummyBackend |
| MALWARE_DETECTION_PARAMETERS | A dict containing all the parameters to initiate the malware detection backend | {"callback_path": "core.malware_detection.malware_detection_callback",} |
| MEDIA_BASE_URL | | |
| NO_WEBSOCKET_CACHE_TIMEOUT | Cache used to store current editor session key when only users without websocket are editing a document | 120 |
| OIDC_ALLOW_DUPLICATE_EMAILS | Allow duplicate emails | false |
| OIDC_AUTH_REQUEST_EXTRA_PARAMS | OIDC extra auth parameters | {} |
| OIDC_CREATE_USER | Create used on OIDC | false |
| OIDC_FALLBACK_TO_EMAIL_FOR_IDENTIFICATION | Fallback to email for identification | true |
| OIDC_OP_AUTHORIZATION_ENDPOINT | Authorization endpoint for OIDC | |
| OIDC_OP_JWKS_ENDPOINT | JWKS endpoint for OIDC | |
| OIDC_OP_LOGOUT_ENDPOINT | Logout endpoint for OIDC | |
| OIDC_OP_TOKEN_ENDPOINT | Token endpoint for OIDC | |
| OIDC_OP_USER_ENDPOINT | User endpoint for OIDC | |
| OIDC_REDIRECT_ALLOWED_HOSTS | Allowed hosts for OIDC redirect url | [] |
| OIDC_REDIRECT_REQUIRE_HTTPS | Require https for OIDC redirect url | false |
| OIDC_RP_CLIENT_ID | Client id used for OIDC | impress |
| OIDC_RP_CLIENT_SECRET | Client secret used for OIDC | |
| OIDC_RP_SCOPES | Scopes requested for OIDC | openid email |
| OIDC_RP_SIGN_ALGO | verification algorithm used OIDC tokens | RS256 |
| OIDC_STORE_ID_TOKEN | Store OIDC token | true |
| OIDC_USE_NONCE | Use nonce for OIDC | true |
| OIDC_USERINFO_FULLNAME_FIELDS | OIDC token claims to create full name | ["first_name", "last_name"] |
| OIDC_USERINFO_SHORTNAME_FIELD | OIDC token claims to create shortname | first_name |
| POSTHOG_KEY | Posthog key for analytics | |
| REDIS_URL | Cache url | redis://redis:6379/1 |
| SENTRY_DSN | Sentry host | |
| SESSION_COOKIE_AGE | duration of the cookie session | 60*60*12 |
| SPECTACULAR_SETTINGS_ENABLE_DJANGO_DEPLOY_CHECK | | false |
| STORAGES_STATICFILES_BACKEND | | whitenoise.storage.CompressedManifestStaticFilesStorage |
| THEME_CUSTOMIZATION_CACHE_TIMEOUT | Cache duration for the customization settings | 86400 |
| THEME_CUSTOMIZATION_FILE_PATH | Full path to the file customizing the theme. An example is provided in src/backend/impress/configuration/theme/default.json | BASE_DIR/impress/configuration/theme/default.json |
| TRASHBIN_CUTOFF_DAYS | Trashbin cutoff | 30 |
| USER_OIDC_ESSENTIAL_CLAIMS | Essential claims in OIDC token | [] |
| Y_PROVIDER_API_BASE_URL | Y Provider url | |
| Y_PROVIDER_API_KEY | Y provider API key | |
## impress-frontend image

View File

@@ -1,12 +1,3 @@
djangoSecretKey: &djangoSecretKey "lkjsdlfkjsldkfjslkdfjslkdjfslkdjf"
djangoSuperUserEmail: admin@example.com
djangoSuperUserPass: admin
aiApiKey: changeme
aiBaseUrl: changeme
oidc:
clientId: impress
clientSecret: ThisIsAnExampleKeyForDevPurposeOnly
image:
repository: lasuite/impress-backend
pullPolicy: Always
@@ -15,100 +6,86 @@ image:
backend:
replicas: 1
envVars:
COLLABORATION_API_URL: https://impress.127.0.0.1.nip.io/collaboration/api/
COLLABORATION_SERVER_SECRET: my-secret
DJANGO_CSRF_TRUSTED_ORIGINS: https://docs.127.0.0.1.nip.io
DJANGO_CSRF_TRUSTED_ORIGINS: https://impress.127.0.0.1.nip.io
DJANGO_CONFIGURATION: Feature
DJANGO_ALLOWED_HOSTS: docs.127.0.0.1.nip.io
DJANGO_ALLOWED_HOSTS: impress.127.0.0.1.nip.io
DJANGO_SERVER_TO_SERVER_API_TOKENS: secret-api-key
DJANGO_SECRET_KEY: *djangoSecretKey
DJANGO_SECRET_KEY: AgoodOrAbadKey
DJANGO_SETTINGS_MODULE: impress.settings
DJANGO_SUPERUSER_PASSWORD: admin
DJANGO_EMAIL_BRAND_NAME: "La Suite Numérique"
DJANGO_EMAIL_HOST: "mailcatcher"
DJANGO_EMAIL_LOGO_IMG: https://docs.127.0.0.1.nip.io/assets/logo-suite-numerique.png
DJANGO_EMAIL_LOGO_IMG: https://impress.127.0.0.1.nip.io/assets/logo-suite-numerique.png
DJANGO_EMAIL_PORT: 1025
DJANGO_EMAIL_URL_APP: https://docs.127.0.0.1.nip.io
DJANGO_EMAIL_USE_SSL: False
LOGGING_LEVEL_HANDLERS_CONSOLE: ERROR
LOGGING_LEVEL_LOGGERS_ROOT: INFO
LOGGING_LEVEL_LOGGERS_APP: INFO
OIDC_USERINFO_SHORTNAME_FIELD: "given_name"
OIDC_USERINFO_FULLNAME_FIELDS: "given_name,usual_name"
OIDC_OP_JWKS_ENDPOINT: https://docs-keycloak.127.0.0.1.nip.io/realms/docs/protocol/openid-connect/certs
OIDC_OP_AUTHORIZATION_ENDPOINT: https://docs-keycloak.127.0.0.1.nip.io/realms/docs/protocol/openid-connect/auth
OIDC_OP_TOKEN_ENDPOINT: https://docs-keycloak.127.0.0.1.nip.io/realms/docs/protocol/openid-connect/token
OIDC_OP_USER_ENDPOINT: https://docs-keycloak.127.0.0.1.nip.io/realms/docs/protocol/openid-connect/userinfo
OIDC_OP_LOGOUT_ENDPOINT: https://docs-keycloak.127.0.0.1.nip.io/realms/docs/protocol/openid-connect/logout
OIDC_RP_CLIENT_ID: docs
OIDC_OP_JWKS_ENDPOINT: https://keycloak.127.0.0.1.nip.io/realms/impress/protocol/openid-connect/certs
OIDC_OP_AUTHORIZATION_ENDPOINT: https://keycloak.127.0.0.1.nip.io/realms/impress/protocol/openid-connect/auth
OIDC_OP_TOKEN_ENDPOINT: https://keycloak.127.0.0.1.nip.io/realms/impress/protocol/openid-connect/token
OIDC_OP_USER_ENDPOINT: https://keycloak.127.0.0.1.nip.io/realms/impress/protocol/openid-connect/userinfo
OIDC_OP_LOGOUT_ENDPOINT: https://keycloak.127.0.0.1.nip.io/realms/impress/protocol/openid-connect/logout
OIDC_RP_CLIENT_ID: impress
OIDC_RP_CLIENT_SECRET: ThisIsAnExampleKeyForDevPurposeOnly
OIDC_RP_SIGN_ALGO: RS256
OIDC_RP_SCOPES: "openid email"
LOGIN_REDIRECT_URL: https://docs.127.0.0.1.nip.io
LOGIN_REDIRECT_URL_FAILURE: https://docs.127.0.0.1.nip.io
LOGOUT_REDIRECT_URL: https://docs.127.0.0.1.nip.io
DB_HOST: postgresql-dev-backend-postgres
DB_NAME:
secretKeyRef:
name: postgresql-dev-backend-postgres
key: database
DB_USER:
secretKeyRef:
name: postgresql-dev-backend-postgres
key: username
DB_PASSWORD:
secretKeyRef:
name: postgresql-dev-backend-postgres
key: password
OIDC_VERIFY_SSL: False
OIDC_USERINFO_SHORTNAME_FIELD: "given_name"
OIDC_USERINFO_FULLNAME_FIELDS: "given_name,usual_name"
OIDC_REDIRECT_ALLOWED_HOSTS: https://impress.127.0.0.1.nip.io
OIDC_AUTH_REQUEST_EXTRA_PARAMS: "{'acr_values': 'eidas1'}"
LOGIN_REDIRECT_URL: https://impress.127.0.0.1.nip.io
LOGIN_REDIRECT_URL_FAILURE: https://impress.127.0.0.1.nip.io
LOGOUT_REDIRECT_URL: https://impress.127.0.0.1.nip.io
POSTHOG_KEY: "{'id': 'posthog_key', 'host': 'https://product.impress.127.0.0.1.nip.io'}"
DB_HOST: postgresql
DB_NAME: impress
DB_USER: dinum
DB_PASSWORD: pass
DB_PORT: 5432
REDIS_URL: redis://user:pass@redis-dev-backend-redis:6379/1
DJANGO_CELERY_BROKER_URL: redis://user:pass@redis-dev-backend-redis:6379/1
AWS_S3_ENDPOINT_URL: http://minio-dev-backend-minio.impress.svc.cluster.local:9000
AWS_S3_ACCESS_KEY_ID: dinum
REDIS_URL: redis://default:pass@redis-master:6379/1
AWS_S3_ENDPOINT_URL: http://minio.impress.svc.cluster.local:9000
AWS_S3_ACCESS_KEY_ID: root
AWS_S3_SECRET_ACCESS_KEY: password
AWS_STORAGE_BUCKET_NAME: docs-media-storage
AWS_STORAGE_BUCKET_NAME: impress-media-storage
STORAGES_STATICFILES_BACKEND: django.contrib.staticfiles.storage.StaticFilesStorage
USER_RECONCILIATION_FORM_URL: https://docs.127.0.0.1.nip.io
Y_PROVIDER_API_BASE_URL: http://impress-y-provider:443/api/
Y_PROVIDER_API_KEY: my-secret
CACHES_KEY_PREFIX: "{{ now | unixEpoch }}"
migrate:
command:
- "/bin/sh"
- "-c"
- |
while ! python manage.py check --database default > /dev/null 2>&1
do
echo "Database not ready"
sleep 2
done
echo "Database is ready"
python manage.py migrate --no-input
python manage.py migrate --no-input &&
python manage.py create_demo --force
restartPolicy: Never
command:
- "gunicorn"
- "-c"
- "/usr/local/etc/gunicorn/impress.py"
- "impress.wsgi:application"
- "--reload"
createsuperuser:
command:
- "/bin/sh"
- "-c"
- |
while ! python manage.py check --database default > /dev/null 2>&1
do
echo "Database not ready"
sleep 2
done
echo "Database is ready"
python manage.py createsuperuser --email admin@example.com --password admin
restartPolicy: Never
# Extra volume mounts to manage our local custom CA and avoid to set ssl_verify: false
# Extra volume to manage our local custom CA and avoid to set ssl_verify: false
extraVolumeMounts:
- name: certs
mountPath: /cert/cacert.pem
subPath: cacert.pem
# Extra volumes to manage our local custom CA and avoid to set ssl_verify: false
# Extra volume to manage our local custom CA and avoid to set ssl_verify: false
extraVolumes:
- name: certs
configMap:
@@ -117,7 +94,12 @@ backend:
- key: cacert.pem
path: cacert.pem
frontend:
envVars:
PORT: 8080
NEXT_PUBLIC_API_ORIGIN: https://impress.127.0.0.1.nip.io
replicas: 1
image:
repository: lasuite/impress-frontend
pullPolicy: Always
@@ -132,47 +114,60 @@ yProvider:
tag: "latest"
envVars:
COLLABORATION_BACKEND_BASE_URL: https://docs.127.0.0.1.nip.io
COLLABORATION_LOGGING: true
COLLABORATION_SERVER_ORIGIN: https://docs.127.0.0.1.nip.io
COLLABORATION_SERVER_ORIGIN: https://impress.127.0.0.1.nip.io
COLLABORATION_SERVER_SECRET: my-secret
Y_PROVIDER_API_KEY: my-secret
COLLABORATION_BACKEND_BASE_URL: https://impress.127.0.0.1.nip.io
NODE_EXTRA_CA_CERTS: /usr/local/share/ca-certificates/cacert.pem
ingress:
enabled: true
host: docs.127.0.0.1.nip.io
annotations:
nginx.ingress.kubernetes.io/proxy-body-size: 100m
# Mount the certificate so yProvider can establish tls with the backend
extraVolumeMounts:
- name: certs
mountPath: /usr/local/share/ca-certificates/cacert.pem
subPath: cacert.pem
ingressCollaborationWS:
enabled: true
host: docs.127.0.0.1.nip.io
ingressCollaborationApi:
enabled: true
host: docs.127.0.0.1.nip.io
ingressAdmin:
enabled: true
host: docs.127.0.0.1.nip.io
extraVolumes:
- name: certs
configMap:
name: certifi
items:
- key: cacert.pem
path: cacert.pem
posthog:
ingress:
enabled: false
ingressAssets:
enabled: false
ingress:
enabled: true
host: impress.127.0.0.1.nip.io
ingressCollaborationWS:
enabled: true
host: impress.127.0.0.1.nip.io
ingressCollaborationApi:
enabled: true
host: impress.127.0.0.1.nip.io
ingressAdmin:
enabled: true
host: impress.127.0.0.1.nip.io
ingressMedia:
enabled: true
host: docs.127.0.0.1.nip.io
host: impress.127.0.0.1.nip.io
annotations:
nginx.ingress.kubernetes.io/auth-url: https://docs.127.0.0.1.nip.io/api/v1.0/documents/media-auth/
nginx.ingress.kubernetes.io/auth-url: https://impress.127.0.0.1.nip.io/api/v1.0/documents/media-auth/
nginx.ingress.kubernetes.io/auth-response-headers: "Authorization, X-Amz-Date, X-Amz-Content-SHA256"
nginx.ingress.kubernetes.io/upstream-vhost: minio-dev-backend-minio.impress.svc.cluster.local:9000
nginx.ingress.kubernetes.io/rewrite-target: /docs-media-storage/$1
nginx.ingress.kubernetes.io/upstream-vhost: minio.impress.svc.cluster.local:9000
nginx.ingress.kubernetes.io/rewrite-target: /impress-media-storage/$1
serviceMedia:
host: minio-dev-backend-minio.impress.svc.cluster.local
host: minio.impress.svc.cluster.local
port: 9000

File diff suppressed because it is too large Load Diff

View File

@@ -1,24 +1,8 @@
minio:
auth:
rootUser: root
rootPassword: password
provisioning:
enabled: true
image: minio/minio
name: minio
# serviceNameOverride: docs-minio
ingress:
enabled: true
hostname: docs-minio.127.0.0.1.nip.io
tls:
enabled: true
secretName: docs-tls
consoleIngress:
enabled: true
hostname: docs-minio-console.127.0.0.1.nip.io
tls:
enabled: true
secretName: docs-tls
api:
port: 80
username: dinum
password: password
bucket: docs-media-storage
versioning: true
size: 1Gi
buckets:
- name: impress-media-storage
versioning: true

View File

@@ -1,9 +1,7 @@
postgres:
enabled: true
name: postgres
#serviceNameOverride: postgres
image: postgres:16-alpine
auth:
username: dinum
password: pass
database: dinum
size: 1Gi
database: impress
tls:
enabled: true
autoGenerated: true

View File

@@ -1,7 +1,4 @@
redis:
enabled: true
name: redis
#serviceNameOverride: redis
image: redis:8.2-alpine
username: user
password: pass
auth:
password: pass
architecture: standalone

View File

@@ -127,7 +127,6 @@ DJANGO_EMAIL_FROM=<your email address>
DJANGO_EMAIL_BRAND_NAME=<brand name used in email templates> # e.g. "La Suite Numérique"
DJANGO_EMAIL_LOGO_IMG=<logo image to use in email templates.> # e.g. "https://docs.yourdomain.tld/assets/logo-suite-numerique.png"
DJANGO_EMAIL_URL_APP=<url used in email templates to go to the app> # e.g. "https://docs.yourdomain.tld"
```
### AI

View File

@@ -7,7 +7,7 @@ This document is a step-by-step guide that describes how to install Docs on a k8
- k8s cluster with an nginx-ingress controller
- an OIDC provider (if you don't have one, we provide an example)
- a PostgreSQL server (if you don't have one, we provide an example)
- a Redis server (if you don't have one, we provide an example)
- a Memcached server (if you don't have one, we provide an example)
- a S3 bucket (if you don't have one, we provide an example)
### Test cluster
@@ -100,66 +100,50 @@ When your k8s cluster is ready (the ingress nginx controller is up), you can sta
Please remember that `*.127.0.0.1.nip.io` will always resolve to `127.0.0.1`, except in the k8s cluster where we configure CoreDNS to answer with the ingress-nginx service IP.
The namespace `impress` is already created, you can work in it and configure your kubectl cli to use it by default.
```
$ kubectl config set-context --current --namespace=impress
```
## Preparation
We provide our own helm chart for all development dependencies, it is available here https://github.com/suitenumerique/helm-dev-backend
This provided chart is for development purpose only and is not ready to use in production.
You can install it on your cluster to deploy keycloak, minio, postgresql and redis.
### What do you use to authenticate your users?
Docs uses OIDC, so if you already have an OIDC provider, obtain the necessary information to use it. In the next step, we will see how to configure Django (and thus Docs) to use it. If you do not have a provider, we will show you how to deploy a local Keycloak instance (this is not a production deployment, just a demo).
```
$ helm install --repo https://suitenumerique.github.io/helm-dev-backend -f docs/examples/helm/keycloak.values.yaml keycloak dev-backend
$ kubectl create namespace impress
$ kubectl config set-context --current --namespace=impress
$ helm install keycloak oci://registry-1.docker.io/bitnamicharts/keycloak -f examples/keycloak.values.yaml
$ #wait until
$ kubectl get pods
NAME READY STATUS RESTARTS AGE
keycloak-dev-backend-keycloak-0 1/1 Running 0 20s
keycloak-dev-backend-keycloak-pg-0 1/1 Running 0 20s
$ kubectl get po
NAME READY STATUS RESTARTS AGE
keycloak-0 1/1 Running 0 6m48s
keycloak-postgresql-0 1/1 Running 0 6m48s
```
From here the important information you will need are:
```yaml
OIDC_OP_JWKS_ENDPOINT: https://docs-keycloak.127.0.0.1.nip.io/realms/impress/protocol/openid-connect/certs
OIDC_OP_AUTHORIZATION_ENDPOINT: https://docs-keycloak.127.0.0.1.nip.io/realms/impress/protocol/openid-connect/auth
OIDC_OP_TOKEN_ENDPOINT: https://docs-keycloak.127.0.0.1.nip.io/realms/impress/protocol/openid-connect/token
OIDC_OP_USER_ENDPOINT: https://docs-keycloak.127.0.0.1.nip.io/realms/impress/protocol/openid-connect/userinfo
OIDC_OP_LOGOUT_ENDPOINT: https://docs-keycloak.127.0.0.1.nip.io/realms/impress/protocol/openid-connect/logout
OIDC_OP_JWKS_ENDPOINT: https://keycloak.127.0.0.1.nip.io/realms/impress/protocol/openid-connect/certs
OIDC_OP_AUTHORIZATION_ENDPOINT: https://keycloak.127.0.0.1.nip.io/realms/impress/protocol/openid-connect/auth
OIDC_OP_TOKEN_ENDPOINT: https://keycloak.127.0.0.1.nip.io/realms/impress/protocol/openid-connect/token
OIDC_OP_USER_ENDPOINT: https://keycloak.127.0.0.1.nip.io/realms/impress/protocol/openid-connect/userinfo
OIDC_OP_LOGOUT_ENDPOINT: https://keycloak.127.0.0.1.nip.io/realms/impress/protocol/openid-connect/logout
OIDC_RP_CLIENT_ID: impress
OIDC_RP_CLIENT_SECRET: ThisIsAnExampleKeyForDevPurposeOnly
OIDC_RP_SIGN_ALGO: RS256
OIDC_RP_SCOPES: "openid email"
```
You can find these values in **examples/helm/keycloak.values.yaml**
You can find these values in **examples/keycloak.values.yaml**
### Find redis server connection values
Docs needs a redis so we start by deploying one:
```
$ helm install --repo https://suitenumerique.github.io/helm-dev-backend -f docs/examples/helm/redis.values.yaml redis dev-backend
$ kubectl get pods
NAME READY STATUS RESTARTS AGE
keycloak-dev-backend-keycloak-0 1/1 Running 0 113s
keycloak-dev-backend-keycloak-pg-0 1/1 Running 0 113s
redis-dev-backend-redis-68c9f66786-4dgxj 1/1 Running 0 2s
```
From here the important information you will need are:
```yaml
REDIS_URL: redis://user:pass@redis-dev-backend-redis:6379/1
DJANGO_CELERY_BROKER_URL: redis://user:pass@redis-dev-backend-redis:6379/1
$ helm install redis oci://registry-1.docker.io/bitnamicharts/redis -f examples/redis.values.yaml
$ kubectl get po
NAME READY STATUS RESTARTS AGE
keycloak-0 1/1 Running 0 26m
keycloak-postgresql-0 1/1 Running 0 26m
redis-master-0 1/1 Running 0 35s
```
### Find postgresql connection values
@@ -167,32 +151,22 @@ DJANGO_CELERY_BROKER_URL: redis://user:pass@redis-dev-backend-redis:6379/1
Docs uses a postgresql database as backend, so if you have a provider, obtain the necessary information to use it. If you don't, you can install a postgresql testing environment as follow:
```
$ helm install --repo https://suitenumerique.github.io/helm-dev-backend -f docs/examples/helm/postgresql.values.yaml postgresql dev-backend
$ kubectl get pods
NAME READY STATUS RESTARTS AGE
keycloak-dev-backend-keycloak-0 1/1 Running 0 3m42s
keycloak-dev-backend-keycloak-pg-0 1/1 Running 0 3m42s
postgresql-dev-backend-postgres-0 1/1 Running 0 13s
redis-dev-backend-redis-68c9f66786-4dgxj 1/1 Running 0 111s
$ helm install postgresql oci://registry-1.docker.io/bitnamicharts/postgresql -f examples/postgresql.values.yaml
$ kubectl get po
NAME READY STATUS RESTARTS AGE
keycloak-0 1/1 Running 0 28m
keycloak-postgresql-0 1/1 Running 0 28m
postgresql-0 1/1 Running 0 14m
redis-master-0 1/1 Running 0 42s
```
From here the important information you will need are:
```yaml
DB_HOST: postgresql-dev-backend-postgres
DB_NAME:
secretKeyRef:
name: postgresql-dev-backend-postgres
key: database
DB_USER:
secretKeyRef:
name: postgresql-dev-backend-postgres
key: username
DB_PASSWORD:
secretKeyRef:
name: postgresql-dev-backend-postgres
key: password
DB_HOST: postgres-postgresql
DB_NAME: impress
DB_USER: dinum
DB_PASSWORD: pass
DB_PORT: 5432
```
@@ -201,15 +175,15 @@ DB_PORT: 5432
Docs uses an s3 bucket to store documents, so if you have a provider obtain the necessary information to use it. If you don't, you can install a local minio testing environment as follow:
```
$ helm install --repo https://suitenumerique.github.io/helm-dev-backend -f docs/examples/helm/minio.values.yaml minio dev-backend
$ kubectl get pods
NAME READY STATUS RESTARTS AGE
keycloak-dev-backend-keycloak-0 1/1 Running 0 6m12s
keycloak-dev-backend-keycloak-pg-0 1/1 Running 0 6m12s
minio-dev-backend-minio-0 1/1 Running 0 10s
postgresql-dev-backend-postgres-0 1/1 Running 0 2m43s
redis-dev-backend-redis-68c9f66786-4dgxj 1/1 Running 0 4m21s
$ helm install minio oci://registry-1.docker.io/bitnamicharts/minio -f examples/minio.values.yaml
$ kubectl get po
NAME READY STATUS RESTARTS AGE
keycloak-0 1/1 Running 0 38m
keycloak-postgresql-0 1/1 Running 0 38m
minio-84f5c66895-bbhsk 1/1 Running 0 42s
minio-provisioning-2b5sq 0/1 Completed 0 42s
postgresql-0 1/1 Running 0 24m
redis-master-0 1/1 Running 0 10m
```
## Deployment
@@ -219,18 +193,20 @@ Now you are ready to deploy Docs without AI. AI requires more dependencies (Open
```
$ helm repo add impress https://suitenumerique.github.io/docs/
$ helm repo update
$ helm install impress impress/docs -f docs/examples/helm/impress.values.yaml
$ helm install impress impress/docs -f examples/impress.values.yaml
$ kubectl get po
NAME READY STATUS RESTARTS AGE
impress-docs-backend-8494fb797d-8k8wt 1/1 Running 0 6m45s
impress-docs-celery-worker-764b5dd98f-9qd6v 1/1 Running 0 6m45s
impress-docs-frontend-5b69b65cc4-s8pps 1/1 Running 0 6m45s
impress-docs-y-provider-5fc7ccd8cc-6ttrf 1/1 Running 0 6m45s
keycloak-dev-backend-keycloak-0 1/1 Running 0 24m
keycloak-dev-backend-keycloak-pg-0 1/1 Running 0 24m
minio-dev-backend-minio-0 1/1 Running 0 8m24s
postgresql-dev-backend-postgres-0 1/1 Running 0 20m
redis-dev-backend-redis-68c9f66786-4dgxj 1/1 Running 0 22m
NAME READY STATUS RESTARTS AGE
impress-docs-backend-96558758d-xtkbp 0/1 Running 0 79s
impress-docs-backend-createsuperuser-r7ltc 0/1 Completed 0 79s
impress-docs-backend-migrate-c949s 0/1 Completed 0 79s
impress-docs-frontend-6749f644f7-p5s42 1/1 Running 0 79s
impress-docs-y-provider-6947fd8f54-78f2l 1/1 Running 0 79s
keycloak-0 1/1 Running 0 48m
keycloak-postgresql-0 1/1 Running 0 48m
minio-84f5c66895-bbhsk 1/1 Running 0 10m
minio-provisioning-2b5sq 0/1 Completed 0 10m
postgresql-0 1/1 Running 0 34m
redis-master-0 1/1 Running 0 20m
```
## Test your deployment
@@ -239,15 +215,13 @@ In order to test your deployment you have to log into your instance. If you excl
```
$ kubectl get ingress
NAME CLASS HOSTS ADDRESS PORTS AGE
impress-docs <none> docs.127.0.0.1.nip.io localhost 80, 443 7m9s
impress-docs-admin <none> docs.127.0.0.1.nip.io localhost 80, 443 7m9s
impress-docs-collaboration-api <none> docs.127.0.0.1.nip.io localhost 80, 443 7m9s
impress-docs-media <none> docs.127.0.0.1.nip.io localhost 80, 443 7m9s
impress-docs-ws <none> docs.127.0.0.1.nip.io localhost 80, 443 7m9s
keycloak-dev-backend-keycloak <none> docs-keycloak.127.0.0.1.nip.io localhost 80, 443 24m
minio-dev-backend-minio-api <none> docs-minio.127.0.0.1.nip.io localhost 80, 443 8m48s
minio-dev-backend-minio-console <none> docs-minio-console.127.0.0.1.nip.io localhost 80, 443 8m48s
NAME CLASS HOSTS ADDRESS PORTS AGE
impress-docs <none> impress.127.0.0.1.nip.io localhost 80, 443 114s
impress-docs-admin <none> impress.127.0.0.1.nip.io localhost 80, 443 114s
impress-docs-collaboration-api <none> impress.127.0.0.1.nip.io localhost 80, 443 114s
impress-docs-media <none> impress.127.0.0.1.nip.io localhost 80, 443 114s
impress-docs-ws <none> impress.127.0.0.1.nip.io localhost 80, 443 114s
keycloak <none> keycloak.127.0.0.1.nip.io localhost 80 49m
```
You can use Docs at https://docs.127.0.0.1.nip.io. The provisionning user in keycloak is docs/docs.
You can use Docs at https://impress.127.0.0.1.nip.io. The provisionning user in keycloak is impress/impress.

View File

@@ -1,180 +0,0 @@
# Language Configuration (2025-12)
This document explains how to configure and override the available languages in the Docs application.
## Default Languages
By default, the application supports the following languages (in priority order):
- English (en-us)
- French (fr-fr)
- German (de-de)
- Dutch (nl-nl)
- Spanish (es-es)
The default configuration is defined in `src/backend/impress/settings.py`:
```python
LANGUAGES = values.SingleNestedTupleValue(
(
("en-us", "English"),
("fr-fr", "Français"),
("de-de", "Deutsch"),
("nl-nl", "Nederlands"),
("es-es", "Español"),
)
)
```
## Overriding Languages
### Using Environment Variables
You can override the available languages by setting the `DJANGO_LANGUAGES` environment variable. This is the recommended approach for customizing language support without modifying the source code.
#### Format
The `DJANGO_LANGUAGES` variable expects a semicolon-separated list of language configurations, where each language is defined as `code,Display Name`:
```
DJANGO_LANGUAGES=code1,Name1;code2,Name2;code3,Name3
```
#### Example Configurations
**Example 1: English and French only**
```bash
DJANGO_LANGUAGES=en-us,English;fr-fr,Français
```
**Example 2: Add Italian and Chinese**
```bash
DJANGO_LANGUAGES=en-us,English;fr-fr,Français;de-de,Deutsch;it-it,Italiano;zh-cn,中文
```
**Example 3: Custom subset of languages**
```bash
DJANGO_LANGUAGES=fr-fr,Français;de-de,Deutsch;es-es,Español
```
### Configuration Files
#### Development Environment
For local development, you can set the `DJANGO_LANGUAGES` variable in your environment configuration file:
**File:** `env.d/development/common.local`
```bash
DJANGO_LANGUAGES=en-us,English;fr-fr,Français;de-de,Deutsch;it-it,Italiano;zh-cn,中文;
```
#### Production Environment
For production deployments, add the variable to your production environment configuration:
**File:** `env.d/production.dist/common`
```bash
DJANGO_LANGUAGES=en-us,English;fr-fr,Français
```
#### Docker Compose
When using Docker Compose, you can set the environment variable in your `compose.yml` or `compose.override.yml` file:
```yaml
services:
app:
environment:
- DJANGO_LANGUAGES=en-us,English;fr-fr,Français;de-de,Deutsch
```
## Important Considerations
### Language Codes
- Use standard language codes (ISO 639-1 with optional region codes)
- Format: `language-region` (e.g., `en-us`, `fr-fr`, `de-de`)
- Use lowercase for language codes and region identifiers
### Priority Order
Languages are listed in priority order. The first language in the list is used as the fallback language throughout the application when a specific translation is not available.
### Translation Availability
Before adding a new language, ensure that:
1. Translation files exist for that language in the `src/backend/locale/` directory
2. The frontend application has corresponding translation files
3. All required messages have been translated
#### Available Languages
The following languages have translation files available in `src/backend/locale/`:
- `br_FR` - Breton (France)
- `cn_CN` - Chinese (China) - *Note: Use `zh-cn` in DJANGO_LANGUAGES*
- `de_DE` - German (Germany) - Use `de-de`
- `en_US` - English (United States) - Use `en-us`
- `es_ES` - Spanish (Spain) - Use `es-es`
- `fr_FR` - French (France) - Use `fr-fr`
- `it_IT` - Italian (Italy) - Use `it-it`
- `nl_NL` - Dutch (Netherlands) - Use `nl-nl`
- `pt_PT` - Portuguese (Portugal) - Use `pt-pt`
- `ru_RU` - Russian (Russia) - Use `ru-ru`
- `sl_SI` - Slovenian (Slovenia) - Use `sl-si`
- `sv_SE` - Swedish (Sweden) - Use `sv-se`
- `tr_TR` - Turkish (Turkey) - Use `tr-tr`
- `uk_UA` - Ukrainian (Ukraine) - Use `uk-ua`
- `zh_CN` - Chinese (China) - Use `zh-cn`
**Note:** When configuring `DJANGO_LANGUAGES`, use lowercase with hyphens (e.g., `pt-pt`, `ru-ru`) rather than the directory name format.
### Translation Management
We use [Crowdin](https://crowdin.com/) to manage translations for the Docs application. Crowdin allows our community to contribute translations and helps maintain consistency across all supported languages.
**Want to add a new language or improve existing translations?**
If you would like us to support a new language or want to contribute to translations, please get in touch with the project maintainers. We can add new languages to our Crowdin project and coordinate translation efforts with the community.
### Cookie and Session
The application stores the user's language preference in a cookie named `docs_language`. The cookie path is set to `/` by default.
## Testing Language Configuration
After changing the language configuration:
1. Restart the application services
2. Verify the language selector displays the correct languages
3. Test switching between different languages
4. Confirm that content is displayed in the selected language
## Troubleshooting
### Languages not appearing
- Verify the environment variable is correctly formatted (semicolon-separated, comma between code and name)
- Check that there are no trailing spaces in language codes or names
- Ensure the application was restarted after changing the configuration
### Missing translations
If you add a new language but see untranslated text:
1. Check if translation files exist in `src/backend/locale/<language_code>/LC_MESSAGES/`
2. Run Django's `makemessages` and `compilemessages` commands to generate/update translations
3. Verify frontend translation files are available
## Related Configuration
- `LANGUAGE_CODE`: Default language code (default: `en-us`)
- `LANGUAGE_COOKIE_NAME`: Cookie name for storing user language preference (default: `docs_language`)
- `LANGUAGE_COOKIE_PATH`: Cookie path (default: `/`)

View File

@@ -1,41 +0,0 @@
# Setup the Find search for Impress
This configuration will enable the fulltext search feature for Docs :
- Each save on **core.Document** or **core.DocumentAccess** will trigger the indexer
- The `api/v1.0/documents/search/` will work as a proxy with the Find API for fulltext search.
## Create an index service for Docs
Configure a **Service** for Docs application with these settings
- **Name**: `docs`<br>_request.auth.name of the Docs application._
- **Client id**: `impress`<br>_Name of the token audience or client_id of the Docs application._
See [how-to-use-indexer.md](how-to-use-indexer.md) for details.
## Configure settings of Docs
Add those Django settings the Docs application to enable the feature.
```shell
SEARCH_INDEXER_CLASS="core.services.search_indexers.FindDocumentIndexer"
SEARCH_INDEXER_COUNTDOWN=10 # Debounce delay in seconds for the indexer calls.
# The token from service "docs" of Find application (development).
SEARCH_INDEXER_SECRET="find-api-key-for-docs-with-exactly-50-chars-length"
SEARCH_INDEXER_URL="http://find:8000/api/v1.0/documents/index/"
# Search endpoint. Uses the OIDC token for authentication
SEARCH_INDEXER_QUERY_URL="http://find:8000/api/v1.0/documents/search/"
# Maximum number of results expected from the search endpoint
SEARCH_INDEXER_QUERY_LIMIT=50
```
We also need to enable the **OIDC Token** refresh or the authentication will fail quickly.
```shell
# Store OIDC tokens in the session
OIDC_STORE_ACCESS_TOKEN = True # Store the access token in the session
OIDC_STORE_REFRESH_TOKEN = True # Store the encrypted refresh token in the session
OIDC_STORE_REFRESH_TOKEN_KEY = "your-32-byte-encryption-key==" # Must be a valid Fernet key (32 url-safe base64-encoded bytes)
```

View File

@@ -97,17 +97,6 @@ Production deployments differ significantly from development environments. The t
| 5433 | PostgreSQL (Keycloak) |
| 1081 | MailCatcher |
**With fulltext search service**
| Port | Service |
| --------- | --------------------- |
| 8081 | Find (Django) |
| 9200 | Opensearch |
| 9600 | Opensearch admin |
| 5601 | Opensearch dashboard |
| 25432 | PostgreSQL (Find) |
## 6. Sizing Guidelines
**RAM** start at 8 GB dev / 16 GB staging / 32 GB prod. Postgres and Keycloak are the first to OOM; scale them first.

88
docs/theming.md Normal file
View File

@@ -0,0 +1,88 @@
# Runtime Theming 🎨
### How to Use
To use this feature, simply set the `FRONTEND_CSS_URL` environment variable to the URL of your custom CSS file. For example:
```javascript
FRONTEND_CSS_URL=http://anything/custom-style.css
```
Once you've set this variable, our application will load your custom CSS file and apply the styles to our frontend application.
### Benefits
This feature provides several benefits, including:
* **Easy customization** 🔄: With this feature, you can easily customize the look and feel of our application without requiring any code changes.
* **Flexibility** 🌈: You can use any CSS styles you like to create a custom theme that meets your needs.
* **Runtime theming** ⏱️: This feature allows you to change the theme of our application at runtime, without requiring a restart or recompilation.
### Example Use Case
Let's say you want to change the background color of our application to a custom color. You can create a custom CSS file with the following contents:
```css
body {
background-color: #3498db;
}
```
Then, set the `FRONTEND_CSS_URL` environment variable to the URL of your custom CSS file. Once you've done this, our application will load your custom CSS file and apply the styles, changing the background color to the custom color you specified.
----
# **Your logo** 📝
You can add your own logo in the header from the theme customization file.
### Settings 🔧
```shellscript
THEME_CUSTOMIZATION_FILE_PATH=<path>
```
### Example of JSON
You can activate it with the `header.logo` configuration: https://github.com/suitenumerique/docs/blob/main/src/helm/env.d/dev/configuration/theme/demo.json
This configuration is optional. If not set, the default logo will be used.
----
# **Footer Configuration** 📝
The footer is configurable from the theme customization file.
### Settings 🔧
```shellscript
THEME_CUSTOMIZATION_FILE_PATH=<path>
```
### Example of JSON
The json must follow some rules: https://github.com/suitenumerique/docs/blob/main/src/helm/env.d/dev/configuration/theme/demo.json
`footer.default` is the fallback if the language is not supported.
---
Below is a visual example of a configured footer ⬇️:
![Footer Configuration Example](./assets/footer-configurable.png)
----
# **Custom Translations** 📝
The translations can be partially overridden from the theme customization file.
### Settings 🔧
```shellscript
THEME_CUSTOMIZATION_FILE_PATH=<path>
```
### Example of JSON
The json must follow some rules: https://github.com/suitenumerique/docs/blob/main/src/helm/env.d/dev/configuration/theme/demo.json

View File

@@ -1,30 +0,0 @@
# User account reconciliation
It is possible to merge user accounts based on their email addresses.
Docs does not have an internal process to requests, but it allows the import of a CSV from an external form
(e.g. made with Grist) in the Django admin panel (in "Core" > "User reconciliation CSV imports" > "Add user reconciliation")
## CSV file format
The CSV must contain the following mandatory columns:
- `active_email`: the email of the user that will remain active after the process.
- `inactive_email`: the email of the user(s) that will be merged into the active user. It is possible to indicate several emails, so the user only has to make one request even if they have more than two accounts.
- `id`: a unique row id, so that entries already processed in a previous import are ignored.
The following columns are optional: `active_email_checked` and `inactive_email_checked` (both must contain `0` (False) or `1` (True), and both default to False.)
If present, it allows to indicate that the source form has a way to validate that the user making the request actually controls the email addresses, skipping the need to send confirmation emails (cf. below)
Once the CSV file is processed, this will create entries in "Core" > "User reconciliations" and send verification emails to validate that the user making the request actually controls the email addresses (unless `active_email_checked` and `inactive_email_checked` were set to `1` in the CSV)
In "Core" > "User reconciliations", an admin can then select all rows they wish to process and check the action "Process selected user reconciliations". Only rows that have the status `ready` and for which both emails have been validated will be processed.
## Settings
If there is a problem with the reconciliation attempt (e.g., one of the addresses given by the user does not match an existing account), the email signaling the error can give back the link to the reconciliation form. This is configured through the following environment variable:
```env
USER_RECONCILIATION_FORM_URL=<url used in the email for reconciliation with errors to allow a new requests>
# e.g. "https://yourgristinstance.tld/xxxx/UserReconciliationForm"
```

View File

@@ -20,7 +20,6 @@ DJANGO_EMAIL_BRAND_NAME="La Suite Numérique"
DJANGO_EMAIL_HOST="mailcatcher"
DJANGO_EMAIL_LOGO_IMG="http://localhost:3000/assets/logo-suite-numerique.png"
DJANGO_EMAIL_PORT=1025
DJANGO_EMAIL_URL_APP="http://localhost:3000"
# Backend url
IMPRESS_BASE_URL="http://localhost:8072"
@@ -37,7 +36,6 @@ OIDC_OP_JWKS_ENDPOINT=http://nginx:8083/realms/impress/protocol/openid-connect/c
OIDC_OP_AUTHORIZATION_ENDPOINT=http://localhost:8083/realms/impress/protocol/openid-connect/auth
OIDC_OP_TOKEN_ENDPOINT=http://nginx:8083/realms/impress/protocol/openid-connect/token
OIDC_OP_USER_ENDPOINT=http://nginx:8083/realms/impress/protocol/openid-connect/userinfo
OIDC_OP_INTROSPECTION_ENDPOINT=http://nginx:8083/realms/impress/protocol/openid-connect/token/introspect
OIDC_RP_CLIENT_ID=impress
OIDC_RP_CLIENT_SECRET=ThisIsAnExampleKeyForDevPurposeOnly
@@ -48,20 +46,9 @@ LOGIN_REDIRECT_URL=http://localhost:3000
LOGIN_REDIRECT_URL_FAILURE=http://localhost:3000
LOGOUT_REDIRECT_URL=http://localhost:3000
OIDC_REDIRECT_ALLOWED_HOSTS="localhost:8083,localhost:3000"
OIDC_REDIRECT_ALLOWED_HOSTS=["http://localhost:8083", "http://localhost:3000"]
OIDC_AUTH_REQUEST_EXTRA_PARAMS={"acr_values": "eidas1"}
# Store OIDC tokens in the session. Needed by search/ endpoint.
# OIDC_STORE_ACCESS_TOKEN = True
# OIDC_STORE_REFRESH_TOKEN = True # Store the encrypted refresh token in the session.
# Must be a valid Fernet key (32 url-safe base64-encoded bytes)
# To create one, use the bin/fernetkey command.
# OIDC_STORE_REFRESH_TOKEN_KEY="your-32-byte-encryption-key=="
# User reconciliation
USER_RECONCILIATION_FORM_URL=http://localhost:3000
# AI
AI_FEATURE_ENABLED=true
AI_BASE_URL=https://openaiendpoint.com
@@ -80,13 +67,5 @@ DJANGO_SERVER_TO_SERVER_API_TOKENS=server-api-token
Y_PROVIDER_API_BASE_URL=http://y-provider-development:4444/api/
Y_PROVIDER_API_KEY=yprovider-api-key
DOCSPEC_API_URL=http://docspec:4000/conversion
# Theme customization
THEME_CUSTOMIZATION_CACHE_TIMEOUT=15
# Indexer (disabled)
# SEARCH_INDEXER_CLASS="core.services.search_indexers.SearchIndexer"
SEARCH_INDEXER_SECRET=find-api-key-for-docs-with-exactly-50-chars-length # Key generated by create_demo in Find app.
SEARCH_INDEXER_URL="http://find:8000/api/v1.0/documents/index/"
SEARCH_INDEXER_QUERY_URL="http://find:8000/api/v1.0/documents/search/"
THEME_CUSTOMIZATION_CACHE_TIMEOUT=15

View File

@@ -6,4 +6,4 @@ Y_PROVIDER_API_BASE_URL=http://y-provider:4444/api/
# Throttle
API_DOCUMENT_THROTTLE_RATE=1000/min
API_CONFIG_THROTTLE_RATE=1000/min
API_CONFIG_THROTTLE_RATE=1000/min

View File

@@ -24,8 +24,7 @@ DJANGO_EMAIL_FROM=<your email address>
#DJANGO_EMAIL_USE_SSL=true # A flag to enable or disable SSL for email sending.
DJANGO_EMAIL_BRAND_NAME="La Suite Numérique"
DJANGO_EMAIL_LOGO_IMG="https://${DOCS_HOST}/assets/logo-suite-numerique.png"
DJANGO_EMAIL_URL_APP="https://${DOCS_HOST}"
DJANGO_EMAIL_LOGO_IMG="https://${DOCS_HOST}/assets/logo-suite-numerique.png"
# Media
AWS_S3_ENDPOINT_URL=https://${S3_HOST}
@@ -53,9 +52,6 @@ LOGOUT_REDIRECT_URL=https://${DOCS_HOST}
OIDC_REDIRECT_ALLOWED_HOSTS=["https://${DOCS_HOST}"]
# User reconciliation
#USER_RECONCILIATION_FORM_URL=https://${DOCS_HOST}
# AI
#AI_FEATURE_ENABLED=true # is false by default
#AI_BASE_URL=https://openaiendpoint.com

View File

@@ -25,30 +25,15 @@
"matchPackageNames": ["pylint"],
"allowedVersions": "<4.0.0"
},
{
"groupName": "allowed django versions",
"matchManagers": ["pep621"],
"matchPackageNames": ["django"],
"allowedVersions": "<6.0.0"
},
{
"groupName": "allowed celery versions",
"matchManagers": ["pep621"],
"matchPackageNames": ["celery"],
"allowedVersions": "<5.6.0"
},
{
"enabled": false,
"groupName": "ignored js dependencies",
"matchManagers": ["npm"],
"matchPackageNames": [
"@next/eslint-plugin-next",
"eslint-config-next",
"docx",
"fetch-mock",
"next",
"node",
"node-fetch",
"react-resizable-panels",
"workbox-webpack-plugin"
]
}

View File

@@ -1,14 +1,20 @@
"""Admin classes and registrations for core app."""
from django.contrib import admin, messages
from django.contrib import admin
from django.contrib.auth import admin as auth_admin
from django.shortcuts import redirect
from django.utils.translation import gettext_lazy as _
from treebeard.admin import TreeAdmin
from core import models
from core.tasks.user_reconciliation import user_reconciliation_csv_import_job
from . import models
class TemplateAccessInline(admin.TabularInline):
"""Inline admin class for template accesses."""
autocomplete_fields = ["user"]
model = models.TemplateAccess
extra = 0
@admin.register(models.User)
@@ -63,6 +69,7 @@ class UserAdmin(auth_admin.UserAdmin):
},
),
)
inlines = (TemplateAccessInline,)
list_display = (
"id",
"sub",
@@ -97,46 +104,15 @@ class UserAdmin(auth_admin.UserAdmin):
search_fields = ("id", "sub", "admin_email", "email", "full_name")
@admin.register(models.UserReconciliationCsvImport)
class UserReconciliationCsvImportAdmin(admin.ModelAdmin):
"""Admin class for UserReconciliationCsvImport model."""
@admin.register(models.Template)
class TemplateAdmin(admin.ModelAdmin):
"""Template admin interface declaration."""
list_display = ("id", "__str__", "created_at", "status")
def save_model(self, request, obj, form, change):
"""Override save_model to trigger the import task on creation."""
super().save_model(request, obj, form, change)
if not change:
user_reconciliation_csv_import_job.delay(obj.pk)
messages.success(request, _("Import job created and queued."))
return redirect("..")
@admin.action(description=_("Process selected user reconciliations"))
def process_reconciliation(_modeladmin, _request, queryset):
"""
Admin action to process selected user reconciliations.
The action will process only entries that are ready and have both emails checked.
"""
processable_entries = queryset.filter(
status="ready", active_email_checked=True, inactive_email_checked=True
)
for entry in processable_entries:
entry.process_reconciliation_request()
@admin.register(models.UserReconciliation)
class UserReconciliationAdmin(admin.ModelAdmin):
"""Admin class for UserReconciliation model."""
list_display = ["id", "__str__", "created_at", "status"]
actions = [process_reconciliation]
inlines = (TemplateAccessInline,)
class DocumentAccessInline(admin.TabularInline):
"""Inline admin class for document accesses."""
"""Inline admin class for template accesses."""
autocomplete_fields = ["user"]
model = models.DocumentAccess

View File

@@ -2,7 +2,6 @@
import unicodedata
from django.conf import settings
from django.utils.translation import gettext_lazy as _
import django_filters
@@ -136,6 +135,4 @@ class UserSearchFilter(django_filters.FilterSet):
Custom filter for searching users.
"""
q = django_filters.CharFilter(
min_length=settings.API_USERS_SEARCH_QUERY_MIN_LENGTH, max_length=254
)
q = django_filters.CharFilter(min_length=5, max_length=254)

View File

@@ -98,10 +98,10 @@ class CanCreateInvitationPermission(permissions.BasePermission):
class ResourceWithAccessPermission(permissions.BasePermission):
"""A permission class for invitations."""
"""A permission class for templates and invitations."""
def has_permission(self, request, view):
"""check create permission."""
"""check create permission for templates."""
return request.user.is_authenticated or view.action != "create"
def has_object_permission(self, request, view, obj):

View File

@@ -4,7 +4,6 @@
import binascii
import mimetypes
from base64 import b64decode
from os.path import splitext
from django.conf import settings
from django.db.models import Q
@@ -16,11 +15,10 @@ import magic
from rest_framework import serializers
from core import choices, enums, models, utils, validators
from core.services import mime_types
from core.services.ai_services import AI_ACTIONS
from core.services.converter_services import (
ConversionError,
Converter,
YdocConverter,
)
@@ -61,6 +59,30 @@ class UserLightSerializer(UserSerializer):
read_only_fields = ["full_name", "short_name"]
class TemplateAccessSerializer(serializers.ModelSerializer):
"""Serialize template accesses."""
abilities = serializers.SerializerMethodField(read_only=True)
class Meta:
model = models.TemplateAccess
resource_field_name = "template"
fields = ["id", "user", "team", "role", "abilities"]
read_only_fields = ["id", "abilities"]
def get_abilities(self, instance) -> dict:
"""Return abilities of the logged-in user on the instance."""
request = self.context.get("request")
if request:
return instance.get_abilities(request.user)
return {}
def update(self, instance, validated_data):
"""Make "user" field is readonly but only on update."""
validated_data.pop("user", None)
return super().update(instance, validated_data)
class ListDocumentSerializer(serializers.ModelSerializer):
"""Serialize documents with limited fields for display in lists."""
@@ -166,9 +188,6 @@ class DocumentSerializer(ListDocumentSerializer):
content = serializers.CharField(required=False)
websocket = serializers.BooleanField(required=False, write_only=True)
file = serializers.FileField(
required=False, write_only=True, allow_null=True, max_length=255
)
class Meta:
model = models.Document
@@ -185,7 +204,6 @@ class DocumentSerializer(ListDocumentSerializer):
"deleted_at",
"depth",
"excerpt",
"file",
"is_favorite",
"link_role",
"link_reach",
@@ -255,30 +273,6 @@ class DocumentSerializer(ListDocumentSerializer):
return value
def validate_file(self, file):
"""Add file size and type constraints as defined in settings."""
if not file:
return None
# Validate file size
if file.size > settings.CONVERSION_FILE_MAX_SIZE:
max_size = settings.CONVERSION_FILE_MAX_SIZE // (1024 * 1024)
raise serializers.ValidationError(
f"File size exceeds the maximum limit of {max_size:d} MB."
)
_name, extension = splitext(file.name)
if extension.lower() not in settings.CONVERSION_FILE_EXTENSIONS_ALLOWED:
raise serializers.ValidationError(
(
f"File extension {extension} is not allowed. Allowed extensions"
f" are: {settings.CONVERSION_FILE_EXTENSIONS_ALLOWED}."
)
)
return file
def save(self, **kwargs):
"""
Process the content field to extract attachment keys and update the document's
@@ -467,9 +461,7 @@ class ServerCreateDocumentSerializer(serializers.Serializer):
language = user.language or language
try:
document_content = Converter().convert(
validated_data["content"], mime_types.MARKDOWN, mime_types.YJS
)
document_content = YdocConverter().convert(validated_data["content"])
except ConversionError as err:
raise serializers.ValidationError(
{"content": ["Could not convert content"]}
@@ -668,6 +660,52 @@ class FileUploadSerializer(serializers.Serializer):
return attrs
class TemplateSerializer(serializers.ModelSerializer):
"""Serialize templates."""
abilities = serializers.SerializerMethodField(read_only=True)
accesses = TemplateAccessSerializer(many=True, read_only=True)
class Meta:
model = models.Template
fields = [
"id",
"title",
"accesses",
"abilities",
"css",
"code",
"is_public",
]
read_only_fields = ["id", "accesses", "abilities"]
def get_abilities(self, document) -> dict:
"""Return abilities of the logged-in user on the instance."""
request = self.context.get("request")
if request:
return document.get_abilities(request.user)
return {}
# pylint: disable=abstract-method
class DocumentGenerationSerializer(serializers.Serializer):
"""Serializer to receive a request to generate a document on a template."""
body = serializers.CharField(label=_("Body"))
body_type = serializers.ChoiceField(
choices=["html", "markdown"],
label=_("Body type"),
required=False,
default="html",
)
format = serializers.ChoiceField(
choices=["pdf", "docx"],
label=_("Format"),
required=False,
default="pdf",
)
class InvitationSerializer(serializers.ModelSerializer):
"""Serialize invitations."""
@@ -975,13 +1013,3 @@ class ThreadSerializer(serializers.ModelSerializer):
if request:
return thread.get_abilities(request.user)
return {}
class SearchDocumentSerializer(serializers.Serializer):
"""Serializer for fulltext search requests through Find application"""
q = serializers.CharField(required=True, allow_blank=False, trim_whitespace=True)
page_size = serializers.IntegerField(
required=False, min_value=1, max_value=50, default=20
)
page = serializers.IntegerField(required=False, min_value=1, default=1)

View File

@@ -1,8 +1,5 @@
"""Throttling modules for the API."""
from django.conf import settings
from lasuite.drf.throttling import MonitoredScopedRateThrottle
from rest_framework.throttling import UserRateThrottle
from sentry_sdk import capture_message
@@ -22,30 +19,3 @@ class UserListThrottleSustained(UserRateThrottle):
"""Throttle for the user list endpoint."""
scope = "user_list_sustained"
class DocumentThrottle(MonitoredScopedRateThrottle):
"""
Throttle for document-related endpoints, with an exception for requests from the
collaboration server.
"""
scope = "document"
def allow_request(self, request, view):
"""
Override to skip throttling for requests from the collaboration server.
Verifies the X-Y-Provider-Key header contains a valid Y_PROVIDER_API_KEY.
Using a custom header instead of Authorization to avoid triggering
authentication middleware.
"""
y_provider_header = request.headers.get("X-Y-Provider-Key", "")
# Check if this is a valid y-provider request and exempt from throttling
y_provider_key = getattr(settings, "Y_PROVIDER_API_KEY", None)
if y_provider_key and y_provider_header == y_provider_key:
return True
return super().allow_request(request, view)

View File

@@ -1,12 +1,9 @@
"""API endpoints"""
# pylint: disable=too-many-lines
import base64
import ipaddress
import json
import logging
import socket
import uuid
from collections import defaultdict
from urllib.parse import unquote, urlencode, urlparse
@@ -21,11 +18,10 @@ from django.core.validators import URLValidator
from django.db import connection, transaction
from django.db import models as db
from django.db.models.expressions import RawSQL
from django.db.models.functions import Greatest, Left, Length
from django.db.models.functions import Left, Length
from django.http import Http404, StreamingHttpResponse
from django.urls import reverse
from django.utils import timezone
from django.utils.decorators import method_decorator
from django.utils.functional import cached_property
from django.utils.text import capfirst, slugify
from django.utils.translation import gettext_lazy as _
@@ -36,46 +32,28 @@ from botocore.exceptions import ClientError
from csp.constants import NONE
from csp.decorators import csp_update
from lasuite.malware_detection import malware_detection
from lasuite.oidc_login.decorators import refresh_oidc_access_token
from lasuite.tools.email import get_domain_from_email
from rest_framework import filters, status, viewsets
from rest_framework import response as drf_response
from rest_framework.permissions import AllowAny
from rest_framework.views import APIView
from core import authentication, choices, enums, models
from core.api.filters import remove_accents
from core.services import mime_types
from core.services.ai_services import AIService
from core.services.collaboration_services import CollaborationService
from core.services.converter_services import (
ConversionError,
Converter,
)
from core.services.converter_services import (
ServiceUnavailableError as YProviderServiceUnavailableError,
)
from core.services.converter_services import (
ValidationError as YProviderValidationError,
)
from core.services.search_indexers import (
get_document_indexer,
get_visited_document_ids_of,
from core.services.converter_services import (
YdocConverter,
)
from core.tasks.mail import send_ask_for_access_mail
from core.utils import (
extract_attachments,
filter_descendants,
users_sharing_documents_with,
)
from core.utils import extract_attachments, filter_descendants
from . import permissions, serializers, utils
from .filters import DocumentFilter, ListDocumentFilter, UserSearchFilter
from .throttling import (
DocumentThrottle,
UserListThrottleBurst,
UserListThrottleSustained,
)
from .throttling import UserListThrottleBurst, UserListThrottleSustained
logger = logging.getLogger(__name__)
@@ -210,15 +188,13 @@ class UserViewSet(
queryset = queryset.exclude(documentaccess__document_id=document_id)
filter_data = filterset.form.cleaned_data
query = remove_accents(filter_data["q"])
query = filter_data["q"]
# For emails, match emails by Levenstein distance to prevent typing errors
if "@" in query:
return (
queryset.annotate(
distance=RawSQL(
"levenshtein(unaccent(email::text), %s::text)", (query,)
)
distance=RawSQL("levenshtein(email::text, %s::text)", (query,))
)
.filter(distance__lte=3)
.order_by("distance", "email")[: settings.API_USERS_LIST_LIMIT]
@@ -226,80 +202,14 @@ class UserViewSet(
# Use trigram similarity for non-email-like queries
# For performance reasons we filter first by similarity, which relies on an
# index, then only calculate precise similarity scores for sorting purposes.
#
# Additionally results are reordered to prefer users "closer" to the current
# user: users they recently shared documents with, then same email domain.
# To achieve that without complex SQL, we build a proximity score in Python
# and return the top N results.
# For security results, users that match neither of these proximity criteria
# are not returned at all, to prevent email enumeration.
current_user = self.request.user
shared_map = users_sharing_documents_with(current_user)
user_email_domain = get_domain_from_email(current_user.email) or ""
candidates = list(
queryset.annotate(
sim_email=TrigramSimilarity("email", query),
sim_name=TrigramSimilarity("full_name", query),
)
.annotate(similarity=Greatest("sim_email", "sim_name"))
# index, then only calculate precise similarity scores for sorting purposes
return (
queryset.filter(email__trigram_word_similar=query)
.annotate(similarity=TrigramSimilarity("email", query))
.filter(similarity__gt=0.2)
.order_by("-similarity")
.order_by("-similarity", "email")[: settings.API_USERS_LIST_LIMIT]
)
# Keep only users that either share documents with the current user
# or have an email with the same domain as the current user.
filtered_candidates = []
for u in candidates:
candidate_domain = get_domain_from_email(u.email) or ""
if shared_map.get(u.id) or (
user_email_domain and candidate_domain == user_email_domain
):
filtered_candidates.append(u)
candidates = filtered_candidates
# Build ordering key for each candidate
def _sort_key(u):
# shared priority: most recent first
# Use shared_last_at timestamp numeric for secondary ordering when shared.
shared_last_at = shared_map.get(u.id)
if shared_last_at:
is_shared = 1
shared_score = int(shared_last_at.timestamp())
else:
is_shared = 0
shared_score = 0
# domain proximity
candidate_email_domain = get_domain_from_email(u.email) or ""
same_full_domain = (
1
if candidate_email_domain
and candidate_email_domain == user_email_domain
else 0
)
# similarity fallback
sim = getattr(u, "similarity", 0) or 0
return (
is_shared,
shared_score,
same_full_domain,
sim,
)
# Sort candidates by the key descending and return top N as a queryset-like
# list. Keep return type consistent with previous behavior (QuerySet slice
# was returned) by returning a list of model instances.
candidates.sort(key=_sort_key, reverse=True)
return candidates[: settings.API_USERS_LIST_LIMIT]
@drf.decorators.action(
detail=False,
methods=["get"],
@@ -317,59 +227,6 @@ class UserViewSet(
)
class ReconciliationConfirmView(APIView):
"""API endpoint to confirm user reconciliation emails.
GET /user-reconciliations/{user_type}/{confirmation_id}/
Marks `active_email_checked` or `inactive_email_checked` to True.
"""
permission_classes = [AllowAny]
def get(self, request, user_type, confirmation_id):
"""
Check the confirmation ID and mark the corresponding email as checked.
"""
try:
# validate UUID
uuid_obj = uuid.UUID(str(confirmation_id))
except ValueError:
return drf_response.Response(
{"detail": "Badly formatted confirmation id"},
status=status.HTTP_400_BAD_REQUEST,
)
if user_type not in ("active", "inactive"):
return drf_response.Response(
{"detail": "Invalid user_type"}, status=status.HTTP_400_BAD_REQUEST
)
lookup = (
{"active_email_confirmation_id": uuid_obj}
if user_type == "active"
else {"inactive_email_confirmation_id": uuid_obj}
)
try:
rec = models.UserReconciliation.objects.get(**lookup)
except models.UserReconciliation.DoesNotExist:
return drf_response.Response(
{"detail": "Reconciliation entry not found"},
status=status.HTTP_404_NOT_FOUND,
)
field_name = (
"active_email_checked"
if user_type == "active"
else "inactive_email_checked"
)
if not getattr(rec, field_name):
setattr(rec, field_name, True)
rec.save()
return drf_response.Response({"detail": "Confirmation received"})
class ResourceAccessViewsetMixin:
"""Mixin with methods common to all access viewsets."""
@@ -508,18 +365,15 @@ class DocumentViewSet(
permission_classes = [
permissions.DocumentPermission,
]
throttle_classes = [DocumentThrottle]
throttle_scope = "document"
queryset = models.Document.objects.select_related("creator").all()
serializer_class = serializers.DocumentSerializer
ai_translate_serializer_class = serializers.AITranslateSerializer
all_serializer_class = serializers.ListDocumentSerializer
children_serializer_class = serializers.ListDocumentSerializer
descendants_serializer_class = serializers.ListDocumentSerializer
list_serializer_class = serializers.ListDocumentSerializer
trashbin_serializer_class = serializers.ListDocumentSerializer
tree_serializer_class = serializers.ListDocumentSerializer
search_serializer_class = serializers.ListDocumentSerializer
def get_queryset(self):
"""Get queryset performing all annotation and filtering on the document tree structure."""
@@ -650,28 +504,6 @@ class DocumentViewSet(
"IN SHARE ROW EXCLUSIVE MODE;"
)
# Remove file from validated_data as it's not a model field
# Process it if present
uploaded_file = serializer.validated_data.pop("file", None)
# If a file is uploaded, convert it to Yjs format and set as content
if uploaded_file:
try:
file_content = uploaded_file.read()
converter = Converter()
converted_content = converter.convert(
file_content,
content_type=uploaded_file.content_type,
accept=mime_types.YJS,
)
serializer.validated_data["content"] = converted_content
serializer.validated_data["title"] = uploaded_file.name
except ConversionError as err:
raise drf.exceptions.ValidationError(
{"file": ["Could not convert file content"]}
) from err
obj = models.Document.add_root(
creator=self.request.user,
**serializer.validated_data,
@@ -773,29 +605,12 @@ class DocumentViewSet(
"""Get list of favorite documents for the current user."""
user = request.user
queryset = self.get_queryset()
# Among the results, we may have documents that are ancestors/descendants
# of each other. In this case we want to keep only the highest ancestors.
root_paths = utils.filter_root_paths(
queryset.order_by("path").values_list("path", flat=True),
skip_sorting=True,
)
path_list = db.Q()
for path in root_paths:
path_list |= db.Q(path__startswith=path)
favorite_documents_ids = models.DocumentFavorite.objects.filter(
user=user
).values_list("document_id", flat=True)
queryset = self.queryset.filter(path_list)
queryset = self.filter_queryset(self.get_queryset())
queryset = queryset.filter(id__in=favorite_documents_ids)
queryset = queryset.annotate_user_roles(user)
queryset = queryset.annotate(
is_favorite=db.Value(True, output_field=db.BooleanField())
)
return self.get_response_for_queryset(queryset)
@drf.decorators.action(
@@ -1023,60 +838,6 @@ class DocumentViewSet(
},
)
@drf.decorators.action(
detail=False,
methods=["get"],
)
def all(self, request, *args, **kwargs):
"""
Returns all documents (including descendants) that the user has access to.
Unlike the list endpoint which only returns top-level documents, this endpoint
returns all documents including children, grandchildren, etc.
"""
user = self.request.user
accessible_documents = self.get_queryset()
accessible_paths = list(accessible_documents.values_list("path", flat=True))
if not accessible_paths:
return self.get_response_for_queryset(self.queryset.none())
# Build query to include all descendants using path prefix matching
descendants_clause = db.Q()
for path in accessible_paths:
descendants_clause |= db.Q(path__startswith=path)
queryset = self.queryset.filter(
descendants_clause, ancestors_deleted_at__isnull=True
)
# Apply existing filters
filterset = ListDocumentFilter(
self.request.GET, queryset=queryset, request=self.request
)
if not filterset.is_valid():
raise drf.exceptions.ValidationError(filterset.errors)
filter_data = filterset.form.cleaned_data
# Filter as early as possible on fields that are available on the model
for field in ["is_creator_me", "title"]:
queryset = filterset.filters[field].filter(queryset, filter_data[field])
queryset = queryset.annotate_user_roles(user)
# Annotate favorite status and filter if applicable as late as possible
queryset = queryset.annotate_is_favorite(user)
for field in ["is_favorite", "is_masked"]:
queryset = filterset.filters[field].filter(queryset, filter_data[field])
# Apply ordering only now that everything is filtered and annotated
queryset = filters.OrderingFilter().filter_queryset(
self.request, queryset, self
)
return self.get_response_for_queryset(queryset)
@drf.decorators.action(
detail=True,
methods=["get"],
@@ -1304,83 +1065,6 @@ class DocumentViewSet(
{"id": str(duplicated_document.id)}, status=status.HTTP_201_CREATED
)
def _search_simple(self, request, text):
"""
Returns a queryset filtered by the content of the document title
"""
# As the 'list' view we get a prefiltered queryset (deleted docs are excluded)
queryset = self.get_queryset()
filterset = DocumentFilter({"title": text}, queryset=queryset)
if not filterset.is_valid():
raise drf.exceptions.ValidationError(filterset.errors)
queryset = filterset.filter_queryset(queryset)
return self.get_response_for_queryset(
queryset.order_by("-updated_at"),
context={
"request": request,
},
)
def _search_fulltext(self, indexer, request, params):
"""
Returns a queryset from the results the fulltext search of Find
"""
access_token = request.session.get("oidc_access_token")
user = request.user
text = params.validated_data["q"]
queryset = models.Document.objects.all()
# Retrieve the documents ids from Find.
results = indexer.search(
text=text,
token=access_token,
visited=get_visited_document_ids_of(queryset, user),
)
docs_by_uuid = {str(d.pk): d for d in queryset.filter(pk__in=results)}
ordered_docs = [docs_by_uuid[id] for id in results]
page = self.paginate_queryset(ordered_docs)
serializer = self.get_serializer(
page if page else ordered_docs,
many=True,
context={
"request": request,
},
)
return self.get_paginated_response(serializer.data)
@drf.decorators.action(detail=False, methods=["get"], url_path="search")
@method_decorator(refresh_oidc_access_token)
def search(self, request, *args, **kwargs):
"""
Returns a DRF response containing the filtered, annotated and ordered document list.
Applies filtering based on request parameter 'q' from `SearchDocumentSerializer`.
Depending of the configuration it can be:
- A fulltext search through the opensearch indexation app "find" if the backend is
enabled (see SEARCH_INDEXER_CLASS)
- A filtering by the model field 'title'.
The ordering is always by the most recent first.
"""
params = serializers.SearchDocumentSerializer(data=request.query_params)
params.is_valid(raise_exception=True)
indexer = get_document_indexer()
if indexer:
return self._search_fulltext(indexer, request, params=params)
# The indexer is not configured, we fallback on a simple icontains filter by the
# model field 'title'.
return self._search_simple(request, text=params.validated_data["q"])
@drf.decorators.action(detail=True, methods=["get"], url_path="versions")
def versions_list(self, request, *args, **kwargs):
"""
@@ -1819,101 +1503,6 @@ class DocumentViewSet(
return drf.response.Response(response, status=drf.status.HTTP_200_OK)
def _reject_invalid_ips(self, ips):
"""
Check if an IP address is safe from SSRF attacks.
Raises:
drf.exceptions.ValidationError: If the IP is unsafe
"""
for ip in ips:
# Block loopback addresses (check before private,
# as 127.0.0.1 might be considered private)
if ip.is_loopback:
raise drf.exceptions.ValidationError(
"Access to loopback addresses is not allowed"
)
# Block link-local addresses (169.254.0.0/16) - check before private
if ip.is_link_local:
raise drf.exceptions.ValidationError(
"Access to link-local addresses is not allowed"
)
# Block private IP ranges
if ip.is_private:
raise drf.exceptions.ValidationError(
"Access to private IP addresses is not allowed"
)
# Block multicast addresses
if ip.is_multicast:
raise drf.exceptions.ValidationError(
"Access to multicast addresses is not allowed"
)
# Block reserved addresses (including 0.0.0.0)
if ip.is_reserved:
raise drf.exceptions.ValidationError(
"Access to reserved IP addresses is not allowed"
)
def _validate_url_against_ssrf(self, url):
"""
Validate that a URL is safe from SSRF (Server-Side Request Forgery) attacks.
Blocks:
- localhost and its variations
- Private IP ranges (10.0.0.0/8, 172.16.0.0/12, 192.168.0.0/16)
- Link-local addresses (169.254.0.0/16)
- Loopback addresses
Raises:
drf.exceptions.ValidationError: If the URL is unsafe
"""
parsed = urlparse(url)
hostname = parsed.hostname
if not hostname:
raise drf.exceptions.ValidationError("Invalid hostname")
# Resolve hostname to IP address(es)
# Check all resolved IPs to prevent DNS rebinding attacks
try:
# Try to parse as IP address first (if hostname is already an IP)
try:
ip = ipaddress.ip_address(hostname)
resolved_ips = [ip]
except ValueError:
# Resolve hostname to IP addresses (supports both IPv4 and IPv6)
resolved_ips = []
try:
# Get all address info (IPv4 and IPv6)
addr_info = socket.getaddrinfo(hostname, None, socket.AF_UNSPEC)
for family, _, _, _, sockaddr in addr_info:
if family == socket.AF_INET:
# IPv4
ip = ipaddress.ip_address(sockaddr[0])
resolved_ips.append(ip)
elif family == socket.AF_INET6:
# IPv6
ip = ipaddress.ip_address(sockaddr[0])
resolved_ips.append(ip)
except (socket.gaierror, OSError) as e:
raise drf.exceptions.ValidationError(
f"Failed to resolve hostname: {str(e)}"
) from e
if not resolved_ips:
raise drf.exceptions.ValidationError(
"No IP addresses found for hostname"
) from None
except ValueError as e:
raise drf.exceptions.ValidationError(f"Invalid IP address: {str(e)}") from e
# Check all resolved IPs to ensure none are private/internal
self._reject_invalid_ips(resolved_ips)
@drf.decorators.action(
detail=True,
methods=["get"],
@@ -1947,16 +1536,6 @@ class DocumentViewSet(
status=drf.status.HTTP_400_BAD_REQUEST,
)
# Validate URL against SSRF attacks
try:
self._validate_url_against_ssrf(url)
except drf.exceptions.ValidationError as e:
logger.error("Potential SSRF attack detected: %s", e)
return drf.response.Response(
{"detail": "Invalid URL used."},
status=drf.status.HTTP_400_BAD_REQUEST,
)
try:
response = requests.get(
url,
@@ -1965,15 +1544,13 @@ class DocumentViewSet(
"User-Agent": request.headers.get("User-Agent", ""),
"Accept": request.headers.get("Accept", ""),
},
allow_redirects=False,
timeout=10,
)
response.raise_for_status()
content_type = response.headers.get("Content-Type", "")
if not content_type.startswith("image/"):
return drf.response.Response(
{"detail": "Invalid URL used."}, status=status.HTTP_400_BAD_REQUEST
status=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE
)
# Use StreamingHttpResponse with the response's iter_content to properly stream the data
@@ -1991,7 +1568,7 @@ class DocumentViewSet(
except requests.RequestException as e:
logger.exception(e)
return drf.response.Response(
{"detail": "Invalid URL used."},
{"error": f"Failed to fetch resource from {url}"},
status=status.HTTP_400_BAD_REQUEST,
)
@@ -2026,14 +1603,14 @@ class DocumentViewSet(
if base64_content is not None:
# Convert using the y-provider service
try:
yprovider = Converter()
yprovider = YdocConverter()
result = yprovider.convert(
base64.b64decode(base64_content),
mime_types.YJS,
"application/vnd.yjs.doc",
{
"markdown": mime_types.MARKDOWN,
"html": mime_types.HTML,
"json": mime_types.JSON,
"markdown": "text/markdown",
"html": "text/html",
"json": "application/json",
}[content_format],
)
content = result
@@ -2254,6 +1831,64 @@ class DocumentAccessViewSet(
)
class TemplateViewSet(
drf.mixins.RetrieveModelMixin,
viewsets.GenericViewSet,
):
"""Template ViewSet"""
filter_backends = [drf.filters.OrderingFilter]
permission_classes = [
permissions.IsAuthenticatedOrSafe,
permissions.ResourceWithAccessPermission,
]
throttle_scope = "template"
ordering = ["-created_at"]
ordering_fields = ["created_at", "updated_at", "title"]
serializer_class = serializers.TemplateSerializer
queryset = models.Template.objects.all()
def get_queryset(self):
"""Custom queryset to get user related templates."""
queryset = super().get_queryset()
user = self.request.user
if not user.is_authenticated:
return queryset
user_roles_query = (
models.TemplateAccess.objects.filter(
db.Q(user=user) | db.Q(team__in=user.teams),
template_id=db.OuterRef("pk"),
)
.values("template")
.annotate(roles_array=ArrayAgg("role"))
.values("roles_array")
)
return queryset.annotate(user_roles=db.Subquery(user_roles_query)).distinct()
def list(self, request, *args, **kwargs):
"""Restrict templates returned by the list endpoint"""
queryset = self.filter_queryset(self.get_queryset())
user = self.request.user
if user.is_authenticated:
queryset = queryset.filter(
db.Q(accesses__user=user)
| db.Q(accesses__team__in=user.teams)
| db.Q(is_public=True)
)
else:
queryset = queryset.filter(is_public=True)
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(queryset, many=True)
return drf.response.Response(serializer.data)
class InvitationViewset(
drf.mixins.CreateModelMixin,
drf.mixins.ListModelMixin,
@@ -2459,17 +2094,12 @@ class ConfigView(drf.views.APIView):
"""
array_settings = [
"AI_FEATURE_ENABLED",
"API_USERS_SEARCH_QUERY_MIN_LENGTH",
"COLLABORATION_WS_URL",
"COLLABORATION_WS_NOT_CONNECTED_READY_ONLY",
"CONVERSION_FILE_EXTENSIONS_ALLOWED",
"CONVERSION_FILE_MAX_SIZE",
"CRISP_WEBSITE_ID",
"ENVIRONMENT",
"FRONTEND_CSS_URL",
"FRONTEND_HOMEPAGE_FEATURE_ENABLED",
"FRONTEND_JS_URL",
"FRONTEND_SILENT_LOGIN_ENABLED",
"FRONTEND_THEME",
"MEDIA_BASE_URL",
"POSTHOG_KEY",

View File

@@ -1,19 +1,11 @@
"""Impress Core application"""
from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _
# from django.apps import AppConfig
# from django.utils.translation import gettext_lazy as _
class CoreConfig(AppConfig):
"""Configuration class for the impress core app."""
# class CoreConfig(AppConfig):
# """Configuration class for the impress core app."""
name = "core"
app_label = "core"
verbose_name = _("Impress core application")
def ready(self):
"""
Import signals when the app is ready.
"""
# pylint: disable=import-outside-toplevel, unused-import
from . import signals # noqa: PLC0415
# name = "core"
# app_label = "core"
# verbose_name = _("impress core application")

View File

@@ -6,7 +6,6 @@ import os
from django.conf import settings
from django.core.exceptions import SuspiciousOperation
from lasuite.marketing.tasks import create_or_update_contact
from lasuite.oidc_login.backends import (
OIDCAuthenticationBackend as LaSuiteOIDCAuthenticationBackend,
)
@@ -58,22 +57,3 @@ class OIDCAuthenticationBackend(LaSuiteOIDCAuthenticationBackend):
return self.UserModel.objects.get_user_by_sub_or_email(sub, email)
except DuplicateEmailError as err:
raise SuspiciousOperation(err.message) from err
def post_get_or_create_user(self, user, claims, is_new_user):
"""
Post-processing after user creation or retrieval.
Args:
user (User): The user instance.
claims (dict): The claims dictionary.
is_new_user (bool): Indicates if the user was newly created.
Returns:
- None
"""
if is_new_user and settings.SIGNUP_NEW_USER_TO_MARKETING_EMAIL:
create_or_update_contact.delay(
email=user.email, attributes={"DOCS_SOURCE": ["SIGNIN"]}
)

View File

@@ -53,6 +53,15 @@ class UserFactory(factory.django.DjangoModelFactory):
if create and (extracted is True):
UserDocumentAccessFactory(user=self, role="owner")
@factory.post_generation
def with_owned_template(self, create, extracted, **kwargs):
"""
Create a template for which the user is owner to check
that there is no interference
"""
if create and (extracted is True):
UserTemplateAccessFactory(user=self, role="owner")
class ParentNodeFactory(factory.declarations.ParameteredAttribute):
"""Custom factory attribute for setting the parent node."""
@@ -193,6 +202,50 @@ class DocumentAskForAccessFactory(factory.django.DjangoModelFactory):
role = factory.fuzzy.FuzzyChoice([r[0] for r in models.RoleChoices.choices])
class TemplateFactory(factory.django.DjangoModelFactory):
"""A factory to create templates"""
class Meta:
model = models.Template
django_get_or_create = ("title",)
skip_postgeneration_save = True
title = factory.Sequence(lambda n: f"template{n}")
is_public = factory.Faker("boolean")
@factory.post_generation
def users(self, create, extracted, **kwargs):
"""Add users to template from a given list of users with or without roles."""
if create and extracted:
for item in extracted:
if isinstance(item, models.User):
UserTemplateAccessFactory(template=self, user=item)
else:
UserTemplateAccessFactory(template=self, user=item[0], role=item[1])
class UserTemplateAccessFactory(factory.django.DjangoModelFactory):
"""Create fake template user accesses for testing."""
class Meta:
model = models.TemplateAccess
template = factory.SubFactory(TemplateFactory)
user = factory.SubFactory(UserFactory)
role = factory.fuzzy.FuzzyChoice([r[0] for r in models.RoleChoices.choices])
class TeamTemplateAccessFactory(factory.django.DjangoModelFactory):
"""Create fake template team accesses for testing."""
class Meta:
model = models.TemplateAccess
template = factory.SubFactory(TemplateFactory)
team = factory.Sequence(lambda n: f"team{n}")
role = factory.fuzzy.FuzzyChoice([r[0] for r in models.RoleChoices.choices])
class InvitationFactory(factory.django.DjangoModelFactory):
"""A factory to create invitations for a user"""

View File

@@ -1,52 +0,0 @@
"""
Handle search setup that needs to be done at bootstrap time.
"""
import logging
import time
from django.core.management.base import BaseCommand, CommandError
from core.services.search_indexers import get_document_indexer
logger = logging.getLogger("docs.search.bootstrap_search")
class Command(BaseCommand):
"""Index all documents to remote search service"""
help = __doc__
def add_arguments(self, parser):
"""Add argument to require forcing execution when not in debug mode."""
parser.add_argument(
"--batch-size",
action="store",
dest="batch_size",
type=int,
default=50,
help="Indexation query batch size",
)
def handle(self, *args, **options):
"""Launch and log search index generation."""
indexer = get_document_indexer()
if not indexer:
raise CommandError("The indexer is not enabled or properly configured.")
logger.info("Starting to regenerate Find index...")
start = time.perf_counter()
batch_size = options["batch_size"]
try:
count = indexer.index(batch_size=batch_size)
except Exception as err:
raise CommandError("Unable to regenerate index") from err
duration = time.perf_counter() - start
logger.info(
"Search index regenerated from %d document(s) in %.2f seconds.",
count,
duration,
)

View File

@@ -1,37 +0,0 @@
# Generated by Django 5.2.8 on 2025-11-20 09:56
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("core", "0026_comments"),
]
operations = [
migrations.RunSQL(
sql="""
CREATE OR REPLACE FUNCTION public.immutable_unaccent(regdictionary, text)
RETURNS text
LANGUAGE c IMMUTABLE PARALLEL SAFE STRICT AS
'$libdir/unaccent', 'unaccent_dict';
CREATE OR REPLACE FUNCTION public.f_unaccent(text)
RETURNS text
LANGUAGE sql IMMUTABLE PARALLEL SAFE STRICT
RETURN public.immutable_unaccent(regdictionary 'public.unaccent', $1);
CREATE INDEX IF NOT EXISTS user_email_unaccent_trgm_idx
ON impress_user
USING gin (f_unaccent(email) gin_trgm_ops);
CREATE INDEX IF NOT EXISTS user_full_name_unaccent_trgm_idx
ON impress_user
USING gin (f_unaccent(full_name) gin_trgm_ops);
""",
reverse_sql="""
DROP INDEX IF EXISTS user_email_unaccent_trgm_idx;
DROP INDEX IF EXISTS user_full_name_unaccent_trgm_idx;
""",
),
]

View File

@@ -1,26 +0,0 @@
# Generated by Django 5.2.9 on 2026-01-09 14:18
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("core", "0027_auto_20251120_0956"),
]
operations = [
migrations.RemoveField(
model_name="templateaccess",
name="template",
),
migrations.RemoveField(
model_name="templateaccess",
name="user",
),
migrations.DeleteModel(
name="Template",
),
migrations.DeleteModel(
name="TemplateAccess",
),
]

View File

@@ -1,178 +0,0 @@
# Generated by Django 5.2.11 on 2026-02-10 15:47
import uuid
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("core", "0028_remove_templateaccess_template_and_more"),
]
operations = [
migrations.CreateModel(
name="UserReconciliationCsvImport",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
help_text="primary key for the record as UUID",
primary_key=True,
serialize=False,
verbose_name="id",
),
),
(
"created_at",
models.DateTimeField(
auto_now_add=True,
help_text="date and time at which a record was created",
verbose_name="created on",
),
),
(
"updated_at",
models.DateTimeField(
auto_now=True,
help_text="date and time at which a record was last updated",
verbose_name="updated on",
),
),
(
"file",
models.FileField(upload_to="imports/", verbose_name="CSV file"),
),
(
"status",
models.CharField(
choices=[
("pending", "Pending"),
("running", "Running"),
("done", "Done"),
("error", "Error"),
],
default="pending",
max_length=20,
),
),
("logs", models.TextField(blank=True)),
],
options={
"verbose_name": "user reconciliation CSV import",
"verbose_name_plural": "user reconciliation CSV imports",
"db_table": "impress_user_reconciliation_csv_import",
},
),
migrations.CreateModel(
name="UserReconciliation",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
help_text="primary key for the record as UUID",
primary_key=True,
serialize=False,
verbose_name="id",
),
),
(
"created_at",
models.DateTimeField(
auto_now_add=True,
help_text="date and time at which a record was created",
verbose_name="created on",
),
),
(
"updated_at",
models.DateTimeField(
auto_now=True,
help_text="date and time at which a record was last updated",
verbose_name="updated on",
),
),
(
"active_email",
models.EmailField(
max_length=254, verbose_name="Active email address"
),
),
(
"inactive_email",
models.EmailField(
max_length=254, verbose_name="Email address to deactivate"
),
),
("active_email_checked", models.BooleanField(default=False)),
("inactive_email_checked", models.BooleanField(default=False)),
(
"active_email_confirmation_id",
models.UUIDField(
default=uuid.uuid4, editable=False, null=True, unique=True
),
),
(
"inactive_email_confirmation_id",
models.UUIDField(
default=uuid.uuid4, editable=False, null=True, unique=True
),
),
(
"source_unique_id",
models.CharField(
blank=True,
max_length=100,
null=True,
verbose_name="Unique ID in the source file",
),
),
(
"status",
models.CharField(
choices=[
("pending", "Pending"),
("ready", "Ready"),
("done", "Done"),
("error", "Error"),
],
default="pending",
max_length=20,
),
),
("logs", models.TextField(blank=True)),
(
"active_user",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="active_user",
to=settings.AUTH_USER_MODEL,
),
),
(
"inactive_user",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="inactive_user",
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"verbose_name": "user reconciliation",
"verbose_name_plural": "user reconciliations",
"db_table": "impress_user_reconciliation",
"ordering": ["-created_at"],
},
),
]

View File

@@ -1,7 +1,6 @@
"""
Declare and configure the models for the impress core application
"""
# pylint: disable=too-many-lines
import hashlib
@@ -15,6 +14,7 @@ from django.contrib.auth import models as auth_models
from django.contrib.auth.base_user import AbstractBaseUser
from django.contrib.postgres.fields import ArrayField
from django.contrib.sites.models import Site
from django.core import mail
from django.core.cache import cache
from django.core.files.base import ContentFile
from django.core.files.storage import default_storage
@@ -32,14 +32,14 @@ from rest_framework.exceptions import ValidationError
from timezone_field import TimeZoneField
from treebeard.mp_tree import MP_Node, MP_NodeManager, MP_NodeQuerySet
from core.choices import (
from .choices import (
PRIVILEGED_ROLES,
LinkReachChoices,
LinkRoleChoices,
RoleChoices,
get_equivalent_link_definition,
)
from core.validators import sub_validator
from .validators import sub_validator
logger = getLogger(__name__)
@@ -118,11 +118,11 @@ class UserManager(auth_models.UserManager):
if settings.OIDC_FALLBACK_TO_EMAIL_FOR_IDENTIFICATION:
try:
return self.get(email__iexact=email)
return self.get(email=email)
except self.model.DoesNotExist:
pass
elif (
self.filter(email__iexact=email).exists()
self.filter(email=email).exists()
and not settings.OIDC_ALLOW_DUPLICATE_EMAILS
):
raise DuplicateEmailError(
@@ -250,37 +250,11 @@ class User(AbstractBaseUser, BaseModel, auth_models.PermissionsMixin):
valid_invitations.delete()
def send_email(self, subject, context=None, language=None):
"""Generate and send email to the user from a template."""
emails = [self.email]
context = context or {}
domain = settings.EMAIL_URL_APP or Site.objects.get_current().domain
language = language or get_language()
context.update(
{
"brandname": settings.EMAIL_BRAND_NAME,
"domain": domain,
"logo_img": settings.EMAIL_LOGO_IMG,
}
)
with override(language):
msg_html = render_to_string("mail/html/template.html", context)
msg_plain = render_to_string("mail/text/template.txt", context)
subject = str(subject) # Force translation
try:
send_mail(
subject.capitalize(),
msg_plain,
settings.EMAIL_FROM,
emails,
html_message=msg_html,
fail_silently=False,
)
except smtplib.SMTPException as exception:
logger.error("invitation to %s was not sent: %s", emails, exception)
def email_user(self, subject, message, from_email=None, **kwargs):
"""Email this user."""
if not self.email:
raise ValueError("User has no email address.")
mail.send_mail(subject, message, from_email, [self.email], **kwargs)
@cached_property
def teams(self):
@@ -291,417 +265,6 @@ class User(AbstractBaseUser, BaseModel, auth_models.PermissionsMixin):
return []
class UserReconciliation(BaseModel):
"""Model to run batch jobs to replace an active user by another one"""
active_email = models.EmailField(_("Active email address"))
inactive_email = models.EmailField(_("Email address to deactivate"))
active_email_checked = models.BooleanField(default=False)
inactive_email_checked = models.BooleanField(default=False)
active_user = models.ForeignKey(
User,
on_delete=models.CASCADE,
null=True,
blank=True,
related_name="active_user",
)
inactive_user = models.ForeignKey(
User,
on_delete=models.CASCADE,
null=True,
blank=True,
related_name="inactive_user",
)
active_email_confirmation_id = models.UUIDField(
default=uuid.uuid4, unique=True, editable=False, null=True
)
inactive_email_confirmation_id = models.UUIDField(
default=uuid.uuid4, unique=True, editable=False, null=True
)
source_unique_id = models.CharField(
max_length=100,
blank=True,
null=True,
verbose_name=_("Unique ID in the source file"),
)
status = models.CharField(
max_length=20,
choices=[
("pending", _("Pending")),
("ready", _("Ready")),
("done", _("Done")),
("error", _("Error")),
],
default="pending",
)
logs = models.TextField(blank=True)
class Meta:
db_table = "impress_user_reconciliation"
verbose_name = _("user reconciliation")
verbose_name_plural = _("user reconciliations")
ordering = ["-created_at"]
def __str__(self):
return f"Reconciliation from {self.inactive_email} to {self.active_email}"
def save(self, *args, **kwargs):
"""
For pending queries, identify the actual users and send validation emails
"""
if self.status == "pending":
self.active_user = User.objects.filter(email=self.active_email).first()
self.inactive_user = User.objects.filter(email=self.inactive_email).first()
if self.active_user and self.inactive_user:
if not self.active_email_checked:
self.send_reconciliation_confirm_email(
self.active_user, "active", self.active_email_confirmation_id
)
if not self.inactive_email_checked:
self.send_reconciliation_confirm_email(
self.inactive_user,
"inactive",
self.inactive_email_confirmation_id,
)
self.status = "ready"
else:
self.status = "error"
self.logs = "Error: Both active and inactive users need to exist."
super().save(*args, **kwargs)
@transaction.atomic
def process_reconciliation_request(self):
"""
Process the reconciliation request as a transaction.
- Transfer document accesses from inactive to active user, updating roles as needed.
- Transfer document favorites from inactive to active user.
- Transfer link traces from inactive to active user.
- Transfer comment-related content from inactive to active user
(threads, comments and reactions)
- Activate the active user and deactivate the inactive user.
- Update the reconciliation entry itself.
"""
# Prepare the data to perform the reconciliation on
updated_accesses, removed_accesses = (
self.prepare_documentaccess_reconciliation()
)
updated_linktraces, removed_linktraces = self.prepare_linktrace_reconciliation()
update_favorites, removed_favorites = (
self.prepare_document_favorite_reconciliation()
)
updated_threads = self.prepare_thread_reconciliation()
updated_comments = self.prepare_comment_reconciliation()
updated_reactions, removed_reactions = self.prepare_reaction_reconciliation()
self.active_user.is_active = True
self.inactive_user.is_active = False
# Actually perform the bulk operations
DocumentAccess.objects.bulk_update(updated_accesses, ["user", "role"])
if removed_accesses:
ids_to_delete = [entry.id for entry in removed_accesses]
DocumentAccess.objects.filter(id__in=ids_to_delete).delete()
DocumentFavorite.objects.bulk_update(update_favorites, ["user"])
if removed_favorites:
ids_to_delete = [entry.id for entry in removed_favorites]
DocumentFavorite.objects.filter(id__in=ids_to_delete).delete()
LinkTrace.objects.bulk_update(updated_linktraces, ["user"])
if removed_linktraces:
ids_to_delete = [entry.id for entry in removed_linktraces]
LinkTrace.objects.filter(id__in=ids_to_delete).delete()
Thread.objects.bulk_update(updated_threads, ["creator"])
Comment.objects.bulk_update(updated_comments, ["user"])
# pylint: disable=C0103
ReactionThroughModel = Reaction.users.through
reactions_to_create = []
for updated_reaction in updated_reactions:
reactions_to_create.append(
ReactionThroughModel(
user_id=self.active_user.pk, reaction_id=updated_reaction.pk
)
)
if reactions_to_create:
ReactionThroughModel.objects.bulk_create(reactions_to_create)
if removed_reactions:
ids_to_delete = [entry.id for entry in removed_reactions]
ReactionThroughModel.objects.filter(
reaction_id__in=ids_to_delete, user_id=self.inactive_user.pk
).delete()
User.objects.bulk_update([self.active_user, self.inactive_user], ["is_active"])
# Wrap up the reconciliation entry
self.logs += f"""Requested update for {len(updated_accesses)} DocumentAccess items
and deletion for {len(removed_accesses)} DocumentAccess items.\n"""
self.status = "done"
self.save()
self.send_reconciliation_done_email()
def prepare_documentaccess_reconciliation(self):
"""
Prepare the reconciliation by transferring document accesses from the inactive user
to the active user.
"""
updated_accesses = []
removed_accesses = []
inactive_accesses = DocumentAccess.objects.filter(user=self.inactive_user)
# Check documents where the active user already has access
inactive_accesses_documents = inactive_accesses.values_list(
"document", flat=True
)
existing_accesses = DocumentAccess.objects.filter(user=self.active_user).filter(
document__in=inactive_accesses_documents
)
existing_roles_per_doc = dict(existing_accesses.values_list("document", "role"))
for entry in inactive_accesses:
if entry.document_id in existing_roles_per_doc:
# Update role if needed
existing_role = existing_roles_per_doc[entry.document_id]
max_role = RoleChoices.max(entry.role, existing_role)
if existing_role != max_role:
existing_access = existing_accesses.get(document=entry.document)
existing_access.role = max_role
updated_accesses.append(existing_access)
removed_accesses.append(entry)
else:
entry.user = self.active_user
updated_accesses.append(entry)
return updated_accesses, removed_accesses
def prepare_document_favorite_reconciliation(self):
"""
Prepare the reconciliation by transferring document favorites from the inactive user
to the active user.
"""
updated_favorites = []
removed_favorites = []
existing_favorites = DocumentFavorite.objects.filter(user=self.active_user)
existing_favorite_doc_ids = set(
existing_favorites.values_list("document_id", flat=True)
)
inactive_favorites = DocumentFavorite.objects.filter(user=self.inactive_user)
for entry in inactive_favorites:
if entry.document_id in existing_favorite_doc_ids:
removed_favorites.append(entry)
else:
entry.user = self.active_user
updated_favorites.append(entry)
return updated_favorites, removed_favorites
def prepare_linktrace_reconciliation(self):
"""
Prepare the reconciliation by transferring link traces from the inactive user
to the active user.
"""
updated_linktraces = []
removed_linktraces = []
existing_linktraces = LinkTrace.objects.filter(user=self.active_user)
inactive_linktraces = LinkTrace.objects.filter(user=self.inactive_user)
for entry in inactive_linktraces:
if existing_linktraces.filter(document=entry.document).exists():
removed_linktraces.append(entry)
else:
entry.user = self.active_user
updated_linktraces.append(entry)
return updated_linktraces, removed_linktraces
def prepare_thread_reconciliation(self):
"""
Prepare the reconciliation by transferring threads from the inactive user
to the active user.
"""
updated_threads = []
inactive_threads = Thread.objects.filter(creator=self.inactive_user)
for entry in inactive_threads:
entry.creator = self.active_user
updated_threads.append(entry)
return updated_threads
def prepare_comment_reconciliation(self):
"""
Prepare the reconciliation by transferring comments from the inactive user
to the active user.
"""
updated_comments = []
inactive_comments = Comment.objects.filter(user=self.inactive_user)
for entry in inactive_comments:
entry.user = self.active_user
updated_comments.append(entry)
return updated_comments
def prepare_reaction_reconciliation(self):
"""
Prepare the reconciliation by creating missing reactions for the active user
(ie, the ones that exist for the inactive user but not the active user)
and then deleting all reactions of the inactive user.
"""
inactive_reactions = Reaction.objects.filter(users=self.inactive_user)
updated_reactions = inactive_reactions.exclude(users=self.active_user)
return updated_reactions, inactive_reactions
def send_reconciliation_confirm_email(
self, user, user_type, confirmation_id, language=None
):
"""Method allowing to send confirmation email for reconciliation requests."""
language = language or get_language()
domain = settings.EMAIL_URL_APP or Site.objects.get_current().domain
message = _(
"""You have requested a reconciliation of your user accounts on Docs.
To confirm that you are the one who initiated the request
and that this email belongs to you:"""
)
with override(language):
subject = _("Confirm by clicking the link to start the reconciliation")
context = {
"title": subject,
"message": message,
"link": f"{domain}/user-reconciliations/{user_type}/{confirmation_id}/",
"link_label": str(_("Click here")),
"button_label": str(_("Confirm")),
}
user.send_email(subject, context, language)
def send_reconciliation_done_email(self, language=None):
"""Method allowing to send done email for reconciliation requests."""
language = language or get_language()
domain = settings.EMAIL_URL_APP or Site.objects.get_current().domain
message = _(
"""Your reconciliation request has been processed.
New documents are likely associated with your account:"""
)
with override(language):
subject = _("Your accounts have been merged")
context = {
"title": subject,
"message": message,
"link": f"{domain}/",
"link_label": str(_("Click here to see")),
"button_label": str(_("See my documents")),
}
self.active_user.send_email(subject, context, language)
class UserReconciliationCsvImport(BaseModel):
"""Model to import reconciliations requests from an external source
(eg, )"""
file = models.FileField(upload_to="imports/", verbose_name=_("CSV file"))
status = models.CharField(
max_length=20,
choices=[
("pending", _("Pending")),
("running", _("Running")),
("done", _("Done")),
("error", _("Error")),
],
default="pending",
)
logs = models.TextField(blank=True)
class Meta:
db_table = "impress_user_reconciliation_csv_import"
verbose_name = _("user reconciliation CSV import")
verbose_name_plural = _("user reconciliation CSV imports")
def __str__(self):
return f"User reconciliation CSV import {self.id}"
def send_email(self, subject, emails, context=None, language=None):
"""Generate and send email to the user from a template."""
context = context or {}
domain = settings.EMAIL_URL_APP or Site.objects.get_current().domain
language = language or get_language()
context.update(
{
"brandname": settings.EMAIL_BRAND_NAME,
"domain": domain,
"logo_img": settings.EMAIL_LOGO_IMG,
}
)
with override(language):
msg_html = render_to_string("mail/html/template.html", context)
msg_plain = render_to_string("mail/text/template.txt", context)
subject = str(subject) # Force translation
try:
send_mail(
subject.capitalize(),
msg_plain,
settings.EMAIL_FROM,
emails,
html_message=msg_html,
fail_silently=False,
)
except smtplib.SMTPException as exception:
logger.error("invitation to %s was not sent: %s", emails, exception)
def send_reconciliation_error_email(
self, recipient_email, other_email, language=None
):
"""Method allowing to send email for reconciliation requests with errors."""
language = language or get_language()
emails = [recipient_email]
message = _(
"""Your request for reconciliation was unsuccessful.
Reconciliation failed for the following email addresses:
{recipient_email}, {other_email}.
Please check for typos.
You can submit another request with the valid email addresses."""
).format(recipient_email=recipient_email, other_email=other_email)
with override(language):
subject = _("Reconciliation of your Docs accounts not completed")
context = {
"title": subject,
"message": message,
"link": settings.USER_RECONCILIATION_FORM_URL,
"link_label": str(_("Click here")),
"button_label": str(_("Make a new request")),
}
self.send_email(subject, emails, context, language)
class BaseAccess(BaseModel):
"""Base model for accesses to handle resources."""
@@ -869,35 +432,32 @@ class Document(MP_Node, BaseModel):
def save(self, *args, **kwargs):
"""Write content to object storage only if _content has changed."""
super().save(*args, **kwargs)
if self._content:
self.save_content(self._content)
file_key = self.file_key
bytes_content = self._content.encode("utf-8")
def save_content(self, content):
"""Save content to object storage."""
file_key = self.file_key
bytes_content = content.encode("utf-8")
# Attempt to directly check if the object exists using the storage client.
try:
response = default_storage.connection.meta.client.head_object(
Bucket=default_storage.bucket_name, Key=file_key
)
except ClientError as excpt:
# If the error is a 404, the object doesn't exist, so we should create it.
if excpt.response["Error"]["Code"] == "404":
has_changed = True
# Attempt to directly check if the object exists using the storage client.
try:
response = default_storage.connection.meta.client.head_object(
Bucket=default_storage.bucket_name, Key=file_key
)
except ClientError as excpt:
# If the error is a 404, the object doesn't exist, so we should create it.
if excpt.response["Error"]["Code"] == "404":
has_changed = True
else:
raise
else:
raise
else:
# Compare the existing ETag with the MD5 hash of the new content.
has_changed = (
response["ETag"].strip('"') != hashlib.md5(bytes_content).hexdigest() # noqa: S324
)
# Compare the existing ETag with the MD5 hash of the new content.
has_changed = (
response["ETag"].strip('"')
!= hashlib.md5(bytes_content).hexdigest() # noqa: S324
)
if has_changed:
content_file = ContentFile(bytes_content)
default_storage.save(file_key, content_file)
if has_changed:
content_file = ContentFile(bytes_content)
default_storage.save(file_key, content_file)
def is_leaf(self):
"""
@@ -1253,16 +813,15 @@ class Document(MP_Node, BaseModel):
def send_email(self, subject, emails, context=None, language=None):
"""Generate and send email from a template."""
context = context or {}
domain = settings.EMAIL_URL_APP or Site.objects.get_current().domain
domain = Site.objects.get_current().domain
language = language or get_language()
context.update(
{
"brandname": settings.EMAIL_BRAND_NAME,
"document": self,
"domain": domain,
"link": f"{domain}/docs/{self.id}/?utm_source=docssharelink&utm_campaign={self.id}",
"link_label": self.title or str(_("Untitled Document")),
"button_label": _("Open"),
"link": f"{domain}/docs/{self.id}/",
"document_title": self.title or str(_("Untitled Document")),
"logo_img": settings.EMAIL_LOGO_IMG,
}
)
@@ -1344,8 +903,7 @@ class Document(MP_Node, BaseModel):
# Mark all descendants as soft deleted
self.get_descendants().filter(ancestors_deleted_at__isnull=True).update(
ancestors_deleted_at=self.ancestors_deleted_at,
updated_at=self.updated_at,
ancestors_deleted_at=self.ancestors_deleted_at
)
@transaction.atomic
@@ -1866,6 +1424,163 @@ class Reaction(BaseModel):
return f"Reaction {self.emoji} on comment {self.comment.id}"
class Template(BaseModel):
"""HTML and CSS code used for formatting the print around the MarkDown body."""
title = models.CharField(_("title"), max_length=255)
description = models.TextField(_("description"), blank=True)
code = models.TextField(_("code"), blank=True)
css = models.TextField(_("css"), blank=True)
is_public = models.BooleanField(
_("public"),
default=False,
help_text=_("Whether this template is public for anyone to use."),
)
class Meta:
db_table = "impress_template"
ordering = ("title",)
verbose_name = _("Template")
verbose_name_plural = _("Templates")
def __str__(self):
return self.title
def get_role(self, user):
"""Return the roles a user has on a resource as an iterable."""
if not user.is_authenticated:
return None
try:
roles = self.user_roles or []
except AttributeError:
try:
roles = self.accesses.filter(
models.Q(user=user) | models.Q(team__in=user.teams),
).values_list("role", flat=True)
except (models.ObjectDoesNotExist, IndexError):
roles = []
return RoleChoices.max(*roles)
def get_abilities(self, user):
"""
Compute and return abilities for a given user on the template.
"""
role = self.get_role(user)
is_owner_or_admin = role in PRIVILEGED_ROLES
can_get = self.is_public or bool(role)
can_update = is_owner_or_admin or role == RoleChoices.EDITOR
return {
"destroy": role == RoleChoices.OWNER,
"generate_document": can_get,
"accesses_manage": is_owner_or_admin,
"update": can_update,
"partial_update": can_update,
"retrieve": can_get,
}
class TemplateAccess(BaseAccess):
"""Relation model to give access to a template for a user or a team with a role."""
template = models.ForeignKey(
Template,
on_delete=models.CASCADE,
related_name="accesses",
)
class Meta:
db_table = "impress_template_access"
ordering = ("-created_at",)
verbose_name = _("Template/user relation")
verbose_name_plural = _("Template/user relations")
constraints = [
models.UniqueConstraint(
fields=["user", "template"],
condition=models.Q(user__isnull=False), # Exclude null users
name="unique_template_user",
violation_error_message=_("This user is already in this template."),
),
models.UniqueConstraint(
fields=["team", "template"],
condition=models.Q(team__gt=""), # Exclude empty string teams
name="unique_template_team",
violation_error_message=_("This team is already in this template."),
),
models.CheckConstraint(
condition=models.Q(user__isnull=False, team="")
| models.Q(user__isnull=True, team__gt=""),
name="check_template_access_either_user_or_team",
violation_error_message=_("Either user or team must be set, not both."),
),
]
def __str__(self):
return f"{self.user!s} is {self.role:s} in template {self.template!s}"
def get_role(self, user):
"""
Get the role a user has on a resource.
"""
if not user.is_authenticated:
return None
try:
roles = self.user_roles or []
except AttributeError:
teams = user.teams
try:
roles = self.template.accesses.filter(
models.Q(user=user) | models.Q(team__in=teams),
).values_list("role", flat=True)
except (Template.DoesNotExist, IndexError):
roles = []
return RoleChoices.max(*roles)
def get_abilities(self, user):
"""
Compute and return abilities for a given user on the template access.
"""
role = self.get_role(user)
is_owner_or_admin = role in PRIVILEGED_ROLES
if self.role == RoleChoices.OWNER:
can_delete = (role == RoleChoices.OWNER) and self.template.accesses.filter(
role=RoleChoices.OWNER
).count() > 1
set_role_to = (
[RoleChoices.ADMIN, RoleChoices.EDITOR, RoleChoices.READER]
if can_delete
else []
)
else:
can_delete = is_owner_or_admin
set_role_to = []
if role == RoleChoices.OWNER:
set_role_to.append(RoleChoices.OWNER)
if is_owner_or_admin:
set_role_to.extend(
[RoleChoices.ADMIN, RoleChoices.EDITOR, RoleChoices.READER]
)
# Remove the current role as we don't want to propose it as an option
try:
set_role_to.remove(self.role)
except ValueError:
pass
return {
"destroy": can_delete,
"update": bool(set_role_to),
"partial_update": bool(set_role_to),
"retrieve": bool(role),
"set_role_to": set_role_to,
}
class Invitation(BaseModel):
"""User invitation to a document."""
@@ -1905,7 +1620,7 @@ class Invitation(BaseModel):
# Check if an identity already exists for the provided email
if (
User.objects.filter(email__iexact=self.email).exists()
User.objects.filter(email=self.email).exists()
and not settings.OIDC_ALLOW_DUPLICATE_EMAILS
):
raise ValidationError(

View File

@@ -3,14 +3,10 @@
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from openai import OpenAI
from core import enums
if settings.LANGFUSE_PUBLIC_KEY:
from langfuse.openai import OpenAI
else:
from openai import OpenAI
AI_ACTIONS = {
"prompt": (
"Answer the prompt using markdown formatting for structure and emphasis. "

View File

@@ -1,17 +1,11 @@
"""Y-Provider API services."""
import logging
import typing
from base64 import b64encode
from django.conf import settings
import requests
from core.services import mime_types
logger = logging.getLogger(__name__)
class ConversionError(Exception):
"""Base exception for conversion-related errors."""
@@ -25,81 +19,8 @@ class ServiceUnavailableError(ConversionError):
"""Raised when the conversion service is unavailable."""
class ConverterProtocol(typing.Protocol):
"""Protocol for converter classes."""
def convert(self, data, content_type, accept):
"""Convert content from one format to another."""
class Converter:
"""Orchestrates conversion between different formats using specialized converters."""
docspec: ConverterProtocol
ydoc: ConverterProtocol
def __init__(self):
self.docspec = DocSpecConverter()
self.ydoc = YdocConverter()
def convert(self, data, content_type, accept):
"""Convert input into other formats using external microservices."""
if content_type == mime_types.DOCX and accept == mime_types.YJS:
blocknote_data = self.docspec.convert(
data, mime_types.DOCX, mime_types.BLOCKNOTE
)
return self.ydoc.convert(
blocknote_data, mime_types.BLOCKNOTE, mime_types.YJS
)
return self.ydoc.convert(data, content_type, accept)
class DocSpecConverter:
"""Service class for DocSpec conversion-related operations."""
def _request(self, url, data, content_type):
"""Make a request to the DocSpec API."""
response = requests.post(
url,
headers={"Accept": mime_types.BLOCKNOTE},
files={"file": ("document.docx", data, content_type)},
timeout=settings.CONVERSION_API_TIMEOUT,
verify=settings.CONVERSION_API_SECURE,
)
if not response.ok:
logger.error(
"DocSpec API error: url=%s, status=%d, response=%s",
url,
response.status_code,
response.text[:200] if response.text else "empty",
)
response.raise_for_status()
return response
def convert(self, data, content_type, accept):
"""Convert a Document to BlockNote."""
if not data:
raise ValidationError("Input data cannot be empty")
if content_type != mime_types.DOCX or accept != mime_types.BLOCKNOTE:
raise ValidationError(
f"Conversion from {content_type} to {accept} is not supported."
)
try:
return self._request(settings.DOCSPEC_API_URL, data, content_type).content
except requests.RequestException as err:
logger.exception("DocSpec service error: url=%s", settings.DOCSPEC_API_URL)
raise ServiceUnavailableError(
"Failed to connect to DocSpec conversion service",
) from err
class YdocConverter:
"""Service class for YDoc conversion-related operations."""
"""Service class for conversion-related operations."""
@property
def auth_header(self):
@@ -120,34 +41,32 @@ class YdocConverter:
timeout=settings.CONVERSION_API_TIMEOUT,
verify=settings.CONVERSION_API_SECURE,
)
if not response.ok:
logger.error(
"Y-Provider API error: url=%s, status=%d, response=%s",
url,
response.status_code,
response.text[:200] if response.text else "empty",
)
response.raise_for_status()
return response
def convert(self, data, content_type=mime_types.MARKDOWN, accept=mime_types.YJS):
def convert(
self, text, content_type="text/markdown", accept="application/vnd.yjs.doc"
):
"""Convert a Markdown text into our internal format using an external microservice."""
if not data:
raise ValidationError("Input data cannot be empty")
if not text:
raise ValidationError("Input text cannot be empty")
url = f"{settings.Y_PROVIDER_API_BASE_URL}{settings.CONVERSION_API_ENDPOINT}/"
try:
response = self._request(url, data, content_type, accept)
if accept == mime_types.YJS:
response = self._request(
f"{settings.Y_PROVIDER_API_BASE_URL}{settings.CONVERSION_API_ENDPOINT}/",
text,
content_type,
accept,
)
if accept == "application/vnd.yjs.doc":
return b64encode(response.content).decode("utf-8")
if accept in {mime_types.MARKDOWN, "text/html"}:
if accept in {"text/markdown", "text/html"}:
return response.text
if accept == mime_types.JSON:
if accept == "application/json":
return response.json()
raise ValidationError("Unsupported format")
except requests.RequestException as err:
logger.exception("Y-Provider service error: url=%s", url)
raise ServiceUnavailableError(
f"Failed to connect to YDoc conversion service {content_type}, {accept}",
"Failed to connect to conversion service",
) from err

View File

@@ -1,8 +0,0 @@
"""MIME type constants for document conversion."""
BLOCKNOTE = "application/vnd.blocknote+json"
YJS = "application/vnd.yjs.doc"
MARKDOWN = "text/markdown"
JSON = "application/json"
DOCX = "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
HTML = "text/html"

View File

@@ -1,298 +0,0 @@
"""Document search index management utilities and indexers"""
import logging
from abc import ABC, abstractmethod
from collections import defaultdict
from functools import cache
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import ImproperlyConfigured
from django.db.models import Subquery
from django.utils.module_loading import import_string
import requests
from core import models, utils
logger = logging.getLogger(__name__)
@cache
def get_document_indexer():
"""Returns an instance of indexer service if enabled and properly configured."""
classpath = settings.SEARCH_INDEXER_CLASS
# For this usecase an empty indexer class is not an issue but a feature.
if not classpath:
logger.info("Document indexer is not configured (see SEARCH_INDEXER_CLASS)")
return None
try:
indexer_class = import_string(settings.SEARCH_INDEXER_CLASS)
return indexer_class()
except ImportError as err:
logger.error("SEARCH_INDEXER_CLASS setting is not valid : %s", err)
except ImproperlyConfigured as err:
logger.error("Document indexer is not properly configured : %s", err)
return None
def get_batch_accesses_by_users_and_teams(paths):
"""
Get accesses related to a list of document paths,
grouped by users and teams, including all ancestor paths.
"""
ancestor_map = utils.get_ancestor_to_descendants_map(
paths, steplen=models.Document.steplen
)
ancestor_paths = list(ancestor_map.keys())
access_qs = models.DocumentAccess.objects.filter(
document__path__in=ancestor_paths
).values("document__path", "user__sub", "team")
access_by_document_path = defaultdict(lambda: {"users": set(), "teams": set()})
for access in access_qs:
ancestor_path = access["document__path"]
user_sub = access["user__sub"]
team = access["team"]
for descendant_path in ancestor_map.get(ancestor_path, []):
if user_sub:
access_by_document_path[descendant_path]["users"].add(str(user_sub))
if team:
access_by_document_path[descendant_path]["teams"].add(team)
return dict(access_by_document_path)
def get_visited_document_ids_of(queryset, user):
"""
Returns the ids of the documents that have a linktrace to the user and NOT owned.
It will be use to limit the opensearch responses to the public documents already
"visited" by the user.
"""
if isinstance(user, AnonymousUser):
return []
qs = models.LinkTrace.objects.filter(user=user)
docs = (
queryset.exclude(accesses__user=user)
.filter(
deleted_at__isnull=True,
ancestors_deleted_at__isnull=True,
)
.filter(pk__in=Subquery(qs.values("document_id")))
.order_by("pk")
.distinct("pk")
)
return [str(id) for id in docs.values_list("pk", flat=True)]
class BaseDocumentIndexer(ABC):
"""
Base class for document indexers.
Handles batching and access resolution. Subclasses must implement both
`serialize_document()` and `push()` to define backend-specific behavior.
"""
def __init__(self):
"""
Initialize the indexer.
"""
self.batch_size = settings.SEARCH_INDEXER_BATCH_SIZE
self.indexer_url = settings.SEARCH_INDEXER_URL
self.indexer_secret = settings.SEARCH_INDEXER_SECRET
self.search_url = settings.SEARCH_INDEXER_QUERY_URL
self.search_limit = settings.SEARCH_INDEXER_QUERY_LIMIT
if not self.indexer_url:
raise ImproperlyConfigured(
"SEARCH_INDEXER_URL must be set in Django settings."
)
if not self.indexer_secret:
raise ImproperlyConfigured(
"SEARCH_INDEXER_SECRET must be set in Django settings."
)
if not self.search_url:
raise ImproperlyConfigured(
"SEARCH_INDEXER_QUERY_URL must be set in Django settings."
)
def index(self, queryset=None, batch_size=None):
"""
Fetch documents in batches, serialize them, and push to the search backend.
Args:
queryset (optional): Document queryset
Defaults to all documents without filter.
batch_size (int, optional): Number of documents per batch.
Defaults to settings.SEARCH_INDEXER_BATCH_SIZE.
"""
last_id = 0
count = 0
queryset = queryset or models.Document.objects.all()
batch_size = batch_size or self.batch_size
while True:
documents_batch = list(
queryset.filter(
id__gt=last_id,
).order_by("id")[:batch_size]
)
if not documents_batch:
break
doc_paths = [doc.path for doc in documents_batch]
last_id = documents_batch[-1].id
accesses_by_document_path = get_batch_accesses_by_users_and_teams(doc_paths)
serialized_batch = [
self.serialize_document(document, accesses_by_document_path)
for document in documents_batch
if document.content or document.title
]
if serialized_batch:
self.push(serialized_batch)
count += len(serialized_batch)
return count
@abstractmethod
def serialize_document(self, document, accesses):
"""
Convert a Document instance to a JSON-serializable format for indexing.
Must be implemented by subclasses.
"""
@abstractmethod
def push(self, data):
"""
Push a batch of serialized documents to the backend.
Must be implemented by subclasses.
"""
# pylint: disable-next=too-many-arguments,too-many-positional-arguments
def search(self, text, token, visited=(), nb_results=None):
"""
Search for documents in Find app.
Ensure the same default ordering as "Docs" list : -updated_at
Returns ids of the documents
Args:
text (str): Text search content.
token (str): OIDC Authentication token.
visited (list, optional):
List of ids of active public documents with LinkTrace
Defaults to settings.SEARCH_INDEXER_BATCH_SIZE.
nb_results (int, optional):
The number of results to return.
Defaults to 50 if not specified.
"""
nb_results = nb_results or self.search_limit
response = self.search_query(
data={
"q": text,
"visited": visited,
"services": ["docs"],
"nb_results": nb_results,
"order_by": "updated_at",
"order_direction": "desc",
},
token=token,
)
return [d["_id"] for d in response]
@abstractmethod
def search_query(self, data, token) -> dict:
"""
Retrieve documents from the Find app API.
Must be implemented by subclasses.
"""
class SearchIndexer(BaseDocumentIndexer):
"""
Document indexer that pushes documents to La Suite Find app.
"""
def serialize_document(self, document, accesses):
"""
Convert a Document to the JSON format expected by La Suite Find.
Args:
document (Document): The document instance.
accesses (dict): Mapping of document ID to user/team access.
Returns:
dict: A JSON-serializable dictionary.
"""
doc_path = document.path
doc_content = document.content
text_content = utils.base64_yjs_to_text(doc_content) if doc_content else ""
return {
"id": str(document.id),
"title": document.title or "",
"content": text_content,
"depth": document.depth,
"path": document.path,
"numchild": document.numchild,
"created_at": document.created_at.isoformat(),
"updated_at": document.updated_at.isoformat(),
"users": list(accesses.get(doc_path, {}).get("users", set())),
"groups": list(accesses.get(doc_path, {}).get("teams", set())),
"reach": document.computed_link_reach,
"size": len(text_content.encode("utf-8")),
"is_active": not bool(document.ancestors_deleted_at),
}
def search_query(self, data, token) -> requests.Response:
"""
Retrieve documents from the Find app API.
Args:
data (dict): search data
token (str): OICD token
Returns:
dict: A JSON-serializable dictionary.
"""
response = requests.post(
self.search_url,
json=data,
headers={"Authorization": f"Bearer {token}"},
timeout=10,
)
response.raise_for_status()
return response.json()
def push(self, data):
"""
Push a batch of documents to the Find backend.
Args:
data (list): List of document dictionaries.
"""
response = requests.post(
self.indexer_url,
json=data,
headers={"Authorization": f"Bearer {self.indexer_secret}"},
timeout=10,
)
response.raise_for_status()

View File

@@ -1,51 +0,0 @@
"""
Declare and configure the signals for the impress core application
"""
from functools import partial
from django.core.cache import cache
from django.db import transaction
from django.db.models import signals
from django.dispatch import receiver
from core import models
from core.tasks.search import trigger_batch_document_indexer
from core.utils import get_users_sharing_documents_with_cache_key
@receiver(signals.post_save, sender=models.Document)
def document_post_save(sender, instance, **kwargs): # pylint: disable=unused-argument
"""
Asynchronous call to the document indexer at the end of the transaction.
Note : Within the transaction we can have an empty content and a serialization
error.
"""
transaction.on_commit(partial(trigger_batch_document_indexer, instance))
@receiver(signals.post_save, sender=models.DocumentAccess)
def document_access_post_save(sender, instance, created, **kwargs): # pylint: disable=unused-argument
"""
Asynchronous call to the document indexer at the end of the transaction.
Clear cache for the affected user.
"""
if not created:
transaction.on_commit(
partial(trigger_batch_document_indexer, instance.document)
)
# Invalidate cache for the user
if instance.user:
cache_key = get_users_sharing_documents_with_cache_key(instance.user)
cache.delete(cache_key)
@receiver(signals.post_delete, sender=models.DocumentAccess)
def document_access_post_delete(sender, instance, **kwargs): # pylint: disable=unused-argument
"""
Clear cache for the affected user when document access is deleted.
"""
if instance.user:
cache_key = get_users_sharing_documents_with_cache_key(instance.user)
cache.delete(cache_key)

View File

@@ -1,95 +0,0 @@
"""Trigger document indexation using celery task."""
from logging import getLogger
from django.conf import settings
from django.core.cache import cache
from django.db.models import Q
from django_redis.cache import RedisCache
from core import models
from core.services.search_indexers import (
get_document_indexer,
)
from impress.celery_app import app
logger = getLogger(__file__)
@app.task
def document_indexer_task(document_id):
"""Celery Task : Sends indexation query for a document."""
indexer = get_document_indexer()
if indexer:
logger.info("Start document %s indexation", document_id)
indexer.index(models.Document.objects.filter(pk=document_id))
def batch_indexer_throttle_acquire(timeout: int = 0, atomic: bool = True):
"""
Enable the task throttle flag for a delay.
Uses redis locks if available to ensure atomic changes
"""
key = "document-batch-indexer-throttle"
# Redis is used as cache database (not in tests). Use the lock feature here
# to ensure atomicity of changes to the throttle flag.
if isinstance(cache, RedisCache) and atomic:
with cache.locks(key):
return batch_indexer_throttle_acquire(timeout, atomic=False)
# Use add() here :
# - set the flag and returns true if not exist
# - do nothing and return false if exist
return cache.add(key, 1, timeout=timeout)
@app.task
def batch_document_indexer_task(timestamp):
"""Celery Task : Sends indexation query for a batch of documents."""
indexer = get_document_indexer()
if indexer:
queryset = models.Document.objects.filter(
Q(updated_at__gte=timestamp)
| Q(deleted_at__gte=timestamp)
| Q(ancestors_deleted_at__gte=timestamp)
)
count = indexer.index(queryset)
logger.info("Indexed %d documents", count)
def trigger_batch_document_indexer(item):
"""
Trigger indexation task with debounce a delay set by the SEARCH_INDEXER_COUNTDOWN setting.
Args:
document (Document): The document instance.
"""
countdown = int(settings.SEARCH_INDEXER_COUNTDOWN)
# DO NOT create a task if indexation if disabled
if not settings.SEARCH_INDEXER_CLASS:
return
if countdown > 0:
# Each time this method is called during a countdown, we increment the
# counter and each task decrease it, so the index be run only once.
if batch_indexer_throttle_acquire(timeout=countdown):
logger.info(
"Add task for batch document indexation from updated_at=%s in %d seconds",
item.updated_at.isoformat(),
countdown,
)
batch_document_indexer_task.apply_async(
args=[item.updated_at], countdown=countdown
)
else:
logger.info("Skip task for batch document %s indexation", item.pk)
else:
document_indexer_task.apply(args=[item.pk])

View File

@@ -1,135 +0,0 @@
"""Processing tasks for user reconciliation CSV imports."""
import csv
import traceback
import uuid
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.db import IntegrityError
from botocore.exceptions import ClientError
from core.models import UserReconciliation, UserReconciliationCsvImport
from impress.celery_app import app
def _process_row(row, job, counters):
"""Process a single row from the CSV file."""
source_unique_id = row["id"].strip()
# Skip entries if they already exist with this source_unique_id
if UserReconciliation.objects.filter(source_unique_id=source_unique_id).exists():
counters["already_processed_source_ids"] += 1
return counters
active_email_checked = row.get("active_email_checked", "0") == "1"
inactive_email_checked = row.get("inactive_email_checked", "0") == "1"
active_email = row["active_email"]
inactive_emails = row["inactive_email"].split("|")
try:
validate_email(active_email)
except ValidationError:
job.send_reconciliation_error_email(
recipient_email=inactive_emails[0], other_email=active_email
)
job.logs += f"Invalid active email address on row {source_unique_id}."
counters["rows_with_errors"] += 1
return counters
for inactive_email in inactive_emails:
try:
validate_email(inactive_email)
except (ValidationError, ValueError):
job.send_reconciliation_error_email(
recipient_email=active_email, other_email=inactive_email
)
job.logs += f"Invalid inactive email address on row {source_unique_id}.\n"
counters["rows_with_errors"] += 1
continue
if inactive_email == active_email:
job.send_reconciliation_error_email(
recipient_email=active_email, other_email=inactive_email
)
job.logs += (
f"Error on row {source_unique_id}: "
f"{active_email} set as both active and inactive email.\n"
)
counters["rows_with_errors"] += 1
continue
_rec_entry = UserReconciliation.objects.create(
active_email=active_email,
inactive_email=inactive_email,
active_email_checked=active_email_checked,
inactive_email_checked=inactive_email_checked,
active_email_confirmation_id=uuid.uuid4(),
inactive_email_confirmation_id=uuid.uuid4(),
source_unique_id=source_unique_id,
status="pending",
)
counters["rec_entries_created"] += 1
return counters
@app.task
def user_reconciliation_csv_import_job(job_id):
"""Process a UserReconciliationCsvImport job.
Creates UserReconciliation entries from the CSV file.
Does some sanity checks on the data:
- active_email and inactive_email must be valid email addresses
- active_email and inactive_email cannot be the same
Rows with errors are logged in the job logs and skipped, but do not cause
the entire job to fail or prevent the next rows from being processed.
"""
# Imports the CSV file, breaks it into UserReconciliation items
job = UserReconciliationCsvImport.objects.get(id=job_id)
job.status = "running"
job.save()
counters = {
"rec_entries_created": 0,
"rows_with_errors": 0,
"already_processed_source_ids": 0,
}
try:
with job.file.open(mode="r") as f:
reader = csv.DictReader(f)
if not {"active_email", "inactive_email", "id"}.issubset(reader.fieldnames):
raise KeyError(
"CSV is missing mandatory columns: active_email, inactive_email, id"
)
for row in reader:
counters = _process_row(row, job, counters)
job.status = "done"
job.logs += (
f"Import completed successfully. {reader.line_num} rows processed."
f" {counters['rec_entries_created']} reconciliation entries created."
f" {counters['already_processed_source_ids']} rows were already processed."
f" {counters['rows_with_errors']} rows had errors."
)
except (
csv.Error,
KeyError,
ValidationError,
ValueError,
IntegrityError,
OSError,
ClientError,
) as e:
# Catch expected I/O/CSV/model errors and record traceback in logs for debugging
job.status = "error"
job.logs += f"{e!s}\n{traceback.format_exc()}"
finally:
job.save()

View File

@@ -0,0 +1,14 @@
<!DOCTYPE html>
<html>
<head>
<title>Generate Document</title>
</head>
<body>
<h2>Generate Document</h2>
<form method="post" enctype="multipart/form-data">
{% csrf_token %}
{{ form.as_p }}
<button type="submit">Generate PDF</button>
</form>
</body>
</html>

View File

@@ -1,4 +1,4 @@
"""Custom template tags for the core application of Docs."""
"""Custom template tags for the core application of People."""
import base64

View File

@@ -2,9 +2,9 @@
import random
import re
from unittest import mock
from django.core.exceptions import SuspiciousOperation
from django.test.utils import override_settings
import pytest
import responses
@@ -12,10 +12,7 @@ from cryptography.fernet import Fernet
from lasuite.oidc_login.backends import get_oidc_refresh_token
from core import models
from core.authentication.backends import (
OIDCAuthenticationBackend,
create_or_update_contact,
)
from core.authentication.backends import OIDCAuthenticationBackend
from core.factories import UserFactory
pytestmark = pytest.mark.django_db
@@ -68,30 +65,6 @@ def test_authentication_getter_existing_user_via_email(
assert user == db_user
def test_authentication_getter_existing_user_via_email_case_insensitive(
django_assert_num_queries, monkeypatch
):
"""
If an existing user doesn't match the sub but matches the email with different case,
the user should be returned (case-insensitive email matching).
"""
klass = OIDCAuthenticationBackend()
db_user = UserFactory(email="john.doe@example.com")
def get_userinfo_mocked(*args):
return {"sub": "123", "email": "JOHN.DOE@EXAMPLE.COM"}
monkeypatch.setattr(OIDCAuthenticationBackend, "get_userinfo", get_userinfo_mocked)
with django_assert_num_queries(4): # user by sub, user by mail, update sub
user = klass.get_or_create_user(
access_token="test-token", id_token=None, payload=None
)
assert user == db_user
def test_authentication_getter_email_none(monkeypatch):
"""
If no user is found with the sub and no email is provided, a new user should be created.
@@ -181,39 +154,6 @@ def test_authentication_getter_existing_user_no_fallback_to_email_no_duplicate(
assert models.User.objects.count() == 1
def test_authentication_getter_existing_user_no_fallback_to_email_no_duplicate_case_insensitive(
settings, monkeypatch
):
"""
When the "OIDC_FALLBACK_TO_EMAIL_FOR_IDENTIFICATION" setting is set to False,
the system should detect duplicate emails even with different case.
"""
klass = OIDCAuthenticationBackend()
_db_user = UserFactory(email="john.doe@example.com")
# Set the setting to False
settings.OIDC_FALLBACK_TO_EMAIL_FOR_IDENTIFICATION = False
settings.OIDC_ALLOW_DUPLICATE_EMAILS = False
def get_userinfo_mocked(*args):
return {"sub": "123", "email": "JOHN.DOE@EXAMPLE.COM"}
monkeypatch.setattr(OIDCAuthenticationBackend, "get_userinfo", get_userinfo_mocked)
with pytest.raises(
SuspiciousOperation,
match=(
"We couldn't find a user with this sub but the email is already associated "
"with a registered user."
),
):
klass.get_or_create_user(access_token="test-token", id_token=None, payload=None)
# Since the sub doesn't match, it should not create a new user
assert models.User.objects.count() == 1
def test_authentication_getter_existing_user_with_email(
django_assert_num_queries, monkeypatch
):
@@ -379,6 +319,85 @@ def test_authentication_getter_new_user_with_email(monkeypatch):
assert models.User.objects.count() == 1
@override_settings(OIDC_OP_USER_ENDPOINT="http://oidc.endpoint.test/userinfo")
@responses.activate
def test_authentication_get_userinfo_json_response():
"""Test get_userinfo method with a JSON response."""
responses.add(
responses.GET,
re.compile(r".*/userinfo"),
json={
"first_name": "John",
"last_name": "Doe",
"email": "john.doe@example.com",
},
status=200,
)
oidc_backend = OIDCAuthenticationBackend()
result = oidc_backend.get_userinfo("fake_access_token", None, None)
assert result["first_name"] == "John"
assert result["last_name"] == "Doe"
assert result["email"] == "john.doe@example.com"
@override_settings(OIDC_OP_USER_ENDPOINT="http://oidc.endpoint.test/userinfo")
@responses.activate
def test_authentication_get_userinfo_token_response(monkeypatch, settings):
"""Test get_userinfo method with a token response."""
settings.OIDC_RP_SIGN_ALGO = "HS256" # disable JWKS URL call
responses.add(
responses.GET,
re.compile(r".*/userinfo"),
body="fake.jwt.token",
status=200,
content_type="application/jwt",
)
def mock_verify_token(self, token): # pylint: disable=unused-argument
return {
"first_name": "Jane",
"last_name": "Doe",
"email": "jane.doe@example.com",
}
monkeypatch.setattr(OIDCAuthenticationBackend, "verify_token", mock_verify_token)
oidc_backend = OIDCAuthenticationBackend()
result = oidc_backend.get_userinfo("fake_access_token", None, None)
assert result["first_name"] == "Jane"
assert result["last_name"] == "Doe"
assert result["email"] == "jane.doe@example.com"
@override_settings(OIDC_OP_USER_ENDPOINT="http://oidc.endpoint.test/userinfo")
@responses.activate
def test_authentication_get_userinfo_invalid_response(settings):
"""
Test get_userinfo method with an invalid JWT response that
causes verify_token to raise an error.
"""
settings.OIDC_RP_SIGN_ALGO = "HS256" # disable JWKS URL call
responses.add(
responses.GET,
re.compile(r".*/userinfo"),
body="fake.jwt.token",
status=200,
content_type="application/jwt",
)
oidc_backend = OIDCAuthenticationBackend()
with pytest.raises(
SuspiciousOperation,
match="User info response was not valid JWT",
):
oidc_backend.get_userinfo("fake_access_token", None, None)
def test_authentication_getter_existing_disabled_user_via_sub(
django_assert_num_queries, monkeypatch
):
@@ -490,79 +509,3 @@ def test_authentication_session_tokens(
assert user is not None
assert request.session["oidc_access_token"] == "test-access-token"
assert get_oidc_refresh_token(request.session) == "test-refresh-token"
def test_authentication_post_get_or_create_user_new_user_to_marketing_email(settings):
"""
New user and SIGNUP_NEW_USER_TO_MARKETING_EMAIL enabled should create a contact
in the marketing backend.
"""
user = UserFactory()
settings.SIGNUP_NEW_USER_TO_MARKETING_EMAIL = True
klass = OIDCAuthenticationBackend()
with mock.patch.object(
create_or_update_contact, "delay"
) as mock_create_or_update_contact:
klass.post_get_or_create_user(user, {}, True)
mock_create_or_update_contact.assert_called_once_with(
email=user.email, attributes={"DOCS_SOURCE": ["SIGNIN"]}
)
def test_authentication_post_get_or_create_user_new_user_to_marketing_email_disabled(
settings,
):
"""
New user and SIGNUP_NEW_USER_TO_MARKETING_EMAIL disabled should not create a contact
in the marketing backend.
"""
user = UserFactory()
settings.SIGNUP_NEW_USER_TO_MARKETING_EMAIL = False
klass = OIDCAuthenticationBackend()
with mock.patch.object(
create_or_update_contact, "delay"
) as mock_create_or_update_contact:
klass.post_get_or_create_user(user, {}, True)
mock_create_or_update_contact.assert_not_called()
def test_authentication_post_get_or_create_user_existing_user_to_marketing_email(
settings,
):
"""
Existing user and SIGNUP_NEW_USER_TO_MARKETING_EMAIL enabled should not create a contact
in the marketing backend.
"""
user = UserFactory()
settings.SIGNUP_NEW_USER_TO_MARKETING_EMAIL = True
klass = OIDCAuthenticationBackend()
with mock.patch.object(
create_or_update_contact, "delay"
) as mock_create_or_update_contact:
klass.post_get_or_create_user(user, {}, False)
mock_create_or_update_contact.assert_not_called()
def test_authentication_post_get_or_create_user_existing_user_to_marketing_email_disabled(
settings,
):
"""
Existing user and SIGNUP_NEW_USER_TO_MARKETING_EMAIL disabled should not create a contact
in the marketing backend.
"""
user = UserFactory()
settings.SIGNUP_NEW_USER_TO_MARKETING_EMAIL = False
klass = OIDCAuthenticationBackend()
with mock.patch.object(
create_or_update_contact, "delay"
) as mock_create_or_update_contact:
klass.post_get_or_create_user(user, {}, False)
mock_create_or_update_contact.assert_not_called()

View File

@@ -1,65 +0,0 @@
"""
Unit test for `index` command.
"""
from operator import itemgetter
from unittest import mock
from django.core.management import CommandError, call_command
from django.db import transaction
import pytest
from core import factories
from core.services.search_indexers import SearchIndexer
@pytest.mark.django_db
@pytest.mark.usefixtures("indexer_settings")
def test_index():
"""Test the command `index` that run the Find app indexer for all the available documents."""
user = factories.UserFactory()
indexer = SearchIndexer()
with transaction.atomic():
doc = factories.DocumentFactory()
empty_doc = factories.DocumentFactory(title=None, content="")
no_title_doc = factories.DocumentFactory(title=None)
factories.UserDocumentAccessFactory(document=doc, user=user)
factories.UserDocumentAccessFactory(document=empty_doc, user=user)
factories.UserDocumentAccessFactory(document=no_title_doc, user=user)
accesses = {
str(doc.path): {"users": [user.sub]},
str(empty_doc.path): {"users": [user.sub]},
str(no_title_doc.path): {"users": [user.sub]},
}
with mock.patch.object(SearchIndexer, "push") as mock_push:
call_command("index")
push_call_args = [call.args[0] for call in mock_push.call_args_list]
# called once but with a batch of docs
mock_push.assert_called_once()
assert sorted(push_call_args[0], key=itemgetter("id")) == sorted(
[
indexer.serialize_document(doc, accesses),
indexer.serialize_document(no_title_doc, accesses),
],
key=itemgetter("id"),
)
@pytest.mark.django_db
@pytest.mark.usefixtures("indexer_settings")
def test_index_improperly_configured(indexer_settings):
"""The command should raise an exception if the indexer is not configured"""
indexer_settings.SEARCH_INDEXER_CLASS = None
with pytest.raises(CommandError) as err:
call_command("index")
assert str(err.value) == "The indexer is not enabled or properly configured."

View File

@@ -24,30 +24,3 @@ def mock_user_teams():
"core.models.User.teams", new_callable=mock.PropertyMock
) as mock_teams:
yield mock_teams
@pytest.fixture(name="indexer_settings")
def indexer_settings_fixture(settings):
"""
Setup valid settings for the document indexer. Clear the indexer cache.
"""
# pylint: disable-next=import-outside-toplevel
from core.services.search_indexers import ( # noqa: PLC0415
get_document_indexer,
)
get_document_indexer.cache_clear()
settings.SEARCH_INDEXER_CLASS = "core.services.search_indexers.SearchIndexer"
settings.SEARCH_INDEXER_SECRET = "ThisIsAKeyForTest"
settings.SEARCH_INDEXER_URL = "http://localhost:8081/api/v1.0/documents/index/"
settings.SEARCH_INDEXER_QUERY_URL = (
"http://localhost:8081/api/v1.0/documents/search/"
)
settings.SEARCH_INDEXER_COUNTDOWN = 1
yield settings
# clear cache to prevent issues with other tests
get_document_indexer.cache_clear()

View File

@@ -1,6 +0,0 @@
active_email,inactive_email,active_email_checked,inactive_email_checked,status,id
"user.test40@example.com","user.test41@example.com",0,0,pending,1
"user.test42@example.com","user.test43@example.com",0,1,pending,2
"user.test44@example.com","user.test45@example.com",1,0,pending,3
"user.test46@example.com","user.test47@example.com",1,1,pending,4
"user.test48@example.com","user.test49@example.com",1,1,pending,5
1 active_email inactive_email active_email_checked inactive_email_checked status id
2 user.test40@example.com user.test41@example.com 0 0 pending 1
3 user.test42@example.com user.test43@example.com 0 1 pending 2
4 user.test44@example.com user.test45@example.com 1 0 pending 3
5 user.test46@example.com user.test47@example.com 1 1 pending 4
6 user.test48@example.com user.test49@example.com 1 1 pending 5

View File

@@ -1,2 +0,0 @@
active_email,inactive_email,active_email_checked,inactive_email_checked,status,id
"user.test40@example.com",,0,0,pending,40
1 active_email inactive_email active_email_checked inactive_email_checked status id
2 user.test40@example.com 0 0 pending 40

View File

@@ -1,5 +0,0 @@
merge_accept,active_email,inactive_email,status,id
true,user.test10@example.com,user.test11@example.com|user.test12@example.com,pending,10
true,user.test30@example.com,user.test31@example.com|user.test32@example.com|user.test33@example.com|user.test34@example.com|user.test35@example.com,pending,11
true,user.test20@example.com,user.test21@example.com,pending,12
true,user.test22@example.com,user.test23@example.com,pending,13
1 merge_accept active_email inactive_email status id
2 true user.test10@example.com user.test11@example.com|user.test12@example.com pending 10
3 true user.test30@example.com user.test31@example.com|user.test32@example.com|user.test33@example.com|user.test34@example.com|user.test35@example.com pending 11
4 true user.test20@example.com user.test21@example.com pending 12
5 true user.test22@example.com user.test23@example.com pending 13

View File

@@ -1,2 +0,0 @@
merge_accept,active_email,inactive_email,status,id
true,user.test20@example.com,user.test20@example.com,pending,20
1 merge_accept active_email inactive_email status id
2 true user.test20@example.com user.test20@example.com pending 20

View File

@@ -1,6 +0,0 @@
active_email,inactive_email,active_email_checked,inactive_email_checked,status
"user.test40@example.com","user.test41@example.com",0,0,pending
"user.test42@example.com","user.test43@example.com",0,1,pending
"user.test44@example.com","user.test45@example.com",1,0,pending
"user.test46@example.com","user.test47@example.com",1,1,pending
"user.test48@example.com","user.test49@example.com",1,1,pending
1 active_email inactive_email active_email_checked inactive_email_checked status
2 user.test40@example.com user.test41@example.com 0 0 pending
3 user.test42@example.com user.test43@example.com 0 1 pending
4 user.test44@example.com user.test45@example.com 1 0 pending
5 user.test46@example.com user.test47@example.com 1 1 pending
6 user.test48@example.com user.test49@example.com 1 1 pending

View File

@@ -596,38 +596,6 @@ def test_api_document_invitations_create_cannot_invite_existing_users():
}
def test_api_item_invitations_create_cannot_invite_existing_users_case_insensitive():
"""
It should not be possible to invite already existing users, even with different email case.
"""
user = factories.UserFactory()
document = factories.DocumentFactory(users=[(user, "owner")])
existing_user = factories.UserFactory()
# Build an invitation to the email of an existing identity with different case
invitation_values = {
"email": existing_user.email.upper(),
"role": random.choice(models.RoleChoices.values),
}
client = APIClient()
client.force_login(user)
client = APIClient()
client.force_login(user)
response = client.post(
f"/api/v1.0/documents/{document.id!s}/invitations/",
invitation_values,
format="json",
)
assert response.status_code == 400
assert response.json() == {
"email": ["This email is already associated to a registered user."]
}
def test_api_document_invitations_create_lower_email():
"""
No matter the case, the email should be converted to lowercase.

View File

@@ -1,427 +0,0 @@
"""
Tests for Documents API endpoint in impress's core app: all
The 'all' endpoint returns ALL documents (including descendants) that the user has access to.
This is different from the 'list' endpoint which only returns top-level documents.
"""
from datetime import timedelta
from unittest import mock
from django.utils import timezone
import pytest
from rest_framework.test import APIClient
from core import factories, models
pytestmark = pytest.mark.django_db
@pytest.mark.parametrize("role", models.LinkRoleChoices.values)
@pytest.mark.parametrize("reach", models.LinkReachChoices.values)
def test_api_documents_all_anonymous(reach, role):
"""
Anonymous users should not be able to list any documents via the all endpoint
whatever the link reach and link role.
"""
parent = factories.DocumentFactory(link_reach=reach, link_role=role)
factories.DocumentFactory(parent=parent, link_reach=reach, link_role=role)
response = APIClient().get("/api/v1.0/documents/all/")
assert response.status_code == 200
results = response.json()["results"]
assert len(results) == 0
def test_api_documents_all_authenticated_with_children():
"""
Authenticated users should see all documents including children,
even though children don't have DocumentAccess records.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
# Create a document tree: parent -> child -> grandchild
parent = factories.DocumentFactory()
factories.UserDocumentAccessFactory(document=parent, user=user, role="owner")
child = factories.DocumentFactory(parent=parent)
grandchild = factories.DocumentFactory(parent=child)
# Verify setup
assert models.DocumentAccess.objects.filter(document=parent).count() == 1
assert models.DocumentAccess.objects.filter(document=child).count() == 0
assert models.DocumentAccess.objects.filter(document=grandchild).count() == 0
response = client.get("/api/v1.0/documents/all/")
assert response.status_code == 200
results = response.json()["results"]
# All three documents should be returned (parent + child + grandchild)
assert len(results) == 3
results_ids = {result["id"] for result in results}
assert results_ids == {str(parent.id), str(child.id), str(grandchild.id)}
depths = {result["depth"] for result in results}
assert depths == {1, 2, 3}
def test_api_documents_all_authenticated_multiple_trees():
"""
Users should see all accessible documents from multiple document trees.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
# Tree 1: User has access
tree1_parent = factories.DocumentFactory()
factories.UserDocumentAccessFactory(document=tree1_parent, user=user)
tree1_child = factories.DocumentFactory(parent=tree1_parent)
# Tree 2: User has access
tree2_parent = factories.DocumentFactory()
factories.UserDocumentAccessFactory(document=tree2_parent, user=user)
tree2_child1 = factories.DocumentFactory(parent=tree2_parent)
tree2_child2 = factories.DocumentFactory(parent=tree2_parent)
# Tree 3: User does NOT have access
tree3_parent = factories.DocumentFactory()
factories.DocumentFactory(parent=tree3_parent)
response = client.get("/api/v1.0/documents/all/")
assert response.status_code == 200
results = response.json()["results"]
# Should return 5 documents (tree1: 2, tree2: 3, tree3: 0)
assert len(results) == 5
results_ids = {result["id"] for result in results}
expected_ids = {
str(tree1_parent.id),
str(tree1_child.id),
str(tree2_parent.id),
str(tree2_child1.id),
str(tree2_child2.id),
}
assert results_ids == expected_ids
def test_api_documents_all_authenticated_explicit_access_to_parent_and_child():
"""
When a user has explicit DocumentAccess to both parent AND child,
both should appear in the 'all' endpoint results (unlike 'list' which deduplicates).
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
# Parent with explicit access
parent = factories.DocumentFactory()
factories.UserDocumentAccessFactory(document=parent, user=user)
# Child also has explicit access (e.g., shared separately)
child = factories.DocumentFactory(parent=parent)
factories.UserDocumentAccessFactory(document=child, user=user)
# Grandchild has no explicit access
grandchild = factories.DocumentFactory(parent=child)
# Verify setup
assert models.DocumentAccess.objects.filter(document=parent).count() == 1
assert models.DocumentAccess.objects.filter(document=child).count() == 1
assert models.DocumentAccess.objects.filter(document=grandchild).count() == 0
response = client.get("/api/v1.0/documents/all/")
assert response.status_code == 200
results = response.json()["results"]
# All three should appear
assert len(results) == 3
results_ids = {result["id"] for result in results}
assert results_ids == {str(parent.id), str(child.id), str(grandchild.id)}
# Each document should appear exactly once (no duplicates)
results_ids_list = [result["id"] for result in results]
assert len(results_ids_list) == len(set(results_ids_list)) # No duplicates
def test_api_documents_all_authenticated_via_team(mock_user_teams):
"""
Users should see all documents (including descendants) for documents accessed via teams.
"""
mock_user_teams.return_value = ["team1", "team2"]
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
# Document tree via team1
parent1 = factories.DocumentFactory()
factories.TeamDocumentAccessFactory(document=parent1, team="team1")
child1 = factories.DocumentFactory(parent=parent1)
# Document tree via team2
parent2 = factories.DocumentFactory()
factories.TeamDocumentAccessFactory(document=parent2, team="team2")
child2 = factories.DocumentFactory(parent=parent2)
# Document tree via unknown team
parent3 = factories.DocumentFactory()
factories.TeamDocumentAccessFactory(document=parent3, team="team3")
factories.DocumentFactory(parent=parent3)
response = client.get("/api/v1.0/documents/all/")
assert response.status_code == 200
results = response.json()["results"]
# Should return 4 documents (team1: 2, team2: 2, team3: 0)
assert len(results) == 4
results_ids = {result["id"] for result in results}
expected_ids = {
str(parent1.id),
str(child1.id),
str(parent2.id),
str(child2.id),
}
assert results_ids == expected_ids
def test_api_documents_all_authenticated_soft_deleted():
"""
Soft-deleted documents and their descendants should not be included.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
# Active tree
active_parent = factories.DocumentFactory()
factories.UserDocumentAccessFactory(document=active_parent, user=user)
active_child = factories.DocumentFactory(parent=active_parent)
# Soft-deleted tree
deleted_parent = factories.DocumentFactory()
factories.UserDocumentAccessFactory(document=deleted_parent, user=user)
_deleted_child = factories.DocumentFactory(parent=deleted_parent)
deleted_parent.soft_delete()
response = client.get("/api/v1.0/documents/all/")
assert response.status_code == 200
results = response.json()["results"]
# Should only return active documents
assert len(results) == 2
results_ids = {result["id"] for result in results}
assert results_ids == {str(active_parent.id), str(active_child.id)}
def test_api_documents_all_authenticated_permanently_deleted():
"""
Permanently deleted documents should not be included.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
# Active tree
active_parent = factories.DocumentFactory()
factories.UserDocumentAccessFactory(document=active_parent, user=user)
active_child = factories.DocumentFactory(parent=active_parent)
# Permanently deleted tree (deleted > 30 days ago)
deleted_parent = factories.DocumentFactory()
factories.UserDocumentAccessFactory(document=deleted_parent, user=user)
_deleted_child = factories.DocumentFactory(parent=deleted_parent)
fourty_days_ago = timezone.now() - timedelta(days=40)
with mock.patch("django.utils.timezone.now", return_value=fourty_days_ago):
deleted_parent.soft_delete()
response = client.get("/api/v1.0/documents/all/")
assert response.status_code == 200
results = response.json()["results"]
# Should only return active documents
assert len(results) == 2
results_ids = {result["id"] for result in results}
assert results_ids == {str(active_parent.id), str(active_child.id)}
def test_api_documents_all_authenticated_link_reach_restricted():
"""
Documents with link_reach=restricted accessed via LinkTrace should not appear
in the all endpoint results.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
# Document with direct access (should appear)
parent_with_access = factories.DocumentFactory()
factories.UserDocumentAccessFactory(document=parent_with_access, user=user)
child_with_access = factories.DocumentFactory(parent=parent_with_access)
# Document with only LinkTrace and restricted reach (should NOT appear)
parent_restricted = factories.DocumentFactory(
link_reach="restricted", link_traces=[user]
)
factories.DocumentFactory(parent=parent_restricted)
response = client.get("/api/v1.0/documents/all/")
assert response.status_code == 200
results = response.json()["results"]
# Only documents with direct access should appear
assert len(results) == 2
results_ids = {result["id"] for result in results}
assert results_ids == {str(parent_with_access.id), str(child_with_access.id)}
@pytest.mark.parametrize("reach", ["public", "authenticated"])
def test_api_documents_all_authenticated_link_reach_public_or_authenticated(reach):
"""
Documents with link_reach=public or authenticated accessed via LinkTrace
should appear with all their descendants.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
# Document accessed via LinkTrace with non-restricted reach
parent = factories.DocumentFactory(link_reach=reach, link_traces=[user])
child = factories.DocumentFactory(parent=parent)
grandchild = factories.DocumentFactory(parent=child)
response = client.get("/api/v1.0/documents/all/")
assert response.status_code == 200
results = response.json()["results"]
# All descendants should be included
assert len(results) == 3
results_ids = {result["id"] for result in results}
assert results_ids == {str(parent.id), str(child.id), str(grandchild.id)}
def test_api_documents_all_format():
"""Validate the format of documents as returned by the all endpoint."""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
document = factories.DocumentFactory()
access = factories.UserDocumentAccessFactory(document=document, user=user)
child = factories.DocumentFactory(parent=document)
response = client.get("/api/v1.0/documents/all/")
assert response.status_code == 200
content = response.json()
results = content.pop("results")
# Check pagination structure
assert content == {
"count": 2,
"next": None,
"previous": None,
}
# Verify parent document format
parent_result = [r for r in results if r["id"] == str(document.id)][0]
assert parent_result == {
"id": str(document.id),
"abilities": document.get_abilities(user),
"ancestors_link_reach": None,
"ancestors_link_role": None,
"computed_link_reach": document.computed_link_reach,
"computed_link_role": document.computed_link_role,
"created_at": document.created_at.isoformat().replace("+00:00", "Z"),
"creator": str(document.creator.id),
"deleted_at": None,
"depth": 1,
"excerpt": document.excerpt,
"is_favorite": False,
"link_reach": document.link_reach,
"link_role": document.link_role,
"nb_accesses_ancestors": 1,
"nb_accesses_direct": 1,
"numchild": 1,
"path": document.path,
"title": document.title,
"updated_at": document.updated_at.isoformat().replace("+00:00", "Z"),
"user_role": access.role,
}
# Verify child document format
child_result = [r for r in results if r["id"] == str(child.id)][0]
assert child_result["depth"] == 2
assert child_result["user_role"] == access.role # Inherited from parent
assert child_result["nb_accesses_direct"] == 0 # No direct access on child
def test_api_documents_all_distinct():
"""
A document should only appear once even if the user has multiple access paths to it.
"""
user = factories.UserFactory()
other_user = factories.UserFactory()
client = APIClient()
client.force_login(user)
# Document with multiple accesses for the same user
document = factories.DocumentFactory(users=[user, other_user])
child = factories.DocumentFactory(parent=document)
response = client.get("/api/v1.0/documents/all/")
assert response.status_code == 200
results = response.json()["results"]
# Should return 2 documents (parent + child), each appearing once
assert len(results) == 2
results_ids = [result["id"] for result in results]
assert results_ids.count(str(document.id)) == 1
assert results_ids.count(str(child.id)) == 1
def test_api_documents_all_comparison_with_list():
"""
The 'all' endpoint should return more documents than 'list' when there are children.
'list' returns only top-level documents, 'all' returns all descendants.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
# Create a document tree
parent = factories.DocumentFactory()
factories.UserDocumentAccessFactory(document=parent, user=user)
child = factories.DocumentFactory(parent=parent)
grandchild = factories.DocumentFactory(parent=child)
# Call list endpoint
list_response = client.get("/api/v1.0/documents/")
list_results = list_response.json()["results"]
# Call all endpoint
all_response = client.get("/api/v1.0/documents/all/")
all_results = all_response.json()["results"]
# list should return only parent
assert len(list_results) == 1
assert list_results[0]["id"] == str(parent.id)
# all should return parent + child + grandchild
assert len(all_results) == 3
all_ids = {result["id"] for result in all_results}
assert all_ids == {str(parent.id), str(child.id), str(grandchild.id)}

View File

@@ -1,8 +1,5 @@
"""Test on the CORS proxy API for documents."""
import socket
import unittest.mock
import pytest
import responses
from requests.exceptions import RequestException
@@ -13,17 +10,11 @@ from core import factories
pytestmark = pytest.mark.django_db
@unittest.mock.patch("core.api.viewsets.socket.getaddrinfo")
@responses.activate
def test_api_docs_cors_proxy_valid_url(mock_getaddrinfo):
def test_api_docs_cors_proxy_valid_url():
"""Test the CORS proxy API for documents with a valid URL."""
document = factories.DocumentFactory(link_reach="public")
# Mock DNS resolution to return a public IP address
mock_getaddrinfo.return_value = [
(socket.AF_INET, socket.SOCK_STREAM, 0, "", ("8.8.8.8", 0))
]
client = APIClient()
url_to_fetch = "https://external-url.com/assets/logo-gouv.png"
responses.get(url_to_fetch, body=b"", status=200, content_type="image/png")
@@ -65,17 +56,11 @@ def test_api_docs_cors_proxy_without_url_query_string():
assert response.json() == {"detail": "Missing 'url' query parameter"}
@unittest.mock.patch("core.api.viewsets.socket.getaddrinfo")
@responses.activate
def test_api_docs_cors_proxy_anonymous_document_not_public(mock_getaddrinfo):
def test_api_docs_cors_proxy_anonymous_document_not_public():
"""Test the CORS proxy API for documents with an anonymous user and a non-public document."""
document = factories.DocumentFactory(link_reach="authenticated")
# Mock DNS resolution to return a public IP address
mock_getaddrinfo.return_value = [
(socket.AF_INET, socket.SOCK_STREAM, 0, "", ("8.8.8.8", 0))
]
client = APIClient()
url_to_fetch = "https://external-url.com/assets/logo-gouv.png"
responses.get(url_to_fetch, body=b"", status=200, content_type="image/png")
@@ -88,22 +73,14 @@ def test_api_docs_cors_proxy_anonymous_document_not_public(mock_getaddrinfo):
}
@unittest.mock.patch("core.api.viewsets.socket.getaddrinfo")
@responses.activate
def test_api_docs_cors_proxy_authenticated_user_accessing_protected_doc(
mock_getaddrinfo,
):
def test_api_docs_cors_proxy_authenticated_user_accessing_protected_doc():
"""
Test the CORS proxy API for documents with an authenticated user accessing a protected
document.
"""
document = factories.DocumentFactory(link_reach="authenticated")
# Mock DNS resolution to return a public IP address
mock_getaddrinfo.return_value = [
(socket.AF_INET, socket.SOCK_STREAM, 0, "", ("8.8.8.8", 0))
]
user = factories.UserFactory()
client = APIClient()
@@ -138,22 +115,14 @@ def test_api_docs_cors_proxy_authenticated_user_accessing_protected_doc(
assert response.streaming_content
@unittest.mock.patch("core.api.viewsets.socket.getaddrinfo")
@responses.activate
def test_api_docs_cors_proxy_authenticated_not_accessing_restricted_doc(
mock_getaddrinfo,
):
def test_api_docs_cors_proxy_authenticated_not_accessing_restricted_doc():
"""
Test the CORS proxy API for documents with an authenticated user not accessing a restricted
document.
"""
document = factories.DocumentFactory(link_reach="restricted")
# Mock DNS resolution to return a public IP address
mock_getaddrinfo.return_value = [
(socket.AF_INET, socket.SOCK_STREAM, 0, "", ("8.8.8.8", 0))
]
user = factories.UserFactory()
client = APIClient()
@@ -169,72 +138,18 @@ def test_api_docs_cors_proxy_authenticated_not_accessing_restricted_doc(
}
@unittest.mock.patch("core.api.viewsets.socket.getaddrinfo")
@responses.activate
def test_api_docs_cors_proxy_unsupported_media_type(mock_getaddrinfo):
def test_api_docs_cors_proxy_unsupported_media_type():
"""Test the CORS proxy API for documents with an unsupported media type."""
document = factories.DocumentFactory(link_reach="public")
# Mock DNS resolution to return a public IP address
mock_getaddrinfo.return_value = [
(socket.AF_INET, socket.SOCK_STREAM, 0, "", ("8.8.8.8", 0))
]
client = APIClient()
url_to_fetch = "https://external-url.com/assets/index.html"
responses.get(url_to_fetch, body=b"", status=200, content_type="text/html")
response = client.get(
f"/api/v1.0/documents/{document.id!s}/cors-proxy/?url={url_to_fetch}"
)
assert response.status_code == 400
assert response.json() == {"detail": "Invalid URL used."}
@unittest.mock.patch("core.api.viewsets.socket.getaddrinfo")
@responses.activate
def test_api_docs_cors_proxy_redirect(mock_getaddrinfo):
"""Test the CORS proxy API for documents with a redirect."""
document = factories.DocumentFactory(link_reach="public")
# Mock DNS resolution to return a public IP address
mock_getaddrinfo.return_value = [
(socket.AF_INET, socket.SOCK_STREAM, 0, "", ("8.8.8.8", 0))
]
client = APIClient()
url_to_fetch = "https://external-url.com/assets/index.html"
responses.get(
url_to_fetch,
body=b"",
status=302,
headers={"Location": "https://external-url.com/other/assets/index.html"},
)
response = client.get(
f"/api/v1.0/documents/{document.id!s}/cors-proxy/?url={url_to_fetch}"
)
assert response.status_code == 400
assert response.json() == {"detail": "Invalid URL used."}
@unittest.mock.patch("core.api.viewsets.socket.getaddrinfo")
@responses.activate
def test_api_docs_cors_proxy_url_not_returning_200(mock_getaddrinfo):
"""Test the CORS proxy API for documents with a URL that does not return 200."""
document = factories.DocumentFactory(link_reach="public")
# Mock DNS resolution to return a public IP address
mock_getaddrinfo.return_value = [
(socket.AF_INET, socket.SOCK_STREAM, 0, "", ("8.8.8.8", 0))
]
client = APIClient()
url_to_fetch = "https://external-url.com/assets/index.html"
responses.get(url_to_fetch, body=b"", status=404)
response = client.get(
f"/api/v1.0/documents/{document.id!s}/cors-proxy/?url={url_to_fetch}"
)
assert response.status_code == 400
assert response.json() == {"detail": "Invalid URL used."}
assert response.status_code == 415
@pytest.mark.parametrize(
@@ -258,17 +173,11 @@ def test_api_docs_cors_proxy_invalid_url(url_to_fetch):
assert response.json() == ["Enter a valid URL."]
@unittest.mock.patch("core.api.viewsets.socket.getaddrinfo")
@responses.activate
def test_api_docs_cors_proxy_request_failed(mock_getaddrinfo):
def test_api_docs_cors_proxy_request_failed():
"""Test the CORS proxy API for documents with a request failed."""
document = factories.DocumentFactory(link_reach="public")
# Mock DNS resolution to return a public IP address
mock_getaddrinfo.return_value = [
(socket.AF_INET, socket.SOCK_STREAM, 0, "", ("8.8.8.8", 0))
]
client = APIClient()
url_to_fetch = "https://external-url.com/assets/index.html"
responses.get(url_to_fetch, body=RequestException("Connection refused"))
@@ -276,164 +185,6 @@ def test_api_docs_cors_proxy_request_failed(mock_getaddrinfo):
f"/api/v1.0/documents/{document.id!s}/cors-proxy/?url={url_to_fetch}"
)
assert response.status_code == 400
assert response.json() == {"detail": "Invalid URL used."}
@pytest.mark.parametrize(
"url_to_fetch",
[
"http://localhost/image.png",
"https://localhost/image.png",
"http://127.0.0.1/image.png",
"https://127.0.0.1/image.png",
"http://0.0.0.0/image.png",
"https://0.0.0.0/image.png",
"http://[::1]/image.png",
"https://[::1]/image.png",
"http://[0:0:0:0:0:0:0:1]/image.png",
"https://[0:0:0:0:0:0:0:1]/image.png",
],
)
def test_api_docs_cors_proxy_blocks_localhost(url_to_fetch):
"""Test that the CORS proxy API blocks localhost variations."""
document = factories.DocumentFactory(link_reach="public")
client = APIClient()
response = client.get(
f"/api/v1.0/documents/{document.id!s}/cors-proxy/?url={url_to_fetch}"
)
assert response.status_code == 400
assert response.json()["detail"] == "Invalid URL used."
@pytest.mark.parametrize(
"url_to_fetch",
[
"http://10.0.0.1/image.png",
"https://10.0.0.1/image.png",
"http://172.16.0.1/image.png",
"https://172.16.0.1/image.png",
"http://192.168.1.1/image.png",
"https://192.168.1.1/image.png",
"http://10.255.255.255/image.png",
"https://10.255.255.255/image.png",
"http://172.31.255.255/image.png",
"https://172.31.255.255/image.png",
"http://192.168.255.255/image.png",
"https://192.168.255.255/image.png",
],
)
def test_api_docs_cors_proxy_blocks_private_ips(url_to_fetch):
"""Test that the CORS proxy API blocks private IP addresses."""
document = factories.DocumentFactory(link_reach="public")
client = APIClient()
response = client.get(
f"/api/v1.0/documents/{document.id!s}/cors-proxy/?url={url_to_fetch}"
)
assert response.status_code == 400
assert response.json()["detail"] == "Invalid URL used."
@pytest.mark.parametrize(
"url_to_fetch",
[
"http://169.254.1.1/image.png",
"https://169.254.1.1/image.png",
"http://169.254.255.255/image.png",
"https://169.254.255.255/image.png",
],
)
def test_api_docs_cors_proxy_blocks_link_local(url_to_fetch):
"""Test that the CORS proxy API blocks link-local addresses."""
document = factories.DocumentFactory(link_reach="public")
client = APIClient()
response = client.get(
f"/api/v1.0/documents/{document.id!s}/cors-proxy/?url={url_to_fetch}"
)
assert response.status_code == 400
assert response.json()["detail"] == "Invalid URL used."
@unittest.mock.patch("core.api.viewsets.socket.getaddrinfo")
@responses.activate
def test_api_docs_cors_proxy_blocks_dns_rebinding_to_private_ip(mock_getaddrinfo):
"""Test that the CORS proxy API blocks DNS rebinding attacks to private IPs."""
document = factories.DocumentFactory(link_reach="public")
# Mock DNS resolution to return a private IP address
mock_getaddrinfo.return_value = [
(socket.AF_INET, socket.SOCK_STREAM, 0, "", ("192.168.1.1", 0))
]
client = APIClient()
url_to_fetch = "https://malicious-domain.com/image.png"
response = client.get(
f"/api/v1.0/documents/{document.id!s}/cors-proxy/?url={url_to_fetch}"
)
assert response.status_code == 400
assert response.json()["detail"] == "Invalid URL used."
mock_getaddrinfo.assert_called_once()
@unittest.mock.patch("core.api.viewsets.socket.getaddrinfo")
@responses.activate
def test_api_docs_cors_proxy_blocks_dns_rebinding_to_localhost(mock_getaddrinfo):
"""Test that the CORS proxy API blocks DNS rebinding attacks to localhost."""
document = factories.DocumentFactory(link_reach="public")
# Mock DNS resolution to return localhost
mock_getaddrinfo.return_value = [
(socket.AF_INET, socket.SOCK_STREAM, 0, "", ("127.0.0.1", 0))
]
client = APIClient()
url_to_fetch = "https://malicious-domain.com/image.png"
response = client.get(
f"/api/v1.0/documents/{document.id!s}/cors-proxy/?url={url_to_fetch}"
)
assert response.status_code == 400
assert response.json()["detail"] == "Invalid URL used."
mock_getaddrinfo.assert_called_once()
@unittest.mock.patch("core.api.viewsets.socket.getaddrinfo")
def test_api_docs_cors_proxy_handles_dns_resolution_failure(mock_getaddrinfo):
"""Test that the CORS proxy API handles DNS resolution failures gracefully."""
document = factories.DocumentFactory(link_reach="public")
# Mock DNS resolution to fail
mock_getaddrinfo.side_effect = socket.gaierror("Name or service not known")
client = APIClient()
url_to_fetch = "https://nonexistent-domain-12345.com/image.png"
response = client.get(
f"/api/v1.0/documents/{document.id!s}/cors-proxy/?url={url_to_fetch}"
)
assert response.status_code == 400
assert response.json()["detail"] == "Invalid URL used."
mock_getaddrinfo.assert_called_once()
@unittest.mock.patch("core.api.viewsets.socket.getaddrinfo")
def test_api_docs_cors_proxy_blocks_multiple_resolved_ips_if_any_private(
mock_getaddrinfo,
):
"""Test that the CORS proxy API blocks if any resolved IP is private."""
document = factories.DocumentFactory(link_reach="public")
# Mock DNS resolution to return both public and private IPs
mock_getaddrinfo.return_value = [
(socket.AF_INET, socket.SOCK_STREAM, 0, "", ("8.8.8.8", 0)),
(socket.AF_INET, socket.SOCK_STREAM, 0, "", ("192.168.1.1", 0)),
]
client = APIClient()
url_to_fetch = "https://example.com/image.png"
response = client.get(
f"/api/v1.0/documents/{document.id!s}/cors-proxy/?url={url_to_fetch}"
)
assert response.status_code == 400
assert response.json()["detail"] == "Invalid URL used."
mock_getaddrinfo.assert_called_once()
assert response.json() == {
"error": "Failed to fetch resource from https://external-url.com/assets/index.html"
}

View File

@@ -16,7 +16,6 @@ from rest_framework.test import APIClient
from core import factories
from core.api.serializers import ServerCreateDocumentSerializer
from core.models import Document, Invitation, User
from core.services import mime_types
from core.services.converter_services import ConversionError, YdocConverter
pytestmark = pytest.mark.django_db
@@ -192,9 +191,7 @@ def test_api_documents_create_for_owner_existing(mock_convert_md):
assert response.status_code == 201
mock_convert_md.assert_called_once_with(
"Document content", mime_types.MARKDOWN, mime_types.YJS
)
mock_convert_md.assert_called_once_with("Document content")
document = Document.objects.get()
assert response.json() == {"id": str(document.id)}
@@ -239,9 +236,7 @@ def test_api_documents_create_for_owner_new_user(mock_convert_md):
assert response.status_code == 201
mock_convert_md.assert_called_once_with(
"Document content", mime_types.MARKDOWN, mime_types.YJS
)
mock_convert_md.assert_called_once_with("Document content")
document = Document.objects.get()
assert response.json() == {"id": str(document.id)}
@@ -302,9 +297,7 @@ def test_api_documents_create_for_owner_existing_user_email_no_sub_with_fallback
assert response.status_code == 201
mock_convert_md.assert_called_once_with(
"Document content", mime_types.MARKDOWN, mime_types.YJS
)
mock_convert_md.assert_called_once_with("Document content")
document = Document.objects.get()
assert response.json() == {"id": str(document.id)}
@@ -400,9 +393,7 @@ def test_api_documents_create_for_owner_new_user_no_sub_no_fallback_allow_duplic
HTTP_AUTHORIZATION="Bearer DummyToken",
)
assert response.status_code == 201
mock_convert_md.assert_called_once_with(
"Document content", mime_types.MARKDOWN, mime_types.YJS
)
mock_convert_md.assert_called_once_with("Document content")
document = Document.objects.get()
assert response.json() == {"id": str(document.id)}
@@ -483,9 +474,7 @@ def test_api_documents_create_for_owner_with_default_language(
)
assert response.status_code == 201
mock_convert_md.assert_called_once_with(
"Document content", mime_types.MARKDOWN, mime_types.YJS
)
mock_convert_md.assert_called_once_with("Document content")
assert mock_send.call_args[0][3] == "de-de"
@@ -512,9 +501,7 @@ def test_api_documents_create_for_owner_with_custom_language(mock_convert_md):
assert response.status_code == 201
mock_convert_md.assert_called_once_with(
"Document content", mime_types.MARKDOWN, mime_types.YJS
)
mock_convert_md.assert_called_once_with("Document content")
assert len(mail.outbox) == 1
email = mail.outbox[0]
@@ -550,9 +537,7 @@ def test_api_documents_create_for_owner_with_custom_subject_and_message(
assert response.status_code == 201
mock_convert_md.assert_called_once_with(
"Document content", mime_types.MARKDOWN, mime_types.YJS
)
mock_convert_md.assert_called_once_with("Document content")
assert len(mail.outbox) == 1
email = mail.outbox[0]
@@ -586,9 +571,7 @@ def test_api_documents_create_for_owner_with_converter_exception(
format="json",
HTTP_AUTHORIZATION="Bearer DummyToken",
)
mock_convert_md.assert_called_once_with(
"Document content", mime_types.MARKDOWN, mime_types.YJS
)
mock_convert_md.assert_called_once_with("Document content")
assert response.status_code == 400
assert response.json() == {"content": ["Could not convert content"]}

View File

@@ -1,413 +0,0 @@
"""
Tests for Documents API endpoint in impress's core app: create with file upload
"""
from base64 import b64decode, binascii
from io import BytesIO
from unittest.mock import patch
import pytest
from rest_framework.test import APIClient
from core import factories
from core.models import Document
from core.services import mime_types
from core.services.converter_services import (
ConversionError,
ServiceUnavailableError,
)
pytestmark = pytest.mark.django_db
def test_api_documents_create_with_file_anonymous():
"""Anonymous users should not be allowed to create documents with file upload."""
# Create a fake DOCX file
file_content = b"fake docx content"
file = BytesIO(file_content)
file.name = "test_document.docx"
response = APIClient().post(
"/api/v1.0/documents/",
{
"file": file,
},
format="multipart",
)
assert response.status_code == 401
assert not Document.objects.exists()
@patch("core.services.converter_services.Converter.convert")
def test_api_documents_create_with_docx_file_success(mock_convert):
"""
Authenticated users should be able to create documents by uploading a DOCX file.
The file should be converted to YJS format and the title should be set from filename.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
# Mock the conversion
converted_yjs = "base64encodedyjscontent"
mock_convert.return_value = converted_yjs
# Create a fake DOCX file
file_content = b"fake docx content"
file = BytesIO(file_content)
file.name = "My Important Document.docx"
response = client.post(
"/api/v1.0/documents/",
{
"file": file,
},
format="multipart",
)
assert response.status_code == 201
document = Document.objects.get()
assert document.title == "My Important Document.docx"
assert document.content == converted_yjs
assert document.accesses.filter(role="owner", user=user).exists()
# Verify the converter was called correctly
mock_convert.assert_called_once_with(
file_content,
content_type=mime_types.DOCX,
accept=mime_types.YJS,
)
@patch("core.services.converter_services.Converter.convert")
def test_api_documents_create_with_markdown_file_success(mock_convert):
"""
Authenticated users should be able to create documents by uploading a Markdown file.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
# Mock the conversion
converted_yjs = "base64encodedyjscontent"
mock_convert.return_value = converted_yjs
# Create a fake Markdown file
file_content = b"# Test Document\n\nThis is a test."
file = BytesIO(file_content)
file.name = "readme.md"
response = client.post(
"/api/v1.0/documents/",
{
"file": file,
},
format="multipart",
)
assert response.status_code == 201
document = Document.objects.get()
assert document.title == "readme.md"
assert document.content == converted_yjs
assert document.accesses.filter(role="owner", user=user).exists()
# Verify the converter was called correctly
mock_convert.assert_called_once_with(
file_content,
content_type=mime_types.MARKDOWN,
accept=mime_types.YJS,
)
@patch("core.services.converter_services.Converter.convert")
def test_api_documents_create_with_file_and_explicit_title(mock_convert):
"""
When both file and title are provided, the filename should override the title.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
# Mock the conversion
converted_yjs = "base64encodedyjscontent"
mock_convert.return_value = converted_yjs
# Create a fake DOCX file
file_content = b"fake docx content"
file = BytesIO(file_content)
file.name = "Uploaded Document.docx"
response = client.post(
"/api/v1.0/documents/",
{
"file": file,
"title": "This should be overridden",
},
format="multipart",
)
assert response.status_code == 201
document = Document.objects.get()
# The filename should take precedence
assert document.title == "Uploaded Document.docx"
def test_api_documents_create_with_empty_file():
"""
Creating a document with an empty file should fail with a validation error.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
# Create an empty file
file = BytesIO(b"")
file.name = "empty.docx"
response = client.post(
"/api/v1.0/documents/",
{
"file": file,
},
format="multipart",
)
assert response.status_code == 400
assert response.json() == {"file": ["The submitted file is empty."]}
assert not Document.objects.exists()
@patch("core.services.converter_services.Converter.convert")
def test_api_documents_create_with_file_conversion_error(mock_convert):
"""
When conversion fails, the API should return a 400 error with appropriate message.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
# Mock the conversion to raise an error
mock_convert.side_effect = ConversionError("Failed to convert document")
# Create a fake DOCX file
file_content = b"fake invalid docx content"
file = BytesIO(file_content)
file.name = "corrupted.docx"
response = client.post(
"/api/v1.0/documents/",
{
"file": file,
},
format="multipart",
)
assert response.status_code == 400
assert response.json() == {"file": ["Could not convert file content"]}
assert not Document.objects.exists()
@patch("core.services.converter_services.Converter.convert")
def test_api_documents_create_with_file_service_unavailable(mock_convert):
"""
When the conversion service is unavailable, appropriate error should be returned.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
# Mock the conversion to raise ServiceUnavailableError
mock_convert.side_effect = ServiceUnavailableError(
"Failed to connect to conversion service"
)
# Create a fake DOCX file
file_content = b"fake docx content"
file = BytesIO(file_content)
file.name = "document.docx"
response = client.post(
"/api/v1.0/documents/",
{
"file": file,
},
format="multipart",
)
assert response.status_code == 400
assert response.json() == {"file": ["Could not convert file content"]}
assert not Document.objects.exists()
def test_api_documents_create_without_file_still_works():
"""
Creating a document without a file should still work as before (backward compatibility).
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
response = client.post(
"/api/v1.0/documents/",
{
"title": "Regular document without file",
},
format="json",
)
assert response.status_code == 201
document = Document.objects.get()
assert document.title == "Regular document without file"
assert document.content is None
assert document.accesses.filter(role="owner", user=user).exists()
@patch("core.services.converter_services.Converter.convert")
def test_api_documents_create_with_file_null_value(mock_convert):
"""
Passing file=null should be treated as no file upload.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
response = client.post(
"/api/v1.0/documents/",
{
"title": "Document with null file",
"file": None,
},
format="json",
)
assert response.status_code == 201
document = Document.objects.get()
assert document.title == "Document with null file"
# Converter should not have been called
mock_convert.assert_not_called()
@patch("core.services.converter_services.Converter.convert")
def test_api_documents_create_with_file_preserves_content_format(mock_convert):
"""
Verify that the converted content is stored correctly in the document.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
# Mock the conversion with realistic base64-encoded YJS data
converted_yjs = "AQMEBQYHCAkKCwwNDg8QERITFBUWFxgZGhscHR4fICA="
mock_convert.return_value = converted_yjs
# Create a fake DOCX file
file_content = b"fake docx with complex formatting"
file = BytesIO(file_content)
file.name = "complex_document.docx"
response = client.post(
"/api/v1.0/documents/",
{
"file": file,
},
format="multipart",
)
assert response.status_code == 201
document = Document.objects.get()
# Verify the content is stored as returned by the converter
assert document.content == converted_yjs
# Verify it's valid base64 (can be decoded)
try:
b64decode(converted_yjs)
except binascii.Error:
pytest.fail("Content should be valid base64-encoded data")
@patch("core.services.converter_services.Converter.convert")
def test_api_documents_create_with_file_unicode_filename(mock_convert):
"""
Test that Unicode characters in filenames are handled correctly.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
# Mock the conversion
converted_yjs = "base64encodedyjscontent"
mock_convert.return_value = converted_yjs
# Create a file with Unicode characters in the name
file_content = b"fake docx content"
file = BytesIO(file_content)
file.name = "文档-télécharger-документ.docx"
response = client.post(
"/api/v1.0/documents/",
{
"file": file,
},
format="multipart",
)
assert response.status_code == 201
document = Document.objects.get()
assert document.title == "文档-télécharger-документ.docx"
def test_api_documents_create_with_file_max_size_exceeded(settings):
"""
The uploaded file should not exceed the maximum size in settings.
"""
settings.CONVERSION_FILE_MAX_SIZE = 1 # 1 byte for test
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
file = BytesIO(b"a" * (10))
file.name = "test.docx"
response = client.post(
"/api/v1.0/documents/",
{
"file": file,
},
format="multipart",
)
assert response.status_code == 400
assert response.json() == {"file": ["File size exceeds the maximum limit of 0 MB."]}
def test_api_documents_create_with_file_extension_not_allowed(settings):
"""
The uploaded file should not have an allowed extension.
"""
settings.CONVERSION_FILE_EXTENSIONS_ALLOWED = [".docx"]
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
file = BytesIO(b"fake docx content")
file.name = "test.md"
response = client.post(
"/api/v1.0/documents/",
{
"file": file,
},
format="multipart",
)
assert response.status_code == 400
assert response.json() == {
"file": [
"File extension .md is not allowed. Allowed extensions are: ['.docx']."
]
}

View File

@@ -83,34 +83,3 @@ def test_api_document_favorite_list_authenticated_with_favorite():
}
],
}
def test_api_document_favorite_list_with_favorite_children():
"""Authenticated users should receive their favorite documents, including children."""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
root = factories.DocumentFactory(creator=user, users=[user])
children = factories.DocumentFactory.create_batch(
2, parent=root, favorited_by=[user]
)
access = factories.UserDocumentAccessFactory(
user=user, role=models.RoleChoices.READER, document__favorited_by=[user]
)
other_root = factories.DocumentFactory(creator=user, users=[user])
factories.DocumentFactory.create_batch(2, parent=other_root)
response = client.get("/api/v1.0/documents/favorite_list/")
assert response.status_code == 200
assert response.json()["count"] == 3
content = response.json()["results"]
assert content[0]["id"] == str(children[0].id)
assert content[1]["id"] == str(children[1].id)
assert content[2]["id"] == str(access.document.id)

View File

@@ -1,425 +0,0 @@
"""
Tests for Documents API endpoint in impress's core app: list
"""
import random
from json import loads as json_loads
from django.test import RequestFactory
import pytest
import responses
from faker import Faker
from rest_framework.test import APIClient
from core import factories, models
from core.services.search_indexers import get_document_indexer
fake = Faker()
pytestmark = pytest.mark.django_db
def build_search_url(**kwargs):
"""Build absolute uri for search endpoint with ORDERED query arguments"""
return (
RequestFactory()
.get("/api/v1.0/documents/search/", dict(sorted(kwargs.items())))
.build_absolute_uri()
)
@pytest.mark.parametrize("role", models.LinkRoleChoices.values)
@pytest.mark.parametrize("reach", models.LinkReachChoices.values)
@responses.activate
def test_api_documents_search_anonymous(reach, role, indexer_settings):
"""
Anonymous users should not be allowed to search documents whatever the
link reach and link role
"""
indexer_settings.SEARCH_INDEXER_QUERY_URL = "http://find/api/v1.0/search"
factories.DocumentFactory(link_reach=reach, link_role=role)
# Find response
responses.add(
responses.POST,
"http://find/api/v1.0/search",
json=[],
status=200,
)
response = APIClient().get("/api/v1.0/documents/search/", data={"q": "alpha"})
assert response.status_code == 200
assert response.json() == {
"count": 0,
"next": None,
"previous": None,
"results": [],
}
def test_api_documents_search_endpoint_is_none(indexer_settings):
"""
Missing SEARCH_INDEXER_QUERY_URL, so the indexer is not properly configured.
Should fallback on title filter
"""
indexer_settings.SEARCH_INDEXER_QUERY_URL = None
assert get_document_indexer() is None
user = factories.UserFactory()
document = factories.DocumentFactory(title="alpha")
access = factories.UserDocumentAccessFactory(document=document, user=user)
client = APIClient()
client.force_login(user)
response = client.get("/api/v1.0/documents/search/", data={"q": "alpha"})
assert response.status_code == 200
content = response.json()
results = content.pop("results")
assert content == {
"count": 1,
"next": None,
"previous": None,
}
assert len(results) == 1
assert results[0] == {
"id": str(document.id),
"abilities": document.get_abilities(user),
"ancestors_link_reach": None,
"ancestors_link_role": None,
"computed_link_reach": document.computed_link_reach,
"computed_link_role": document.computed_link_role,
"created_at": document.created_at.isoformat().replace("+00:00", "Z"),
"creator": str(document.creator.id),
"depth": 1,
"excerpt": document.excerpt,
"link_reach": document.link_reach,
"link_role": document.link_role,
"nb_accesses_ancestors": 1,
"nb_accesses_direct": 1,
"numchild": 0,
"path": document.path,
"title": document.title,
"updated_at": document.updated_at.isoformat().replace("+00:00", "Z"),
"deleted_at": None,
"user_role": access.role,
}
@responses.activate
def test_api_documents_search_invalid_params(indexer_settings):
"""Validate the format of documents as returned by the search view."""
indexer_settings.SEARCH_INDEXER_QUERY_URL = "http://find/api/v1.0/search"
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
response = client.get("/api/v1.0/documents/search/")
assert response.status_code == 400
assert response.json() == {"q": ["This field is required."]}
response = client.get("/api/v1.0/documents/search/", data={"q": " "})
assert response.status_code == 400
assert response.json() == {"q": ["This field may not be blank."]}
response = client.get(
"/api/v1.0/documents/search/", data={"q": "any", "page": "NaN"}
)
assert response.status_code == 400
assert response.json() == {"page": ["A valid integer is required."]}
@responses.activate
def test_api_documents_search_format(indexer_settings):
"""Validate the format of documents as returned by the search view."""
indexer_settings.SEARCH_INDEXER_QUERY_URL = "http://find/api/v1.0/search"
assert get_document_indexer() is not None
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
user_a, user_b, user_c = factories.UserFactory.create_batch(3)
document = factories.DocumentFactory(
title="alpha",
users=(user_a, user_c),
link_traces=(user, user_b),
)
access = factories.UserDocumentAccessFactory(document=document, user=user)
# Find response
responses.add(
responses.POST,
"http://find/api/v1.0/search",
json=[
{"_id": str(document.pk)},
],
status=200,
)
response = client.get("/api/v1.0/documents/search/", data={"q": "alpha"})
assert response.status_code == 200
content = response.json()
results = content.pop("results")
assert content == {
"count": 1,
"next": None,
"previous": None,
}
assert len(results) == 1
assert results[0] == {
"id": str(document.id),
"abilities": document.get_abilities(user),
"ancestors_link_reach": None,
"ancestors_link_role": None,
"computed_link_reach": document.computed_link_reach,
"computed_link_role": document.computed_link_role,
"created_at": document.created_at.isoformat().replace("+00:00", "Z"),
"creator": str(document.creator.id),
"depth": 1,
"excerpt": document.excerpt,
"link_reach": document.link_reach,
"link_role": document.link_role,
"nb_accesses_ancestors": 3,
"nb_accesses_direct": 3,
"numchild": 0,
"path": document.path,
"title": document.title,
"updated_at": document.updated_at.isoformat().replace("+00:00", "Z"),
"deleted_at": None,
"user_role": access.role,
}
@responses.activate
@pytest.mark.parametrize(
"pagination, status, expected",
(
(
{"page": 1, "page_size": 10},
200,
{
"count": 10,
"previous": None,
"next": None,
"range": (0, None),
},
),
(
{},
200,
{
"count": 10,
"previous": None,
"next": None,
"range": (0, None),
"api_page_size": 21, # default page_size is 20
},
),
(
{"page": 2, "page_size": 10},
404,
{},
),
(
{"page": 1, "page_size": 5},
200,
{
"count": 10,
"previous": None,
"next": {"page": 2, "page_size": 5},
"range": (0, 5),
},
),
(
{"page": 2, "page_size": 5},
200,
{
"count": 10,
"previous": {"page_size": 5},
"next": None,
"range": (5, None),
},
),
({"page": 3, "page_size": 5}, 404, {}),
),
)
def test_api_documents_search_pagination(
indexer_settings, pagination, status, expected
):
"""Documents should be ordered by descending "score" by default"""
indexer_settings.SEARCH_INDEXER_QUERY_URL = "http://find/api/v1.0/search"
assert get_document_indexer() is not None
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
docs = factories.DocumentFactory.create_batch(10, title="alpha", users=[user])
docs_by_uuid = {str(doc.pk): doc for doc in docs}
api_results = [{"_id": id} for id in docs_by_uuid.keys()]
# reorder randomly to simulate score ordering
random.shuffle(api_results)
# Find response
# pylint: disable-next=assignment-from-none
api_search = responses.add(
responses.POST,
"http://find/api/v1.0/search",
json=api_results,
status=200,
)
response = client.get(
"/api/v1.0/documents/search/",
data={
"q": "alpha",
**pagination,
},
)
assert response.status_code == status
if response.status_code < 300:
previous_url = (
build_search_url(q="alpha", **expected["previous"])
if expected["previous"]
else None
)
next_url = (
build_search_url(q="alpha", **expected["next"])
if expected["next"]
else None
)
start, end = expected["range"]
content = response.json()
assert content["count"] == expected["count"]
assert content["previous"] == previous_url
assert content["next"] == next_url
results = content.pop("results")
# The find api results ordering by score is kept
assert [r["id"] for r in results] == [r["_id"] for r in api_results[start:end]]
# Check the query parameters.
assert api_search.call_count == 1
assert api_search.calls[0].response.status_code == 200
assert json_loads(api_search.calls[0].request.body) == {
"q": "alpha",
"visited": [],
"services": ["docs"],
"nb_results": 50,
"order_by": "updated_at",
"order_direction": "desc",
}
@responses.activate
@pytest.mark.parametrize(
"pagination, status, expected",
(
(
{"page": 1, "page_size": 10},
200,
{"count": 10, "previous": None, "next": None, "range": (0, None)},
),
(
{},
200,
{"count": 10, "previous": None, "next": None, "range": (0, None)},
),
(
{"page": 2, "page_size": 10},
404,
{},
),
(
{"page": 1, "page_size": 5},
200,
{
"count": 10,
"previous": None,
"next": {"page": 2, "page_size": 5},
"range": (0, 5),
},
),
(
{"page": 2, "page_size": 5},
200,
{
"count": 10,
"previous": {"page_size": 5},
"next": None,
"range": (5, None),
},
),
({"page": 3, "page_size": 5}, 404, {}),
),
)
def test_api_documents_search_pagination_endpoint_is_none(
indexer_settings, pagination, status, expected
):
"""Documents should be ordered by descending "-updated_at" by default"""
indexer_settings.SEARCH_INDEXER_QUERY_URL = None
assert get_document_indexer() is None
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
factories.DocumentFactory.create_batch(10, title="alpha", users=[user])
response = client.get(
"/api/v1.0/documents/search/",
data={
"q": "alpha",
**pagination,
},
)
assert response.status_code == status
if response.status_code < 300:
previous_url = (
build_search_url(q="alpha", **expected["previous"])
if expected["previous"]
else None
)
next_url = (
build_search_url(q="alpha", **expected["next"])
if expected["next"]
else None
)
queryset = models.Document.objects.order_by("-updated_at")
start, end = expected["range"]
expected_results = [str(d.pk) for d in queryset[start:end]]
content = response.json()
assert content["count"] == expected["count"]
assert content["previous"] == previous_url
assert content["next"] == next_url
results = content.pop("results")
assert [r["id"] for r in results] == expected_results

View File

@@ -0,0 +1,46 @@
"""
Tests for Templates API endpoint in impress's core app: create
"""
import pytest
from rest_framework.test import APIClient
from core import factories
from core.models import Template
pytestmark = pytest.mark.django_db
def test_api_templates_create_anonymous():
"""Anonymous users should not be allowed to create templates."""
response = APIClient().post(
"/api/v1.0/templates/",
{
"title": "my template",
},
)
assert response.status_code == 401
assert not Template.objects.exists()
def test_api_templates_create_authenticated():
"""
Authenticated users should be able to create templates and should automatically be declared
as the owner of the newly created template.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
response = client.post(
"/api/v1.0/templates/",
{
"title": "my template",
},
format="json",
)
assert response.status_code == 405
assert not Template.objects.exists()

View File

@@ -0,0 +1,45 @@
"""
Tests for Templates API endpoint in impress's core app: delete
"""
import random
import pytest
from rest_framework.test import APIClient
from core import factories, models
pytestmark = pytest.mark.django_db
def test_api_templates_delete_anonymous():
"""Anonymous users should not be allowed to destroy a template."""
template = factories.TemplateFactory()
response = APIClient().delete(
f"/api/v1.0/templates/{template.id!s}/",
)
assert response.status_code == 401
assert models.Template.objects.count() == 1
def test_api_templates_delete_not_implemented():
"""
Authenticated users should not be allowed to delete a template to which they are not
related.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
is_public = random.choice([True, False])
template = factories.TemplateFactory(is_public=is_public, users=[(user, "owner")])
response = client.delete(
f"/api/v1.0/templates/{template.id!s}/",
)
assert response.status_code == 405
assert models.Template.objects.count() == 1

View File

@@ -0,0 +1,237 @@
"""
Tests for Templates API endpoint in impress's core app: list
"""
from unittest import mock
import pytest
from rest_framework.pagination import PageNumberPagination
from rest_framework.test import APIClient
from core import factories
pytestmark = pytest.mark.django_db
def test_api_templates_list_anonymous():
"""Anonymous users should only be able to list public templates."""
factories.TemplateFactory.create_batch(2, is_public=False)
public_templates = factories.TemplateFactory.create_batch(2, is_public=True)
expected_ids = {str(template.id) for template in public_templates}
response = APIClient().get("/api/v1.0/templates/")
assert response.status_code == 200
results = response.json()["results"]
assert len(results) == 2
results_id = {result["id"] for result in results}
assert expected_ids == results_id
def test_api_templates_list_authenticated_direct():
"""
Authenticated users should be able to list templates they are a direct
owner/administrator/member of or that are public.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
related_templates = [
access.template
for access in factories.UserTemplateAccessFactory.create_batch(5, user=user)
]
public_templates = factories.TemplateFactory.create_batch(2, is_public=True)
factories.TemplateFactory.create_batch(2, is_public=False)
expected_ids = {
str(template.id) for template in related_templates + public_templates
}
response = client.get(
"/api/v1.0/templates/",
)
assert response.status_code == 200
results = response.json()["results"]
assert len(results) == 7
results_id = {result["id"] for result in results}
assert expected_ids == results_id
def test_api_templates_list_authenticated_via_team(mock_user_teams):
"""
Authenticated users should be able to list templates they are a
owner/administrator/member of via a team or that are public.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
mock_user_teams.return_value = ["team1", "team2", "unknown"]
templates_team1 = [
access.template
for access in factories.TeamTemplateAccessFactory.create_batch(2, team="team1")
]
templates_team2 = [
access.template
for access in factories.TeamTemplateAccessFactory.create_batch(3, team="team2")
]
public_templates = factories.TemplateFactory.create_batch(2, is_public=True)
factories.TemplateFactory.create_batch(2, is_public=False)
expected_ids = {
str(template.id)
for template in templates_team1 + templates_team2 + public_templates
}
response = client.get("/api/v1.0/templates/")
assert response.status_code == 200
results = response.json()["results"]
assert len(results) == 7
results_id = {result["id"] for result in results}
assert expected_ids == results_id
@mock.patch.object(PageNumberPagination, "get_page_size", return_value=2)
def test_api_templates_list_pagination(
_mock_page_size,
):
"""Pagination should work as expected."""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
template_ids = [
str(access.template_id)
for access in factories.UserTemplateAccessFactory.create_batch(3, user=user)
]
# Get page 1
response = client.get(
"/api/v1.0/templates/",
)
assert response.status_code == 200
content = response.json()
assert content["count"] == 3
assert content["next"] == "http://testserver/api/v1.0/templates/?page=2"
assert content["previous"] is None
assert len(content["results"]) == 2
for item in content["results"]:
template_ids.remove(item["id"])
# Get page 2
response = client.get(
"/api/v1.0/templates/?page=2",
)
assert response.status_code == 200
content = response.json()
assert content["count"] == 3
assert content["next"] is None
assert content["previous"] == "http://testserver/api/v1.0/templates/"
assert len(content["results"]) == 1
template_ids.remove(content["results"][0]["id"])
assert template_ids == []
def test_api_templates_list_authenticated_distinct():
"""A template with several related users should only be listed once."""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
other_user = factories.UserFactory()
template = factories.TemplateFactory(users=[user, other_user], is_public=True)
response = client.get(
"/api/v1.0/templates/",
)
assert response.status_code == 200
content = response.json()
assert len(content["results"]) == 1
assert content["results"][0]["id"] == str(template.id)
def test_api_templates_list_order_default():
"""The templates list should be sorted by 'created_at' in descending order by default."""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
template_ids = [
str(access.template.id)
for access in factories.UserTemplateAccessFactory.create_batch(5, user=user)
]
response = client.get(
"/api/v1.0/templates/",
)
assert response.status_code == 200
response_data = response.json()
response_template_ids = [template["id"] for template in response_data["results"]]
template_ids.reverse()
assert response_template_ids == template_ids, (
"created_at values are not sorted from newest to oldest"
)
def test_api_templates_list_order_param():
"""
The templates list is sorted by 'created_at' in ascending order when setting
the "ordering" query parameter.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
templates_ids = [
str(access.template.id)
for access in factories.UserTemplateAccessFactory.create_batch(5, user=user)
]
response = client.get(
"/api/v1.0/templates/?ordering=created_at",
)
assert response.status_code == 200
response_data = response.json()
response_template_ids = [template["id"] for template in response_data["results"]]
assert response_template_ids == templates_ids, (
"created_at values are not sorted from oldest to newest"
)
def test_api_template_throttling(settings):
"""Test api template throttling."""
current_rate = settings.REST_FRAMEWORK["DEFAULT_THROTTLE_RATES"]["template"]
settings.REST_FRAMEWORK["DEFAULT_THROTTLE_RATES"]["template"] = "2/minute"
client = APIClient()
for _i in range(2):
response = client.get("/api/v1.0/templates/")
assert response.status_code == 200
with mock.patch("core.api.throttling.capture_message") as mock_capture_message:
response = client.get("/api/v1.0/templates/")
assert response.status_code == 429
mock_capture_message.assert_called_once_with(
"Rate limit exceeded for scope template", "warning"
)
settings.REST_FRAMEWORK["DEFAULT_THROTTLE_RATES"]["template"] = current_rate

View File

@@ -0,0 +1,522 @@
"""
Tests for Templates API endpoint in impress's core app: retrieve
"""
import pytest
from rest_framework.test import APIClient
from core import factories
pytestmark = pytest.mark.django_db
def test_api_templates_retrieve_anonymous_public():
"""Anonymous users should be allowed to retrieve public templates."""
template = factories.TemplateFactory(is_public=True)
response = APIClient().get(f"/api/v1.0/templates/{template.id!s}/")
assert response.status_code == 200
assert response.json() == {
"id": str(template.id),
"abilities": {
"destroy": False,
"generate_document": True,
"accesses_manage": False,
"partial_update": False,
"retrieve": True,
"update": False,
},
"accesses": [],
"title": template.title,
"is_public": True,
"code": template.code,
"css": template.css,
}
def test_api_templates_retrieve_anonymous_not_public():
"""Anonymous users should not be able to retrieve a template that is not public."""
template = factories.TemplateFactory(is_public=False)
response = APIClient().get(f"/api/v1.0/templates/{template.id!s}/")
assert response.status_code == 401
assert response.json() == {
"detail": "Authentication credentials were not provided."
}
def test_api_templates_retrieve_authenticated_unrelated_public():
"""
Authenticated users should be able to retrieve a public template to which they are
not related.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
template = factories.TemplateFactory(is_public=True)
response = client.get(
f"/api/v1.0/templates/{template.id!s}/",
)
assert response.status_code == 200
assert response.json() == {
"id": str(template.id),
"abilities": {
"destroy": False,
"generate_document": True,
"accesses_manage": False,
"partial_update": False,
"retrieve": True,
"update": False,
},
"accesses": [],
"title": template.title,
"is_public": True,
"code": template.code,
"css": template.css,
}
def test_api_templates_retrieve_authenticated_unrelated_not_public():
"""
Authenticated users should not be allowed to retrieve a template that is not public and
to which they are not related.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
template = factories.TemplateFactory(is_public=False)
response = client.get(
f"/api/v1.0/templates/{template.id!s}/",
)
assert response.status_code == 403
assert response.json() == {
"detail": "You do not have permission to perform this action."
}
def test_api_templates_retrieve_authenticated_related_direct():
"""
Authenticated users should be allowed to retrieve a template to which they
are directly related whatever the role.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
template = factories.TemplateFactory()
access1 = factories.UserTemplateAccessFactory(template=template, user=user)
access2 = factories.UserTemplateAccessFactory(template=template)
response = client.get(
f"/api/v1.0/templates/{template.id!s}/",
)
assert response.status_code == 200
content = response.json()
assert sorted(content.pop("accesses"), key=lambda x: x["user"]) == sorted(
[
{
"id": str(access1.id),
"user": str(user.id),
"team": "",
"role": access1.role,
"abilities": access1.get_abilities(user),
},
{
"id": str(access2.id),
"user": str(access2.user.id),
"team": "",
"role": access2.role,
"abilities": access2.get_abilities(user),
},
],
key=lambda x: x["user"],
)
assert response.json() == {
"id": str(template.id),
"title": template.title,
"abilities": template.get_abilities(user),
"is_public": template.is_public,
"code": template.code,
"css": template.css,
}
def test_api_templates_retrieve_authenticated_related_team_none(mock_user_teams):
"""
Authenticated users should not be able to retrieve a template related to teams in
which the user is not.
"""
mock_user_teams.return_value = []
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
template = factories.TemplateFactory(is_public=False)
factories.TeamTemplateAccessFactory(
template=template, team="readers", role="reader"
)
factories.TeamTemplateAccessFactory(
template=template, team="editors", role="editor"
)
factories.TeamTemplateAccessFactory(
template=template, team="administrators", role="administrator"
)
factories.TeamTemplateAccessFactory(template=template, team="owners", role="owner")
factories.TeamTemplateAccessFactory(template=template)
factories.TeamTemplateAccessFactory()
response = client.get(f"/api/v1.0/templates/{template.id!s}/")
assert response.status_code == 403
assert response.json() == {
"detail": "You do not have permission to perform this action."
}
@pytest.mark.parametrize(
"teams",
[
["readers"],
["unknown", "readers"],
["editors"],
["unknown", "editors"],
],
)
def test_api_templates_retrieve_authenticated_related_team_readers_or_editors(
teams, mock_user_teams
):
"""
Authenticated users should be allowed to retrieve a template to which they
are related via a team whatever the role and see all its accesses.
"""
mock_user_teams.return_value = teams
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
template = factories.TemplateFactory(is_public=False)
access_reader = factories.TeamTemplateAccessFactory(
template=template, team="readers", role="reader"
)
access_editor = factories.TeamTemplateAccessFactory(
template=template, team="editors", role="editor"
)
access_administrator = factories.TeamTemplateAccessFactory(
template=template, team="administrators", role="administrator"
)
access_owner = factories.TeamTemplateAccessFactory(
template=template, team="owners", role="owner"
)
other_access = factories.TeamTemplateAccessFactory(template=template)
factories.TeamTemplateAccessFactory()
response = client.get(f"/api/v1.0/templates/{template.id!s}/")
assert response.status_code == 200
content = response.json()
expected_abilities = {
"destroy": False,
"retrieve": True,
"set_role_to": [],
"update": False,
"partial_update": False,
}
assert sorted(content.pop("accesses"), key=lambda x: x["id"]) == sorted(
[
{
"id": str(access_reader.id),
"user": None,
"team": "readers",
"role": access_reader.role,
"abilities": expected_abilities,
},
{
"id": str(access_editor.id),
"user": None,
"team": "editors",
"role": access_editor.role,
"abilities": expected_abilities,
},
{
"id": str(access_administrator.id),
"user": None,
"team": "administrators",
"role": access_administrator.role,
"abilities": expected_abilities,
},
{
"id": str(access_owner.id),
"user": None,
"team": "owners",
"role": access_owner.role,
"abilities": expected_abilities,
},
{
"id": str(other_access.id),
"user": None,
"team": other_access.team,
"role": other_access.role,
"abilities": expected_abilities,
},
],
key=lambda x: x["id"],
)
assert response.json() == {
"id": str(template.id),
"title": template.title,
"abilities": template.get_abilities(user),
"is_public": False,
"code": template.code,
"css": template.css,
}
@pytest.mark.parametrize(
"teams",
[
["administrators"],
["members", "administrators"],
["unknown", "administrators"],
],
)
def test_api_templates_retrieve_authenticated_related_team_administrators(
teams, mock_user_teams
):
"""
Authenticated users should be allowed to retrieve a template to which they
are related via a team whatever the role and see all its accesses.
"""
mock_user_teams.return_value = teams
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
template = factories.TemplateFactory(is_public=False)
access_reader = factories.TeamTemplateAccessFactory(
template=template, team="readers", role="reader"
)
access_editor = factories.TeamTemplateAccessFactory(
template=template, team="editors", role="editor"
)
access_administrator = factories.TeamTemplateAccessFactory(
template=template, team="administrators", role="administrator"
)
access_owner = factories.TeamTemplateAccessFactory(
template=template, team="owners", role="owner"
)
other_access = factories.TeamTemplateAccessFactory(template=template)
factories.TeamTemplateAccessFactory()
response = client.get(f"/api/v1.0/templates/{template.id!s}/")
assert response.status_code == 200
content = response.json()
assert sorted(content.pop("accesses"), key=lambda x: x["id"]) == sorted(
[
{
"id": str(access_reader.id),
"user": None,
"team": "readers",
"role": "reader",
"abilities": {
"destroy": True,
"retrieve": True,
"set_role_to": ["administrator", "editor"],
"update": True,
"partial_update": True,
},
},
{
"id": str(access_editor.id),
"user": None,
"team": "editors",
"role": "editor",
"abilities": {
"destroy": True,
"retrieve": True,
"set_role_to": ["administrator", "reader"],
"update": True,
"partial_update": True,
},
},
{
"id": str(access_administrator.id),
"user": None,
"team": "administrators",
"role": "administrator",
"abilities": {
"destroy": True,
"retrieve": True,
"set_role_to": ["editor", "reader"],
"update": True,
"partial_update": True,
},
},
{
"id": str(access_owner.id),
"user": None,
"team": "owners",
"role": "owner",
"abilities": {
"destroy": False,
"retrieve": True,
"set_role_to": [],
"update": False,
"partial_update": False,
},
},
{
"id": str(other_access.id),
"user": None,
"team": other_access.team,
"role": other_access.role,
"abilities": other_access.get_abilities(user),
},
],
key=lambda x: x["id"],
)
assert response.json() == {
"id": str(template.id),
"title": template.title,
"abilities": template.get_abilities(user),
"is_public": False,
"code": template.code,
"css": template.css,
}
@pytest.mark.parametrize(
"teams",
[
["owners"],
["owners", "administrators"],
["members", "administrators", "owners"],
["unknown", "owners"],
],
)
def test_api_templates_retrieve_authenticated_related_team_owners(
teams, mock_user_teams
):
"""
Authenticated users should be allowed to retrieve a template to which they
are related via a team whatever the role and see all its accesses.
"""
mock_user_teams.return_value = teams
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
template = factories.TemplateFactory(is_public=False)
access_reader = factories.TeamTemplateAccessFactory(
template=template, team="readers", role="reader"
)
access_editor = factories.TeamTemplateAccessFactory(
template=template, team="editors", role="editor"
)
access_administrator = factories.TeamTemplateAccessFactory(
template=template, team="administrators", role="administrator"
)
access_owner = factories.TeamTemplateAccessFactory(
template=template, team="owners", role="owner"
)
other_access = factories.TeamTemplateAccessFactory(template=template)
factories.TeamTemplateAccessFactory()
response = client.get(f"/api/v1.0/templates/{template.id!s}/")
assert response.status_code == 200
content = response.json()
assert sorted(content.pop("accesses"), key=lambda x: x["id"]) == sorted(
[
{
"id": str(access_reader.id),
"user": None,
"team": "readers",
"role": "reader",
"abilities": {
"destroy": True,
"retrieve": True,
"set_role_to": ["owner", "administrator", "editor"],
"update": True,
"partial_update": True,
},
},
{
"id": str(access_editor.id),
"user": None,
"team": "editors",
"role": "editor",
"abilities": {
"destroy": True,
"retrieve": True,
"set_role_to": ["owner", "administrator", "reader"],
"update": True,
"partial_update": True,
},
},
{
"id": str(access_administrator.id),
"user": None,
"team": "administrators",
"role": "administrator",
"abilities": {
"destroy": True,
"retrieve": True,
"set_role_to": ["owner", "editor", "reader"],
"update": True,
"partial_update": True,
},
},
{
"id": str(access_owner.id),
"user": None,
"team": "owners",
"role": "owner",
"abilities": {
# editable only if there is another owner role than the user's team...
"destroy": other_access.role == "owner",
"retrieve": True,
"set_role_to": ["administrator", "editor", "reader"]
if other_access.role == "owner"
else [],
"update": other_access.role == "owner",
"partial_update": other_access.role == "owner",
},
},
{
"id": str(other_access.id),
"user": None,
"team": other_access.team,
"role": other_access.role,
"abilities": other_access.get_abilities(user),
},
],
key=lambda x: x["id"],
)
assert response.json() == {
"id": str(template.id),
"title": template.title,
"abilities": template.get_abilities(user),
"is_public": False,
"code": template.code,
"css": template.css,
}

View File

@@ -0,0 +1,54 @@
"""
Tests for Templates API endpoint in impress's core app: update
"""
import pytest
from rest_framework.test import APIClient
from core import factories
from core.api import serializers
pytestmark = pytest.mark.django_db
def test_api_templates_update_anonymous():
"""Anonymous users should not be allowed to update a template."""
template = factories.TemplateFactory()
new_template_values = serializers.TemplateSerializer(
instance=factories.TemplateFactory()
).data
response = APIClient().put(
f"/api/v1.0/templates/{template.id!s}/",
new_template_values,
format="json",
)
assert response.status_code == 401
def test_api_templates_update_not_implemented():
"""
Authenticated users should not be allowed to update a template.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
template = factories.TemplateFactory(users=[(user, "owner")])
new_template_values = serializers.TemplateSerializer(
instance=factories.TemplateFactory()
).data
response = client.put(
f"/api/v1.0/templates/{template.id!s}/", new_template_values, format="json"
)
assert response.status_code == 405
response = client.patch(
f"/api/v1.0/templates/{template.id!s}/", new_template_values, format="json"
)
assert response.status_code == 405

View File

@@ -20,12 +20,10 @@ pytestmark = pytest.mark.django_db
@override_settings(
AI_FEATURE_ENABLED=False,
API_USERS_SEARCH_QUERY_MIN_LENGTH=6,
COLLABORATION_WS_URL="http://testcollab/",
COLLABORATION_WS_NOT_CONNECTED_READY_ONLY=True,
CRISP_WEBSITE_ID="123",
FRONTEND_CSS_URL="http://testcss/",
FRONTEND_JS_URL="http://testjs/",
FRONTEND_THEME="test-theme",
MEDIA_BASE_URL="http://testserver/",
POSTHOG_KEY={"id": "132456", "host": "https://eu.i.posthog-test.com"},
@@ -45,17 +43,12 @@ def test_api_config(is_authenticated):
assert response.status_code == HTTP_200_OK
assert response.json() == {
"AI_FEATURE_ENABLED": False,
"API_USERS_SEARCH_QUERY_MIN_LENGTH": 6,
"COLLABORATION_WS_URL": "http://testcollab/",
"COLLABORATION_WS_NOT_CONNECTED_READY_ONLY": True,
"CONVERSION_FILE_EXTENSIONS_ALLOWED": [".docx", ".md"],
"CONVERSION_FILE_MAX_SIZE": 20971520,
"CRISP_WEBSITE_ID": "123",
"ENVIRONMENT": "test",
"FRONTEND_CSS_URL": "http://testcss/",
"FRONTEND_HOMEPAGE_FEATURE_ENABLED": True,
"FRONTEND_JS_URL": "http://testjs/",
"FRONTEND_SILENT_LOGIN_ENABLED": False,
"FRONTEND_THEME": "test-theme",
"LANGUAGES": [
["en-us", "English"],

View File

@@ -1,107 +0,0 @@
"""
Test DocumentThrottle for regular throttling and y-provider bypass.
"""
import pytest
from rest_framework.test import APIClient
from core import factories
pytestmark = pytest.mark.django_db
def test_api_throttling_document_throttle_regular_requests(settings):
"""Test that regular requests are throttled normally."""
current_rate = settings.REST_FRAMEWORK["DEFAULT_THROTTLE_RATES"]["document"]
settings.REST_FRAMEWORK["DEFAULT_THROTTLE_RATES"]["document"] = "3/minute"
settings.Y_PROVIDER_API_KEY = "test-y-provider-key"
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
document = factories.DocumentFactory()
factories.UserDocumentAccessFactory(document=document, user=user)
# Make 3 requests without the y-provider key
for _i in range(3):
response = client.get(
f"/api/v1.0/documents/{document.id!s}/",
)
assert response.status_code == 200
# 4th request should be throttled
response = client.get(
f"/api/v1.0/documents/{document.id!s}/",
)
assert response.status_code == 429
# A request with the y-provider key should NOT be throttled
response = client.get(
f"/api/v1.0/documents/{document.id!s}/",
HTTP_X_Y_PROVIDER_KEY="test-y-provider-key",
)
assert response.status_code == 200
# Restore original rate
settings.REST_FRAMEWORK["DEFAULT_THROTTLE_RATES"]["document"] = current_rate
def test_api_throttling_document_throttle_y_provider_exempted(settings):
"""Test that y-provider requests are exempted from throttling."""
current_rate = settings.REST_FRAMEWORK["DEFAULT_THROTTLE_RATES"]["document"]
settings.REST_FRAMEWORK["DEFAULT_THROTTLE_RATES"]["document"] = "3/minute"
settings.Y_PROVIDER_API_KEY = "test-y-provider-key"
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
document = factories.DocumentFactory()
factories.UserDocumentAccessFactory(document=document, user=user)
# Make many requests with the y-provider API key
for _i in range(10):
response = client.get(
f"/api/v1.0/documents/{document.id!s}/",
HTTP_X_Y_PROVIDER_KEY="test-y-provider-key",
)
assert response.status_code == 200
# Restore original rate
settings.REST_FRAMEWORK["DEFAULT_THROTTLE_RATES"]["document"] = current_rate
def test_api_throttling_document_throttle_invalid_token(settings):
"""Test that requests with invalid tokens are throttled."""
current_rate = settings.REST_FRAMEWORK["DEFAULT_THROTTLE_RATES"]["document"]
settings.REST_FRAMEWORK["DEFAULT_THROTTLE_RATES"]["document"] = "3/minute"
settings.Y_PROVIDER_API_KEY = "test-y-provider-key"
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
document = factories.DocumentFactory()
factories.UserDocumentAccessFactory(document=document, user=user)
# Make 3 requests with an invalid token
for _i in range(3):
response = client.get(
f"/api/v1.0/documents/{document.id!s}/",
HTTP_X_Y_PROVIDER_KEY="invalid-token",
)
assert response.status_code == 200
# 4th request should be throttled
response = client.get(
f"/api/v1.0/documents/{document.id!s}/",
HTTP_X_Y_PROVIDER_KEY="invalid-token",
)
assert response.status_code == 429
# Restore original rate
settings.REST_FRAMEWORK["DEFAULT_THROTTLE_RATES"]["document"] = current_rate

View File

@@ -1,85 +0,0 @@
"""
Unit tests for the ReconciliationConfirmView API view.
"""
import uuid
from django.conf import settings
import pytest
from rest_framework.test import APIClient
from core import factories, models
pytestmark = pytest.mark.django_db
def test_reconciliation_confirm_view_sets_active_checked():
"""GETting the active confirmation endpoint should set active_email_checked."""
user = factories.UserFactory(email="user.confirm1@example.com")
other = factories.UserFactory(email="user.confirm2@example.com")
rec = models.UserReconciliation.objects.create(
active_email=user.email,
inactive_email=other.email,
active_user=user,
inactive_user=other,
active_email_checked=False,
inactive_email_checked=False,
status="ready",
)
client = APIClient()
conf_id = rec.active_email_confirmation_id
url = f"/api/{settings.API_VERSION}/user-reconciliations/active/{conf_id}/"
resp = client.get(url)
assert resp.status_code == 200
assert resp.json() == {"detail": "Confirmation received"}
rec.refresh_from_db()
assert rec.active_email_checked is True
def test_reconciliation_confirm_view_sets_inactive_checked():
"""GETting the inactive confirmation endpoint should set inactive_email_checked."""
user = factories.UserFactory(email="user.confirm3@example.com")
other = factories.UserFactory(email="user.confirm4@example.com")
rec = models.UserReconciliation.objects.create(
active_email=user.email,
inactive_email=other.email,
active_user=user,
inactive_user=other,
active_email_checked=False,
inactive_email_checked=False,
status="ready",
)
client = APIClient()
conf_id = rec.inactive_email_confirmation_id
url = f"/api/{settings.API_VERSION}/user-reconciliations/inactive/{conf_id}/"
resp = client.get(url)
assert resp.status_code == 200
assert resp.json() == {"detail": "Confirmation received"}
rec.refresh_from_db()
assert rec.inactive_email_checked is True
def test_reconciliation_confirm_view_invalid_user_type_returns_400():
"""GETting with an invalid user_type should return 400."""
client = APIClient()
# Use a valid uuid format but invalid user_type
url = f"/api/{settings.API_VERSION}/user-reconciliations/other/{uuid.uuid4()}/"
resp = client.get(url)
assert resp.status_code == 400
assert resp.json() == {"detail": "Invalid user_type"}
def test_reconciliation_confirm_view_not_found_returns_404():
"""GETting with a non-existing confirmation_id should return 404."""
client = APIClient()
url = f"/api/{settings.API_VERSION}/user-reconciliations/active/{uuid.uuid4()}/"
resp = client.get(url)
assert resp.status_code == 404
assert resp.json() == {"detail": "Reconciliation entry not found"}

View File

@@ -2,8 +2,6 @@
Test users API endpoints in the impress core app.
"""
from django.utils import timezone
import pytest
from rest_framework.test import APIClient
@@ -78,207 +76,12 @@ def test_api_users_list_query_email():
assert user_ids == []
def test_api_users_list_query_email_with_internationalized_domain_names():
"""
Authenticated users should be able to list users and filter by email.
It should work even if the email address contains an internationalized domain name.
"""
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
jean = factories.UserFactory(email="jean.martin@éducation.fr")
marie = factories.UserFactory(email="marie.durand@education.fr")
kurokawa = factories.UserFactory(email="contact@黒川.日本")
response = client.get("/api/v1.0/users/?q=jean.martin@education.fr")
assert response.status_code == 200
user_ids = [user["id"] for user in response.json()]
assert user_ids == [str(jean.id)]
response = client.get("/api/v1.0/users/?q=jean.martin@éducation.fr")
assert response.status_code == 200
user_ids = [user["id"] for user in response.json()]
assert user_ids == [str(jean.id)]
response = client.get("/api/v1.0/users/?q=marie.durand@education.fr")
assert response.status_code == 200
user_ids = [user["id"] for user in response.json()]
assert user_ids == [str(marie.id)]
response = client.get("/api/v1.0/users/?q=marie.durand@éducation.fr")
assert response.status_code == 200
user_ids = [user["id"] for user in response.json()]
assert user_ids == [str(marie.id)]
response = client.get("/api/v1.0/users/?q=contact@黒川.日本")
assert response.status_code == 200
user_ids = [user["id"] for user in response.json()]
assert user_ids == [str(kurokawa.id)]
def test_api_users_list_query_full_name():
"""
Authenticated users should be able to list users and filter by full name.
Only results with a Trigram similarity greater than 0.2 with the query should be returned.
"""
user = factories.UserFactory(email="user@example.com")
client = APIClient()
client.force_login(user)
dave = factories.UserFactory(email="contact@example.com", full_name="David Bowman")
response = client.get(
"/api/v1.0/users/?q=David",
)
assert response.status_code == 200
user_ids = [user["id"] for user in response.json()]
assert user_ids == [str(dave.id)]
response = client.get("/api/v1.0/users/?q=Bowman")
assert response.status_code == 200
user_ids = [user["id"] for user in response.json()]
assert user_ids == [str(dave.id)]
response = client.get("/api/v1.0/users/?q=bowman")
assert response.status_code == 200
user_ids = [user["id"] for user in response.json()]
assert user_ids == [str(dave.id)]
response = client.get("/api/v1.0/users/?q=BOWMAN")
assert response.status_code == 200
user_ids = [user["id"] for user in response.json()]
assert user_ids == [str(dave.id)]
response = client.get("/api/v1.0/users/?q=BoWmAn")
assert response.status_code == 200
user_ids = [user["id"] for user in response.json()]
assert user_ids == [str(dave.id)]
response = client.get("/api/v1.0/users/?q=Bovin")
assert response.status_code == 200
user_ids = [user["id"] for user in response.json()]
assert user_ids == []
def test_api_users_list_query_accented_full_name():
"""
Authenticated users should be able to list users and filter by full name with accents.
Only results with a Trigram similarity greater than 0.2 with the query should be returned.
"""
user = factories.UserFactory(email="user@example.com")
client = APIClient()
client.force_login(user)
fred = factories.UserFactory(
email="contact@example.com", full_name="Frédérique Lefèvre"
)
response = client.get("/api/v1.0/users/?q=Frédérique")
assert response.status_code == 200
user_ids = [user["id"] for user in response.json()]
assert user_ids == [str(fred.id)]
response = client.get("/api/v1.0/users/?q=Frederique")
assert response.status_code == 200
user_ids = [user["id"] for user in response.json()]
assert user_ids == [str(fred.id)]
response = client.get("/api/v1.0/users/?q=Lefèvre")
assert response.status_code == 200
user_ids = [user["id"] for user in response.json()]
assert user_ids == [str(fred.id)]
response = client.get("/api/v1.0/users/?q=Lefevre")
assert response.status_code == 200
user_ids = [user["id"] for user in response.json()]
assert user_ids == [str(fred.id)]
response = client.get("/api/v1.0/users/?q=François Lorfebvre")
assert response.status_code == 200
users = [user["full_name"] for user in response.json()]
assert users == []
def test_api_users_list_sorted_by_closest_match():
"""
Authenticated users should be able to list users and the results should be
sorted by closest match to the query.
Sorting criteria are :
- Shared documents with the user (most recent first)
- Same full email domain (example.gouv.fr)
Addresses that match neither criteria should be excluded from the results.
Case in point: the logged-in user has recently shared documents
with pierre.dupont@beta.gouv.fr and less recently with pierre.durand@impots.gouv.fr.
Other users named Pierre also exist:
- pierre.thomas@example.com
- pierre.petit@anct.gouv.fr
- pierre.robert@culture.gouv.fr
The search results should be ordered as follows:
# Shared with first
- pierre.dupond@beta.gouv.fr # Most recent first
- pierre.durand@impots.gouv.fr
# Same full domain second
- pierre.petit@anct.gouv.fr
"""
user = factories.UserFactory(
email="martin.bernard@anct.gouv.fr", full_name="Martin Bernard"
)
client = APIClient()
client.force_login(user)
pierre_1 = factories.UserFactory(email="pierre.dupont@beta.gouv.fr")
pierre_2 = factories.UserFactory(email="pierre.durand@impots.gouv.fr")
_pierre_3 = factories.UserFactory(email="pierre.thomas@example.com")
pierre_4 = factories.UserFactory(email="pierre.petit@anct.gouv.fr")
_pierre_5 = factories.UserFactory(email="pierre.robert@culture.gouv.fr")
document_1 = factories.DocumentFactory(creator=user)
document_2 = factories.DocumentFactory(creator=user)
factories.UserDocumentAccessFactory(user=user, document=document_1)
factories.UserDocumentAccessFactory(user=user, document=document_2)
now = timezone.now()
last_week = now - timezone.timedelta(days=7)
last_month = now - timezone.timedelta(days=30)
# The factory cannot set the created_at directly, so we force it after creation
p1_d1 = factories.UserDocumentAccessFactory(user=pierre_1, document=document_1)
p1_d1.created_at = last_week
p1_d1.save()
p2_d2 = factories.UserDocumentAccessFactory(user=pierre_2, document=document_2)
p2_d2.created_at = last_month
p2_d2.save()
response = client.get("/api/v1.0/users/?q=Pierre")
assert response.status_code == 200
user_ids = [user["email"] for user in response.json()]
assert user_ids == [
str(pierre_1.email),
str(pierre_2.email),
str(pierre_4.email),
]
def test_api_users_list_limit(settings):
"""
Authenticated users should be able to list users and the number of results
should be limited to API_USERS_LIST_LIMIT (by default 5).
should be limited to 10.
"""
user = factories.UserFactory(email="user@example.com")
user = factories.UserFactory()
client = APIClient()
client.force_login(user)
@@ -381,16 +184,28 @@ def test_api_users_list_query_email_exclude_doc_user():
def test_api_users_list_query_short_queries():
"""
If API_USERS_SEARCH_QUERY_MIN_LENGTH is not set, the default minimum length should be 3.
Queries shorter than 5 characters should return an empty result set.
"""
user = factories.UserFactory(email="paul@example.com", full_name="Paul")
user = factories.UserFactory(email="paul@example.com")
client = APIClient()
client.force_login(user)
factories.UserFactory(email="john.doe@example.com", full_name="John Doe")
factories.UserFactory(email="john.lennon@example.com", full_name="John Lennon")
factories.UserFactory(email="john.doe@example.com")
factories.UserFactory(email="john.lennon@example.com")
response = client.get("/api/v1.0/users/?q=joh")
response = client.get("/api/v1.0/users/?q=jo")
assert response.status_code == 400
assert response.json() == {
"q": ["Ensure this value has at least 5 characters (it has 2)."]
}
response = client.get("/api/v1.0/users/?q=john")
assert response.status_code == 400
assert response.json() == {
"q": ["Ensure this value has at least 5 characters (it has 4)."]
}
response = client.get("/api/v1.0/users/?q=john.")
assert response.status_code == 200
assert len(response.json()) == 2
@@ -416,7 +231,7 @@ def test_api_users_list_query_long_queries():
def test_api_users_list_query_inactive():
"""Inactive users should not be listed."""
user = factories.UserFactory(email="user@example.com")
user = factories.UserFactory()
client = APIClient()
client.force_login(user)

View File

@@ -1021,51 +1021,7 @@ def test_models_documents__email_invitation__success():
f"Test Sender (sender@example.com) invited you with the role &quot;editor&quot; "
f"on the following document: {document.title}" in email_content
)
assert (
f"docs/{document.id}/?utm_source=docssharelink&amp;utm_campaign={document.id}"
in email_content
)
@pytest.mark.parametrize(
"email_url_app",
[
"https://test-example.com", # Test with EMAIL_URL_APP set
None, # Test fallback to Site domain
],
)
def test_models_documents__email_invitation__url_app_param(email_url_app):
"""
Test that email invitation uses EMAIL_URL_APP when set, or falls back to Site domain.
"""
with override_settings(EMAIL_URL_APP=email_url_app):
document = factories.DocumentFactory()
sender = factories.UserFactory(
full_name="Test Sender", email="sender@example.com"
)
document.send_invitation_email(
"guest@example.com", models.RoleChoices.EDITOR, sender, "en"
)
# pylint: disable-next=no-member
email = mail.outbox[0]
email_content = " ".join(email.body.split())
# Determine expected domain
if email_url_app:
expected_url = (
f"https://test-example.com/docs/{document.id}/"
f"?utm_source=docssharelink&amp;utm_campaign={document.id}"
)
assert expected_url in email_content
else:
# Default Site domain is example.com
expected_url = (
f"example.com/docs/{document.id}/"
f"?utm_source=docssharelink&amp;utm_campaign={document.id}"
)
assert expected_url in email_content
assert f"docs/{document.id}/" in email_content
def test_models_documents__email_invitation__success_empty_title():
@@ -1096,10 +1052,7 @@ def test_models_documents__email_invitation__success_empty_title():
"Test Sender (sender@example.com) invited you with the role &quot;editor&quot; "
"on the following document: Untitled Document" in email_content
)
assert (
f"docs/{document.id}/?utm_source=docssharelink&amp;utm_campaign={document.id}"
in email_content
)
assert f"docs/{document.id}/" in email_content
def test_models_documents__email_invitation__success_fr():
@@ -1134,10 +1087,7 @@ def test_models_documents__email_invitation__success_fr():
f"Test Sender2 (sender2@example.com) vous a invité avec le rôle &quot;propriétaire&quot; "
f"sur le document suivant : {document.title}" in email_content
)
assert (
f"docs/{document.id}/?utm_source=docssharelink&amp;utm_campaign={document.id}"
in email_content
)
assert f"docs/{document.id}/" in email_content
@mock.patch(
@@ -1443,7 +1393,7 @@ def test_models_documents_restore_complex(django_assert_num_queries):
assert child2.ancestors_deleted_at == document.deleted_at
# Restore the item
with django_assert_num_queries(14):
with django_assert_num_queries(13):
document.restore()
document.refresh_from_db()
child1.refresh_from_db()

View File

@@ -1,441 +0,0 @@
"""
Unit tests for the Document model
"""
# pylint: disable=too-many-lines
from operator import itemgetter
from unittest import mock
from django.core.cache import cache
from django.db import transaction
import pytest
from core import factories, models
from core.services.search_indexers import SearchIndexer
pytestmark = pytest.mark.django_db
def reset_batch_indexer_throttle():
"""Reset throttle flag"""
cache.delete("document-batch-indexer-throttle")
@pytest.fixture(autouse=True)
def reset_throttle():
"""Reset throttle flag before each test"""
reset_batch_indexer_throttle()
yield
reset_batch_indexer_throttle()
@mock.patch.object(SearchIndexer, "push")
@pytest.mark.usefixtures("indexer_settings")
@pytest.mark.django_db(transaction=True)
def test_models_documents_post_save_indexer(mock_push):
"""Test indexation task on document creation"""
with transaction.atomic():
doc1, doc2, doc3 = factories.DocumentFactory.create_batch(3)
accesses = {}
data = [call.args[0] for call in mock_push.call_args_list]
indexer = SearchIndexer()
assert len(data) == 1
# One call
assert sorted(data[0], key=itemgetter("id")) == sorted(
[
indexer.serialize_document(doc1, accesses),
indexer.serialize_document(doc2, accesses),
indexer.serialize_document(doc3, accesses),
],
key=itemgetter("id"),
)
# The throttle counters should be reset
assert cache.get("document-batch-indexer-throttle") == 1
@pytest.mark.django_db(transaction=True)
def test_models_documents_post_save_indexer_no_batches(indexer_settings):
"""Test indexation task on doculment creation, no throttle"""
indexer_settings.SEARCH_INDEXER_COUNTDOWN = 0
with mock.patch.object(SearchIndexer, "push") as mock_push:
with transaction.atomic():
doc1, doc2, doc3 = factories.DocumentFactory.create_batch(3)
accesses = {}
data = [call.args[0] for call in mock_push.call_args_list]
indexer = SearchIndexer()
# 3 calls
assert len(data) == 3
# one document per call
assert [len(d) for d in data] == [1] * 3
# all documents are indexed
assert sorted([d[0] for d in data], key=itemgetter("id")) == sorted(
[
indexer.serialize_document(doc1, accesses),
indexer.serialize_document(doc2, accesses),
indexer.serialize_document(doc3, accesses),
],
key=itemgetter("id"),
)
# The throttle counters should be reset
assert cache.get("file-batch-indexer-throttle") is None
@mock.patch.object(SearchIndexer, "push")
@pytest.mark.django_db(transaction=True)
def test_models_documents_post_save_indexer_not_configured(mock_push, indexer_settings):
"""Task should not start an indexation when disabled"""
indexer_settings.SEARCH_INDEXER_CLASS = None
user = factories.UserFactory()
with transaction.atomic():
doc = factories.DocumentFactory()
factories.UserDocumentAccessFactory(document=doc, user=user)
assert mock_push.assert_not_called
@mock.patch.object(SearchIndexer, "push")
@pytest.mark.django_db(transaction=True)
def test_models_documents_post_save_indexer_wrongly_configured(
mock_push, indexer_settings
):
"""Task should not start an indexation when disabled"""
indexer_settings.SEARCH_INDEXER_URL = None
user = factories.UserFactory()
with transaction.atomic():
doc = factories.DocumentFactory()
factories.UserDocumentAccessFactory(document=doc, user=user)
assert mock_push.assert_not_called
@mock.patch.object(SearchIndexer, "push")
@pytest.mark.usefixtures("indexer_settings")
@pytest.mark.django_db(transaction=True)
def test_models_documents_post_save_indexer_with_accesses(mock_push):
"""Test indexation task on document creation"""
user = factories.UserFactory()
with transaction.atomic():
doc1, doc2, doc3 = factories.DocumentFactory.create_batch(3)
factories.UserDocumentAccessFactory(document=doc1, user=user)
factories.UserDocumentAccessFactory(document=doc2, user=user)
factories.UserDocumentAccessFactory(document=doc3, user=user)
accesses = {
str(doc1.path): {"users": [user.sub]},
str(doc2.path): {"users": [user.sub]},
str(doc3.path): {"users": [user.sub]},
}
data = [call.args[0] for call in mock_push.call_args_list]
indexer = SearchIndexer()
assert len(data) == 1
assert sorted(data[0], key=itemgetter("id")) == sorted(
[
indexer.serialize_document(doc1, accesses),
indexer.serialize_document(doc2, accesses),
indexer.serialize_document(doc3, accesses),
],
key=itemgetter("id"),
)
@mock.patch.object(SearchIndexer, "push")
@pytest.mark.usefixtures("indexer_settings")
@pytest.mark.django_db(transaction=True)
def test_models_documents_post_save_indexer_deleted(mock_push):
"""Indexation task on deleted or ancestor_deleted documents"""
user = factories.UserFactory()
with transaction.atomic():
doc = factories.DocumentFactory(
link_reach=models.LinkReachChoices.AUTHENTICATED
)
main_doc = factories.DocumentFactory(
link_reach=models.LinkReachChoices.AUTHENTICATED
)
child_doc = factories.DocumentFactory(
parent=main_doc,
link_reach=models.LinkReachChoices.AUTHENTICATED,
)
factories.UserDocumentAccessFactory(document=doc, user=user)
factories.UserDocumentAccessFactory(document=main_doc, user=user)
factories.UserDocumentAccessFactory(document=child_doc, user=user)
# Manually reset the throttle flag here or the next indexation will be ignored for 1 second
reset_batch_indexer_throttle()
with transaction.atomic():
main_doc_deleted = models.Document.objects.get(pk=main_doc.pk)
main_doc_deleted.soft_delete()
child_doc_deleted = models.Document.objects.get(pk=child_doc.pk)
main_doc_deleted.refresh_from_db()
child_doc_deleted.refresh_from_db()
assert main_doc_deleted.deleted_at is not None
assert child_doc_deleted.ancestors_deleted_at is not None
assert child_doc_deleted.deleted_at is None
assert child_doc_deleted.ancestors_deleted_at is not None
accesses = {
str(doc.path): {"users": [user.sub]},
str(main_doc_deleted.path): {"users": [user.sub]},
str(child_doc_deleted.path): {"users": [user.sub]},
}
data = [call.args[0] for call in mock_push.call_args_list]
indexer = SearchIndexer()
assert len(data) == 2
# First indexation on document creation
assert sorted(data[0], key=itemgetter("id")) == sorted(
[
indexer.serialize_document(doc, accesses),
indexer.serialize_document(main_doc, accesses),
indexer.serialize_document(child_doc, accesses),
],
key=itemgetter("id"),
)
# Even deleted items are re-indexed : only update their status in the future
assert sorted(data[1], key=itemgetter("id")) == sorted(
[
indexer.serialize_document(main_doc_deleted, accesses), # soft_delete()
indexer.serialize_document(child_doc_deleted, accesses),
],
key=itemgetter("id"),
)
@pytest.mark.django_db(transaction=True)
@pytest.mark.usefixtures("indexer_settings")
def test_models_documents_indexer_hard_deleted():
"""Indexation task on hard deleted document"""
user = factories.UserFactory()
with transaction.atomic():
doc = factories.DocumentFactory(
link_reach=models.LinkReachChoices.AUTHENTICATED
)
factories.UserDocumentAccessFactory(document=doc, user=user)
# Call task on deleted document.
with mock.patch.object(SearchIndexer, "push") as mock_push:
doc.delete()
# Hard delete document are not re-indexed.
assert mock_push.assert_not_called
@mock.patch.object(SearchIndexer, "push")
@pytest.mark.usefixtures("indexer_settings")
@pytest.mark.django_db(transaction=True)
def test_models_documents_post_save_indexer_restored(mock_push):
"""Restart indexation task on restored documents"""
user = factories.UserFactory()
with transaction.atomic():
doc = factories.DocumentFactory(
link_reach=models.LinkReachChoices.AUTHENTICATED
)
doc_deleted = factories.DocumentFactory(
link_reach=models.LinkReachChoices.AUTHENTICATED
)
doc_ancestor_deleted = factories.DocumentFactory(
parent=doc_deleted,
link_reach=models.LinkReachChoices.AUTHENTICATED,
)
factories.UserDocumentAccessFactory(document=doc, user=user)
factories.UserDocumentAccessFactory(document=doc_deleted, user=user)
factories.UserDocumentAccessFactory(document=doc_ancestor_deleted, user=user)
doc_deleted.soft_delete()
doc_deleted.refresh_from_db()
doc_ancestor_deleted.refresh_from_db()
assert doc_deleted.deleted_at is not None
assert doc_deleted.ancestors_deleted_at is not None
assert doc_ancestor_deleted.deleted_at is None
assert doc_ancestor_deleted.ancestors_deleted_at is not None
# Manually reset the throttle flag here or the next indexation will be ignored for 1 second
reset_batch_indexer_throttle()
with transaction.atomic():
doc_restored = models.Document.objects.get(pk=doc_deleted.pk)
doc_restored.restore()
doc_ancestor_restored = models.Document.objects.get(pk=doc_ancestor_deleted.pk)
assert doc_restored.deleted_at is None
assert doc_restored.ancestors_deleted_at is None
assert doc_ancestor_restored.deleted_at is None
assert doc_ancestor_restored.ancestors_deleted_at is None
accesses = {
str(doc.path): {"users": [user.sub]},
str(doc_deleted.path): {"users": [user.sub]},
str(doc_ancestor_deleted.path): {"users": [user.sub]},
}
data = [call.args[0] for call in mock_push.call_args_list]
indexer = SearchIndexer()
# All docs are re-indexed
assert len(data) == 2
# First indexation on items creation & soft delete (in the same transaction)
assert sorted(data[0], key=itemgetter("id")) == sorted(
[
indexer.serialize_document(doc, accesses),
indexer.serialize_document(doc_deleted, accesses),
indexer.serialize_document(doc_ancestor_deleted, accesses),
],
key=itemgetter("id"),
)
# Restored items are re-indexed : only update their status in the future
assert sorted(data[1], key=itemgetter("id")) == sorted(
[
indexer.serialize_document(doc_restored, accesses), # restore()
indexer.serialize_document(doc_ancestor_restored, accesses),
],
key=itemgetter("id"),
)
@pytest.mark.django_db(transaction=True)
@pytest.mark.usefixtures("indexer_settings")
def test_models_documents_post_save_indexer_throttle():
"""Test indexation task skipping on document update"""
indexer = SearchIndexer()
user = factories.UserFactory()
with mock.patch.object(SearchIndexer, "push"):
with transaction.atomic():
docs = factories.DocumentFactory.create_batch(5, users=(user,))
accesses = {str(item.path): {"users": [user.sub]} for item in docs}
with mock.patch.object(SearchIndexer, "push") as mock_push:
# Simulate 1 running task
cache.set("document-batch-indexer-throttle", 1)
# save doc to trigger the indexer, but nothing should be done since
# the flag is up
with transaction.atomic():
docs[0].save()
docs[2].save()
docs[3].save()
assert [call.args[0] for call in mock_push.call_args_list] == []
with mock.patch.object(SearchIndexer, "push") as mock_push:
# No waiting task
cache.delete("document-batch-indexer-throttle")
with transaction.atomic():
docs[0].save()
docs[2].save()
docs[3].save()
data = [call.args[0] for call in mock_push.call_args_list]
# One call
assert len(data) == 1
assert sorted(data[0], key=itemgetter("id")) == sorted(
[
indexer.serialize_document(docs[0], accesses),
indexer.serialize_document(docs[2], accesses),
indexer.serialize_document(docs[3], accesses),
],
key=itemgetter("id"),
)
@pytest.mark.django_db(transaction=True)
@pytest.mark.usefixtures("indexer_settings")
def test_models_documents_access_post_save_indexer():
"""Test indexation task on DocumentAccess update"""
users = factories.UserFactory.create_batch(3)
with mock.patch.object(SearchIndexer, "push"):
with transaction.atomic():
doc = factories.DocumentFactory(users=users)
doc_accesses = models.DocumentAccess.objects.filter(document=doc).order_by(
"user__sub"
)
reset_batch_indexer_throttle()
with mock.patch.object(SearchIndexer, "push") as mock_push:
with transaction.atomic():
for doc_access in doc_accesses:
doc_access.save()
data = [call.args[0] for call in mock_push.call_args_list]
# One call
assert len(data) == 1
assert [d["id"] for d in data[0]] == [str(doc.pk)]
@pytest.mark.django_db(transaction=True)
def test_models_items_access_post_save_indexer_no_throttle(indexer_settings):
"""Test indexation task on ItemAccess update, no throttle"""
indexer_settings.SEARCH_INDEXER_COUNTDOWN = 0
users = factories.UserFactory.create_batch(3)
with transaction.atomic():
doc = factories.DocumentFactory(users=users)
doc_accesses = models.DocumentAccess.objects.filter(document=doc).order_by(
"user__sub"
)
reset_batch_indexer_throttle()
with mock.patch.object(SearchIndexer, "push") as mock_push:
with transaction.atomic():
for doc_access in doc_accesses:
doc_access.save()
data = [call.args[0] for call in mock_push.call_args_list]
# 3 calls
assert len(data) == 3
# one document per call
assert [len(d) for d in data] == [1] * 3
# the same document is indexed 3 times
assert [d[0]["id"] for d in data] == [str(doc.pk)] * 3

View File

@@ -0,0 +1,419 @@
"""
Unit tests for the TemplateAccess model
"""
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import ValidationError
import pytest
from core import factories
pytestmark = pytest.mark.django_db
def test_models_template_accesses_str():
"""
The str representation should include user email, template title and role.
"""
user = factories.UserFactory(email="david.bowman@example.com")
access = factories.UserTemplateAccessFactory(
role="reader",
user=user,
template__title="admins",
)
assert str(access) == "david.bowman@example.com is reader in template admins"
def test_models_template_accesses_unique_user():
"""Template accesses should be unique for a given couple of user and template."""
access = factories.UserTemplateAccessFactory()
with pytest.raises(
ValidationError,
match="This user is already in this template.",
):
factories.UserTemplateAccessFactory(user=access.user, template=access.template)
def test_models_template_accesses_several_empty_teams():
"""A template can have several template accesses with an empty team."""
access = factories.UserTemplateAccessFactory()
factories.UserTemplateAccessFactory(template=access.template)
def test_models_template_accesses_unique_team():
"""Template accesses should be unique for a given couple of team and template."""
access = factories.TeamTemplateAccessFactory()
with pytest.raises(
ValidationError,
match="This team is already in this template.",
):
factories.TeamTemplateAccessFactory(team=access.team, template=access.template)
def test_models_template_accesses_several_null_users():
"""A template can have several template accesses with a null user."""
access = factories.TeamTemplateAccessFactory()
factories.TeamTemplateAccessFactory(template=access.template)
def test_models_template_accesses_user_and_team_set():
"""User and team can't both be set on a template access."""
with pytest.raises(
ValidationError,
match="Either user or team must be set, not both.",
):
factories.UserTemplateAccessFactory(team="my-team")
def test_models_template_accesses_user_and_team_empty():
"""User and team can't both be empty on a template access."""
with pytest.raises(
ValidationError,
match="Either user or team must be set, not both.",
):
factories.UserTemplateAccessFactory(user=None)
# get_abilities
def test_models_template_access_get_abilities_anonymous():
"""Check abilities returned for an anonymous user."""
access = factories.UserTemplateAccessFactory()
abilities = access.get_abilities(AnonymousUser())
assert abilities == {
"destroy": False,
"retrieve": False,
"update": False,
"partial_update": False,
"set_role_to": [],
}
def test_models_template_access_get_abilities_authenticated():
"""Check abilities returned for an authenticated user."""
access = factories.UserTemplateAccessFactory()
user = factories.UserFactory()
abilities = access.get_abilities(user)
assert abilities == {
"destroy": False,
"retrieve": False,
"update": False,
"partial_update": False,
"set_role_to": [],
}
# - for owner
def test_models_template_access_get_abilities_for_owner_of_self_allowed():
"""
Check abilities of self access for the owner of a template when
there is more than one owner left.
"""
access = factories.UserTemplateAccessFactory(role="owner")
factories.UserTemplateAccessFactory(template=access.template, role="owner")
abilities = access.get_abilities(access.user)
assert abilities == {
"destroy": True,
"retrieve": True,
"update": True,
"partial_update": True,
"set_role_to": ["administrator", "editor", "reader"],
}
def test_models_template_access_get_abilities_for_owner_of_self_last():
"""
Check abilities of self access for the owner of a template when there is only one owner left.
"""
access = factories.UserTemplateAccessFactory(role="owner")
abilities = access.get_abilities(access.user)
assert abilities == {
"destroy": False,
"retrieve": True,
"update": False,
"partial_update": False,
"set_role_to": [],
}
def test_models_template_access_get_abilities_for_owner_of_owner():
"""Check abilities of owner access for the owner of a template."""
access = factories.UserTemplateAccessFactory(role="owner")
factories.UserTemplateAccessFactory(template=access.template) # another one
user = factories.UserTemplateAccessFactory(
template=access.template, role="owner"
).user
abilities = access.get_abilities(user)
assert abilities == {
"destroy": True,
"retrieve": True,
"update": True,
"partial_update": True,
"set_role_to": ["administrator", "editor", "reader"],
}
def test_models_template_access_get_abilities_for_owner_of_administrator():
"""Check abilities of administrator access for the owner of a template."""
access = factories.UserTemplateAccessFactory(role="administrator")
factories.UserTemplateAccessFactory(template=access.template) # another one
user = factories.UserTemplateAccessFactory(
template=access.template, role="owner"
).user
abilities = access.get_abilities(user)
assert abilities == {
"destroy": True,
"retrieve": True,
"update": True,
"partial_update": True,
"set_role_to": ["owner", "editor", "reader"],
}
def test_models_template_access_get_abilities_for_owner_of_editor():
"""Check abilities of editor access for the owner of a template."""
access = factories.UserTemplateAccessFactory(role="editor")
factories.UserTemplateAccessFactory(template=access.template) # another one
user = factories.UserTemplateAccessFactory(
template=access.template, role="owner"
).user
abilities = access.get_abilities(user)
assert abilities == {
"destroy": True,
"retrieve": True,
"update": True,
"partial_update": True,
"set_role_to": ["owner", "administrator", "reader"],
}
def test_models_template_access_get_abilities_for_owner_of_reader():
"""Check abilities of reader access for the owner of a template."""
access = factories.UserTemplateAccessFactory(role="reader")
factories.UserTemplateAccessFactory(template=access.template) # another one
user = factories.UserTemplateAccessFactory(
template=access.template, role="owner"
).user
abilities = access.get_abilities(user)
assert abilities == {
"destroy": True,
"retrieve": True,
"update": True,
"partial_update": True,
"set_role_to": ["owner", "administrator", "editor"],
}
# - for administrator
def test_models_template_access_get_abilities_for_administrator_of_owner():
"""Check abilities of owner access for the administrator of a template."""
access = factories.UserTemplateAccessFactory(role="owner")
factories.UserTemplateAccessFactory(template=access.template) # another one
user = factories.UserTemplateAccessFactory(
template=access.template, role="administrator"
).user
abilities = access.get_abilities(user)
assert abilities == {
"destroy": False,
"retrieve": True,
"update": False,
"partial_update": False,
"set_role_to": [],
}
def test_models_template_access_get_abilities_for_administrator_of_administrator():
"""Check abilities of administrator access for the administrator of a template."""
access = factories.UserTemplateAccessFactory(role="administrator")
factories.UserTemplateAccessFactory(template=access.template) # another one
user = factories.UserTemplateAccessFactory(
template=access.template, role="administrator"
).user
abilities = access.get_abilities(user)
assert abilities == {
"destroy": True,
"retrieve": True,
"update": True,
"partial_update": True,
"set_role_to": ["editor", "reader"],
}
def test_models_template_access_get_abilities_for_administrator_of_editor():
"""Check abilities of editor access for the administrator of a template."""
access = factories.UserTemplateAccessFactory(role="editor")
factories.UserTemplateAccessFactory(template=access.template) # another one
user = factories.UserTemplateAccessFactory(
template=access.template, role="administrator"
).user
abilities = access.get_abilities(user)
assert abilities == {
"destroy": True,
"retrieve": True,
"update": True,
"partial_update": True,
"set_role_to": ["administrator", "reader"],
}
def test_models_template_access_get_abilities_for_administrator_of_reader():
"""Check abilities of reader access for the administrator of a template."""
access = factories.UserTemplateAccessFactory(role="reader")
factories.UserTemplateAccessFactory(template=access.template) # another one
user = factories.UserTemplateAccessFactory(
template=access.template, role="administrator"
).user
abilities = access.get_abilities(user)
assert abilities == {
"destroy": True,
"retrieve": True,
"update": True,
"partial_update": True,
"set_role_to": ["administrator", "editor"],
}
# - For editor
def test_models_template_access_get_abilities_for_editor_of_owner():
"""Check abilities of owner access for the editor of a template."""
access = factories.UserTemplateAccessFactory(role="owner")
factories.UserTemplateAccessFactory(template=access.template) # another one
user = factories.UserTemplateAccessFactory(
template=access.template, role="editor"
).user
abilities = access.get_abilities(user)
assert abilities == {
"destroy": False,
"retrieve": True,
"update": False,
"partial_update": False,
"set_role_to": [],
}
def test_models_template_access_get_abilities_for_editor_of_administrator():
"""Check abilities of administrator access for the editor of a template."""
access = factories.UserTemplateAccessFactory(role="administrator")
factories.UserTemplateAccessFactory(template=access.template) # another one
user = factories.UserTemplateAccessFactory(
template=access.template, role="editor"
).user
abilities = access.get_abilities(user)
assert abilities == {
"destroy": False,
"retrieve": True,
"update": False,
"partial_update": False,
"set_role_to": [],
}
def test_models_template_access_get_abilities_for_editor_of_editor_user(
django_assert_num_queries,
):
"""Check abilities of editor access for the editor of a template."""
access = factories.UserTemplateAccessFactory(role="editor")
factories.UserTemplateAccessFactory(template=access.template) # another one
user = factories.UserTemplateAccessFactory(
template=access.template, role="editor"
).user
with django_assert_num_queries(1):
abilities = access.get_abilities(user)
assert abilities == {
"destroy": False,
"retrieve": True,
"update": False,
"partial_update": False,
"set_role_to": [],
}
# - For reader
def test_models_template_access_get_abilities_for_reader_of_owner():
"""Check abilities of owner access for the reader of a template."""
access = factories.UserTemplateAccessFactory(role="owner")
factories.UserTemplateAccessFactory(template=access.template) # another one
user = factories.UserTemplateAccessFactory(
template=access.template, role="reader"
).user
abilities = access.get_abilities(user)
assert abilities == {
"destroy": False,
"retrieve": True,
"update": False,
"partial_update": False,
"set_role_to": [],
}
def test_models_template_access_get_abilities_for_reader_of_administrator():
"""Check abilities of administrator access for the reader of a template."""
access = factories.UserTemplateAccessFactory(role="administrator")
factories.UserTemplateAccessFactory(template=access.template) # another one
user = factories.UserTemplateAccessFactory(
template=access.template, role="reader"
).user
abilities = access.get_abilities(user)
assert abilities == {
"destroy": False,
"retrieve": True,
"update": False,
"partial_update": False,
"set_role_to": [],
}
def test_models_template_access_get_abilities_for_reader_of_reader_user(
django_assert_num_queries,
):
"""Check abilities of reader access for the reader of a template."""
access = factories.UserTemplateAccessFactory(role="reader")
factories.UserTemplateAccessFactory(template=access.template) # another one
user = factories.UserTemplateAccessFactory(
template=access.template, role="reader"
).user
with django_assert_num_queries(1):
abilities = access.get_abilities(user)
assert abilities == {
"destroy": False,
"retrieve": True,
"update": False,
"partial_update": False,
"set_role_to": [],
}
def test_models_template_access_get_abilities_preset_role(django_assert_num_queries):
"""No query is done if the role is preset, e.g., with a query annotation."""
access = factories.UserTemplateAccessFactory(role="reader")
user = factories.UserTemplateAccessFactory(
template=access.template, role="reader"
).user
access.user_roles = ["reader"]
with django_assert_num_queries(0):
abilities = access.get_abilities(user)
assert abilities == {
"destroy": False,
"retrieve": True,
"update": False,
"partial_update": False,
"set_role_to": [],
}

View File

@@ -0,0 +1,187 @@
"""
Unit tests for the Template model
"""
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import ValidationError
import pytest
from core import factories, models
pytestmark = pytest.mark.django_db
def test_models_templates_str():
"""The str representation should be the title of the template."""
template = factories.TemplateFactory(title="admins")
assert str(template) == "admins"
def test_models_templates_id_unique():
"""The "id" field should be unique."""
template = factories.TemplateFactory()
with pytest.raises(ValidationError, match="Template with this Id already exists."):
factories.TemplateFactory(id=template.id)
def test_models_templates_title_null():
"""The "title" field should not be null."""
with pytest.raises(ValidationError, match="This field cannot be null."):
models.Template.objects.create(title=None)
def test_models_templates_title_empty():
"""The "title" field should not be empty."""
with pytest.raises(ValidationError, match="This field cannot be blank."):
models.Template.objects.create(title="")
def test_models_templates_title_max_length():
"""The "title" field should be 100 characters maximum."""
factories.TemplateFactory(title="a" * 255)
with pytest.raises(
ValidationError,
match=r"Ensure this value has at most 255 characters \(it has 256\)\.",
):
factories.TemplateFactory(title="a" * 256)
# get_abilities
def test_models_templates_get_abilities_anonymous_public():
"""Check abilities returned for an anonymous user if the template is public."""
template = factories.TemplateFactory(is_public=True)
abilities = template.get_abilities(AnonymousUser())
assert abilities == {
"destroy": False,
"retrieve": True,
"update": False,
"accesses_manage": False,
"partial_update": False,
"generate_document": True,
}
def test_models_templates_get_abilities_anonymous_not_public():
"""Check abilities returned for an anonymous user if the template is private."""
template = factories.TemplateFactory(is_public=False)
abilities = template.get_abilities(AnonymousUser())
assert abilities == {
"destroy": False,
"retrieve": False,
"update": False,
"accesses_manage": False,
"partial_update": False,
"generate_document": False,
}
def test_models_templates_get_abilities_authenticated_public():
"""Check abilities returned for an authenticated user if the user is public."""
template = factories.TemplateFactory(is_public=True)
abilities = template.get_abilities(factories.UserFactory())
assert abilities == {
"destroy": False,
"retrieve": True,
"update": False,
"accesses_manage": False,
"partial_update": False,
"generate_document": True,
}
def test_models_templates_get_abilities_authenticated_not_public():
"""Check abilities returned for an authenticated user if the template is private."""
template = factories.TemplateFactory(is_public=False)
abilities = template.get_abilities(factories.UserFactory())
assert abilities == {
"destroy": False,
"retrieve": False,
"update": False,
"accesses_manage": False,
"partial_update": False,
"generate_document": False,
}
def test_models_templates_get_abilities_owner():
"""Check abilities returned for the owner of a template."""
user = factories.UserFactory()
access = factories.UserTemplateAccessFactory(role="owner", user=user)
abilities = access.template.get_abilities(access.user)
assert abilities == {
"destroy": True,
"retrieve": True,
"update": True,
"accesses_manage": True,
"partial_update": True,
"generate_document": True,
}
def test_models_templates_get_abilities_administrator():
"""Check abilities returned for the administrator of a template."""
access = factories.UserTemplateAccessFactory(role="administrator")
abilities = access.template.get_abilities(access.user)
assert abilities == {
"destroy": False,
"retrieve": True,
"update": True,
"accesses_manage": True,
"partial_update": True,
"generate_document": True,
}
def test_models_templates_get_abilities_editor_user(django_assert_num_queries):
"""Check abilities returned for the editor of a template."""
access = factories.UserTemplateAccessFactory(role="editor")
with django_assert_num_queries(1):
abilities = access.template.get_abilities(access.user)
assert abilities == {
"destroy": False,
"retrieve": True,
"update": True,
"accesses_manage": False,
"partial_update": True,
"generate_document": True,
}
def test_models_templates_get_abilities_reader_user(django_assert_num_queries):
"""Check abilities returned for the reader of a template."""
access = factories.UserTemplateAccessFactory(role="reader")
with django_assert_num_queries(1):
abilities = access.template.get_abilities(access.user)
assert abilities == {
"destroy": False,
"retrieve": True,
"update": False,
"accesses_manage": False,
"partial_update": False,
"generate_document": True,
}
def test_models_templates_get_abilities_preset_role(django_assert_num_queries):
"""No query is done if the role is preset e.g. with query annotation."""
access = factories.UserTemplateAccessFactory(role="reader")
access.template.user_roles = ["reader"]
with django_assert_num_queries(0):
abilities = access.template.get_abilities(access.user)
assert abilities == {
"destroy": False,
"retrieve": True,
"update": False,
"accesses_manage": False,
"partial_update": False,
"generate_document": True,
}

View File

@@ -1,669 +0,0 @@
"""
Unit tests for the UserReconciliationCsvImport model
"""
import uuid
from pathlib import Path
from django.core import mail
from django.core.files.base import ContentFile
import pytest
from core import factories, models
from core.admin import process_reconciliation
from core.tasks.user_reconciliation import user_reconciliation_csv_import_job
pytestmark = pytest.mark.django_db
@pytest.fixture(name="import_example_csv_basic")
def fixture_import_example_csv_basic():
"""
Import an example CSV file for user reconciliation
and return the created import object.
"""
# Create users referenced in the CSV
for i in range(40, 50):
factories.UserFactory(email=f"user.test{i}@example.com")
example_csv_path = Path(__file__).parent / "data/example_reconciliation_basic.csv"
with open(example_csv_path, "rb") as f:
csv_file = ContentFile(f.read(), name="example_reconciliation_basic.csv")
csv_import = models.UserReconciliationCsvImport(file=csv_file)
csv_import.save()
return csv_import
@pytest.fixture(name="import_example_csv_grist_form")
def fixture_import_example_csv_grist_form():
"""
Import an example CSV file for user reconciliation
and return the created import object.
"""
# Create users referenced in the CSV
for i in range(10, 40):
factories.UserFactory(email=f"user.test{i}@example.com")
example_csv_path = (
Path(__file__).parent / "data/example_reconciliation_grist_form.csv"
)
with open(example_csv_path, "rb") as f:
csv_file = ContentFile(f.read(), name="example_reconciliation_grist_form.csv")
csv_import = models.UserReconciliationCsvImport(file=csv_file)
csv_import.save()
return csv_import
def test_user_reconciliation_csv_import_entry_is_created(import_example_csv_basic):
"""Test that a UserReconciliationCsvImport entry is created correctly."""
assert import_example_csv_basic.status == "pending"
assert import_example_csv_basic.file.name.endswith(
"example_reconciliation_basic.csv"
)
def test_user_reconciliation_csv_import_entry_is_created_grist_form(
import_example_csv_grist_form,
):
"""Test that a UserReconciliationCsvImport entry is created correctly."""
assert import_example_csv_grist_form.status == "pending"
assert import_example_csv_grist_form.file.name.endswith(
"example_reconciliation_grist_form.csv"
)
def test_incorrect_csv_format_handling():
"""Test that an incorrectly formatted CSV file is handled gracefully."""
example_csv_path = (
Path(__file__).parent / "data/example_reconciliation_missing_column.csv"
)
with open(example_csv_path, "rb") as f:
csv_file = ContentFile(
f.read(), name="example_reconciliation_missing_column.csv"
)
csv_import = models.UserReconciliationCsvImport(file=csv_file)
csv_import.save()
assert csv_import.status == "pending"
user_reconciliation_csv_import_job(csv_import.id)
csv_import.refresh_from_db()
assert (
"CSV is missing mandatory columns: active_email, inactive_email, id"
in csv_import.logs
)
assert csv_import.status == "error"
def test_incorrect_email_format_handling():
"""Test that an incorrectly formatted CSV file is handled gracefully."""
example_csv_path = Path(__file__).parent / "data/example_reconciliation_error.csv"
with open(example_csv_path, "rb") as f:
csv_file = ContentFile(f.read(), name="example_reconciliation_error.csv")
csv_import = models.UserReconciliationCsvImport(file=csv_file)
csv_import.save()
assert csv_import.status == "pending"
user_reconciliation_csv_import_job(csv_import.id)
csv_import.refresh_from_db()
assert "Invalid inactive email address on row 40" in csv_import.logs
assert csv_import.status == "done"
# pylint: disable-next=no-member
assert len(mail.outbox) == 1
# pylint: disable-next=no-member
email = mail.outbox[0]
assert email.to == ["user.test40@example.com"]
email_content = " ".join(email.body.split())
assert "Reconciliation of your Docs accounts not completed" in email_content
def test_incorrect_csv_data_handling_grist_form():
"""Test that a CSV file with incorrect data is handled gracefully."""
example_csv_path = (
Path(__file__).parent / "data/example_reconciliation_grist_form_error.csv"
)
with open(example_csv_path, "rb") as f:
csv_file = ContentFile(
f.read(), name="example_reconciliation_grist_form_error.csv"
)
csv_import = models.UserReconciliationCsvImport(file=csv_file)
csv_import.save()
assert csv_import.status == "pending"
user_reconciliation_csv_import_job(csv_import.id)
csv_import.refresh_from_db()
assert (
"user.test20@example.com set as both active and inactive email"
in csv_import.logs
)
assert csv_import.status == "done"
def test_job_creates_reconciliation_entries(import_example_csv_basic):
"""Test that the CSV import job creates UserReconciliation entries."""
assert import_example_csv_basic.status == "pending"
user_reconciliation_csv_import_job(import_example_csv_basic.id)
# Verify the job status changed
import_example_csv_basic.refresh_from_db()
assert import_example_csv_basic.status == "done"
assert "Import completed successfully." in import_example_csv_basic.logs
assert "6 rows processed." in import_example_csv_basic.logs
assert "5 reconciliation entries created." in import_example_csv_basic.logs
# Verify reconciliation entries were created
reconciliations = models.UserReconciliation.objects.all()
assert reconciliations.count() == 5
def test_job_does_not_create_duplicated_reconciliation_entries(
import_example_csv_basic,
):
"""Test that the CSV import job doesn't create UserReconciliation entries
for source unique IDs that have already been processed."""
_already_created_entry = models.UserReconciliation.objects.create(
active_email="user.test40@example.com",
inactive_email="user.test41@example.com",
active_email_checked=0,
inactive_email_checked=0,
status="pending",
source_unique_id=1,
)
assert import_example_csv_basic.status == "pending"
user_reconciliation_csv_import_job(import_example_csv_basic.id)
# Verify the job status changed
import_example_csv_basic.refresh_from_db()
assert import_example_csv_basic.status == "done"
assert "Import completed successfully." in import_example_csv_basic.logs
assert "6 rows processed." in import_example_csv_basic.logs
assert "4 reconciliation entries created." in import_example_csv_basic.logs
assert "1 rows were already processed." in import_example_csv_basic.logs
# Verify the correct number of reconciliation entries were created
reconciliations = models.UserReconciliation.objects.all()
assert reconciliations.count() == 5
def test_job_creates_reconciliation_entries_grist_form(import_example_csv_grist_form):
"""Test that the CSV import job creates UserReconciliation entries."""
assert import_example_csv_grist_form.status == "pending"
user_reconciliation_csv_import_job(import_example_csv_grist_form.id)
# Verify the job status changed
import_example_csv_grist_form.refresh_from_db()
assert "Import completed successfully" in import_example_csv_grist_form.logs
assert import_example_csv_grist_form.status == "done"
# Verify reconciliation entries were created
reconciliations = models.UserReconciliation.objects.all()
assert reconciliations.count() == 9
def test_csv_import_reconciliation_data_is_correct(import_example_csv_basic):
"""Test that the data in created UserReconciliation entries matches the CSV."""
user_reconciliation_csv_import_job(import_example_csv_basic.id)
reconciliations = models.UserReconciliation.objects.order_by("created_at")
first_entry = reconciliations.first()
assert first_entry.active_email == "user.test40@example.com"
assert first_entry.inactive_email == "user.test41@example.com"
assert first_entry.active_email_checked is False
assert first_entry.inactive_email_checked is False
for rec in reconciliations:
assert rec.status == "ready"
@pytest.fixture(name="user_reconciliation_users_and_docs")
def fixture_user_reconciliation_users_and_docs():
"""Fixture to create two users with overlapping document accesses
for reconciliation tests."""
user_1 = factories.UserFactory(email="user.test1@example.com")
user_2 = factories.UserFactory(email="user.test2@example.com")
# Create 10 distinct document accesses for each user
userdocs_u1 = [
factories.UserDocumentAccessFactory(user=user_1, role="editor")
for _ in range(10)
]
userdocs_u2 = [
factories.UserDocumentAccessFactory(user=user_2, role="editor")
for _ in range(10)
]
# Make the first 3 documents of each list shared with the other user
# with a lower role
for ud in userdocs_u1[0:3]:
factories.UserDocumentAccessFactory(
user=user_2, document=ud.document, role="reader"
)
for ud in userdocs_u2[0:3]:
factories.UserDocumentAccessFactory(
user=user_1, document=ud.document, role="reader"
)
# Make the next 3 documents of each list shared with the other user
# with a higher role
for ud in userdocs_u1[3:6]:
factories.UserDocumentAccessFactory(
user=user_2, document=ud.document, role="owner"
)
for ud in userdocs_u2[3:6]:
factories.UserDocumentAccessFactory(
user=user_1, document=ud.document, role="owner"
)
return (user_1, user_2, userdocs_u1, userdocs_u2)
def test_user_reconciliation_is_created(user_reconciliation_users_and_docs):
"""Test that a UserReconciliation entry can be created and saved."""
user_1, user_2, _userdocs_u1, _userdocs_u2 = user_reconciliation_users_and_docs
rec = models.UserReconciliation.objects.create(
active_email=user_1.email,
inactive_email=user_2.email,
active_email_checked=False,
inactive_email_checked=True,
active_email_confirmation_id=uuid.uuid4(),
inactive_email_confirmation_id=uuid.uuid4(),
status="pending",
)
rec.save()
assert rec.status == "ready"
def test_user_reconciliation_verification_emails_are_sent(
user_reconciliation_users_and_docs,
):
"""Test that both UserReconciliation verification emails are sent."""
user_1, user_2, _userdocs_u1, _userdocs_u2 = user_reconciliation_users_and_docs
rec = models.UserReconciliation.objects.create(
active_email=user_1.email,
inactive_email=user_2.email,
active_email_checked=False,
inactive_email_checked=False,
active_email_confirmation_id=uuid.uuid4(),
inactive_email_confirmation_id=uuid.uuid4(),
status="pending",
)
rec.save()
# pylint: disable-next=no-member
assert len(mail.outbox) == 2
# pylint: disable-next=no-member
email_1 = mail.outbox[0]
assert email_1.to == [user_1.email]
email_1_content = " ".join(email_1.body.split())
assert (
"You have requested a reconciliation of your user accounts on Docs."
in email_1_content
)
active_email_confirmation_id = rec.active_email_confirmation_id
inactive_email_confirmation_id = rec.inactive_email_confirmation_id
assert (
f"user-reconciliations/active/{active_email_confirmation_id}/"
in email_1_content
)
# pylint: disable-next=no-member
email_2 = mail.outbox[1]
assert email_2.to == [user_2.email]
email_2_content = " ".join(email_2.body.split())
assert (
"You have requested a reconciliation of your user accounts on Docs."
in email_2_content
)
assert (
f"user-reconciliations/inactive/{inactive_email_confirmation_id}/"
in email_2_content
)
def test_user_reconciliation_only_starts_if_checks_are_made(
user_reconciliation_users_and_docs,
):
"""Test that the admin action does not process entries
unless both email checks are confirmed.
"""
user_1, user_2, _userdocs_u1, _userdocs_u2 = user_reconciliation_users_and_docs
# Create a reconciliation entry where only one email has been checked
rec = models.UserReconciliation.objects.create(
active_email=user_1.email,
inactive_email=user_2.email,
active_email_checked=True,
inactive_email_checked=False,
status="pending",
)
rec.save()
# Capture counts before running admin action
accesses_before_active = models.DocumentAccess.objects.filter(user=user_1).count()
accesses_before_inactive = models.DocumentAccess.objects.filter(user=user_2).count()
users_active_before = (user_1.is_active, user_2.is_active)
# Call the admin action with the queryset containing our single rec
qs = models.UserReconciliation.objects.filter(id=rec.id)
process_reconciliation(None, None, qs)
# Reload from DB and assert nothing was processed (checks prevent processing)
rec.refresh_from_db()
user_1.refresh_from_db()
user_2.refresh_from_db()
assert rec.status == "ready"
assert (
models.DocumentAccess.objects.filter(user=user_1).count()
== accesses_before_active
)
assert (
models.DocumentAccess.objects.filter(user=user_2).count()
== accesses_before_inactive
)
assert (user_1.is_active, user_2.is_active) == users_active_before
def test_process_reconciliation_updates_accesses(
user_reconciliation_users_and_docs,
):
"""Test that accesses are consolidated on the active user."""
user_1, user_2, userdocs_u1, userdocs_u2 = user_reconciliation_users_and_docs
u1_2 = userdocs_u1[2]
u1_5 = userdocs_u1[5]
u2doc1 = userdocs_u2[1].document
u2doc5 = userdocs_u2[5].document
rec = models.UserReconciliation.objects.create(
active_email=user_1.email,
inactive_email=user_2.email,
active_user=user_1,
inactive_user=user_2,
active_email_checked=True,
inactive_email_checked=True,
status="ready",
)
qs = models.UserReconciliation.objects.filter(id=rec.id)
process_reconciliation(None, None, qs)
rec.refresh_from_db()
user_1.refresh_from_db()
user_2.refresh_from_db()
u1_2.refresh_from_db(
from_queryset=models.DocumentAccess.objects.select_for_update()
)
u1_5.refresh_from_db(
from_queryset=models.DocumentAccess.objects.select_for_update()
)
# After processing, inactive user should have no accesses
# and active user should have one access per union document
# with the highest role
assert rec.status == "done"
assert "Requested update for 10 DocumentAccess items" in rec.logs
assert "and deletion for 12 DocumentAccess items" in rec.logs
assert models.DocumentAccess.objects.filter(user=user_2).count() == 0
assert models.DocumentAccess.objects.filter(user=user_1).count() == 20
assert u1_2.role == "editor"
assert u1_5.role == "owner"
assert (
models.DocumentAccess.objects.filter(user=user_1, document=u2doc1).first().role
== "editor"
)
assert (
models.DocumentAccess.objects.filter(user=user_1, document=u2doc5).first().role
== "owner"
)
assert user_1.is_active is True
assert user_2.is_active is False
# pylint: disable-next=no-member
assert len(mail.outbox) == 1
# pylint: disable-next=no-member
email = mail.outbox[0]
assert email.to == [user_1.email]
email_content = " ".join(email.body.split())
assert "Your accounts have been merged" in email_content
def test_process_reconciliation_updates_linktraces(
user_reconciliation_users_and_docs,
):
"""Test that linktraces are consolidated on the active user."""
user_1, user_2, userdocs_u1, userdocs_u2 = user_reconciliation_users_and_docs
u1_2 = userdocs_u1[2]
u1_5 = userdocs_u1[5]
doc_both = u1_2.document
models.LinkTrace.objects.create(document=doc_both, user=user_1)
models.LinkTrace.objects.create(document=doc_both, user=user_2)
doc_inactive_only = userdocs_u2[4].document
models.LinkTrace.objects.create(
document=doc_inactive_only, user=user_2, is_masked=True
)
doc_active_only = userdocs_u1[4].document
models.LinkTrace.objects.create(document=doc_active_only, user=user_1)
rec = models.UserReconciliation.objects.create(
active_email=user_1.email,
inactive_email=user_2.email,
active_user=user_1,
inactive_user=user_2,
active_email_checked=True,
inactive_email_checked=True,
status="ready",
)
qs = models.UserReconciliation.objects.filter(id=rec.id)
process_reconciliation(None, None, qs)
rec.refresh_from_db()
user_1.refresh_from_db()
user_2.refresh_from_db()
u1_2.refresh_from_db(
from_queryset=models.DocumentAccess.objects.select_for_update()
)
u1_5.refresh_from_db(
from_queryset=models.DocumentAccess.objects.select_for_update()
)
# Inactive user should have no linktraces
assert models.LinkTrace.objects.filter(user=user_2).count() == 0
# doc_both should have a single LinkTrace owned by the active user
assert (
models.LinkTrace.objects.filter(user=user_1, document=doc_both).exists() is True
)
assert models.LinkTrace.objects.filter(user=user_1, document=doc_both).count() == 1
assert (
models.LinkTrace.objects.filter(user=user_2, document=doc_both).exists()
is False
)
# doc_inactive_only should now be linked to active user and preserve is_masked
lt = models.LinkTrace.objects.filter(
user=user_1, document=doc_inactive_only
).first()
assert lt is not None
assert lt.is_masked is True
# doc_active_only should still belong to active user
assert models.LinkTrace.objects.filter(
user=user_1, document=doc_active_only
).exists()
def test_process_reconciliation_updates_threads_comments_reactions(
user_reconciliation_users_and_docs,
):
"""Test that threads, comments and reactions are transferred/deduplicated
on reconciliation."""
user_1, user_2, _userdocs_u1, userdocs_u2 = user_reconciliation_users_and_docs
# Use a document from the inactive user's set
document = userdocs_u2[0].document
# Thread and comment created by inactive user -> should be moved to active
thread = factories.ThreadFactory(document=document, creator=user_2)
comment = factories.CommentFactory(thread=thread, user=user_2)
# Reaction where only inactive user reacted -> should be moved to active user
reaction_inactive_only = factories.ReactionFactory(comment=comment, users=[user_2])
# Reaction where both users reacted -> inactive user's participation should be removed
thread2 = factories.ThreadFactory(document=document, creator=user_1)
comment2 = factories.CommentFactory(thread=thread2, user=user_1)
reaction_both = factories.ReactionFactory(comment=comment2, users=[user_1, user_2])
# Reaction where only active user reacted -> unchanged
thread3 = factories.ThreadFactory(document=document, creator=user_1)
comment3 = factories.CommentFactory(thread=thread3, user=user_1)
reaction_active_only = factories.ReactionFactory(comment=comment3, users=[user_1])
rec = models.UserReconciliation.objects.create(
active_email=user_1.email,
inactive_email=user_2.email,
active_user=user_1,
inactive_user=user_2,
active_email_checked=True,
inactive_email_checked=True,
status="ready",
)
qs = models.UserReconciliation.objects.filter(id=rec.id)
process_reconciliation(None, None, qs)
# Refresh objects
thread.refresh_from_db()
comment.refresh_from_db()
reaction_inactive_only.refresh_from_db()
reaction_both.refresh_from_db()
reaction_active_only.refresh_from_db()
# Thread and comment creator should now be the active user
assert thread.creator == user_1
assert comment.user == user_1
# reaction_inactive_only: inactive user's participation should be removed and
# active user's participation added
reaction_inactive_only.refresh_from_db()
assert not reaction_inactive_only.users.filter(pk=user_2.pk).exists()
assert reaction_inactive_only.users.filter(pk=user_1.pk).exists()
# reaction_both: should end up with only active user's participation
assert reaction_both.users.filter(pk=user_2.pk).exists() is False
assert reaction_both.users.filter(pk=user_1.pk).exists() is True
# reaction_active_only should still have active user's participation
assert reaction_active_only.users.filter(pk=user_1.pk).exists()
def test_process_reconciliation_updates_favorites(
user_reconciliation_users_and_docs,
):
"""Test that favorites are consolidated on the active user."""
user_1, user_2, userdocs_u1, userdocs_u2 = user_reconciliation_users_and_docs
u1_2 = userdocs_u1[2]
u1_5 = userdocs_u1[5]
doc_both = u1_2.document
models.DocumentFavorite.objects.create(document=doc_both, user=user_1)
models.DocumentFavorite.objects.create(document=doc_both, user=user_2)
doc_inactive_only = userdocs_u2[4].document
models.DocumentFavorite.objects.create(document=doc_inactive_only, user=user_2)
doc_active_only = userdocs_u1[4].document
models.DocumentFavorite.objects.create(document=doc_active_only, user=user_1)
rec = models.UserReconciliation.objects.create(
active_email=user_1.email,
inactive_email=user_2.email,
active_user=user_1,
inactive_user=user_2,
active_email_checked=True,
inactive_email_checked=True,
status="ready",
)
qs = models.UserReconciliation.objects.filter(id=rec.id)
process_reconciliation(None, None, qs)
rec.refresh_from_db()
user_1.refresh_from_db()
user_2.refresh_from_db()
u1_2.refresh_from_db(
from_queryset=models.DocumentAccess.objects.select_for_update()
)
u1_5.refresh_from_db(
from_queryset=models.DocumentAccess.objects.select_for_update()
)
# Inactive user should have no document favorites
assert models.DocumentFavorite.objects.filter(user=user_2).count() == 0
# doc_both should have a single DocumentFavorite owned by the active user
assert (
models.DocumentFavorite.objects.filter(user=user_1, document=doc_both).exists()
is True
)
assert (
models.DocumentFavorite.objects.filter(user=user_1, document=doc_both).count()
== 1
)
assert (
models.DocumentFavorite.objects.filter(user=user_2, document=doc_both).exists()
is False
)
# doc_inactive_only should now be linked to active user
assert (
models.DocumentFavorite.objects.filter(
user=user_2, document=doc_inactive_only
).count()
== 0
)
assert models.DocumentFavorite.objects.filter(
user=user_1, document=doc_inactive_only
).exists()
# doc_active_only should still belong to active user
assert models.DocumentFavorite.objects.filter(
user=user_1, document=doc_active_only
).exists()

View File

@@ -2,6 +2,8 @@
Unit tests for the User model
"""
from unittest import mock
from django.core.exceptions import ValidationError
import pytest
@@ -24,6 +26,26 @@ def test_models_users_id_unique():
factories.UserFactory(id=user.id)
def test_models_users_send_mail_main_existing():
"""The "email_user' method should send mail to the user's email address."""
user = factories.UserFactory()
with mock.patch("django.core.mail.send_mail") as mock_send:
user.email_user("my subject", "my message")
mock_send.assert_called_once_with("my subject", "my message", None, [user.email])
def test_models_users_send_mail_main_missing():
"""The "email_user' method should fail if the user has no email address."""
user = factories.UserFactory(email=None)
with pytest.raises(ValueError) as excinfo:
user.email_user("my subject", "my message")
assert str(excinfo.value) == "User has no email address."
@pytest.mark.parametrize(
"sub,is_valid",
[

View File

@@ -1,93 +0,0 @@
"""Test Converter orchestration services."""
from unittest.mock import MagicMock, patch
from core.services import mime_types
from core.services.converter_services import Converter
@patch("core.services.converter_services.DocSpecConverter")
@patch("core.services.converter_services.YdocConverter")
def test_converter_docx_to_yjs_orchestration(mock_ydoc_class, mock_docspec_class):
"""Test that DOCX to YJS conversion uses both DocSpec and Ydoc converters."""
# Setup mocks
mock_docspec = MagicMock()
mock_ydoc = MagicMock()
mock_docspec_class.return_value = mock_docspec
mock_ydoc_class.return_value = mock_ydoc
# Mock the conversion chain: DOCX -> BlockNote -> YJS
blocknote_data = b'[{"type": "paragraph", "content": "test"}]'
yjs_data = "base64encodedyjs"
mock_docspec.convert.return_value = blocknote_data
mock_ydoc.convert.return_value = yjs_data
# Execute conversion
converter = Converter()
docx_data = b"fake docx data"
result = converter.convert(docx_data, mime_types.DOCX, mime_types.YJS)
# Verify the orchestration
mock_docspec.convert.assert_called_once_with(
docx_data, mime_types.DOCX, mime_types.BLOCKNOTE
)
mock_ydoc.convert.assert_called_once_with(
blocknote_data, mime_types.BLOCKNOTE, mime_types.YJS
)
assert result == yjs_data
@patch("core.services.converter_services.YdocConverter")
def test_converter_markdown_to_yjs_delegation(mock_ydoc_class):
"""Test that Markdown to YJS conversion is delegated to YdocConverter."""
mock_ydoc = MagicMock()
mock_ydoc_class.return_value = mock_ydoc
yjs_data = "base64encodedyjs"
mock_ydoc.convert.return_value = yjs_data
converter = Converter()
markdown_data = "# Test Document"
result = converter.convert(markdown_data, mime_types.MARKDOWN, mime_types.YJS)
mock_ydoc.convert.assert_called_once_with(
markdown_data, mime_types.MARKDOWN, mime_types.YJS
)
assert result == yjs_data
@patch("core.services.converter_services.YdocConverter")
def test_converter_yjs_to_html_delegation(mock_ydoc_class):
"""Test that YJS to HTML conversion is delegated to YdocConverter."""
mock_ydoc = MagicMock()
mock_ydoc_class.return_value = mock_ydoc
html_data = "<p>Test Document</p>"
mock_ydoc.convert.return_value = html_data
converter = Converter()
yjs_data = b"yjs binary data"
result = converter.convert(yjs_data, mime_types.YJS, mime_types.HTML)
mock_ydoc.convert.assert_called_once_with(yjs_data, mime_types.YJS, mime_types.HTML)
assert result == html_data
@patch("core.services.converter_services.YdocConverter")
def test_converter_blocknote_to_yjs_delegation(mock_ydoc_class):
"""Test that BlockNote to YJS conversion is delegated to YdocConverter."""
mock_ydoc = MagicMock()
mock_ydoc_class.return_value = mock_ydoc
yjs_data = "base64encodedyjs"
mock_ydoc.convert.return_value = yjs_data
converter = Converter()
blocknote_data = b'[{"type": "paragraph"}]'
result = converter.convert(blocknote_data, mime_types.BLOCKNOTE, mime_types.YJS)
mock_ydoc.convert.assert_called_once_with(
blocknote_data, mime_types.BLOCKNOTE, mime_types.YJS
)
assert result == yjs_data

View File

@@ -6,7 +6,6 @@ from unittest.mock import MagicMock, patch
import pytest
import requests
from core.services import mime_types
from core.services.converter_services import (
ServiceUnavailableError,
ValidationError,
@@ -22,9 +21,9 @@ def test_auth_header(settings):
def test_convert_empty_text():
"""Should raise ValidationError when data is empty."""
"""Should raise ValidationError when text is empty."""
converter = YdocConverter()
with pytest.raises(ValidationError, match="Input data cannot be empty"):
with pytest.raises(ValidationError, match="Input text cannot be empty"):
converter.convert("")
@@ -37,7 +36,7 @@ def test_convert_service_unavailable(mock_post):
with pytest.raises(
ServiceUnavailableError,
match="Failed to connect to YDoc conversion service",
match="Failed to connect to conversion service",
):
converter.convert("test text")
@@ -53,7 +52,7 @@ def test_convert_http_error(mock_post):
with pytest.raises(
ServiceUnavailableError,
match="Failed to connect to YDoc conversion service",
match="Failed to connect to conversion service",
):
converter.convert("test text")
@@ -84,8 +83,8 @@ def test_convert_full_integration(mock_post, settings):
data="test markdown",
headers={
"Authorization": "Bearer test-key",
"Content-Type": mime_types.MARKDOWN,
"Accept": mime_types.YJS,
"Content-Type": "text/markdown",
"Accept": "application/vnd.yjs.doc",
},
timeout=5,
verify=False,
@@ -109,7 +108,9 @@ def test_convert_full_integration_with_specific_headers(mock_post, settings):
mock_response.raise_for_status.return_value = None
mock_post.return_value = mock_response
result = converter.convert(b"test_content", mime_types.YJS, mime_types.MARKDOWN)
result = converter.convert(
b"test_content", "application/vnd.yjs.doc", "text/markdown"
)
assert result == expected_response
mock_post.assert_called_once_with(
@@ -117,8 +118,8 @@ def test_convert_full_integration_with_specific_headers(mock_post, settings):
data=b"test_content",
headers={
"Authorization": "Bearer test-key",
"Content-Type": mime_types.YJS,
"Accept": mime_types.MARKDOWN,
"Content-Type": "application/vnd.yjs.doc",
"Accept": "text/markdown",
},
timeout=5,
verify=False,
@@ -134,7 +135,7 @@ def test_convert_timeout(mock_post):
with pytest.raises(
ServiceUnavailableError,
match="Failed to connect to YDoc conversion service",
match="Failed to connect to conversion service",
):
converter.convert("test text")
@@ -143,5 +144,5 @@ def test_convert_none_input():
"""Should raise ValidationError when input is None."""
converter = YdocConverter()
with pytest.raises(ValidationError, match="Input data cannot be empty"):
with pytest.raises(ValidationError, match="Input text cannot be empty"):
converter.convert(None)

View File

@@ -1,117 +0,0 @@
"""Test DocSpec converter services."""
from unittest.mock import MagicMock, patch
import pytest
import requests
from core.services import mime_types
from core.services.converter_services import (
DocSpecConverter,
ServiceUnavailableError,
ValidationError,
)
def test_docspec_convert_empty_data():
"""Should raise ValidationError when data is empty."""
converter = DocSpecConverter()
with pytest.raises(ValidationError, match="Input data cannot be empty"):
converter.convert("", mime_types.DOCX, mime_types.BLOCKNOTE)
def test_docspec_convert_none_input():
"""Should raise ValidationError when input is None."""
converter = DocSpecConverter()
with pytest.raises(ValidationError, match="Input data cannot be empty"):
converter.convert(None, mime_types.DOCX, mime_types.BLOCKNOTE)
def test_docspec_convert_unsupported_content_type():
"""Should raise ValidationError when content type is not DOCX."""
converter = DocSpecConverter()
with pytest.raises(
ValidationError, match="Conversion from text/plain to .* is not supported"
):
converter.convert(b"test data", "text/plain", mime_types.BLOCKNOTE)
def test_docspec_convert_unsupported_accept():
"""Should raise ValidationError when accept type is not BLOCKNOTE."""
converter = DocSpecConverter()
with pytest.raises(
ValidationError,
match=f"Conversion from {mime_types.DOCX} to {mime_types.YJS} is not supported",
):
converter.convert(b"test data", mime_types.DOCX, mime_types.YJS)
@patch("requests.post")
def test_docspec_convert_service_unavailable(mock_post):
"""Should raise ServiceUnavailableError when service is unavailable."""
converter = DocSpecConverter()
mock_post.side_effect = requests.RequestException("Connection error")
with pytest.raises(
ServiceUnavailableError,
match="Failed to connect to DocSpec conversion service",
):
converter.convert(b"test data", mime_types.DOCX, mime_types.BLOCKNOTE)
@patch("requests.post")
def test_docspec_convert_http_error(mock_post):
"""Should raise ServiceUnavailableError when HTTP error occurs."""
converter = DocSpecConverter()
mock_response = MagicMock()
mock_response.raise_for_status.side_effect = requests.HTTPError("HTTP Error")
mock_post.return_value = mock_response
with pytest.raises(
ServiceUnavailableError,
match="Failed to connect to DocSpec conversion service",
):
converter.convert(b"test data", mime_types.DOCX, mime_types.BLOCKNOTE)
@patch("requests.post")
def test_docspec_convert_timeout(mock_post):
"""Should raise ServiceUnavailableError when request times out."""
converter = DocSpecConverter()
mock_post.side_effect = requests.Timeout("Request timed out")
with pytest.raises(
ServiceUnavailableError,
match="Failed to connect to DocSpec conversion service",
):
converter.convert(b"test data", mime_types.DOCX, mime_types.BLOCKNOTE)
@patch("requests.post")
def test_docspec_convert_success(mock_post, settings):
"""Test successful DOCX to BlockNote conversion."""
settings.DOCSPEC_API_URL = "http://docspec.test/convert"
settings.CONVERSION_API_TIMEOUT = 5
settings.CONVERSION_API_SECURE = False
converter = DocSpecConverter()
expected_content = b'[{"type": "paragraph", "content": "test"}]'
mock_response = MagicMock()
mock_response.content = expected_content
mock_response.raise_for_status.return_value = None
mock_post.return_value = mock_response
docx_data = b"fake docx binary data"
result = converter.convert(docx_data, mime_types.DOCX, mime_types.BLOCKNOTE)
assert result == expected_content
# Verify the request was made correctly
mock_post.assert_called_once_with(
"http://docspec.test/convert",
headers={"Accept": mime_types.BLOCKNOTE},
files={"file": ("document.docx", docx_data, mime_types.DOCX)},
timeout=5,
verify=False,
)

View File

@@ -1,635 +0,0 @@
"""Tests for Documents search indexers"""
from functools import partial
from json import dumps as json_dumps
from unittest.mock import patch
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import ImproperlyConfigured
from django.utils.module_loading import import_string
import pytest
import responses
from requests import HTTPError
from core import factories, models, utils
from core.services.search_indexers import (
BaseDocumentIndexer,
SearchIndexer,
get_document_indexer,
get_visited_document_ids_of,
)
pytestmark = pytest.mark.django_db
class FakeDocumentIndexer(BaseDocumentIndexer):
"""Fake indexer for test purpose"""
def serialize_document(self, document, accesses):
return {}
def push(self, data):
pass
def search_query(self, data, token):
return {}
def test_services_search_indexer_class_invalid(indexer_settings):
"""
Should raise RuntimeError if SEARCH_INDEXER_CLASS cannot be imported.
"""
indexer_settings.SEARCH_INDEXER_CLASS = "unknown.Unknown"
assert get_document_indexer() is None
def test_services_search_indexer_class(indexer_settings):
"""
Import indexer class defined in setting SEARCH_INDEXER_CLASS.
"""
indexer_settings.SEARCH_INDEXER_CLASS = (
"core.tests.test_services_search_indexers.FakeDocumentIndexer"
)
assert isinstance(
get_document_indexer(),
import_string("core.tests.test_services_search_indexers.FakeDocumentIndexer"),
)
def test_services_search_indexer_is_configured(indexer_settings):
"""
Should return true only when the indexer class and other configuration settings
are valid.
"""
indexer_settings.SEARCH_INDEXER_CLASS = None
# None
get_document_indexer.cache_clear()
assert not get_document_indexer()
# Empty
indexer_settings.SEARCH_INDEXER_CLASS = ""
get_document_indexer.cache_clear()
assert not get_document_indexer()
# Valid class
indexer_settings.SEARCH_INDEXER_CLASS = (
"core.services.search_indexers.SearchIndexer"
)
get_document_indexer.cache_clear()
assert get_document_indexer() is not None
indexer_settings.SEARCH_INDEXER_URL = ""
# Invalid url
get_document_indexer.cache_clear()
assert not get_document_indexer()
def test_services_search_indexer_url_is_none(indexer_settings):
"""
Indexer should raise RuntimeError if SEARCH_INDEXER_URL is None or empty.
"""
indexer_settings.SEARCH_INDEXER_URL = None
with pytest.raises(ImproperlyConfigured) as exc_info:
SearchIndexer()
assert "SEARCH_INDEXER_URL must be set in Django settings." in str(exc_info.value)
def test_services_search_indexer_url_is_empty(indexer_settings):
"""
Indexer should raise RuntimeError if SEARCH_INDEXER_URL is empty string.
"""
indexer_settings.SEARCH_INDEXER_URL = ""
with pytest.raises(ImproperlyConfigured) as exc_info:
SearchIndexer()
assert "SEARCH_INDEXER_URL must be set in Django settings." in str(exc_info.value)
def test_services_search_indexer_secret_is_none(indexer_settings):
"""
Indexer should raise RuntimeError if SEARCH_INDEXER_SECRET is None.
"""
indexer_settings.SEARCH_INDEXER_SECRET = None
with pytest.raises(ImproperlyConfigured) as exc_info:
SearchIndexer()
assert "SEARCH_INDEXER_SECRET must be set in Django settings." in str(
exc_info.value
)
def test_services_search_indexer_secret_is_empty(indexer_settings):
"""
Indexer should raise RuntimeError if SEARCH_INDEXER_SECRET is empty string.
"""
indexer_settings.SEARCH_INDEXER_SECRET = ""
with pytest.raises(ImproperlyConfigured) as exc_info:
SearchIndexer()
assert "SEARCH_INDEXER_SECRET must be set in Django settings." in str(
exc_info.value
)
def test_services_search_endpoint_is_none(indexer_settings):
"""
Indexer should raise RuntimeError if SEARCH_INDEXER_QUERY_URL is None.
"""
indexer_settings.SEARCH_INDEXER_QUERY_URL = None
with pytest.raises(ImproperlyConfigured) as exc_info:
SearchIndexer()
assert "SEARCH_INDEXER_QUERY_URL must be set in Django settings." in str(
exc_info.value
)
def test_services_search_endpoint_is_empty(indexer_settings):
"""
Indexer should raise RuntimeError if SEARCH_INDEXER_QUERY_URL is empty.
"""
indexer_settings.SEARCH_INDEXER_QUERY_URL = ""
with pytest.raises(ImproperlyConfigured) as exc_info:
SearchIndexer()
assert "SEARCH_INDEXER_QUERY_URL must be set in Django settings." in str(
exc_info.value
)
@pytest.mark.usefixtures("indexer_settings")
def test_services_search_indexers_serialize_document_returns_expected_json():
"""
It should serialize documents with correct metadata and access control.
"""
user_a, user_b = factories.UserFactory.create_batch(2)
document = factories.DocumentFactory()
factories.DocumentFactory(parent=document)
factories.UserDocumentAccessFactory(document=document, user=user_a)
factories.UserDocumentAccessFactory(document=document, user=user_b)
factories.TeamDocumentAccessFactory(document=document, team="team1")
factories.TeamDocumentAccessFactory(document=document, team="team2")
accesses = {
document.path: {
"users": {str(user_a.sub), str(user_b.sub)},
"teams": {"team1", "team2"},
}
}
indexer = SearchIndexer()
result = indexer.serialize_document(document, accesses)
assert set(result.pop("users")) == {str(user_a.sub), str(user_b.sub)}
assert set(result.pop("groups")) == {"team1", "team2"}
assert result == {
"id": str(document.id),
"title": document.title,
"depth": 1,
"path": document.path,
"numchild": 1,
"content": utils.base64_yjs_to_text(document.content),
"created_at": document.created_at.isoformat(),
"updated_at": document.updated_at.isoformat(),
"reach": document.link_reach,
"size": 13,
"is_active": True,
}
@pytest.mark.usefixtures("indexer_settings")
def test_services_search_indexers_serialize_document_deleted():
"""Deleted documents are marked as just in the serialized json."""
parent = factories.DocumentFactory()
document = factories.DocumentFactory(parent=parent)
parent.soft_delete()
document.refresh_from_db()
indexer = SearchIndexer()
result = indexer.serialize_document(document, {})
assert result["is_active"] is False
@pytest.mark.usefixtures("indexer_settings")
def test_services_search_indexers_serialize_document_empty():
"""Empty documents returns empty content in the serialized json."""
document = factories.DocumentFactory(content="", title=None)
indexer = SearchIndexer()
result = indexer.serialize_document(document, {})
assert result["content"] == ""
assert result["title"] == ""
@responses.activate
def test_services_search_indexers_index_errors(indexer_settings):
"""
Documents indexing response handling on Find API HTTP errors.
"""
factories.DocumentFactory()
indexer_settings.SEARCH_INDEXER_URL = "http://app-find/api/v1.0/documents/index/"
responses.add(
responses.POST,
"http://app-find/api/v1.0/documents/index/",
status=401,
body=json_dumps({"message": "Authentication failed."}),
)
with pytest.raises(HTTPError):
SearchIndexer().index()
@patch.object(SearchIndexer, "push")
def test_services_search_indexers_batches_pass_only_batch_accesses(
mock_push, indexer_settings
):
"""
Documents indexing should be processed in batches,
and only the access data relevant to each batch should be used.
"""
indexer_settings.SEARCH_INDEXER_BATCH_SIZE = 2
documents = factories.DocumentFactory.create_batch(5)
# Attach a single user access to each document
expected_user_subs = {}
for document in documents:
access = factories.UserDocumentAccessFactory(document=document)
expected_user_subs[str(document.id)] = str(access.user.sub)
assert SearchIndexer().index() == 5
# Should be 3 batches: 2 + 2 + 1
assert mock_push.call_count == 3
seen_doc_ids = set()
for call in mock_push.call_args_list:
batch = call.args[0]
assert isinstance(batch, list)
for doc_json in batch:
doc_id = doc_json["id"]
seen_doc_ids.add(doc_id)
# Only one user expected per document
assert doc_json["users"] == [expected_user_subs[doc_id]]
assert doc_json["groups"] == []
# Make sure all 5 documents were indexed
assert seen_doc_ids == {str(d.id) for d in documents}
@patch.object(SearchIndexer, "push")
@pytest.mark.usefixtures("indexer_settings")
def test_services_search_indexers_batch_size_argument(mock_push):
"""
Documents indexing should be processed in batches,
batch_size overrides SEARCH_INDEXER_BATCH_SIZE
"""
documents = factories.DocumentFactory.create_batch(5)
# Attach a single user access to each document
expected_user_subs = {}
for document in documents:
access = factories.UserDocumentAccessFactory(document=document)
expected_user_subs[str(document.id)] = str(access.user.sub)
assert SearchIndexer().index(batch_size=2) == 5
# Should be 3 batches: 2 + 2 + 1
assert mock_push.call_count == 3
seen_doc_ids = set()
for call in mock_push.call_args_list:
batch = call.args[0]
assert isinstance(batch, list)
for doc_json in batch:
doc_id = doc_json["id"]
seen_doc_ids.add(doc_id)
# Only one user expected per document
assert doc_json["users"] == [expected_user_subs[doc_id]]
assert doc_json["groups"] == []
# Make sure all 5 documents were indexed
assert seen_doc_ids == {str(d.id) for d in documents}
@patch.object(SearchIndexer, "push")
@pytest.mark.usefixtures("indexer_settings")
def test_services_search_indexers_ignore_empty_documents(mock_push):
"""
Documents indexing should be processed in batches,
and only the access data relevant to each batch should be used.
"""
document = factories.DocumentFactory()
factories.DocumentFactory(content="", title="")
empty_title = factories.DocumentFactory(title="")
empty_content = factories.DocumentFactory(content="")
assert SearchIndexer().index() == 3
assert mock_push.call_count == 1
# Make sure only not eempty documents are indexed
results = {doc["id"] for doc in mock_push.call_args[0][0]}
assert results == {
str(d.id)
for d in (
document,
empty_content,
empty_title,
)
}
@patch.object(SearchIndexer, "push")
def test_services_search_indexers_skip_empty_batches(mock_push, indexer_settings):
"""
Documents indexing batch can be empty if all the docs are empty.
"""
indexer_settings.SEARCH_INDEXER_BATCH_SIZE = 2
document = factories.DocumentFactory()
# Only empty docs
factories.DocumentFactory.create_batch(5, content="", title="")
assert SearchIndexer().index() == 1
assert mock_push.call_count == 1
results = [doc["id"] for doc in mock_push.call_args[0][0]]
assert results == [str(document.id)]
@patch.object(SearchIndexer, "push")
@pytest.mark.usefixtures("indexer_settings")
def test_services_search_indexers_ancestors_link_reach(mock_push):
"""Document accesses and reach should take into account ancestors link reaches."""
great_grand_parent = factories.DocumentFactory(link_reach="restricted")
grand_parent = factories.DocumentFactory(
parent=great_grand_parent, link_reach="authenticated"
)
parent = factories.DocumentFactory(parent=grand_parent, link_reach="public")
document = factories.DocumentFactory(parent=parent, link_reach="restricted")
assert SearchIndexer().index() == 4
results = {doc["id"]: doc for doc in mock_push.call_args[0][0]}
assert len(results) == 4
assert results[str(great_grand_parent.id)]["reach"] == "restricted"
assert results[str(grand_parent.id)]["reach"] == "authenticated"
assert results[str(parent.id)]["reach"] == "public"
assert results[str(document.id)]["reach"] == "public"
@patch.object(SearchIndexer, "push")
@pytest.mark.usefixtures("indexer_settings")
def test_services_search_indexers_ancestors_users(mock_push):
"""Document accesses and reach should include users from ancestors."""
user_gp, user_p, user_d = factories.UserFactory.create_batch(3)
grand_parent = factories.DocumentFactory(users=[user_gp])
parent = factories.DocumentFactory(parent=grand_parent, users=[user_p])
document = factories.DocumentFactory(parent=parent, users=[user_d])
assert SearchIndexer().index() == 3
results = {doc["id"]: doc for doc in mock_push.call_args[0][0]}
assert len(results) == 3
assert results[str(grand_parent.id)]["users"] == [str(user_gp.sub)]
assert set(results[str(parent.id)]["users"]) == {str(user_gp.sub), str(user_p.sub)}
assert set(results[str(document.id)]["users"]) == {
str(user_gp.sub),
str(user_p.sub),
str(user_d.sub),
}
@patch.object(SearchIndexer, "push")
@pytest.mark.usefixtures("indexer_settings")
def test_services_search_indexers_ancestors_teams(mock_push):
"""Document accesses and reach should include teams from ancestors."""
grand_parent = factories.DocumentFactory(teams=["team_gp"])
parent = factories.DocumentFactory(parent=grand_parent, teams=["team_p"])
document = factories.DocumentFactory(parent=parent, teams=["team_d"])
assert SearchIndexer().index() == 3
results = {doc["id"]: doc for doc in mock_push.call_args[0][0]}
assert len(results) == 3
assert results[str(grand_parent.id)]["groups"] == ["team_gp"]
assert set(results[str(parent.id)]["groups"]) == {"team_gp", "team_p"}
assert set(results[str(document.id)]["groups"]) == {"team_gp", "team_p", "team_d"}
@patch("requests.post")
def test_push_uses_correct_url_and_data(mock_post, indexer_settings):
"""
push() should call requests.post with the correct URL from settings
the timeout set to 10 seconds and the data as JSON.
"""
indexer_settings.SEARCH_INDEXER_URL = "http://example.com/index"
indexer = SearchIndexer()
sample_data = [{"id": "123", "title": "Test"}]
mock_response = mock_post.return_value
mock_response.raise_for_status.return_value = None # No error
indexer.push(sample_data)
mock_post.assert_called_once()
args, kwargs = mock_post.call_args
assert args[0] == indexer_settings.SEARCH_INDEXER_URL
assert kwargs.get("json") == sample_data
assert kwargs.get("timeout") == 10
def test_get_visited_document_ids_of():
"""
get_visited_document_ids_of() returns the ids of the documents viewed
by the user BUT without specific access configuration (like public ones)
"""
user = factories.UserFactory()
other = factories.UserFactory()
anonymous = AnonymousUser()
queryset = models.Document.objects.all()
assert not get_visited_document_ids_of(queryset, anonymous)
assert not get_visited_document_ids_of(queryset, user)
doc1, doc2, _ = factories.DocumentFactory.create_batch(3)
create_link = partial(models.LinkTrace.objects.create, user=user, is_masked=False)
create_link(document=doc1)
create_link(document=doc2)
# The third document is not visited
assert sorted(get_visited_document_ids_of(queryset, user)) == sorted(
[str(doc1.pk), str(doc2.pk)]
)
factories.UserDocumentAccessFactory(user=other, document=doc1)
factories.UserDocumentAccessFactory(user=user, document=doc2)
# The second document have an access for the user
assert get_visited_document_ids_of(queryset, user) == [str(doc1.pk)]
@pytest.mark.usefixtures("indexer_settings")
def test_get_visited_document_ids_of_deleted():
"""
get_visited_document_ids_of() returns the ids of the documents viewed
by the user if they are not deleted.
"""
user = factories.UserFactory()
anonymous = AnonymousUser()
queryset = models.Document.objects.all()
assert not get_visited_document_ids_of(queryset, anonymous)
assert not get_visited_document_ids_of(queryset, user)
doc = factories.DocumentFactory()
doc_deleted = factories.DocumentFactory()
doc_ancestor_deleted = factories.DocumentFactory(parent=doc_deleted)
create_link = partial(models.LinkTrace.objects.create, user=user, is_masked=False)
create_link(document=doc)
create_link(document=doc_deleted)
create_link(document=doc_ancestor_deleted)
# The all documents are visited
assert sorted(get_visited_document_ids_of(queryset, user)) == sorted(
[str(doc.pk), str(doc_deleted.pk), str(doc_ancestor_deleted.pk)]
)
doc_deleted.soft_delete()
# Only the first document is not deleted
assert get_visited_document_ids_of(queryset, user) == [str(doc.pk)]
@responses.activate
def test_services_search_indexers_search_errors(indexer_settings):
"""
Documents indexing response handling on Find API HTTP errors.
"""
factories.DocumentFactory()
indexer_settings.SEARCH_INDEXER_QUERY_URL = (
"http://app-find/api/v1.0/documents/search/"
)
responses.add(
responses.POST,
"http://app-find/api/v1.0/documents/search/",
status=401,
body=json_dumps({"message": "Authentication failed."}),
)
with pytest.raises(HTTPError):
SearchIndexer().search("alpha", token="mytoken")
@patch("requests.post")
def test_services_search_indexers_search(mock_post, indexer_settings):
"""
search() should call requests.post to SEARCH_INDEXER_QUERY_URL with the
document ids from linktraces.
"""
user = factories.UserFactory()
indexer = SearchIndexer()
mock_response = mock_post.return_value
mock_response.raise_for_status.return_value = None # No error
doc1, doc2, _ = factories.DocumentFactory.create_batch(3)
create_link = partial(models.LinkTrace.objects.create, user=user, is_masked=False)
create_link(document=doc1)
create_link(document=doc2)
visited = get_visited_document_ids_of(models.Document.objects.all(), user)
indexer.search("alpha", visited=visited, token="mytoken")
args, kwargs = mock_post.call_args
assert args[0] == indexer_settings.SEARCH_INDEXER_QUERY_URL
query_data = kwargs.get("json")
assert query_data["q"] == "alpha"
assert sorted(query_data["visited"]) == sorted([str(doc1.pk), str(doc2.pk)])
assert query_data["services"] == ["docs"]
assert query_data["nb_results"] == 50
assert query_data["order_by"] == "updated_at"
assert query_data["order_direction"] == "desc"
assert kwargs.get("headers") == {"Authorization": "Bearer mytoken"}
assert kwargs.get("timeout") == 10
@patch("requests.post")
def test_services_search_indexers_search_nb_results(mock_post, indexer_settings):
"""
Find API call should have nb_results == SEARCH_INDEXER_QUERY_LIMIT
or the given nb_results argument.
"""
indexer_settings.SEARCH_INDEXER_QUERY_LIMIT = 25
user = factories.UserFactory()
indexer = SearchIndexer()
mock_response = mock_post.return_value
mock_response.raise_for_status.return_value = None # No error
doc1, doc2, _ = factories.DocumentFactory.create_batch(3)
create_link = partial(models.LinkTrace.objects.create, user=user, is_masked=False)
create_link(document=doc1)
create_link(document=doc2)
visited = get_visited_document_ids_of(models.Document.objects.all(), user)
indexer.search("alpha", visited=visited, token="mytoken")
args, kwargs = mock_post.call_args
assert args[0] == indexer_settings.SEARCH_INDEXER_QUERY_URL
assert kwargs.get("json")["nb_results"] == 25
# The argument overrides the setting value
indexer.search("alpha", visited=visited, token="mytoken", nb_results=109)
args, kwargs = mock_post.call_args
assert args[0] == indexer_settings.SEARCH_INDEXER_QUERY_URL
assert kwargs.get("json")["nb_results"] == 109

View File

@@ -3,14 +3,9 @@
import base64
import uuid
from django.core.cache import cache
import pycrdt
import pytest
from core import factories, utils
pytestmark = pytest.mark.django_db
from core import utils
# This base64 string is an example of what is saved in the database.
# This base64 is generated from the blocknote editor, it contains
@@ -80,128 +75,3 @@ def test_utils_extract_attachments():
base64_string = base64.b64encode(update).decode("utf-8")
# image_key2 is missing the "/media/" part and shouldn't get extracted
assert utils.extract_attachments(base64_string) == [image_key1, image_key3]
def test_utils_get_ancestor_to_descendants_map_single_path():
"""Test ancestor mapping of a single path."""
paths = ["000100020005"]
result = utils.get_ancestor_to_descendants_map(paths, steplen=4)
assert result == {
"0001": {"000100020005"},
"00010002": {"000100020005"},
"000100020005": {"000100020005"},
}
def test_utils_get_ancestor_to_descendants_map_multiple_paths():
"""Test ancestor mapping of multiple paths with shared prefixes."""
paths = ["000100020005", "00010003"]
result = utils.get_ancestor_to_descendants_map(paths, steplen=4)
assert result == {
"0001": {"000100020005", "00010003"},
"00010002": {"000100020005"},
"000100020005": {"000100020005"},
"00010003": {"00010003"},
}
def test_utils_users_sharing_documents_with_cache_miss():
"""Test cache miss: should query database and cache result."""
user1 = factories.UserFactory()
user2 = factories.UserFactory()
user3 = factories.UserFactory()
doc1 = factories.DocumentFactory()
doc2 = factories.DocumentFactory()
factories.UserDocumentAccessFactory(user=user1, document=doc1)
factories.UserDocumentAccessFactory(user=user2, document=doc1)
factories.UserDocumentAccessFactory(user=user3, document=doc2)
cache_key = utils.get_users_sharing_documents_with_cache_key(user1)
cache.delete(cache_key)
result = utils.users_sharing_documents_with(user1)
assert user2.id in result
cached_data = cache.get(cache_key)
assert cached_data == result
def test_utils_users_sharing_documents_with_cache_hit():
"""Test cache hit: should return cached data without querying database."""
user1 = factories.UserFactory()
user2 = factories.UserFactory()
doc1 = factories.DocumentFactory()
factories.UserDocumentAccessFactory(user=user1, document=doc1)
factories.UserDocumentAccessFactory(user=user2, document=doc1)
cache_key = utils.get_users_sharing_documents_with_cache_key(user1)
test_cached_data = {user2.id: "2025-02-10"}
cache.set(cache_key, test_cached_data, 86400)
result = utils.users_sharing_documents_with(user1)
assert result == test_cached_data
def test_utils_users_sharing_documents_with_cache_invalidation_on_create():
"""Test that cache is invalidated when a DocumentAccess is created."""
# Create test data
user1 = factories.UserFactory()
user2 = factories.UserFactory()
doc1 = factories.DocumentFactory()
# Pre-populate cache
cache_key = utils.get_users_sharing_documents_with_cache_key(user1)
cache.set(cache_key, {}, 86400)
# Verify cache exists
assert cache.get(cache_key) is not None
# Create new DocumentAccess
factories.UserDocumentAccessFactory(user=user2, document=doc1)
# Cache should still exist (only created for user2 who was added)
# But if we create access for user1 being shared with, cache should be cleared
cache.set(cache_key, {"test": "data"}, 86400)
factories.UserDocumentAccessFactory(user=user1, document=doc1)
# Cache for user1 should be invalidated (cleared)
assert cache.get(cache_key) is None
def test_utils_users_sharing_documents_with_cache_invalidation_on_delete():
"""Test that cache is invalidated when a DocumentAccess is deleted."""
user1 = factories.UserFactory()
user2 = factories.UserFactory()
doc1 = factories.DocumentFactory()
doc_access = factories.UserDocumentAccessFactory(user=user1, document=doc1)
cache_key = utils.get_users_sharing_documents_with_cache_key(user1)
cache.set(cache_key, {user2.id: "2025-02-10"}, 86400)
assert cache.get(cache_key) is not None
doc_access.delete()
assert cache.get(cache_key) is None
def test_utils_users_sharing_documents_with_empty_result():
"""Test when user is not sharing any documents."""
user1 = factories.UserFactory()
cache_key = utils.get_users_sharing_documents_with_cache_key(user1)
cache.delete(cache_key)
result = utils.users_sharing_documents_with(user1)
assert result == {}
cached_data = cache.get(cache_key)
assert cached_data == {}

View File

@@ -1,62 +0,0 @@
"""Tests for utils.users_sharing_documents_with function."""
from django.utils import timezone
import pytest
from core import factories, utils
pytestmark = pytest.mark.django_db
def test_utils_users_sharing_documents_with():
"""Test users_sharing_documents_with function."""
user = factories.UserFactory(
email="martin.bernard@anct.gouv.fr", full_name="Martin Bernard"
)
pierre_1 = factories.UserFactory(
email="pierre.dupont@beta.gouv.fr", full_name="Pierre Dupont"
)
pierre_2 = factories.UserFactory(
email="pierre.durand@impots.gouv.fr", full_name="Pierre Durand"
)
now = timezone.now()
yesterday = now - timezone.timedelta(days=1)
last_week = now - timezone.timedelta(days=7)
last_month = now - timezone.timedelta(days=30)
document_1 = factories.DocumentFactory(creator=user)
document_2 = factories.DocumentFactory(creator=user)
document_3 = factories.DocumentFactory(creator=user)
factories.UserDocumentAccessFactory(user=user, document=document_1)
factories.UserDocumentAccessFactory(user=user, document=document_2)
factories.UserDocumentAccessFactory(user=user, document=document_3)
# The factory cannot set the created_at directly, so we force it after creation
doc_1_pierre_1 = factories.UserDocumentAccessFactory(
user=pierre_1, document=document_1, created_at=last_week
)
doc_1_pierre_1.created_at = last_week
doc_1_pierre_1.save()
doc_2_pierre_2 = factories.UserDocumentAccessFactory(
user=pierre_2, document=document_2
)
doc_2_pierre_2.created_at = last_month
doc_2_pierre_2.save()
doc_3_pierre_2 = factories.UserDocumentAccessFactory(
user=pierre_2, document=document_3
)
doc_3_pierre_2.created_at = yesterday
doc_3_pierre_2.save()
shared_map = utils.users_sharing_documents_with(user)
assert shared_map == {
pierre_1.id: last_week,
pierre_2.id: yesterday,
}

View File

@@ -10,6 +10,7 @@ from core.api import viewsets
# - Main endpoints
router = DefaultRouter()
router.register("templates", viewsets.TemplateViewSet, basename="templates")
router.register("documents", viewsets.DocumentViewSet, basename="documents")
router.register("users", viewsets.UserViewSet, basename="users")
@@ -59,10 +60,6 @@ urlpatterns = [
r"^documents/(?P<resource_id>[0-9a-z-]*)/threads/(?P<thread_id>[0-9a-z-]*)/",
include(thread_related_router.urls),
),
path(
"user-reconciliations/<str:user_type>/<uuid:confirmation_id>/",
viewsets.ReconciliationConfirmView.as_view(),
),
]
),
),

View File

@@ -1,42 +1,12 @@
"""Utils for the core app."""
import base64
import logging
import re
import time
from collections import defaultdict
from django.core.cache import cache
from django.db import models as db
from django.db.models import Subquery
import pycrdt
from bs4 import BeautifulSoup
from core import enums, models
logger = logging.getLogger(__name__)
def get_ancestor_to_descendants_map(paths, steplen):
"""
Given a list of document paths, return a mapping of ancestor_path -> set of descendant_paths.
Each path is assumed to use materialized path format with fixed-length segments.
Args:
paths (list of str): List of full document paths.
steplen (int): Length of each path segment.
Returns:
dict[str, set[str]]: Mapping from ancestor path to its descendant paths (including itself).
"""
ancestor_map = defaultdict(set)
for path in paths:
for i in range(steplen, len(path) + 1, steplen):
ancestor = path[:i]
ancestor_map[ancestor].add(path)
return ancestor_map
from core import enums
def filter_descendants(paths, root_paths, skip_sorting=False):
@@ -104,46 +74,3 @@ def extract_attachments(content):
xml_content = base64_yjs_to_xml(content)
return re.findall(enums.MEDIA_STORAGE_URL_EXTRACT, xml_content)
def get_users_sharing_documents_with_cache_key(user):
"""Generate a unique cache key for each user."""
return f"users_sharing_documents_with_{user.id}"
def users_sharing_documents_with(user):
"""
Returns a map of users sharing documents with the given user,
sorted by last shared date.
"""
start_time = time.time()
cache_key = get_users_sharing_documents_with_cache_key(user)
cached_result = cache.get(cache_key)
if cached_result is not None:
elapsed = time.time() - start_time
logger.info(
"users_sharing_documents_with cache hit for user %s (took %.3fs)",
user.id,
elapsed,
)
return cached_result
user_docs_qs = models.DocumentAccess.objects.filter(user=user).values_list(
"document_id", flat=True
)
shared_qs = (
models.DocumentAccess.objects.filter(document_id__in=Subquery(user_docs_qs))
.exclude(user=user)
.values("user")
.annotate(last_shared=db.Max("created_at"))
)
result = {item["user"]: item["last_shared"] for item in shared_qs}
cache.set(cache_key, result, 86400) # Cache for 1 day
elapsed = time.time() - start_time
logger.info(
"users_sharing_documents_with cache miss for user %s (took %.3fs)",
user.id,
elapsed,
)
return result

View File

@@ -0,0 +1,2 @@
<img width="200" src="http://localhost:3000/assets/logo-gouv.png" />
<br/>

View File

View File

@@ -1,27 +1,23 @@
# ruff: noqa: S311, S106
"""create_demo management command"""
import base64
import logging
import math
import random
import time
from collections import defaultdict
from uuid import uuid4
from django import db
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import pycrdt
from faker import Faker
from core import models
from demo import defaults
languages = [x for (x, y) in settings.LANGUAGES]
fake = Faker(languages)
fake = Faker()
logger = logging.getLogger("impress.commands.demo.create_demo")
@@ -31,16 +27,6 @@ def random_true_with_probability(probability):
return random.random() < probability
def get_ydoc_for_text(text):
"""Return a ydoc from plain text for demo purposes."""
ydoc = pycrdt.Doc()
paragraph = pycrdt.XmlElement("p", {}, [pycrdt.XmlText(text)])
fragment = pycrdt.XmlFragment([paragraph])
ydoc["document-store"] = fragment
update = ydoc.get_update()
return base64.b64encode(update).decode("utf-8")
class BulkQueue:
"""A utility class to create Django model instances in bulk by just pushing to a queue."""
@@ -62,7 +48,7 @@ class BulkQueue:
self.queue[objects[0]._meta.model.__name__] = [] # noqa: SLF001
def push(self, obj):
"""Add a model instance to queue so that it gets created in bulk."""
"""Add a model instance to queue to that it gets created in bulk."""
objects = self.queue[obj._meta.model.__name__] # noqa: SLF001
objects.append(obj)
if len(objects) > self.BATCH_SIZE:
@@ -141,7 +127,7 @@ def create_demo(stdout):
is_staff=False,
short_name=first_name,
full_name=f"{first_name:s} {random.choice(last_names):s}",
language=random.choice(languages),
language=random.choice(settings.LANGUAGES)[0],
)
)
queue.flush()
@@ -153,19 +139,17 @@ def create_demo(stdout):
# pylint: disable=protected-access
key = models.Document._int2str(i) # noqa: SLF001
padding = models.Document.alphabet[0] * (models.Document.steplen - len(key))
title = fake.sentence(nb_words=4)
document = models.Document(
id=uuid4(),
depth=1,
path=f"{padding}{key}",
creator_id=random.choice(users_ids),
title=title,
link_reach=models.LinkReachChoices.AUTHENTICATED
if random_true_with_probability(0.5)
else random.choice(models.LinkReachChoices.values),
queue.push(
models.Document(
depth=1,
path=f"{padding}{key}",
creator_id=random.choice(users_ids),
title=fake.sentence(nb_words=4),
link_reach=models.LinkReachChoices.AUTHENTICATED
if random_true_with_probability(0.5)
else random.choice(models.LinkReachChoices.values),
)
)
document.save_content(get_ydoc_for_text(f"Content for {title:s}"))
queue.push(document)
queue.flush()
@@ -195,7 +179,8 @@ def create_demo(stdout):
is_superuser=False,
is_active=True,
is_staff=False,
language=dev_user["language"] or random.choice(languages),
language=dev_user["language"]
or random.choice(settings.LANGUAGES)[0],
)
)
@@ -216,6 +201,29 @@ def create_demo(stdout):
queue.flush()
with Timeit(stdout, "Creating Template"):
with open(
file="demo/data/template/code.txt", mode="r", encoding="utf-8"
) as text_file:
code_data = text_file.read()
with open(
file="demo/data/template/css.txt", mode="r", encoding="utf-8"
) as text_file:
css_data = text_file.read()
queue.push(
models.Template(
id="baca9e2a-59fb-42ef-b5c6-6f6b05637111",
title="Demo Template",
description="This is the demo template",
code=code_data,
css=css_data,
is_public=True,
)
)
queue.flush()
class Command(BaseCommand):
"""A management command to create a demo database."""

View File

@@ -25,6 +25,7 @@ def test_commands_create_demo():
"""The create_demo management command should create objects as expected."""
call_command("create_demo")
assert models.Template.objects.count() == 1
assert models.User.objects.count() >= 10
assert models.Document.objects.count() >= 10
assert models.DocumentAccess.objects.count() > 10

View File

@@ -3,8 +3,8 @@
"default": {
"logo": {
"src": "/assets/icon-docs.svg",
"width": "54px",
"alt": "Docs Logo",
"style": { "width": "54px", "height": "auto" },
"withTitle": true
},
"externalLinks": [
@@ -125,38 +125,5 @@
}
}
}
},
"home": {
"with-proconnect": false,
"icon-banner": {
"src": "/assets/icon-docs.svg",
"style": {
"width": "64px",
"height": "auto"
},
"alt": ""
}
},
"header": {
"logo": {},
"icon": {
"src": "/assets/icon-docs.svg",
"style": {
"width": "32px",
"height": "auto"
},
"alt": "",
"withTitle": true
}
},
"favicon": {
"light": {
"href": "/assets/favicon-light.png",
"type": "image/png"
},
"dark": {
"href": "/assets/favicon-dark.png",
"type": "image/png"
}
}
}

Some files were not shown because too many files have changed in this diff Show More