mirror of
https://github.com/suitenumerique/docs.git
synced 2026-05-06 23:22:15 +02:00
Compare commits
47 Commits
buildpack
...
feat/no-we
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b533b93169 | ||
|
|
c3f81c2b62 | ||
|
|
c7261cf507 | ||
|
|
e504f43611 | ||
|
|
3ad6d0ea12 | ||
|
|
9e8a7b3502 | ||
|
|
05db9c8e51 | ||
|
|
7ed33019c2 | ||
|
|
a99c813421 | ||
|
|
a83902a0d4 | ||
|
|
080f855083 | ||
|
|
90d94f6b7a | ||
|
|
f97ab51c8e | ||
|
|
ba4f90a607 | ||
|
|
6c16e081de | ||
|
|
56a945983e | ||
|
|
4fbbead405 | ||
|
|
9a212400a0 | ||
|
|
f07fcd4c0d | ||
|
|
4fc49d5cb2 | ||
|
|
0fd16b4371 | ||
|
|
fbb2799050 | ||
|
|
afbb4b29dc | ||
|
|
db63ebd0c8 | ||
|
|
c5f018e03e | ||
|
|
1c93fbc007 | ||
|
|
d811e3c2fc | ||
|
|
fe5fda5d73 | ||
|
|
bf66265125 | ||
|
|
ce329142dc | ||
|
|
f8cff43dac | ||
|
|
f5b2c27bd8 | ||
|
|
62433ef7f1 | ||
|
|
bc0824d110 | ||
|
|
fa653c6776 | ||
|
|
d12f942d29 | ||
|
|
62f85e7d24 | ||
|
|
65cc088a17 | ||
|
|
94e99784f3 | ||
|
|
fa83955a77 | ||
|
|
5962f7aae1 | ||
|
|
dc06315566 | ||
|
|
f4ad26a8fa | ||
|
|
d952815932 | ||
|
|
cde64ed80a | ||
|
|
cfd88d0469 | ||
|
|
5e45fec296 |
@@ -34,3 +34,4 @@ db.sqlite3
|
||||
|
||||
# Frontend
|
||||
node_modules
|
||||
.next
|
||||
|
||||
23
.gitattributes
vendored
Normal file
23
.gitattributes
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
# Set the default behavior for all files
|
||||
* text=auto eol=lf
|
||||
|
||||
# Binary files (should not be modified)
|
||||
*.png binary
|
||||
*.jpg binary
|
||||
*.jpeg binary
|
||||
*.gif binary
|
||||
*.ico binary
|
||||
*.mov binary
|
||||
*.mp4 binary
|
||||
*.mp3 binary
|
||||
*.flv binary
|
||||
*.fla binary
|
||||
*.swf binary
|
||||
*.gz binary
|
||||
*.zip binary
|
||||
*.7z binary
|
||||
*.ttf binary
|
||||
*.woff binary
|
||||
*.woff2 binary
|
||||
*.eot binary
|
||||
*.pdf binary
|
||||
19
.github/PULL_REQUEST_TEMPLATE.md
vendored
19
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,11 +1,22 @@
|
||||
## Purpose
|
||||
|
||||
Description...
|
||||
Describe the purpose of this pull request.
|
||||
|
||||
|
||||
## Proposal
|
||||
|
||||
Description...
|
||||
- [ ] item 1...
|
||||
- [ ] item 2...
|
||||
|
||||
- [] item 1...
|
||||
- [] item 2...
|
||||
## External contributions
|
||||
|
||||
Thank you for your contribution! 🎉
|
||||
|
||||
Please ensure the following items are checked before submitting your pull request:
|
||||
- [ ] I have read and followed the [contributing guidelines](https://github.com/suitenumerique/docs/blob/main/CONTRIBUTING.md)
|
||||
- [ ] I have read and agreed to the [Code of Conduct](https://github.com/suitenumerique/docs/blob/main/CODE_OF_CONDUCT.md)
|
||||
- [ ] I have signed off my commits with `git commit --signoff` (DCO compliance)
|
||||
- [ ] I have signed my commits with my SSH or GPG key (`git commit -S`)
|
||||
- [ ] My commit messages follow the required format: `<gitmoji>(type) title description`
|
||||
- [ ] I have added a changelog entry under `## [Unreleased]` section (if noticeable change)
|
||||
- [ ] I have added corresponding tests for new features or bug fixes (if applicable)
|
||||
2
.github/workflows/crowdin_download.yml
vendored
2
.github/workflows/crowdin_download.yml
vendored
@@ -10,7 +10,7 @@ jobs:
|
||||
install-dependencies:
|
||||
uses: ./.github/workflows/dependencies.yml
|
||||
with:
|
||||
node_version: '20.x'
|
||||
node_version: '22.x'
|
||||
with-front-dependencies-installation: true
|
||||
|
||||
synchronize-with-crowdin:
|
||||
|
||||
2
.github/workflows/crowdin_upload.yml
vendored
2
.github/workflows/crowdin_upload.yml
vendored
@@ -10,7 +10,7 @@ jobs:
|
||||
install-dependencies:
|
||||
uses: ./.github/workflows/dependencies.yml
|
||||
with:
|
||||
node_version: '20.x'
|
||||
node_version: '22.x'
|
||||
with-front-dependencies-installation: true
|
||||
with-build_mails: true
|
||||
|
||||
|
||||
2
.github/workflows/dependencies.yml
vendored
2
.github/workflows/dependencies.yml
vendored
@@ -5,7 +5,7 @@ on:
|
||||
inputs:
|
||||
node_version:
|
||||
required: false
|
||||
default: '20.x'
|
||||
default: '22.x'
|
||||
type: string
|
||||
with-front-dependencies-installation:
|
||||
type: boolean
|
||||
|
||||
14
.github/workflows/impress-frontend.yml
vendored
14
.github/workflows/impress-frontend.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
install-dependencies:
|
||||
uses: ./.github/workflows/dependencies.yml
|
||||
with:
|
||||
node_version: '20.x'
|
||||
node_version: '22.x'
|
||||
with-front-dependencies-installation: true
|
||||
|
||||
test-front:
|
||||
@@ -26,7 +26,7 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20.x"
|
||||
node-version: "22.x"
|
||||
|
||||
- name: Restore the frontend cache
|
||||
uses: actions/cache@v4
|
||||
@@ -48,7 +48,7 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20.x"
|
||||
node-version: "22.x"
|
||||
- name: Restore the frontend cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20.x"
|
||||
node-version: "22.x"
|
||||
|
||||
- name: Restore the frontend cache
|
||||
uses: actions/cache@v4
|
||||
@@ -86,7 +86,7 @@ jobs:
|
||||
run: cd src/frontend/apps/e2e && yarn install --frozen-lockfile && yarn install-playwright chromium
|
||||
|
||||
- name: Start Docker services
|
||||
run: make bootstrap FLUSH_ARGS='--no-input' cache=
|
||||
run: make bootstrap-e2e FLUSH_ARGS='--no-input'
|
||||
|
||||
- name: Run e2e tests
|
||||
run: cd src/frontend/ && yarn e2e:test --project='chromium'
|
||||
@@ -109,7 +109,7 @@ jobs:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20.x"
|
||||
node-version: "22.x"
|
||||
|
||||
- name: Restore the frontend cache
|
||||
uses: actions/cache@v4
|
||||
@@ -125,7 +125,7 @@ jobs:
|
||||
run: cd src/frontend/apps/e2e && yarn install --frozen-lockfile && yarn install-playwright firefox webkit chromium
|
||||
|
||||
- name: Start Docker services
|
||||
run: make bootstrap FLUSH_ARGS='--no-input' cache=
|
||||
run: make bootstrap-e2e FLUSH_ARGS='--no-input'
|
||||
|
||||
- name: Run e2e tests
|
||||
run: cd src/frontend/ && yarn e2e:test --project=firefox --project=webkit
|
||||
|
||||
3
.github/workflows/impress.yml
vendored
3
.github/workflows/impress.yml
vendored
@@ -123,6 +123,9 @@ jobs:
|
||||
# needed because the postgres container does not provide a healthcheck
|
||||
options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
|
||||
|
||||
redis:
|
||||
image: redis:5
|
||||
|
||||
env:
|
||||
DJANGO_CONFIGURATION: Test
|
||||
DJANGO_SETTINGS_MODULE: impress.settings
|
||||
|
||||
22
CHANGELOG.md
22
CHANGELOG.md
@@ -8,6 +8,28 @@ and this project adheres to
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
### Added
|
||||
|
||||
- ✨(frontend) add customization for translations #857
|
||||
- 📝(project) add troubleshoot doc #1066
|
||||
- 📝(project) add system-requirement doc #1066
|
||||
- 🔧(front) configure x-frame-options to DENY in nginx conf #1084
|
||||
|
||||
### Changed
|
||||
|
||||
- 📌(yjs) stop pinning node to minor version on yjs docker image #1005
|
||||
- 🧑💻(docker) add .next to .dockerignore #1055
|
||||
- 🧑💻(docker) handle frontend development images with docker compose #1033
|
||||
- 🧑💻(docker) add y-provider config to development environment #1057
|
||||
|
||||
### Fixed
|
||||
|
||||
-🐛(frontend) table of content disappearing #982
|
||||
-🐛(frontend) fix multiple EmojiPicker #1012
|
||||
-🐛(frontend) fix meta title #1017
|
||||
-🔧(git) set LF line endings for all text files #1032
|
||||
-📝(docs) minor fixes to docs/env.md
|
||||
|
||||
## [3.3.0] - 2025-05-06
|
||||
|
||||
### Added
|
||||
|
||||
61
Makefile
61
Makefile
@@ -39,6 +39,7 @@ DOCKER_UID = $(shell id -u)
|
||||
DOCKER_GID = $(shell id -g)
|
||||
DOCKER_USER = $(DOCKER_UID):$(DOCKER_GID)
|
||||
COMPOSE = DOCKER_USER=$(DOCKER_USER) docker compose
|
||||
COMPOSE_E2E = DOCKER_USER=$(DOCKER_USER) docker compose -f compose.yml -f compose-e2e.yml
|
||||
COMPOSE_EXEC = $(COMPOSE) exec
|
||||
COMPOSE_EXEC_APP = $(COMPOSE_EXEC) app-dev
|
||||
COMPOSE_RUN = $(COMPOSE) run --rm
|
||||
@@ -74,22 +75,39 @@ create-env-files: \
|
||||
env.d/development/kc_postgresql
|
||||
.PHONY: create-env-files
|
||||
|
||||
bootstrap: ## Prepare Docker images for the project
|
||||
bootstrap: \
|
||||
pre-bootstrap: \
|
||||
data/media \
|
||||
data/static \
|
||||
create-env-files \
|
||||
build \
|
||||
create-env-files
|
||||
.PHONY: pre-bootstrap
|
||||
|
||||
post-bootstrap: \
|
||||
migrate \
|
||||
demo \
|
||||
back-i18n-compile \
|
||||
mails-install \
|
||||
mails-build \
|
||||
mails-build
|
||||
.PHONY: post-bootstrap
|
||||
|
||||
|
||||
bootstrap: ## Prepare Docker developmentimages for the project
|
||||
bootstrap: \
|
||||
pre-bootstrap \
|
||||
build \
|
||||
post-bootstrap \
|
||||
run
|
||||
.PHONY: bootstrap
|
||||
|
||||
bootstrap-e2e: ## Prepare Docker production images to be used for e2e tests
|
||||
bootstrap-e2e: \
|
||||
pre-bootstrap \
|
||||
build-e2e \
|
||||
post-bootstrap \
|
||||
run-e2e
|
||||
.PHONY: bootstrap-e2e
|
||||
|
||||
# -- Docker/compose
|
||||
build: cache ?= --no-cache
|
||||
build: cache ?=
|
||||
build: ## build the project containers
|
||||
@$(MAKE) build-backend cache=$(cache)
|
||||
@$(MAKE) build-yjs-provider cache=$(cache)
|
||||
@@ -103,16 +121,23 @@ build-backend: ## build the app-dev container
|
||||
|
||||
build-yjs-provider: cache ?=
|
||||
build-yjs-provider: ## build the y-provider container
|
||||
@$(COMPOSE) build y-provider $(cache)
|
||||
@$(COMPOSE) build y-provider-development $(cache)
|
||||
.PHONY: build-yjs-provider
|
||||
|
||||
build-frontend: cache ?=
|
||||
build-frontend: ## build the frontend container
|
||||
@$(COMPOSE) build frontend $(cache)
|
||||
@$(COMPOSE) build frontend-development $(cache)
|
||||
.PHONY: build-frontend
|
||||
|
||||
build-e2e: cache ?=
|
||||
build-e2e: ## build the e2e container
|
||||
@$(MAKE) build-backend cache=$(cache)
|
||||
@$(COMPOSE_E2E) build frontend $(cache)
|
||||
@$(COMPOSE_E2E) build y-provider $(cache)
|
||||
.PHONY: build-e2e
|
||||
|
||||
down: ## stop and remove containers, networks, images, and volumes
|
||||
@$(COMPOSE) down
|
||||
@$(COMPOSE_E2E) down
|
||||
.PHONY: down
|
||||
|
||||
logs: ## display app-dev logs (follow mode)
|
||||
@@ -121,22 +146,30 @@ logs: ## display app-dev logs (follow mode)
|
||||
|
||||
run-backend: ## Start only the backend application and all needed services
|
||||
@$(COMPOSE) up --force-recreate -d celery-dev
|
||||
@$(COMPOSE) up --force-recreate -d y-provider
|
||||
@$(COMPOSE) up --force-recreate -d y-provider-development
|
||||
@$(COMPOSE) up --force-recreate -d nginx
|
||||
.PHONY: run-backend
|
||||
|
||||
run: ## start the wsgi (production) and development server
|
||||
run:
|
||||
@$(MAKE) run-backend
|
||||
@$(COMPOSE) up --force-recreate -d frontend
|
||||
@$(COMPOSE) up --force-recreate -d frontend-development
|
||||
.PHONY: run
|
||||
|
||||
run-e2e: ## start the e2e server
|
||||
run-e2e:
|
||||
@$(MAKE) run-backend
|
||||
@$(COMPOSE_E2E) stop y-provider-development
|
||||
@$(COMPOSE_E2E) up --force-recreate -d frontend
|
||||
@$(COMPOSE_E2E) up --force-recreate -d y-provider
|
||||
.PHONY: run-e2e
|
||||
|
||||
status: ## an alias for "docker compose ps"
|
||||
@$(COMPOSE) ps
|
||||
@$(COMPOSE_E2E) ps
|
||||
.PHONY: status
|
||||
|
||||
stop: ## stop the development server using Docker
|
||||
@$(COMPOSE) stop
|
||||
@$(COMPOSE_E2E) stop
|
||||
.PHONY: stop
|
||||
|
||||
# -- Backend
|
||||
@@ -315,7 +348,7 @@ frontend-lint: ## run the frontend linter
|
||||
.PHONY: frontend-lint
|
||||
|
||||
run-frontend-development: ## Run the frontend in development mode
|
||||
@$(COMPOSE) stop frontend
|
||||
@$(COMPOSE) stop frontend frontend-development
|
||||
cd $(PATH_FRONT_IMPRESS) && yarn dev
|
||||
.PHONY: run-frontend-development
|
||||
|
||||
|
||||
2
Procfile
2
Procfile
@@ -1,2 +0,0 @@
|
||||
web: bin/buildpack_start.sh
|
||||
postdeploy: python manage.py migrate
|
||||
@@ -57,7 +57,7 @@ Available methods: Helm chart, Nix package
|
||||
|
||||
In the works: Docker Compose, YunoHost
|
||||
|
||||
⚠️ For some advanced features (ex: Export as PDF) Docs relies on XL packages from BlockNote. These are licenced under AGPL-3.0 and are not MIT compatible. You can perfectly use Docs without these packages by setting the environment variable `PUBLISH_AS_MIT` to true. That way you'll build an image of the application without the features that are not MIT compatible. Read the [environment variables documentation](/docs/docs/env.md) for more information.
|
||||
⚠️ For some advanced features (ex: Export as PDF) Docs relies on XL packages from BlockNote. These are licenced under AGPL-3.0 and are not MIT compatible. You can perfectly use Docs without these packages by setting the environment variable `PUBLISH_AS_MIT` to true. That way you'll build an image of the application without the features that are not MIT compatible. Read the [environment variables documentation](/docs/env.md) for more information.
|
||||
|
||||
## Getting started 🔧
|
||||
|
||||
@@ -93,11 +93,11 @@ The easiest way to start working on the project is to use [GNU Make](https://www
|
||||
$ make bootstrap FLUSH_ARGS='--no-input'
|
||||
```
|
||||
|
||||
This command builds the `app` container, installs dependencies, performs database migrations and compiles translations. It's a good idea to use this command each time you are pulling code from the project repository to avoid dependency-related or migration-related issues.
|
||||
This command builds the `app-dev` and `frontend-dev` containers, installs dependencies, performs database migrations and compiles translations. It's a good idea to use this command each time you are pulling code from the project repository to avoid dependency-related or migration-related issues.
|
||||
|
||||
Your Docker services should now be up and running 🎉
|
||||
|
||||
You can access to the project by going to <http://localhost:3000>.
|
||||
You can access the project by going to <http://localhost:3000>.
|
||||
|
||||
You will be prompted to log in. The default credentials are:
|
||||
|
||||
@@ -106,7 +106,7 @@ username: impress
|
||||
password: impress
|
||||
```
|
||||
|
||||
📝 Note that if you need to run them afterwards, you can use the eponym Make rule:
|
||||
📝 Note that if you need to run them afterwards, you can use the eponymous Make rule:
|
||||
|
||||
```shellscript
|
||||
$ make run
|
||||
|
||||
@@ -18,7 +18,7 @@ the following command inside your docker container:
|
||||
|
||||
## [3.3.0] - 2025-05-22
|
||||
|
||||
⚠️ For some advanced features (ex: Export as PDF) Docs relies on XL packages from BlockNote. These are licenced under AGPL-3.0 and are not MIT compatible. You can perfectly use Docs without these packages by setting the environment variable `PUBLISH_AS_MIT` to true. That way you'll build an image of the application without the features that are not MIT compatible. Read the [environment variables documentation](/docs/docs/env.md) for more information.
|
||||
⚠️ For some advanced features (ex: Export as PDF) Docs relies on XL packages from BlockNote. These are licenced under AGPL-3.0 and are not MIT compatible. You can perfectly use Docs without these packages by setting the environment variable `PUBLISH_AS_MIT` to true. That way you'll build an image of the application without the features that are not MIT compatible. Read the [environment variables documentation](/docs/env.md) for more information.
|
||||
|
||||
The footer is now configurable from a customization file. To override the default one, you can
|
||||
use the `THEME_CUSTOMIZATION_FILE_PATH` environment variable to point to your customization file.
|
||||
|
||||
@@ -6,7 +6,7 @@ REPO_DIR="$(cd "$( dirname "${BASH_SOURCE[0]}" )/.." && pwd)"
|
||||
UNSET_USER=0
|
||||
|
||||
TERRAFORM_DIRECTORY="./env.d/terraform"
|
||||
COMPOSE_FILE="${REPO_DIR}/docker-compose.yml"
|
||||
COMPOSE_FILE="${REPO_DIR}/compose.yml"
|
||||
|
||||
|
||||
# _set_user: set (or unset) default user id used to run docker commands
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -o errexit # always exit on error
|
||||
set -o pipefail # don't ignore exit codes when piping output
|
||||
|
||||
echo "-----> Running post-compile script"
|
||||
|
||||
rm -rf docker docs env.d gitlint src/frontend/apps/e2e
|
||||
rm -rf src/frontend/apps
|
||||
rm -rf src/frontend/packages
|
||||
|
||||
# Remove some of the larger packages required by the frontend only
|
||||
rm -rf src/frontend/node_modules/@next src/frontend/node_modules/next src/frontend/node_modules/react-icons src/frontend/node_modules/@gouvfr-lasuite
|
||||
|
||||
# du -ch | sort -rh | head -n 100
|
||||
@@ -1,15 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -o errexit # always exit on error
|
||||
set -o pipefail # don't ignore exit codes when piping output
|
||||
|
||||
echo "-----> Running post-frontend script"
|
||||
|
||||
# Move the frontend build to the nginx root and clean up
|
||||
mkdir -p build/
|
||||
mv src/frontend/apps/impress/out build/frontend-out
|
||||
|
||||
mv src/backend/* ./
|
||||
mv src/nginx/* ./
|
||||
|
||||
echo "3.13" > .python-version
|
||||
@@ -1,18 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Start the Django backend server
|
||||
gunicorn -b :8000 impress.wsgi:application --log-file - &
|
||||
|
||||
# Start the Y provider service
|
||||
cd src/frontend/servers/y-provider && PORT=4444 ../../.scalingo/node/bin/node dist/start-server.js &
|
||||
|
||||
# Start the Nginx server
|
||||
bin/run &
|
||||
|
||||
# if the current shell is killed, also terminate all its children
|
||||
trap "pkill SIGTERM -P $$" SIGTERM
|
||||
|
||||
# wait for a single child to finish,
|
||||
wait -n
|
||||
# then kill all the other tasks
|
||||
pkill -P $$
|
||||
28
compose-e2e.yml
Normal file
28
compose-e2e.yml
Normal file
@@ -0,0 +1,28 @@
|
||||
services:
|
||||
|
||||
frontend:
|
||||
user: "${DOCKER_USER:-1000}"
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./src/frontend/Dockerfile
|
||||
target: frontend-production
|
||||
args:
|
||||
API_ORIGIN: "http://localhost:8071"
|
||||
PUBLISH_AS_MIT: "false"
|
||||
SW_DEACTIVATED: "true"
|
||||
image: impress:frontend-production
|
||||
ports:
|
||||
- "3000:3000"
|
||||
|
||||
y-provider:
|
||||
user: ${DOCKER_USER:-1000}
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./src/frontend/servers/y-provider/Dockerfile
|
||||
target: y-provider
|
||||
image: impress:y-provider-production
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- env.d/development/common
|
||||
ports:
|
||||
- "4444:4444"
|
||||
@@ -98,40 +98,6 @@ services:
|
||||
depends_on:
|
||||
- app-dev
|
||||
|
||||
app:
|
||||
build:
|
||||
context: .
|
||||
target: backend-production
|
||||
args:
|
||||
DOCKER_USER: ${DOCKER_USER:-1000}
|
||||
user: ${DOCKER_USER:-1000}
|
||||
image: impress:backend-production
|
||||
environment:
|
||||
- DJANGO_CONFIGURATION=Demo
|
||||
env_file:
|
||||
- env.d/development/common
|
||||
- env.d/development/postgresql
|
||||
depends_on:
|
||||
postgresql:
|
||||
condition: service_healthy
|
||||
restart: true
|
||||
redis:
|
||||
condition: service_started
|
||||
minio:
|
||||
condition: service_started
|
||||
|
||||
celery:
|
||||
user: ${DOCKER_USER:-1000}
|
||||
image: impress:backend-production
|
||||
command: ["celery", "-A", "impress.celery_app", "worker", "-l", "INFO"]
|
||||
environment:
|
||||
- DJANGO_CONFIGURATION=Demo
|
||||
env_file:
|
||||
- env.d/development/common
|
||||
- env.d/development/postgresql
|
||||
depends_on:
|
||||
- app
|
||||
|
||||
nginx:
|
||||
image: nginx:1.25
|
||||
ports:
|
||||
@@ -141,23 +107,25 @@ services:
|
||||
depends_on:
|
||||
app-dev:
|
||||
condition: service_started
|
||||
y-provider:
|
||||
condition: service_started
|
||||
keycloak:
|
||||
condition: service_healthy
|
||||
restart: true
|
||||
|
||||
frontend:
|
||||
frontend-development:
|
||||
user: "${DOCKER_USER:-1000}"
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./src/frontend/Dockerfile
|
||||
target: frontend-production
|
||||
target: impress-dev
|
||||
args:
|
||||
API_ORIGIN: "http://localhost:8071"
|
||||
PUBLISH_AS_MIT: "false"
|
||||
SW_DEACTIVATED: "true"
|
||||
image: impress:frontend-development
|
||||
volumes:
|
||||
- ./src/frontend:/home/frontend
|
||||
- /home/frontend/node_modules
|
||||
- /home/frontend/apps/impress/node_modules
|
||||
ports:
|
||||
- "3000:3000"
|
||||
|
||||
@@ -171,24 +139,29 @@ services:
|
||||
working_dir: /app
|
||||
|
||||
node:
|
||||
image: node:18
|
||||
image: node:22
|
||||
user: "${DOCKER_USER:-1000}"
|
||||
environment:
|
||||
HOME: /tmp
|
||||
volumes:
|
||||
- ".:/app"
|
||||
|
||||
y-provider:
|
||||
y-provider-development:
|
||||
user: ${DOCKER_USER:-1000}
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./src/frontend/servers/y-provider/Dockerfile
|
||||
target: y-provider
|
||||
target: y-provider-development
|
||||
image: impress:y-provider-development
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- env.d/development/common
|
||||
ports:
|
||||
- "4444:4444"
|
||||
volumes:
|
||||
- ./src/frontend/:/home/frontend
|
||||
- /home/frontend/node_modules
|
||||
- /home/frontend/servers/y-provider/node_modules
|
||||
|
||||
kc_postgresql:
|
||||
image: postgres:14.3
|
||||
@@ -60,7 +60,7 @@
|
||||
},
|
||||
{
|
||||
"username": "user-e2e-chromium",
|
||||
"email": "user@chromium.e2e",
|
||||
"email": "user@chromium.test",
|
||||
"firstName": "E2E",
|
||||
"lastName": "Chromium",
|
||||
"enabled": true,
|
||||
@@ -74,7 +74,7 @@
|
||||
},
|
||||
{
|
||||
"username": "user-e2e-webkit",
|
||||
"email": "user@webkit.e2e",
|
||||
"email": "user@webkit.test",
|
||||
"firstName": "E2E",
|
||||
"lastName": "Webkit",
|
||||
"enabled": true,
|
||||
@@ -88,7 +88,7 @@
|
||||
},
|
||||
{
|
||||
"username": "user-e2e-firefox",
|
||||
"email": "user@firefox.e2e",
|
||||
"email": "user@firefox.test",
|
||||
"firstName": "E2E",
|
||||
"lastName": "Firefox",
|
||||
"enabled": true,
|
||||
|
||||
192
docs/env.md
192
docs/env.md
@@ -6,102 +6,102 @@ Here we describe all environment variables that can be set for the docs applicat
|
||||
|
||||
These are the environment variables you can set for the `impress-backend` container.
|
||||
|
||||
| Option | Description | default |
|
||||
| ----------------------------------------------- | --------------------------------------------------------------------------------------------- | ------------------------------------------------------- |
|
||||
| DJANGO_ALLOWED_HOSTS | allowed hosts | [] |
|
||||
| DJANGO_SECRET_KEY | secret key | |
|
||||
| DJANGO_SERVER_TO_SERVER_API_TOKENS | | [] |
|
||||
| DB_ENGINE | engine to use for database connections | django.db.backends.postgresql_psycopg2 |
|
||||
| DB_NAME | name of the database | impress |
|
||||
| DB_USER | user to authenticate with | dinum |
|
||||
| DB_PASSWORD | password to authenticate with | pass |
|
||||
| DB_HOST | host of the database | localhost |
|
||||
| DB_PORT | port of the database | 5432 |
|
||||
| MEDIA_BASE_URL | | |
|
||||
| STORAGES_STATICFILES_BACKEND | | whitenoise.storage.CompressedManifestStaticFilesStorage |
|
||||
| AWS_S3_ENDPOINT_URL | S3 endpoint | |
|
||||
| AWS_S3_ACCESS_KEY_ID | access id for s3 endpoint | |
|
||||
| AWS_S3_SECRET_ACCESS_KEY | access key for s3 endpoint | |
|
||||
| AWS_S3_REGION_NAME | region name for s3 endpoint | |
|
||||
| AWS_STORAGE_BUCKET_NAME | bucket name for s3 endpoint | impress-media-storage |
|
||||
| DOCUMENT_IMAGE_MAX_SIZE | maximum size of document in bytes | 10485760 |
|
||||
| LANGUAGE_CODE | default language | en-us |
|
||||
| API_USERS_LIST_THROTTLE_RATE_SUSTAINED | throttle rate for api | 180/hour |
|
||||
| API_USERS_LIST_THROTTLE_RATE_BURST | throttle rate for api on burst | 30/minute |
|
||||
| SPECTACULAR_SETTINGS_ENABLE_DJANGO_DEPLOY_CHECK | | false |
|
||||
| TRASHBIN_CUTOFF_DAYS | trashbin cutoff | 30 |
|
||||
| DJANGO_EMAIL_BACKEND | email backend library | django.core.mail.backends.smtp.EmailBackend |
|
||||
| DJANGO_EMAIL_BRAND_NAME | brand name for email | |
|
||||
| DJANGO_EMAIL_HOST | host name of email | |
|
||||
| DJANGO_EMAIL_HOST_USER | user to authenticate with on the email host | |
|
||||
| DJANGO_EMAIL_HOST_PASSWORD | password to authenticate with on the email host | |
|
||||
| DJANGO_EMAIL_LOGO_IMG | logo for the email | |
|
||||
| DJANGO_EMAIL_PORT | port used to connect to email host | |
|
||||
| DJANGO_EMAIL_USE_TLS | use tls for email host connection | false |
|
||||
| DJANGO_EMAIL_USE_SSL | use sstl for email host connection | false |
|
||||
| DJANGO_EMAIL_FROM | email address used as sender | from@example.com |
|
||||
| DJANGO_CORS_ALLOW_ALL_ORIGINS | allow all CORS origins | true |
|
||||
| DJANGO_CORS_ALLOWED_ORIGINS | list of origins allowed for CORS | [] |
|
||||
| DJANGO_CORS_ALLOWED_ORIGIN_REGEXES | list of origins allowed for CORS using regulair expressions | [] |
|
||||
| SENTRY_DSN | sentry host | |
|
||||
| COLLABORATION_API_URL | collaboration api host | |
|
||||
| COLLABORATION_SERVER_SECRET | collaboration api secret | |
|
||||
| COLLABORATION_WS_URL | collaboration websocket url | |
|
||||
| COLLABORATION_WS_NOT_CONNECTED_READY_ONLY | Users not connected to the collaboration server cannot edit | false |
|
||||
| FRONTEND_CSS_URL | To add a external css file to the app | |
|
||||
| FRONTEND_HOMEPAGE_FEATURE_ENABLED | frontend feature flag to display the homepage | false |
|
||||
| FRONTEND_THEME | frontend theme to use | |
|
||||
| POSTHOG_KEY | posthog key for analytics | |
|
||||
| CRISP_WEBSITE_ID | crisp website id for support | |
|
||||
| DJANGO_CELERY_BROKER_URL | celery broker url | redis://redis:6379/0 |
|
||||
| DJANGO_CELERY_BROKER_TRANSPORT_OPTIONS | celery broker transport options | {} |
|
||||
| SESSION_COOKIE_AGE | duration of the cookie session | 60*60*12 |
|
||||
| OIDC_CREATE_USER | create used on OIDC | false |
|
||||
| OIDC_RP_SIGN_ALGO | verification algorithm used OIDC tokens | RS256 |
|
||||
| OIDC_RP_CLIENT_ID | client id used for OIDC | impress |
|
||||
| OIDC_RP_CLIENT_SECRET | client secret used for OIDC | |
|
||||
| OIDC_OP_JWKS_ENDPOINT | JWKS endpoint for OIDC | |
|
||||
| OIDC_OP_AUTHORIZATION_ENDPOINT | Authorization endpoint for OIDC | |
|
||||
| OIDC_OP_TOKEN_ENDPOINT | Token endpoint for OIDC | |
|
||||
| OIDC_OP_USER_ENDPOINT | User endpoint for OIDC | |
|
||||
| OIDC_OP_LOGOUT_ENDPOINT | Logout endpoint for OIDC | |
|
||||
| OIDC_AUTH_REQUEST_EXTRA_PARAMS | OIDC extra auth parameters | {} |
|
||||
| OIDC_RP_SCOPES | scopes requested for OIDC | openid email |
|
||||
| LOGIN_REDIRECT_URL | login redirect url | |
|
||||
| LOGIN_REDIRECT_URL_FAILURE | login redirect url on failure | |
|
||||
| LOGOUT_REDIRECT_URL | logout redirect url | |
|
||||
| OIDC_USE_NONCE | use nonce for OIDC | true |
|
||||
| OIDC_REDIRECT_REQUIRE_HTTPS | Require https for OIDC redirect url | false |
|
||||
| OIDC_REDIRECT_ALLOWED_HOSTS | Allowed hosts for OIDC redirect url | [] |
|
||||
| OIDC_STORE_ID_TOKEN | Store OIDC token | true |
|
||||
| OIDC_FALLBACK_TO_EMAIL_FOR_IDENTIFICATION | faillback to email for identification | true |
|
||||
| OIDC_ALLOW_DUPLICATE_EMAILS | Allow duplicate emails | false |
|
||||
| USER_OIDC_ESSENTIAL_CLAIMS | essential claims in OIDC token | [] |
|
||||
| OIDC_USERINFO_FULLNAME_FIELDS | OIDC token claims to create full name | ["first_name", "last_name"] |
|
||||
| OIDC_USERINFO_SHORTNAME_FIELD | OIDC token claims to create shortname | first_name |
|
||||
| ALLOW_LOGOUT_GET_METHOD | Allow get logout method | true |
|
||||
| AI_API_KEY | AI key to be used for AI Base url | |
|
||||
| AI_BASE_URL | OpenAI compatible AI base url | |
|
||||
| AI_MODEL | AI Model to use | |
|
||||
| AI_ALLOW_REACH_FROM | Users that can use AI must be this level. options are "public", "authenticated", "restricted" | authenticated |
|
||||
| AI_FEATURE_ENABLED | Enable AI options | false |
|
||||
| Y_PROVIDER_API_KEY | Y provider API key | |
|
||||
| Y_PROVIDER_API_BASE_URL | Y Provider url | |
|
||||
| CONVERSION_API_ENDPOINT | Conversion API endpoint | convert-markdown |
|
||||
| CONVERSION_API_CONTENT_FIELD | Conversion api content field | content |
|
||||
| CONVERSION_API_TIMEOUT | Conversion api timeout | 30 |
|
||||
| CONVERSION_API_SECURE | Require secure conversion api | false |
|
||||
| LOGGING_LEVEL_LOGGERS_ROOT | default logging level. options are "DEBUG", "INFO", "WARN", "ERROR", "CRITICAL" | INFO |
|
||||
| LOGGING_LEVEL_LOGGERS_APP | application logging level. options are "DEBUG", "INFO", "WARN", "ERROR", "CRITICAL" | INFO |
|
||||
| API_USERS_LIST_LIMIT | Limit on API users | 5 |
|
||||
| DJANGO_CSRF_TRUSTED_ORIGINS | CSRF trusted origins | [] |
|
||||
| REDIS_URL | cache url | redis://redis:6379/1 |
|
||||
| CACHES_DEFAULT_TIMEOUT | cache default timeout | 30 |
|
||||
| CACHES_KEY_PREFIX | The prefix used to every cache keys. | docs |
|
||||
| MALWARE_DETECTION_BACKEND | The malware detection backend use from the django-lasuite package | lasuite.malware_detection.backends.dummy.DummyBackend |
|
||||
| MALWARE_DETECTION_PARAMETERS | A dict containing all the parameters to initiate the malware detection backend | {"callback_path": "core.malware_detection.malware_detection_callback",} |
|
||||
| THEME_CUSTOMIZATION_FILE_PATH | full path to the file customizing the theme. An example is provided in src/backend/impress/configuration/theme/default.json | BASE_DIR/impress/configuration/theme/default.json |
|
||||
| THEME_CUSTOMIZATION_CACHE_TIMEOUT | Cache duration for the customization settings | 86400 |
|
||||
| Option | Description | default |
|
||||
|-------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------|
|
||||
| AI_ALLOW_REACH_FROM | Users that can use AI must be this level. options are "public", "authenticated", "restricted" | authenticated |
|
||||
| AI_API_KEY | AI key to be used for AI Base url | |
|
||||
| AI_BASE_URL | OpenAI compatible AI base url | |
|
||||
| AI_FEATURE_ENABLED | Enable AI options | false |
|
||||
| AI_MODEL | AI Model to use | |
|
||||
| ALLOW_LOGOUT_GET_METHOD | Allow get logout method | true |
|
||||
| API_USERS_LIST_LIMIT | Limit on API users | 5 |
|
||||
| API_USERS_LIST_THROTTLE_RATE_BURST | Throttle rate for api on burst | 30/minute |
|
||||
| API_USERS_LIST_THROTTLE_RATE_SUSTAINED | Throttle rate for api | 180/hour |
|
||||
| AWS_S3_ACCESS_KEY_ID | Access id for s3 endpoint | |
|
||||
| AWS_S3_ENDPOINT_URL | S3 endpoint | |
|
||||
| AWS_S3_REGION_NAME | Region name for s3 endpoint | |
|
||||
| AWS_S3_SECRET_ACCESS_KEY | Access key for s3 endpoint | |
|
||||
| AWS_STORAGE_BUCKET_NAME | Bucket name for s3 endpoint | impress-media-storage |
|
||||
| CACHES_DEFAULT_TIMEOUT | Cache default timeout | 30 |
|
||||
| CACHES_KEY_PREFIX | The prefix used to every cache keys. | docs |
|
||||
| COLLABORATION_API_URL | Collaboration api host | |
|
||||
| COLLABORATION_SERVER_SECRET | Collaboration api secret | |
|
||||
| COLLABORATION_WS_NOT_CONNECTED_READY_ONLY | Users not connected to the collaboration server cannot edit | false |
|
||||
| COLLABORATION_WS_URL | Collaboration websocket url | |
|
||||
| CONVERSION_API_CONTENT_FIELD | Conversion api content field | content |
|
||||
| CONVERSION_API_ENDPOINT | Conversion API endpoint | convert-markdown |
|
||||
| CONVERSION_API_SECURE | Require secure conversion api | false |
|
||||
| CONVERSION_API_TIMEOUT | Conversion api timeout | 30 |
|
||||
| CRISP_WEBSITE_ID | Crisp website id for support | |
|
||||
| DB_ENGINE | Engine to use for database connections | django.db.backends.postgresql_psycopg2 |
|
||||
| DB_HOST | Host of the database | localhost |
|
||||
| DB_NAME | Name of the database | impress |
|
||||
| DB_PASSWORD | Password to authenticate with | pass |
|
||||
| DB_PORT | Port of the database | 5432 |
|
||||
| DB_USER | User to authenticate with | dinum |
|
||||
| DJANGO_ALLOWED_HOSTS | Allowed hosts | [] |
|
||||
| DJANGO_CELERY_BROKER_TRANSPORT_OPTIONS | Celery broker transport options | {} |
|
||||
| DJANGO_CELERY_BROKER_URL | Celery broker url | redis://redis:6379/0 |
|
||||
| DJANGO_CORS_ALLOW_ALL_ORIGINS | Allow all CORS origins | false |
|
||||
| DJANGO_CORS_ALLOWED_ORIGIN_REGEXES | List of origins allowed for CORS using regulair expressions | [] |
|
||||
| DJANGO_CORS_ALLOWED_ORIGINS | List of origins allowed for CORS | [] |
|
||||
| DJANGO_CSRF_TRUSTED_ORIGINS | CSRF trusted origins | [] |
|
||||
| DJANGO_EMAIL_BACKEND | Email backend library | django.core.mail.backends.smtp.EmailBackend |
|
||||
| DJANGO_EMAIL_BRAND_NAME | Brand name for email | |
|
||||
| DJANGO_EMAIL_FROM | Email address used as sender | from@example.com |
|
||||
| DJANGO_EMAIL_HOST | Hostname of email | |
|
||||
| DJANGO_EMAIL_HOST_PASSWORD | Password to authenticate with on the email host | |
|
||||
| DJANGO_EMAIL_HOST_USER | User to authenticate with on the email host | |
|
||||
| DJANGO_EMAIL_LOGO_IMG | Logo for the email | |
|
||||
| DJANGO_EMAIL_PORT | Port used to connect to email host | |
|
||||
| DJANGO_EMAIL_USE_SSL | Use ssl for email host connection | false |
|
||||
| DJANGO_EMAIL_USE_TLS | Use tls for email host connection | false |
|
||||
| DJANGO_SECRET_KEY | Secret key | |
|
||||
| DJANGO_SERVER_TO_SERVER_API_TOKENS | | [] |
|
||||
| DOCUMENT_IMAGE_MAX_SIZE | Maximum size of document in bytes | 10485760 |
|
||||
| FRONTEND_CSS_URL | To add a external css file to the app | |
|
||||
| FRONTEND_HOMEPAGE_FEATURE_ENABLED | Frontend feature flag to display the homepage | false |
|
||||
| FRONTEND_THEME | Frontend theme to use | |
|
||||
| LANGUAGE_CODE | Default language | en-us |
|
||||
| LOGGING_LEVEL_LOGGERS_APP | Application logging level. options are "DEBUG", "INFO", "WARN", "ERROR", "CRITICAL" | INFO |
|
||||
| LOGGING_LEVEL_LOGGERS_ROOT | Default logging level. options are "DEBUG", "INFO", "WARN", "ERROR", "CRITICAL" | INFO |
|
||||
| LOGIN_REDIRECT_URL | Login redirect url | |
|
||||
| LOGIN_REDIRECT_URL_FAILURE | Login redirect url on failure | |
|
||||
| LOGOUT_REDIRECT_URL | Logout redirect url | |
|
||||
| MALWARE_DETECTION_BACKEND | The malware detection backend use from the django-lasuite package | lasuite.malware_detection.backends.dummy.DummyBackend |
|
||||
| MALWARE_DETECTION_PARAMETERS | A dict containing all the parameters to initiate the malware detection backend | {"callback_path": "core.malware_detection.malware_detection_callback",} |
|
||||
| MEDIA_BASE_URL | | |
|
||||
| OIDC_ALLOW_DUPLICATE_EMAILS | Allow duplicate emails | false |
|
||||
| OIDC_AUTH_REQUEST_EXTRA_PARAMS | OIDC extra auth parameters | {} |
|
||||
| OIDC_CREATE_USER | Create used on OIDC | false |
|
||||
| OIDC_FALLBACK_TO_EMAIL_FOR_IDENTIFICATION | Fallback to email for identification | true |
|
||||
| OIDC_OP_AUTHORIZATION_ENDPOINT | Authorization endpoint for OIDC | |
|
||||
| OIDC_OP_JWKS_ENDPOINT | JWKS endpoint for OIDC | |
|
||||
| OIDC_OP_LOGOUT_ENDPOINT | Logout endpoint for OIDC | |
|
||||
| OIDC_OP_TOKEN_ENDPOINT | Token endpoint for OIDC | |
|
||||
| OIDC_OP_USER_ENDPOINT | User endpoint for OIDC | |
|
||||
| OIDC_REDIRECT_ALLOWED_HOSTS | Allowed hosts for OIDC redirect url | [] |
|
||||
| OIDC_REDIRECT_REQUIRE_HTTPS | Require https for OIDC redirect url | false |
|
||||
| OIDC_RP_CLIENT_ID | Client id used for OIDC | impress |
|
||||
| OIDC_RP_CLIENT_SECRET | Client secret used for OIDC | |
|
||||
| OIDC_RP_SCOPES | Scopes requested for OIDC | openid email |
|
||||
| OIDC_RP_SIGN_ALGO | verification algorithm used OIDC tokens | RS256 |
|
||||
| OIDC_STORE_ID_TOKEN | Store OIDC token | true |
|
||||
| OIDC_USE_NONCE | Use nonce for OIDC | true |
|
||||
| OIDC_USERINFO_FULLNAME_FIELDS | OIDC token claims to create full name | ["first_name", "last_name"] |
|
||||
| OIDC_USERINFO_SHORTNAME_FIELD | OIDC token claims to create shortname | first_name |
|
||||
| POSTHOG_KEY | Posthog key for analytics | |
|
||||
| REDIS_URL | Cache url | redis://redis:6379/1 |
|
||||
| SENTRY_DSN | Sentry host | |
|
||||
| SESSION_COOKIE_AGE | duration of the cookie session | 60*60*12 |
|
||||
| SPECTACULAR_SETTINGS_ENABLE_DJANGO_DEPLOY_CHECK | | false |
|
||||
| STORAGES_STATICFILES_BACKEND | | whitenoise.storage.CompressedManifestStaticFilesStorage |
|
||||
| THEME_CUSTOMIZATION_CACHE_TIMEOUT | Cache duration for the customization settings | 86400 |
|
||||
| THEME_CUSTOMIZATION_FILE_PATH | Full path to the file customizing the theme. An example is provided in src/backend/impress/configuration/theme/default.json | BASE_DIR/impress/configuration/theme/default.json |
|
||||
| TRASHBIN_CUTOFF_DAYS | Trashbin cutoff | 30 |
|
||||
| USER_OIDC_ESSENTIAL_CLAIMS | Essential claims in OIDC token | [] |
|
||||
| Y_PROVIDER_API_BASE_URL | Y Provider url | |
|
||||
| Y_PROVIDER_API_KEY | Y provider API key | |
|
||||
|
||||
|
||||
## impress-frontend image
|
||||
|
||||
@@ -91,7 +91,7 @@ extraDeploy:
|
||||
},
|
||||
{
|
||||
"username": "user-e2e-chromium",
|
||||
"email": "user@chromium.e2e",
|
||||
"email": "user@chromium.test",
|
||||
"firstName": "E2E",
|
||||
"lastName": "Chromium",
|
||||
"enabled": "true",
|
||||
@@ -105,7 +105,7 @@ extraDeploy:
|
||||
},
|
||||
{
|
||||
"username": "user-e2e-webkit",
|
||||
"email": "user@webkit.e2e",
|
||||
"email": "user@webkit.test",
|
||||
"firstName": "E2E",
|
||||
"lastName": "Webkit",
|
||||
"enabled": "true",
|
||||
@@ -119,7 +119,7 @@ extraDeploy:
|
||||
},
|
||||
{
|
||||
"username": "user-e2e-firefox",
|
||||
"email": "user@firefox.e2e",
|
||||
"email": "user@firefox.test",
|
||||
"firstName": "E2E",
|
||||
"lastName": "Firefox",
|
||||
"enabled": "true",
|
||||
|
||||
110
docs/system-requirements.md
Normal file
110
docs/system-requirements.md
Normal file
@@ -0,0 +1,110 @@
|
||||
# La Suite Docs – System & Requirements (2025-06)
|
||||
|
||||
## 1. Quick-Reference Matrix (single VM / laptop)
|
||||
|
||||
| Scenario | RAM | vCPU | SSD | Notes |
|
||||
| ------------------------- | ----- | ---- | ------- | ------------------------- |
|
||||
| **Solo dev** | 8 GB | 4 | 15 GB | Hot-reload + one IDE |
|
||||
| **Team QA** | 16 GB | 6 | 30 GB | Runs integration tests |
|
||||
| **Prod ≤ 100 live users** | 32 GB | 8 + | 50 GB + | Scale linearly above this |
|
||||
|
||||
Memory is the first bottleneck; CPU matters only when Celery or the Next.js build is saturated.
|
||||
|
||||
> **Note:** Memory consumption varies by operating system. Windows tends to be more memory-hungry than Linux, so consider adding 10-20% extra RAM when running on Windows compared to Linux-based systems.
|
||||
|
||||
## 2. Development Environment Memory Requirements
|
||||
|
||||
| Service | Typical use | Rationale / source |
|
||||
| ------------------------ | ----------------------------- | --------------------------------------------------------------------------------------- |
|
||||
| PostgreSQL | **1 – 2 GB** | `shared_buffers` starting point ≈ 25% RAM ([postgresql.org][1]) |
|
||||
| Keycloak | **≈ 1.3 GB** | 70% of limit for heap + ~300 MB non-heap ([keycloak.org][2]) |
|
||||
| Redis | **≤ 256 MB** | Empty instance ≈ 3 MB; budget 256 MB to allow small datasets ([stackoverflow.com][3]) |
|
||||
| MinIO | **2 GB (dev) / 32 GB (prod)**| Pre-allocates 1–2 GiB; docs recommend 32 GB per host for ≤ 100 Ti storage ([min.io][4]) |
|
||||
| Django API (+ Celery) | **0.8 – 1.5 GB** | Empirical in-house metrics |
|
||||
| Next.js frontend | **0.5 – 1 GB** | Dev build chain |
|
||||
| Y-Provider (y-websocket) | **< 200 MB** | Large 40 MB YDoc called “big” in community thread ([discuss.yjs.dev][5]) |
|
||||
| Nginx | **< 100 MB** | Static reverse-proxy footprint |
|
||||
|
||||
[1]: https://www.postgresql.org/docs/9.1/runtime-config-resource.html "PostgreSQL: Documentation: 9.1: Resource Consumption"
|
||||
[2]: https://www.keycloak.org/high-availability/concepts-memory-and-cpu-sizing "Concepts for sizing CPU and memory resources - Keycloak"
|
||||
[3]: https://stackoverflow.com/questions/45233052/memory-footprint-for-redis-empty-instance "Memory footprint for Redis empty instance - Stack Overflow"
|
||||
[4]: https://min.io/docs/minio/kubernetes/upstream/operations/checklists/hardware.html "Hardware Checklist — MinIO Object Storage for Kubernetes"
|
||||
[5]: https://discuss.yjs.dev/t/understanding-memory-requirements-for-production-usage/198 "Understanding memory requirements for production usage - Yjs Community"
|
||||
|
||||
> **Rule of thumb:** add 2 GB for OS/overhead, then sum only the rows you actually run.
|
||||
|
||||
## 3. Production Environment Memory Requirements
|
||||
|
||||
Production deployments differ significantly from development environments. The table below shows typical memory usage for production services:
|
||||
|
||||
| Service | Typical use | Rationale / notes |
|
||||
| ------------------------ | ----------------------------- | --------------------------------------------------------------------------------------- |
|
||||
| PostgreSQL | **2 – 8 GB** | Higher `shared_buffers` and connection pooling for concurrent users |
|
||||
| OIDC Provider (optional) | **Variable** | Any OIDC-compatible provider (Keycloak, Auth0, Azure AD, etc.) - external or self-hosted |
|
||||
| Redis | **256 MB – 2 GB** | Session storage and caching; scales with active user sessions |
|
||||
| Object Storage (optional)| **External or self-hosted** | Can use AWS S3, Azure Blob, Google Cloud Storage, or self-hosted MinIO |
|
||||
| Django API (+ Celery) | **1 – 3 GB** | Production workloads with background tasks and higher concurrency |
|
||||
| Static Files (Nginx) | **< 200 MB** | Serves Next.js build output and static assets; no development overhead |
|
||||
| Y-Provider (y-websocket) | **200 MB – 1 GB** | Scales with concurrent document editing sessions |
|
||||
| Nginx (Load Balancer) | **< 200 MB** | Reverse proxy, SSL termination, static file serving |
|
||||
|
||||
### Production Architecture Notes
|
||||
|
||||
- **Frontend**: Uses pre-built Next.js static assets served by Nginx (no Node.js runtime needed)
|
||||
- **Authentication**: Any OIDC-compatible provider can be used instead of self-hosted Keycloak
|
||||
- **Object Storage**: External services (S3, Azure Blob) or self-hosted solutions (MinIO) are both viable
|
||||
- **Database**: Consider PostgreSQL clustering or managed database services for high availability
|
||||
- **Scaling**: Horizontal scaling is recommended for Django API and Y-Provider services
|
||||
|
||||
### Minimal Production Setup (Core Services Only)
|
||||
|
||||
| Service | Memory | Notes |
|
||||
| ------------------------ | --------- | --------------------------------------- |
|
||||
| PostgreSQL | **2 GB** | Core database |
|
||||
| Django API (+ Celery) | **1.5 GB**| Backend services |
|
||||
| Y-Provider | **200 MB**| Real-time collaboration |
|
||||
| Nginx | **100 MB**| Static files + reverse proxy |
|
||||
| Redis | **256 MB**| Session storage |
|
||||
| **Total (without auth/storage)** | **≈ 4 GB** | External OIDC + object storage assumed |
|
||||
|
||||
## 4. Recommended Software Versions
|
||||
|
||||
| Tool | Minimum |
|
||||
| ----------------------- | ------- |
|
||||
| Docker Engine / Desktop | 24.0 |
|
||||
| Docker Compose | v2 |
|
||||
| Git | 2.40 |
|
||||
| **Node.js** | 22+ |
|
||||
| **Python** | 3.13+ |
|
||||
| GNU Make | 4.4 |
|
||||
| Kind | 0.22 |
|
||||
| Helm | 3.14 |
|
||||
| kubectl | 1.29 |
|
||||
| mkcert | 1.4 |
|
||||
|
||||
|
||||
## 5. Ports (dev defaults)
|
||||
|
||||
| Port | Service |
|
||||
| --------- | --------------------- |
|
||||
| 3000 | Next.js |
|
||||
| 8071 | Django |
|
||||
| 4444 | Y-Provider |
|
||||
| 8080 | Keycloak |
|
||||
| 8083 | Nginx proxy |
|
||||
| 9000/9001 | MinIO |
|
||||
| 15432 | PostgreSQL (main) |
|
||||
| 5433 | PostgreSQL (Keycloak) |
|
||||
| 1081 | MailCatcher |
|
||||
|
||||
## 6. Sizing Guidelines
|
||||
|
||||
**RAM** – start at 8 GB dev / 16 GB staging / 32 GB prod. Postgres and Keycloak are the first to OOM; scale them first.
|
||||
|
||||
> **OS considerations:** Windows systems typically require 10-20% more RAM than Linux due to higher OS overhead. Docker Desktop on Windows also uses additional memory compared to native Linux Docker.
|
||||
|
||||
**CPU** – budget one vCPU per busy container until Celery or Next.js builds saturate.
|
||||
|
||||
**Disk** – SSD; add 10 GB extra for the Docker layer cache.
|
||||
|
||||
**MinIO** – for demos, mount a local folder instead of running MinIO to save 2 GB+ of RAM.
|
||||
@@ -53,4 +53,18 @@ Below is a visual example of a configured footer ⬇️:
|
||||
|
||||

|
||||
|
||||
----
|
||||
|
||||
# **Custom Translations** 📝
|
||||
|
||||
The translations can be partially overridden from the theme customization file.
|
||||
|
||||
### Settings 🔧
|
||||
|
||||
```shellscript
|
||||
THEME_CUSTOMIZATION_FILE_PATH=<path>
|
||||
```
|
||||
|
||||
### Example of JSON
|
||||
|
||||
The json must follow some rules: https://github.com/suitenumerique/docs/blob/main/src/helm/env.d/dev/configuration/theme/demo.json
|
||||
194
docs/troubleshoot.md
Normal file
194
docs/troubleshoot.md
Normal file
@@ -0,0 +1,194 @@
|
||||
# Troubleshooting Guide
|
||||
|
||||
## Line Ending Issues on Windows (LF/CRLF)
|
||||
|
||||
### Problem Description
|
||||
|
||||
This project uses **LF (Line Feed: `\n`) line endings** exclusively. Windows users may encounter issues because:
|
||||
|
||||
- **Windows** defaults to CRLF (Carriage Return + Line Feed: `\r\n`) for line endings
|
||||
- **This project** uses LF line endings for consistency across all platforms
|
||||
- **Git** may automatically convert line endings, causing conflicts or build failures
|
||||
|
||||
### Common Symptoms
|
||||
|
||||
- Git shows files as modified even when no changes were made
|
||||
- Error messages like "warning: LF will be replaced by CRLF"
|
||||
- Build failures or linting errors due to line ending mismatches
|
||||
|
||||
### Solutions for Windows Users
|
||||
|
||||
#### Configure Git to Preserve LF (Recommended)
|
||||
|
||||
Configure Git to NOT convert line endings and preserve LF:
|
||||
|
||||
```bash
|
||||
git config core.autocrlf false
|
||||
git config core.eol lf
|
||||
```
|
||||
|
||||
This tells Git to:
|
||||
- Never convert line endings automatically
|
||||
- Always use LF for line endings in working directory
|
||||
|
||||
|
||||
#### Fix Existing Repository with Wrong Line Endings
|
||||
|
||||
If you already have CRLF line endings in your local repository, the **best approach** is to configure Git properly and clone the project again:
|
||||
|
||||
1. **Configure Git first**:
|
||||
```bash
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
```
|
||||
|
||||
2. **Clone the project fresh** (recommended):
|
||||
```bash
|
||||
# Navigate to parent directory
|
||||
cd ..
|
||||
|
||||
# Remove current repository (backup your changes first!)
|
||||
rm -rf docs
|
||||
|
||||
# Clone again with correct line endings
|
||||
git clone git@github.com:suitenumerique/docs.git
|
||||
```
|
||||
|
||||
**Alternative**: If you have uncommitted changes and cannot re-clone:
|
||||
|
||||
1. **Backup your changes**:
|
||||
```bash
|
||||
git add .
|
||||
git commit -m "Save changes before fixing line endings"
|
||||
```
|
||||
|
||||
2. **Remove all files from Git's index**:
|
||||
```bash
|
||||
git rm --cached -r .
|
||||
```
|
||||
|
||||
3. **Reset Git configuration** (if not done globally):
|
||||
```bash
|
||||
git config core.autocrlf false
|
||||
git config core.eol lf
|
||||
```
|
||||
|
||||
4. **Re-add all files** (Git will use LF line endings):
|
||||
```bash
|
||||
git add .
|
||||
```
|
||||
|
||||
5. **Commit the changes**:
|
||||
```bash
|
||||
git commit -m "✏️(project) Fix line endings to LF"
|
||||
```
|
||||
|
||||
## Minio Permission Issues on Windows
|
||||
|
||||
### Problem Description
|
||||
|
||||
On Windows, you may encounter permission-related errors when running Minio in development mode with Docker Compose. This typically happens because:
|
||||
|
||||
- **Windows file permissions** don't map well to Unix-style user IDs used in Docker containers
|
||||
- **Docker Desktop** may have issues with user mapping when using the `DOCKER_USER` environment variable
|
||||
- **Minio container** fails to start or access volumes due to permission conflicts
|
||||
|
||||
### Common Symptoms
|
||||
|
||||
- Minio container fails to start with permission denied errors
|
||||
- Error messages related to file system permissions in Minio logs
|
||||
- Unable to create or access buckets in the development environment
|
||||
- Docker Compose showing Minio service as unhealthy or exited
|
||||
|
||||
### Solution for Windows Users
|
||||
|
||||
If you encounter Minio permission issues on Windows, you can temporarily disable user mapping for the Minio service:
|
||||
|
||||
1. **Open the `compose.yml` file**
|
||||
|
||||
2. **Comment out the user directive** in the `minio` service section:
|
||||
```yaml
|
||||
minio:
|
||||
# user: ${DOCKER_USER:-1000} # Comment this line on Windows if permission issues occur
|
||||
image: minio/minio
|
||||
environment:
|
||||
- MINIO_ROOT_USER=impress
|
||||
- MINIO_ROOT_PASSWORD=password
|
||||
# ... rest of the configuration
|
||||
```
|
||||
|
||||
3. **Restart the services**:
|
||||
```bash
|
||||
make run
|
||||
```
|
||||
|
||||
### Why This Works
|
||||
|
||||
- Commenting out the `user` directive allows the Minio container to run with its default user
|
||||
- This bypasses Windows-specific permission mapping issues
|
||||
- The container will have the necessary permissions to access and manage the mounted volumes
|
||||
|
||||
### Note
|
||||
|
||||
This is a **development-only workaround**. In production environments, proper user mapping and security considerations should be maintained according to your deployment requirements.
|
||||
|
||||
## Frontend File Watching Issues on Windows
|
||||
|
||||
### Problem Description
|
||||
|
||||
Windows users may experience issues with file watching in the frontend-development container. This typically happens because:
|
||||
|
||||
- **Docker on Windows** has known limitations with file change detection
|
||||
- **Node.js file watchers** may not detect changes properly on Windows filesystem
|
||||
- **Hot reloading** fails to trigger when files are modified
|
||||
|
||||
### Common Symptoms
|
||||
|
||||
- Changes to frontend code aren't detected automatically
|
||||
- Hot module replacement doesn't work as expected
|
||||
- Need to manually restart the frontend container after code changes
|
||||
- Console shows no reaction when saving files
|
||||
|
||||
### Solution: Enable WATCHPACK_POLLING
|
||||
|
||||
Add the `WATCHPACK_POLLING=true` environment variable to the frontend-development service in your local environment:
|
||||
|
||||
1. **Modify the `compose.yml` file** by adding the environment variable to the frontend-development service:
|
||||
|
||||
```yaml
|
||||
frontend-development:
|
||||
user: "${DOCKER_USER:-1000}"
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./src/frontend/Dockerfile
|
||||
target: impress-dev
|
||||
args:
|
||||
API_ORIGIN: "http://localhost:8071"
|
||||
PUBLISH_AS_MIT: "false"
|
||||
SW_DEACTIVATED: "true"
|
||||
image: impress:frontend-development
|
||||
environment:
|
||||
- WATCHPACK_POLLING=true # Add this line for Windows users
|
||||
volumes:
|
||||
- ./src/frontend:/home/frontend
|
||||
- /home/frontend/node_modules
|
||||
- /home/frontend/apps/impress/node_modules
|
||||
ports:
|
||||
- "3000:3000"
|
||||
```
|
||||
|
||||
2. **Restart your containers**:
|
||||
```bash
|
||||
make run
|
||||
```
|
||||
|
||||
### Why This Works
|
||||
|
||||
- `WATCHPACK_POLLING=true` forces the file watcher to use polling instead of filesystem events
|
||||
- Polling periodically checks for file changes rather than relying on OS-level file events
|
||||
- This is more reliable on Windows but slightly increases CPU usage
|
||||
- Changes to your frontend code should now be detected properly, enabling hot reloading
|
||||
|
||||
### Note
|
||||
|
||||
This setting is primarily needed for Windows users. Linux and macOS users typically don't need this setting as file watching works correctly by default on those platforms.
|
||||
@@ -61,3 +61,6 @@ COLLABORATION_BACKEND_BASE_URL=http://app-dev:8000
|
||||
COLLABORATION_SERVER_ORIGIN=http://localhost:3000
|
||||
COLLABORATION_SERVER_SECRET=my-secret
|
||||
COLLABORATION_WS_URL=ws://localhost:4444/collaboration/ws/
|
||||
|
||||
Y_PROVIDER_API_BASE_URL=http://y-provider:4444/api/
|
||||
Y_PROVIDER_API_KEY=yprovider-api-key
|
||||
|
||||
@@ -2,5 +2,3 @@
|
||||
BURST_THROTTLE_RATES="200/minute"
|
||||
DJANGO_SERVER_TO_SERVER_API_TOKENS=test-e2e
|
||||
SUSTAINED_THROTTLE_RATES="200/hour"
|
||||
Y_PROVIDER_API_KEY=yprovider-api-key
|
||||
Y_PROVIDER_API_BASE_URL=http://y-provider:4444/api/
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"extends": ["github>numerique-gouv/renovate-configuration"],
|
||||
"dependencyDashboard": true,
|
||||
"labels": ["dependencies", "noChangeLog"],
|
||||
"labels": ["dependencies", "noChangeLog", "automated"],
|
||||
"packageRules": [
|
||||
{
|
||||
"enabled": false,
|
||||
@@ -9,12 +9,6 @@
|
||||
"matchManagers": ["pep621"],
|
||||
"matchPackageNames": []
|
||||
},
|
||||
{
|
||||
"groupName": "allowed django versions",
|
||||
"matchManagers": ["pep621"],
|
||||
"matchPackageNames": ["Django"],
|
||||
"allowedVersions": "<5.2"
|
||||
},
|
||||
{
|
||||
"groupName": "allowed redis versions",
|
||||
"matchManagers": ["pep621"],
|
||||
|
||||
@@ -10,7 +10,7 @@ from django.conf import settings
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.contrib.postgres.search import TrigramSimilarity
|
||||
from django.core.cache import cache
|
||||
from django.core.cache import cache, caches
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.files.storage import default_storage
|
||||
from django.db import connection, transaction
|
||||
@@ -404,7 +404,7 @@ class DocumentViewSet(
|
||||
|
||||
Example:
|
||||
- Ascending: GET /api/v1.0/documents/?ordering=created_at
|
||||
- Desceding: GET /api/v1.0/documents/?ordering=-title
|
||||
- Descending: GET /api/v1.0/documents/?ordering=-title
|
||||
|
||||
### Filtering:
|
||||
- `is_creator_me=true`: Returns documents created by the current user.
|
||||
@@ -631,6 +631,33 @@ class DocumentViewSet(
|
||||
"""Override to implement a soft delete instead of dumping the record in database."""
|
||||
instance.soft_delete()
|
||||
|
||||
def perform_update(self, serializer):
|
||||
"""Check rules about collaboration."""
|
||||
|
||||
shared_cache = caches["shared"]
|
||||
cache_key = f"docs:state:{serializer.instance.id}"
|
||||
doc_state = shared_cache.get(cache_key, enums.DEFAULT_DOCS_STATE.copy())
|
||||
|
||||
session_key = self.request.session.session_key
|
||||
|
||||
if doc_state["wsUsers"] and not session_key in doc_state["wsUsers"]:
|
||||
raise drf.exceptions.PermissionDenied(
|
||||
"You are not allowed to edit this document."
|
||||
)
|
||||
|
||||
if doc_state["httpUser"] and doc_state["httpUser"] != session_key:
|
||||
raise drf.exceptions.PermissionDenied(
|
||||
"You are not allowed to edit this document."
|
||||
)
|
||||
|
||||
if doc_state["httpUser"] is None:
|
||||
doc_state["httpUser"] = session_key
|
||||
shared_cache.set(cache_key, doc_state)
|
||||
|
||||
shared_cache.touch(cache_key)
|
||||
|
||||
return super().perform_update(serializer)
|
||||
|
||||
@drf.decorators.action(
|
||||
detail=False,
|
||||
methods=["get"],
|
||||
|
||||
10
src/backend/core/cache.py
Normal file
10
src/backend/core/cache.py
Normal file
@@ -0,0 +1,10 @@
|
||||
"""Cache utilities"""
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def shared_key_func(key: str, key_prefix: str, version: int = 1) -> str:
|
||||
"""
|
||||
Compute key for shared cache. In order to be compatiable with other system,
|
||||
only the key is used.
|
||||
"""
|
||||
return key
|
||||
@@ -22,6 +22,11 @@ MEDIA_STORAGE_URL_EXTRACT = re.compile(
|
||||
f"{settings.MEDIA_URL:s}({UUID_REGEX}/{ATTACHMENTS_FOLDER}/{UUID_REGEX}{FILE_EXT_REGEX})"
|
||||
)
|
||||
|
||||
DEFAULT_DOCS_STATE = {
|
||||
"httpUser": None,
|
||||
"wsUsers": [],
|
||||
}
|
||||
|
||||
|
||||
# In Django's code base, `LANGUAGES` is set by default with all supported languages.
|
||||
# We can use it for the choice of languages which should not be limited to the few languages
|
||||
|
||||
@@ -35,6 +35,8 @@ class UserFactory(factory.django.DjangoModelFactory):
|
||||
|
||||
class Meta:
|
||||
model = models.User
|
||||
# Skip postgeneration save, no save is made in the postgeneration methods.
|
||||
skip_postgeneration_save = True
|
||||
|
||||
sub = factory.Sequence(lambda n: f"user{n!s}")
|
||||
email = factory.Faker("email")
|
||||
|
||||
21
src/backend/core/middleware.py
Normal file
21
src/backend/core/middleware.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Force session creation for all requests."""
|
||||
|
||||
|
||||
class ForceSessionMiddleware:
|
||||
"""
|
||||
Force session creation for unauthenticated users.
|
||||
Must be used after Authentication middleware.
|
||||
"""
|
||||
|
||||
def __init__(self, get_response):
|
||||
"""Initialize the middleware."""
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
"""Force session creation for unauthenticated users."""
|
||||
|
||||
if not request.user.is_authenticated and request.session.session_key is None:
|
||||
request.session.save()
|
||||
|
||||
response = self.get_response(request)
|
||||
return response
|
||||
@@ -504,7 +504,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddConstraint(
|
||||
model_name="documentaccess",
|
||||
constraint=models.CheckConstraint(
|
||||
check=models.Q(
|
||||
condition=models.Q(
|
||||
models.Q(("team", ""), ("user__isnull", False)),
|
||||
models.Q(("team__gt", ""), ("user__isnull", True)),
|
||||
_connector="OR",
|
||||
@@ -540,7 +540,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddConstraint(
|
||||
model_name="templateaccess",
|
||||
constraint=models.CheckConstraint(
|
||||
check=models.Q(
|
||||
condition=models.Q(
|
||||
models.Q(("team", ""), ("user__isnull", False)),
|
||||
models.Q(("team__gt", ""), ("user__isnull", True)),
|
||||
_connector="OR",
|
||||
|
||||
@@ -520,7 +520,7 @@ class Document(MP_Node, BaseModel):
|
||||
verbose_name_plural = _("Documents")
|
||||
constraints = [
|
||||
models.CheckConstraint(
|
||||
check=(
|
||||
condition=(
|
||||
models.Q(deleted_at__isnull=True)
|
||||
| models.Q(deleted_at=models.F("ancestors_deleted_at"))
|
||||
),
|
||||
@@ -747,7 +747,7 @@ class Document(MP_Node, BaseModel):
|
||||
for ancestor in ancestors_links:
|
||||
links_definitions[ancestor["link_reach"]].add(ancestor["link_role"])
|
||||
|
||||
return dict(links_definitions) # Convert defaultdict back to a normal dict
|
||||
return dict(links_definitions) # Convert default dict back to a normal dict
|
||||
|
||||
def compute_ancestors_links(self, user):
|
||||
"""
|
||||
@@ -1088,7 +1088,7 @@ class DocumentAccess(BaseAccess):
|
||||
violation_error_message=_("This team is already in this document."),
|
||||
),
|
||||
models.CheckConstraint(
|
||||
check=models.Q(user__isnull=False, team="")
|
||||
condition=models.Q(user__isnull=False, team="")
|
||||
| models.Q(user__isnull=True, team__gt=""),
|
||||
name="check_document_access_either_user_or_team",
|
||||
violation_error_message=_("Either user or team must be set, not both."),
|
||||
@@ -1236,7 +1236,7 @@ class TemplateAccess(BaseAccess):
|
||||
violation_error_message=_("This team is already in this template."),
|
||||
),
|
||||
models.CheckConstraint(
|
||||
check=models.Q(user__isnull=False, team="")
|
||||
condition=models.Q(user__isnull=False, team="")
|
||||
| models.Q(user__isnull=True, team__gt=""),
|
||||
name="check_template_access_either_user_or_team",
|
||||
violation_error_message=_("Either user or team must be set, not both."),
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from unittest import mock
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.core.cache import caches
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -14,7 +14,8 @@ VIA = [USER, TEAM]
|
||||
@pytest.fixture(autouse=True)
|
||||
def clear_cache():
|
||||
"""Fixture to clear the cache before each test."""
|
||||
cache.clear()
|
||||
for cache in caches.all():
|
||||
cache.clear()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
||||
@@ -47,7 +47,7 @@ def test_api_documents_update_new_attachment_keys_anonymous(django_assert_num_qu
|
||||
factories.DocumentFactory(attachments=[image_keys[3]], link_reach="restricted")
|
||||
expected_keys = {image_keys[i] for i in [0, 1]}
|
||||
|
||||
with django_assert_num_queries(9):
|
||||
with django_assert_num_queries(11):
|
||||
response = APIClient().put(
|
||||
f"/api/v1.0/documents/{document.id!s}/",
|
||||
{"content": get_ydoc_with_mages(image_keys)},
|
||||
@@ -60,7 +60,7 @@ def test_api_documents_update_new_attachment_keys_anonymous(django_assert_num_qu
|
||||
|
||||
# Check that the db query to check attachments readability for extracted
|
||||
# keys is not done if the content changes but no new keys are found
|
||||
with django_assert_num_queries(5):
|
||||
with django_assert_num_queries(7):
|
||||
response = APIClient().put(
|
||||
f"/api/v1.0/documents/{document.id!s}/",
|
||||
{"content": get_ydoc_with_mages(image_keys[:2])},
|
||||
@@ -98,7 +98,7 @@ def test_api_documents_update_new_attachment_keys_authenticated(
|
||||
factories.DocumentFactory(attachments=[image_keys[4]], users=[user])
|
||||
expected_keys = {image_keys[i] for i in [0, 1, 2, 4]}
|
||||
|
||||
with django_assert_num_queries(10):
|
||||
with django_assert_num_queries(12):
|
||||
response = client.put(
|
||||
f"/api/v1.0/documents/{document.id!s}/",
|
||||
{"content": get_ydoc_with_mages(image_keys)},
|
||||
@@ -111,7 +111,7 @@ def test_api_documents_update_new_attachment_keys_authenticated(
|
||||
|
||||
# Check that the db query to check attachments readability for extracted
|
||||
# keys is not done if the content changes but no new keys are found
|
||||
with django_assert_num_queries(6):
|
||||
with django_assert_num_queries(8):
|
||||
response = client.put(
|
||||
f"/api/v1.0/documents/{document.id!s}/",
|
||||
{"content": get_ydoc_with_mages(image_keys[:2])},
|
||||
|
||||
@@ -1064,7 +1064,7 @@ def test_models_documents_restore(django_assert_num_queries):
|
||||
assert document.deleted_at is not None
|
||||
assert document.ancestors_deleted_at == document.deleted_at
|
||||
|
||||
with django_assert_num_queries(8):
|
||||
with django_assert_num_queries(10):
|
||||
document.restore()
|
||||
document.refresh_from_db()
|
||||
assert document.deleted_at is None
|
||||
@@ -1107,7 +1107,7 @@ def test_models_documents_restore_complex(django_assert_num_queries):
|
||||
assert child2.ancestors_deleted_at == document.deleted_at
|
||||
|
||||
# Restore the item
|
||||
with django_assert_num_queries(11):
|
||||
with django_assert_num_queries(13):
|
||||
document.restore()
|
||||
document.refresh_from_db()
|
||||
child1.refresh_from_db()
|
||||
@@ -1157,7 +1157,7 @@ def test_models_documents_restore_complex_bis(django_assert_num_queries):
|
||||
|
||||
# Restoring the grand parent should not restore the document
|
||||
# as it was deleted before the grand parent
|
||||
with django_assert_num_queries(9):
|
||||
with django_assert_num_queries(11):
|
||||
grand_parent.restore()
|
||||
|
||||
grand_parent.refresh_from_db()
|
||||
|
||||
@@ -8,11 +8,11 @@ NB_OBJECTS = {
|
||||
|
||||
DEV_USERS = [
|
||||
{"username": "impress", "email": "impress@impress.world", "language": "en-us"},
|
||||
{"username": "user-e2e-webkit", "email": "user@webkit.e2e", "language": "en-us"},
|
||||
{"username": "user-e2e-firefox", "email": "user@firefox.e2e", "language": "en-us"},
|
||||
{"username": "user-e2e-webkit", "email": "user@webkit.test", "language": "en-us"},
|
||||
{"username": "user-e2e-firefox", "email": "user@firefox.test", "language": "en-us"},
|
||||
{
|
||||
"username": "user-e2e-chromium",
|
||||
"email": "user@chromium.e2e",
|
||||
"email": "user@chromium.test",
|
||||
"language": "en-us",
|
||||
},
|
||||
]
|
||||
|
||||
@@ -33,9 +33,9 @@ def test_commands_create_demo():
|
||||
# assert dev users have doc accesses
|
||||
user = models.User.objects.get(email="impress@impress.world")
|
||||
assert models.DocumentAccess.objects.filter(user=user).exists()
|
||||
user = models.User.objects.get(email="user@webkit.e2e")
|
||||
user = models.User.objects.get(email="user@webkit.test")
|
||||
assert models.DocumentAccess.objects.filter(user=user).exists()
|
||||
user = models.User.objects.get(email="user@firefox.e2e")
|
||||
user = models.User.objects.get(email="user@firefox.test")
|
||||
assert models.DocumentAccess.objects.filter(user=user).exists()
|
||||
user = models.User.objects.get(email="user@chromium.e2e")
|
||||
user = models.User.objects.get(email="user@chromium.test")
|
||||
assert models.DocumentAccess.objects.filter(user=user).exists()
|
||||
|
||||
@@ -9,6 +9,7 @@ https://docs.djangoproject.com/en/3.1/topics/settings/
|
||||
For the full list of settings and their values, see
|
||||
https://docs.djangoproject.com/en/3.1/ref/settings/
|
||||
"""
|
||||
# pylint: disable=too-many-lines
|
||||
|
||||
import os
|
||||
import tomllib
|
||||
@@ -16,7 +17,6 @@ from socket import gethostbyname, gethostname
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
import dj_database_url
|
||||
import sentry_sdk
|
||||
from configurations import Configuration, values
|
||||
from sentry_sdk.integrations.django import DjangoIntegration
|
||||
@@ -75,9 +75,7 @@ class Base(Configuration):
|
||||
|
||||
# Database
|
||||
DATABASES = {
|
||||
"default": dj_database_url.config()
|
||||
if os.environ.get("DATABASE_URL")
|
||||
else {
|
||||
"default": {
|
||||
"ENGINE": values.Value(
|
||||
"django.db.backends.postgresql_psycopg2",
|
||||
environ_name="DB_ENGINE",
|
||||
@@ -286,6 +284,7 @@ class Base(Configuration):
|
||||
"django.middleware.common.CommonMiddleware",
|
||||
"django.middleware.csrf.CsrfViewMiddleware",
|
||||
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
||||
"core.middleware.ForceSessionMiddleware",
|
||||
"django.contrib.messages.middleware.MessageMiddleware",
|
||||
"dockerflow.django.middleware.DockerflowMiddleware",
|
||||
]
|
||||
@@ -326,6 +325,24 @@ class Base(Configuration):
|
||||
# Cache
|
||||
CACHES = {
|
||||
"default": {"BACKEND": "django.core.cache.backends.locmem.LocMemCache"},
|
||||
"shared": {
|
||||
"BACKEND": "django_redis.cache.RedisCache",
|
||||
"LOCATION": values.Value(
|
||||
"redis://redis:6379/0",
|
||||
environ_name="REDIS_URL",
|
||||
environ_prefix=None,
|
||||
),
|
||||
"TIMEOUT": values.IntegerValue(
|
||||
120, # timeout in seconds
|
||||
environ_name="SHARED_CACHE_TIMEOUT",
|
||||
environ_prefix=None,
|
||||
),
|
||||
"OPTIONS": {
|
||||
"CLIENT_CLASS": "django_redis.client.DefaultClient",
|
||||
"SERIALIZER": "django_redis.serializers.json.JSONSerializer",
|
||||
},
|
||||
"KEY_FUNCTION": "core.cache.shared_key_func",
|
||||
},
|
||||
}
|
||||
|
||||
REST_FRAMEWORK = {
|
||||
@@ -399,7 +416,7 @@ class Base(Configuration):
|
||||
|
||||
# CORS
|
||||
CORS_ALLOW_CREDENTIALS = True
|
||||
CORS_ALLOW_ALL_ORIGINS = values.BooleanValue(True)
|
||||
CORS_ALLOW_ALL_ORIGINS = values.BooleanValue(False)
|
||||
CORS_ALLOWED_ORIGINS = values.ListValue([])
|
||||
CORS_ALLOWED_ORIGIN_REGEXES = values.ListValue([])
|
||||
|
||||
@@ -473,6 +490,7 @@ class Base(Configuration):
|
||||
SESSION_COOKIE_AGE = values.PositiveIntegerValue(
|
||||
default=60 * 60 * 12, environ_name="SESSION_COOKIE_AGE", environ_prefix=None
|
||||
)
|
||||
SESSION_COOKIE_NAME = "docs_sessionid"
|
||||
|
||||
# OIDC - Authorization Code Flow
|
||||
OIDC_CREATE_USER = values.BooleanValue(
|
||||
@@ -814,8 +832,6 @@ class Development(Base):
|
||||
CSRF_TRUSTED_ORIGINS = ["http://localhost:8072", "http://localhost:3000"]
|
||||
DEBUG = True
|
||||
|
||||
SESSION_COOKIE_NAME = "impress_sessionid"
|
||||
|
||||
USE_SWAGGER = True
|
||||
SESSION_CACHE_ALIAS = "session"
|
||||
CACHES = {
|
||||
@@ -825,7 +841,7 @@ class Development(Base):
|
||||
"session": {
|
||||
"BACKEND": "django_redis.cache.RedisCache",
|
||||
"LOCATION": values.Value(
|
||||
"redis://redis:6379/2",
|
||||
"redis://redis:6379/0",
|
||||
environ_name="REDIS_URL",
|
||||
environ_prefix=None,
|
||||
),
|
||||
@@ -838,6 +854,24 @@ class Development(Base):
|
||||
"CLIENT_CLASS": "django_redis.client.DefaultClient",
|
||||
},
|
||||
},
|
||||
"shared": {
|
||||
"BACKEND": "django_redis.cache.RedisCache",
|
||||
"LOCATION": values.Value(
|
||||
"redis://redis:6379/0",
|
||||
environ_name="REDIS_URL",
|
||||
environ_prefix=None,
|
||||
),
|
||||
"TIMEOUT": values.IntegerValue(
|
||||
120, # timeout in seconds
|
||||
environ_name="SHARED_CACHE_TIMEOUT",
|
||||
environ_prefix=None,
|
||||
),
|
||||
"OPTIONS": {
|
||||
"CLIENT_CLASS": "django_redis.client.DefaultClient",
|
||||
"SERIALIZER": "django_redis.serializers.json.JSONSerializer",
|
||||
},
|
||||
"KEY_FUNCTION": "core.cache.shared_key_func",
|
||||
},
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
@@ -852,6 +886,9 @@ class Test(Base):
|
||||
"django.contrib.auth.hashers.MD5PasswordHasher",
|
||||
]
|
||||
USE_SWAGGER = True
|
||||
# Static files are not used in the test environment
|
||||
# Tests are raising warnings because the /data/static directory does not exist
|
||||
STATIC_ROOT = None
|
||||
|
||||
CELERY_TASK_ALWAYS_EAGER = values.BooleanValue(True)
|
||||
|
||||
@@ -920,7 +957,7 @@ class Production(Base):
|
||||
"default": {
|
||||
"BACKEND": "django_redis.cache.RedisCache",
|
||||
"LOCATION": values.Value(
|
||||
"redis://redis:6379/1",
|
||||
"redis://redis:6379/0",
|
||||
environ_name="REDIS_URL",
|
||||
environ_prefix=None,
|
||||
),
|
||||
@@ -938,6 +975,24 @@ class Production(Base):
|
||||
environ_prefix=None,
|
||||
),
|
||||
},
|
||||
"shared": {
|
||||
"BACKEND": "django_redis.cache.RedisCache",
|
||||
"LOCATION": values.Value(
|
||||
"redis://redis:6379/0",
|
||||
environ_name="REDIS_URL",
|
||||
environ_prefix=None,
|
||||
),
|
||||
"TIMEOUT": values.IntegerValue(
|
||||
120, # timeout in seconds
|
||||
environ_name="SHARED_CACHE_TIMEOUT",
|
||||
environ_prefix=None,
|
||||
),
|
||||
"OPTIONS": {
|
||||
"CLIENT_CLASS": "django_redis.client.DefaultClient",
|
||||
"SERIALIZER": "django_redis.serializers.json.JSONSerializer",
|
||||
},
|
||||
"KEY_FUNCTION": "core.cache.shared_key_func",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -26,10 +26,9 @@ readme = "README.md"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = [
|
||||
"beautifulsoup4==4.13.4",
|
||||
"boto3==1.38.18",
|
||||
"boto3==1.38.36",
|
||||
"Brotli==1.1.0",
|
||||
"celery[redis]==5.5.2",
|
||||
"dj-database-url==2.3.0",
|
||||
"celery[redis]==5.5.3",
|
||||
"django-configurations==2.5.1",
|
||||
"django-cors-headers==4.7.0",
|
||||
"django-countries==7.6.1",
|
||||
@@ -39,7 +38,7 @@ dependencies = [
|
||||
"django-redis==5.4.0",
|
||||
"django-storages[s3]==1.14.6",
|
||||
"django-timezone-field>=5.1",
|
||||
"django==5.1.9",
|
||||
"django==5.2.3",
|
||||
"django-treebeard==4.7.1",
|
||||
"djangorestframework==3.16.0",
|
||||
"drf_spectacular==0.28.0",
|
||||
@@ -47,19 +46,19 @@ dependencies = [
|
||||
"easy_thumbnails==2.10",
|
||||
"factory_boy==3.3.3",
|
||||
"gunicorn==23.0.0",
|
||||
"jsonschema==4.23.0",
|
||||
"jsonschema==4.24.0",
|
||||
"lxml==5.4.0",
|
||||
"markdown==3.8",
|
||||
"mozilla-django-oidc==4.0.1",
|
||||
"nested-multipart-parser==1.5.0",
|
||||
"openai==1.79.0",
|
||||
"openai==1.86.0",
|
||||
"psycopg[binary]==3.2.9",
|
||||
"pycrdt==0.12.19",
|
||||
"pycrdt==0.12.21",
|
||||
"PyJWT==2.10.1",
|
||||
"python-magic==0.4.27",
|
||||
"redis<6.0.0",
|
||||
"requests==2.32.3",
|
||||
"sentry-sdk==2.28.0",
|
||||
"requests==2.32.4",
|
||||
"sentry-sdk==2.30.0",
|
||||
"whitenoise==6.9.0",
|
||||
]
|
||||
|
||||
@@ -73,21 +72,21 @@ dependencies = [
|
||||
dev = [
|
||||
"django-extensions==4.1",
|
||||
"django-test-migrations==1.5.0",
|
||||
"drf-spectacular-sidecar==2025.5.1",
|
||||
"freezegun==1.5.1",
|
||||
"drf-spectacular-sidecar==2025.6.1",
|
||||
"freezegun==1.5.2",
|
||||
"ipdb==0.13.13",
|
||||
"ipython==9.2.0",
|
||||
"ipython==9.3.0",
|
||||
"pyfakefs==5.8.0",
|
||||
"pylint-django==2.6.1",
|
||||
"pylint==3.3.7",
|
||||
"pytest-cov==6.1.1",
|
||||
"pytest-cov==6.2.1",
|
||||
"pytest-django==4.11.1",
|
||||
"pytest==8.3.5",
|
||||
"pytest==8.4.0",
|
||||
"pytest-icdiff==0.9",
|
||||
"pytest-xdist==3.6.1",
|
||||
"pytest-xdist==3.7.0",
|
||||
"responses==0.25.7",
|
||||
"ruff==0.11.10",
|
||||
"types-requests==2.32.0.20250515",
|
||||
"ruff==0.11.13",
|
||||
"types-requests==2.32.4.20250611",
|
||||
]
|
||||
|
||||
[tool.setuptools]
|
||||
|
||||
@@ -119,9 +119,29 @@ test.describe('Config', () => {
|
||||
.first(),
|
||||
).toBeAttached();
|
||||
});
|
||||
|
||||
test('it checks theme_customization.translations config', async ({
|
||||
page,
|
||||
}) => {
|
||||
await overrideConfig(page, {
|
||||
theme_customization: {
|
||||
translations: {
|
||||
en: {
|
||||
translation: {
|
||||
Docs: 'MyCustomDocs',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await page.goto('/');
|
||||
|
||||
await expect(page.getByText('MyCustomDocs')).toBeAttached();
|
||||
});
|
||||
});
|
||||
|
||||
test.describe('Config: Not loggued', () => {
|
||||
test.describe('Config: Not logged', () => {
|
||||
test.use({ storageState: { cookies: [], origins: [] } });
|
||||
|
||||
test('it checks the config api is called', async ({ page }) => {
|
||||
|
||||
@@ -31,7 +31,7 @@ test.describe('Doc Create', () => {
|
||||
});
|
||||
});
|
||||
|
||||
test.describe('Doc Create: Not loggued', () => {
|
||||
test.describe('Doc Create: Not logged', () => {
|
||||
test.use({ storageState: { cookies: [], origins: [] } });
|
||||
|
||||
test('it creates a doc server way', async ({
|
||||
@@ -44,8 +44,8 @@ test.describe('Doc Create: Not loggued', () => {
|
||||
const data = {
|
||||
title,
|
||||
content: markdown,
|
||||
sub: `user@${browserName}.e2e`,
|
||||
email: `user@${browserName}.e2e`,
|
||||
sub: `user@${browserName}.test`,
|
||||
email: `user@${browserName}.test`,
|
||||
};
|
||||
|
||||
const newDoc = await request.post(
|
||||
|
||||
@@ -95,7 +95,7 @@ test.describe('Doc Editor', () => {
|
||||
|
||||
const selectVisibility = page.getByLabel('Visibility', { exact: true });
|
||||
|
||||
// When the visibility is changed, the ws should closed the connection (backend signal)
|
||||
// When the visibility is changed, the ws should close the connection (backend signal)
|
||||
const wsClosePromise = webSocket.waitForEvent('close');
|
||||
|
||||
await selectVisibility.click();
|
||||
|
||||
@@ -270,7 +270,7 @@ test.describe('Doc Export', () => {
|
||||
});
|
||||
|
||||
/**
|
||||
* We cannot assert the line break is visible in the pdf but we can assert the
|
||||
* We cannot assert the line break is visible in the pdf, but we can assert the
|
||||
* line break is visible in the editor and that the pdf is generated.
|
||||
*/
|
||||
test('it exports the doc with divider', async ({ page, browserName }) => {
|
||||
|
||||
@@ -131,7 +131,7 @@ test.describe('Document list members', () => {
|
||||
const list = page.getByTestId('doc-share-quick-search');
|
||||
await expect(list).toBeVisible();
|
||||
const currentUser = list.getByTestId(
|
||||
`doc-share-member-row-user@${browserName}.e2e`,
|
||||
`doc-share-member-row-user@${browserName}.test`,
|
||||
);
|
||||
const currentUserRole = currentUser.getByLabel('doc-role-dropdown');
|
||||
await expect(currentUser).toBeVisible();
|
||||
@@ -175,7 +175,7 @@ test.describe('Document list members', () => {
|
||||
|
||||
const list = page.getByTestId('doc-share-quick-search');
|
||||
|
||||
const emailMyself = `user@${browserName}.e2e`;
|
||||
const emailMyself = `user@${browserName}.test`;
|
||||
const mySelf = list.getByTestId(`doc-share-member-row-${emailMyself}`);
|
||||
const mySelfMoreActions = mySelf.getByRole('button', {
|
||||
name: 'more_horiz',
|
||||
|
||||
@@ -96,7 +96,7 @@ test.describe('Doc Routing', () => {
|
||||
});
|
||||
});
|
||||
|
||||
test.describe('Doc Routing: Not loggued', () => {
|
||||
test.describe('Doc Routing: Not logged', () => {
|
||||
test.use({ storageState: { cookies: [], origins: [] } });
|
||||
|
||||
test('checks redirect to a doc after login', async ({
|
||||
|
||||
@@ -151,7 +151,7 @@ test.describe('Doc Visibility: Restricted', () => {
|
||||
});
|
||||
|
||||
const otherBrowser = browsersName.find((b) => b !== browserName);
|
||||
const username = `user@${otherBrowser}.e2e`;
|
||||
const username = `user@${otherBrowser}.test`;
|
||||
await inputSearch.fill(username);
|
||||
await page.getByRole('option', { name: username }).click();
|
||||
|
||||
@@ -295,7 +295,7 @@ test.describe('Doc Visibility: Public', () => {
|
||||
).toBeVisible();
|
||||
|
||||
await page.getByLabel('Visibility mode').click();
|
||||
await page.getByLabel('Edition').click();
|
||||
await page.getByLabel('Editing').click();
|
||||
|
||||
await expect(
|
||||
page.getByText('The document visibility has been updated.').first(),
|
||||
@@ -333,7 +333,7 @@ test.describe('Doc Visibility: Public', () => {
|
||||
test.describe('Doc Visibility: Authenticated', () => {
|
||||
test.use({ storageState: { cookies: [], origins: [] } });
|
||||
|
||||
test('A doc is not accessible when unauthentified.', async ({
|
||||
test('A doc is not accessible when unauthenticated.', async ({
|
||||
page,
|
||||
browserName,
|
||||
}) => {
|
||||
@@ -476,7 +476,7 @@ test.describe('Doc Visibility: Authenticated', () => {
|
||||
|
||||
const urlDoc = page.url();
|
||||
await page.getByLabel('Visibility mode').click();
|
||||
await page.getByLabel('Edition').click();
|
||||
await page.getByLabel('Editing').click();
|
||||
|
||||
await expect(
|
||||
page.getByText('The document visibility has been updated.').first(),
|
||||
|
||||
@@ -128,8 +128,16 @@ export async function waitForLanguageSwitch(
|
||||
lang: TestLanguageValue,
|
||||
) {
|
||||
const header = page.locator('header').first();
|
||||
await header.getByRole('button', { name: 'arrow_drop_down' }).click();
|
||||
const languagePicker = header.locator('.--docs--language-picker-text');
|
||||
const isAlreadyTargetLanguage = await languagePicker
|
||||
.innerText()
|
||||
.then((text) => text.toLowerCase().includes(lang.label.toLowerCase()));
|
||||
|
||||
if (isAlreadyTargetLanguage) {
|
||||
return;
|
||||
}
|
||||
|
||||
await languagePicker.click();
|
||||
const responsePromise = page.waitForResponse(
|
||||
(resp) =>
|
||||
resp.url().includes('/user') && resp.request().method() === 'PATCH',
|
||||
|
||||
@@ -7,10 +7,14 @@ server {
|
||||
|
||||
location / {
|
||||
try_files $uri index.html $uri/ =404;
|
||||
|
||||
add_header X-Frame-Options DENY always;
|
||||
}
|
||||
|
||||
location ~ "^/docs/[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}/?$" {
|
||||
try_files $uri /docs/[id]/index.html;
|
||||
|
||||
add_header X-Frame-Options DENY always;
|
||||
}
|
||||
|
||||
error_page 404 /404.html;
|
||||
|
||||
@@ -16,56 +16,56 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@ag-media/react-pdf-table": "2.0.3",
|
||||
"@blocknote/code-block": "0.30.1",
|
||||
"@blocknote/core": "0.30.1",
|
||||
"@blocknote/mantine": "0.30.1",
|
||||
"@blocknote/react": "0.30.1",
|
||||
"@blocknote/xl-docx-exporter": "0.30.1",
|
||||
"@blocknote/xl-pdf-exporter": "0.30.1",
|
||||
"@blocknote/code-block": "0.31.1",
|
||||
"@blocknote/core": "0.31.1",
|
||||
"@blocknote/mantine": "0.31.1",
|
||||
"@blocknote/react": "0.31.1",
|
||||
"@blocknote/xl-docx-exporter": "0.31.1",
|
||||
"@blocknote/xl-pdf-exporter": "0.31.1",
|
||||
"@emoji-mart/data": "1.2.1",
|
||||
"@emoji-mart/react": "1.1.1",
|
||||
"@fontsource/material-icons": "5.2.5",
|
||||
"@gouvfr-lasuite/integration": "1.0.3",
|
||||
"@gouvfr-lasuite/ui-kit": "0.6.0",
|
||||
"@gouvfr-lasuite/ui-kit": "0.7.0",
|
||||
"@hocuspocus/provider": "2.15.2",
|
||||
"@openfun/cunningham-react": "3.1.0",
|
||||
"@react-pdf/renderer": "4.3.0",
|
||||
"@sentry/nextjs": "9.19.0",
|
||||
"@tanstack/react-query": "5.76.1",
|
||||
"@sentry/nextjs": "9.26.0",
|
||||
"@tanstack/react-query": "5.80.5",
|
||||
"canvg": "4.0.3",
|
||||
"clsx": "2.1.1",
|
||||
"cmdk": "1.1.1",
|
||||
"crisp-sdk-web": "1.0.25",
|
||||
"docx": "9.5.0",
|
||||
"emoji-mart": "5.6.0",
|
||||
"i18next": "25.1.3",
|
||||
"i18next": "25.2.1",
|
||||
"i18next-browser-languagedetector": "8.1.0",
|
||||
"idb": "8.0.3",
|
||||
"lodash": "4.17.21",
|
||||
"luxon": "3.6.1",
|
||||
"next": "15.3.2",
|
||||
"posthog-js": "1.242.2",
|
||||
"next": "15.3.3",
|
||||
"posthog-js": "1.249.3",
|
||||
"react": "*",
|
||||
"react-aria-components": "1.8.0",
|
||||
"react-aria-components": "1.9.0",
|
||||
"react-dom": "*",
|
||||
"react-i18next": "15.5.1",
|
||||
"react-i18next": "15.5.2",
|
||||
"react-intersection-observer": "9.16.0",
|
||||
"react-select": "5.10.1",
|
||||
"styled-components": "6.1.18",
|
||||
"use-debounce": "10.0.4",
|
||||
"y-protocols": "1.0.6",
|
||||
"yjs": "*",
|
||||
"zustand": "5.0.4"
|
||||
"zustand": "5.0.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@svgr/webpack": "8.1.0",
|
||||
"@tanstack/react-query-devtools": "5.76.1",
|
||||
"@tanstack/react-query-devtools": "5.80.5",
|
||||
"@testing-library/dom": "10.4.0",
|
||||
"@testing-library/jest-dom": "6.6.3",
|
||||
"@testing-library/react": "16.3.0",
|
||||
"@testing-library/user-event": "14.6.1",
|
||||
"@types/jest": "29.5.14",
|
||||
"@types/lodash": "4.17.16",
|
||||
"@types/lodash": "4.17.17",
|
||||
"@types/luxon": "3.6.2",
|
||||
"@types/node": "*",
|
||||
"@types/react": "*",
|
||||
@@ -78,11 +78,11 @@
|
||||
"jest-environment-jsdom": "29.7.0",
|
||||
"node-fetch": "2.7.0",
|
||||
"prettier": "3.5.3",
|
||||
"stylelint": "16.19.1",
|
||||
"stylelint": "16.20.0",
|
||||
"stylelint-config-standard": "38.0.0",
|
||||
"stylelint-prettier": "5.0.3",
|
||||
"typescript": "*",
|
||||
"webpack": "5.99.8",
|
||||
"webpack": "5.99.9",
|
||||
"workbox-webpack-plugin": "7.1.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,7 +29,7 @@ describe('fetchAPI', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('check the versionning', () => {
|
||||
it('check the versioning', () => {
|
||||
fetchMock.mock('http://test.jest/api/v2.0/some/url', 200);
|
||||
|
||||
void fetchAPI('some/url', {}, '2.0');
|
||||
|
||||
48
src/frontend/apps/impress/src/core/api/useUserUpdate.ts
Normal file
48
src/frontend/apps/impress/src/core/api/useUserUpdate.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import {
|
||||
UseMutationResult,
|
||||
useMutation,
|
||||
useQueryClient,
|
||||
} from '@tanstack/react-query';
|
||||
|
||||
import { APIError, errorCauses, fetchAPI } from '@/api';
|
||||
import { User } from '@/features/auth/api/types';
|
||||
import { KEY_AUTH } from '@/features/auth/api/useAuthQuery';
|
||||
|
||||
type UserUpdateRequest = Partial<User>;
|
||||
|
||||
async function updateUser(userUpdateData: UserUpdateRequest): Promise<User> {
|
||||
const response = await fetchAPI(`users/${userUpdateData.id}/`, {
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(userUpdateData),
|
||||
});
|
||||
if (!response.ok) {
|
||||
throw new APIError(
|
||||
`Failed to update the user`,
|
||||
await errorCauses(response, userUpdateData),
|
||||
);
|
||||
}
|
||||
return response.json() as Promise<User>;
|
||||
}
|
||||
|
||||
export const useUserUpdate = (): UseMutationResult<
|
||||
User,
|
||||
APIError,
|
||||
UserUpdateRequest
|
||||
> => {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const mutationResult = useMutation<User, APIError, UserUpdateRequest>({
|
||||
mutationFn: updateUser,
|
||||
onSuccess: () => {
|
||||
void queryClient.invalidateQueries({ queryKey: [KEY_AUTH] });
|
||||
},
|
||||
onError: (error) => {
|
||||
console.error('Error updating user', error);
|
||||
},
|
||||
});
|
||||
|
||||
return mutationResult;
|
||||
};
|
||||
@@ -1,10 +1,15 @@
|
||||
import { Loader } from '@openfun/cunningham-react';
|
||||
import Head from 'next/head';
|
||||
import { PropsWithChildren, useEffect } from 'react';
|
||||
import { PropsWithChildren, useEffect, useRef } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
import { Box } from '@/components';
|
||||
import { useCunninghamTheme } from '@/cunningham';
|
||||
import { useLanguageSynchronizer } from '@/features/language/';
|
||||
import { useAuthQuery } from '@/features/auth';
|
||||
import {
|
||||
useCustomTranslations,
|
||||
useSynchronizedLanguage,
|
||||
} from '@/features/language';
|
||||
import { useAnalytics } from '@/libs';
|
||||
import { CrispProvider, PostHogAnalytic } from '@/services';
|
||||
import { useSentryStore } from '@/stores/useSentryStore';
|
||||
@@ -13,10 +18,35 @@ import { useConfig } from './api/useConfig';
|
||||
|
||||
export const ConfigProvider = ({ children }: PropsWithChildren) => {
|
||||
const { data: conf } = useConfig();
|
||||
const { data: user } = useAuthQuery();
|
||||
const { setSentry } = useSentryStore();
|
||||
const { setTheme } = useCunninghamTheme();
|
||||
const { changeLanguageSynchronized } = useSynchronizedLanguage();
|
||||
const { customizeTranslations } = useCustomTranslations();
|
||||
const { AnalyticsProvider } = useAnalytics();
|
||||
const { synchronizeLanguage } = useLanguageSynchronizer();
|
||||
const { i18n } = useTranslation();
|
||||
const languageSynchronized = useRef(false);
|
||||
|
||||
useEffect(() => {
|
||||
if (!user || languageSynchronized.current) {
|
||||
return;
|
||||
}
|
||||
|
||||
const targetLanguage =
|
||||
user?.language ?? i18n.resolvedLanguage ?? i18n.language;
|
||||
|
||||
void changeLanguageSynchronized(targetLanguage, user).then(() => {
|
||||
languageSynchronized.current = true;
|
||||
});
|
||||
}, [user, i18n.resolvedLanguage, i18n.language, changeLanguageSynchronized]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!conf?.theme_customization?.translations) {
|
||||
return;
|
||||
}
|
||||
|
||||
customizeTranslations(conf.theme_customization.translations);
|
||||
}, [conf?.theme_customization?.translations, customizeTranslations]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!conf?.SENTRY_DSN) {
|
||||
@@ -34,10 +64,6 @@ export const ConfigProvider = ({ children }: PropsWithChildren) => {
|
||||
setTheme(conf.FRONTEND_THEME);
|
||||
}, [conf?.FRONTEND_THEME, setTheme]);
|
||||
|
||||
useEffect(() => {
|
||||
void synchronizeLanguage();
|
||||
}, [synchronizeLanguage]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!conf?.POSTHOG_KEY) {
|
||||
return;
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { Resource } from 'i18next';
|
||||
|
||||
import { APIError, errorCauses, fetchAPI } from '@/api';
|
||||
import { Theme } from '@/cunningham/';
|
||||
@@ -7,9 +8,10 @@ import { PostHogConf } from '@/services';
|
||||
|
||||
interface ThemeCustomization {
|
||||
footer?: FooterType;
|
||||
translations?: Resource;
|
||||
}
|
||||
|
||||
interface ConfigResponse {
|
||||
export interface ConfigResponse {
|
||||
AI_FEATURE_ENABLED?: boolean;
|
||||
COLLABORATION_WS_URL?: string;
|
||||
COLLABORATION_WS_NOT_CONNECTED_READY_ONLY?: boolean;
|
||||
|
||||
@@ -11,5 +11,5 @@ export interface User {
|
||||
email: string;
|
||||
full_name: string;
|
||||
short_name: string;
|
||||
language: string;
|
||||
language?: string;
|
||||
}
|
||||
|
||||
@@ -178,7 +178,6 @@ export const BlockNoteEditorVersion = ({
|
||||
initialContent,
|
||||
}: BlockNoteEditorVersionProps) => {
|
||||
const readOnly = true;
|
||||
const { setEditor } = useEditorStore();
|
||||
const editor = useCreateBlockNote(
|
||||
{
|
||||
collaboration: {
|
||||
@@ -193,15 +192,6 @@ export const BlockNoteEditorVersion = ({
|
||||
},
|
||||
[initialContent],
|
||||
);
|
||||
useHeadings(editor);
|
||||
|
||||
useEffect(() => {
|
||||
setEditor(editor);
|
||||
|
||||
return () => {
|
||||
setEditor(undefined);
|
||||
};
|
||||
}, [setEditor, editor]);
|
||||
|
||||
return (
|
||||
<Box $css={cssEditor(readOnly)} className="--docs--editor-container">
|
||||
|
||||
@@ -5,7 +5,6 @@ import { css } from 'styled-components';
|
||||
import * as Y from 'yjs';
|
||||
|
||||
import { Box, Text, TextErrors } from '@/components';
|
||||
import { useCunninghamTheme } from '@/cunningham';
|
||||
import { DocHeader, DocVersionHeader } from '@/docs/doc-header/';
|
||||
import {
|
||||
Doc,
|
||||
@@ -26,9 +25,6 @@ interface DocEditorProps {
|
||||
export const DocEditor = ({ doc, versionId }: DocEditorProps) => {
|
||||
const { isDesktop } = useResponsiveStore();
|
||||
const isVersion = !!versionId && typeof versionId === 'string';
|
||||
|
||||
const { colorsTokens } = useCunninghamTheme();
|
||||
|
||||
const { provider } = useProviderStore();
|
||||
|
||||
if (!provider) {
|
||||
@@ -66,7 +62,6 @@ export const DocEditor = ({ doc, versionId }: DocEditorProps) => {
|
||||
</Box>
|
||||
|
||||
<Box
|
||||
$background={colorsTokens['primary-bg']}
|
||||
$direction="row"
|
||||
$width="100%"
|
||||
$css="overflow-x: clip; flex: 1;"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import data from '@emoji-mart/data';
|
||||
import { EmojiMartData } from '@emoji-mart/data';
|
||||
import Picker from '@emoji-mart/react';
|
||||
import React from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
@@ -6,19 +6,15 @@ import { useTranslation } from 'react-i18next';
|
||||
import { Box } from '@/components';
|
||||
|
||||
interface EmojiPickerProps {
|
||||
emojiData: EmojiMartData;
|
||||
categories: string[];
|
||||
custom: {
|
||||
name: string;
|
||||
id: string;
|
||||
emojis: string[];
|
||||
}[];
|
||||
onClickOutside: () => void;
|
||||
onEmojiSelect: ({ native }: { native: string }) => void;
|
||||
}
|
||||
|
||||
export const EmojiPicker = ({
|
||||
emojiData,
|
||||
categories,
|
||||
custom,
|
||||
onClickOutside,
|
||||
onEmojiSelect,
|
||||
}: EmojiPickerProps) => {
|
||||
@@ -27,9 +23,8 @@ export const EmojiPicker = ({
|
||||
return (
|
||||
<Box $position="absolute" $zIndex={1000} $margin="2rem 0 0 0">
|
||||
<Picker
|
||||
data={emojiData}
|
||||
categories={categories}
|
||||
custom={custom}
|
||||
data={data}
|
||||
locale={i18n.resolvedLanguage}
|
||||
navPosition="none"
|
||||
onClickOutside={onClickOutside}
|
||||
|
||||
@@ -10,52 +10,7 @@ import { Box, BoxButton, Icon } from '@/components';
|
||||
import { DocsBlockNoteEditor } from '../../types';
|
||||
import { EmojiPicker } from '../EmojiPicker';
|
||||
|
||||
const calloutCustom = [
|
||||
{
|
||||
name: 'Callout',
|
||||
id: 'callout',
|
||||
emojis: [
|
||||
'bulb',
|
||||
'point_right',
|
||||
'point_up',
|
||||
'ok_hand',
|
||||
'key',
|
||||
'construction',
|
||||
'warning',
|
||||
'fire',
|
||||
'pushpin',
|
||||
'scissors',
|
||||
'question',
|
||||
'no_entry',
|
||||
'no_entry_sign',
|
||||
'alarm_clock',
|
||||
'phone',
|
||||
'rotating_light',
|
||||
'recycle',
|
||||
'white_check_mark',
|
||||
'lock',
|
||||
'paperclip',
|
||||
'book',
|
||||
'speaking_head_in_silhouette',
|
||||
'arrow_right',
|
||||
'loudspeaker',
|
||||
'hammer_and_wrench',
|
||||
'gear',
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const calloutCategories = [
|
||||
'callout',
|
||||
'people',
|
||||
'nature',
|
||||
'foods',
|
||||
'activity',
|
||||
'places',
|
||||
'flags',
|
||||
'objects',
|
||||
'symbols',
|
||||
];
|
||||
import InitEmojiCallout from './initEmojiCallout';
|
||||
|
||||
export const CalloutBlock = createReactBlockSpec(
|
||||
{
|
||||
@@ -124,8 +79,8 @@ export const CalloutBlock = createReactBlockSpec(
|
||||
|
||||
{openEmojiPicker && (
|
||||
<EmojiPicker
|
||||
categories={calloutCategories}
|
||||
custom={calloutCustom}
|
||||
emojiData={InitEmojiCallout.emojidata}
|
||||
categories={InitEmojiCallout.calloutCategories}
|
||||
onClickOutside={onClickOutside}
|
||||
onEmojiSelect={onEmojiSelect}
|
||||
/>
|
||||
|
||||
@@ -0,0 +1,76 @@
|
||||
/**
|
||||
* "emoji-mart" is a singleton, multiple imports in the same
|
||||
* application could cause issues.
|
||||
* BlockNote uses "emoji-mart" internally as well, if
|
||||
* Blocknote emoji picker is init before the callout emoji picker,
|
||||
* the callout emoji picker will not be set up correctly.
|
||||
* To avoid this, we initialize emoji-mart here and before any
|
||||
* other components that uses it.
|
||||
*/
|
||||
import data, { Category, EmojiMartData } from '@emoji-mart/data';
|
||||
import { init } from 'emoji-mart';
|
||||
|
||||
type EmojiMartDataFixed = Omit<EmojiMartData, 'categories'> & {
|
||||
categories: (Category & { name: string })[];
|
||||
};
|
||||
|
||||
const emojidata = structuredClone(data) as EmojiMartDataFixed;
|
||||
|
||||
const CALLOUT_ID = 'callout';
|
||||
const CALLOUT_EMOJIS = [
|
||||
'bulb',
|
||||
'point_right',
|
||||
'point_up',
|
||||
'ok_hand',
|
||||
'key',
|
||||
'construction',
|
||||
'warning',
|
||||
'fire',
|
||||
'pushpin',
|
||||
'scissors',
|
||||
'question',
|
||||
'no_entry',
|
||||
'no_entry_sign',
|
||||
'alarm_clock',
|
||||
'phone',
|
||||
'rotating_light',
|
||||
'recycle',
|
||||
'white_check_mark',
|
||||
'lock',
|
||||
'paperclip',
|
||||
'book',
|
||||
'speaking_head_in_silhouette',
|
||||
'arrow_right',
|
||||
'loudspeaker',
|
||||
'hammer_and_wrench',
|
||||
'gear',
|
||||
];
|
||||
|
||||
if (!emojidata.categories.some((c) => c.id === CALLOUT_ID)) {
|
||||
emojidata.categories.unshift({
|
||||
id: CALLOUT_ID,
|
||||
name: 'Callout',
|
||||
emojis: CALLOUT_EMOJIS,
|
||||
});
|
||||
}
|
||||
|
||||
void init({ data: emojidata });
|
||||
|
||||
const calloutCategories = [
|
||||
'callout',
|
||||
'people',
|
||||
'nature',
|
||||
'foods',
|
||||
'activity',
|
||||
'places',
|
||||
'flags',
|
||||
'objects',
|
||||
'symbols',
|
||||
];
|
||||
|
||||
const calloutEmojiData = {
|
||||
emojidata,
|
||||
calloutCategories,
|
||||
};
|
||||
|
||||
export default calloutEmojiData;
|
||||
@@ -9,7 +9,7 @@ export const useHeadings = (editor: DocsBlockNoteEditor) => {
|
||||
useEffect(() => {
|
||||
setHeadings(editor);
|
||||
|
||||
editor?.onEditorContentChange(() => {
|
||||
editor?.onChange(() => {
|
||||
setHeadings(editor);
|
||||
});
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ export const cssEditor = (readonly: boolean) => css`
|
||||
& > .bn-container,
|
||||
& .ProseMirror {
|
||||
height: 100%;
|
||||
padding-bottom: 2rem;
|
||||
|
||||
img.bn-visual-media[src*='-unsafe'] {
|
||||
pointer-events: none;
|
||||
@@ -72,8 +73,7 @@ export const cssEditor = (readonly: boolean) => css`
|
||||
border-radius: var(--c--theme--spacings--3xs);
|
||||
}
|
||||
|
||||
.bn-block-content[data-content-type='paragraph'],
|
||||
.bn-block-content[data-content-type='heading'] {
|
||||
.bn-block[data-background-color] > .bn-block-content {
|
||||
padding: var(--c--theme--spacings--3xs) var(--c--theme--spacings--3xs);
|
||||
border-radius: var(--c--theme--spacings--3xs);
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ import { DocsExporterPDF } from '../types';
|
||||
export const blockMappingParagraphPDF: DocsExporterPDF['mappings']['blockMapping']['paragraph'] =
|
||||
(block, exporter) => {
|
||||
/**
|
||||
* Breakline in the editor are not rendered in the PDF
|
||||
* Break line in the editor are not rendered in the PDF
|
||||
* By adding a space if the block is empty we ensure that the block is rendered
|
||||
*/
|
||||
if (Array.isArray(block.content)) {
|
||||
|
||||
@@ -34,12 +34,12 @@ export const useRemoveDoc = (options?: UseRemoveDocOptions) => {
|
||||
queryKey: [KEY_LIST_DOC],
|
||||
});
|
||||
if (options?.onSuccess) {
|
||||
options.onSuccess(data, variables, context);
|
||||
void options.onSuccess(data, variables, context);
|
||||
}
|
||||
},
|
||||
onError: (error, variables, context) => {
|
||||
if (options?.onError) {
|
||||
options.onError(error, variables, context);
|
||||
void options.onError(error, variables, context);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
@@ -63,12 +63,12 @@ export const useDeleteDocAccess = (options?: UseDeleteDocAccessOptions) => {
|
||||
queryKey: [KEY_LIST_USER],
|
||||
});
|
||||
if (options?.onSuccess) {
|
||||
options.onSuccess(data, variables, context);
|
||||
void options.onSuccess(data, variables, context);
|
||||
}
|
||||
},
|
||||
onError: (error, variables, context) => {
|
||||
if (options?.onError) {
|
||||
options.onError(error, variables, context);
|
||||
void options.onError(error, variables, context);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
@@ -58,12 +58,12 @@ export const useDeleteDocInvitation = (
|
||||
queryKey: [KEY_LIST_DOC_INVITATIONS],
|
||||
});
|
||||
if (options?.onSuccess) {
|
||||
options.onSuccess(data, variables, context);
|
||||
void options.onSuccess(data, variables, context);
|
||||
}
|
||||
},
|
||||
onError: (error, variables, context) => {
|
||||
if (options?.onError) {
|
||||
options.onError(error, variables, context);
|
||||
void options.onError(error, variables, context);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
@@ -65,12 +65,12 @@ export const useUpdateDocAccess = (options?: UseUpdateDocAccessOptions) => {
|
||||
queryKey: [KEY_LIST_DOC],
|
||||
});
|
||||
if (options?.onSuccess) {
|
||||
options.onSuccess(data, variables, context);
|
||||
void options.onSuccess(data, variables, context);
|
||||
}
|
||||
},
|
||||
onError: (error, variables, context) => {
|
||||
if (options?.onError) {
|
||||
options.onError(error, variables, context);
|
||||
void options.onError(error, variables, context);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
@@ -66,12 +66,12 @@ export const useUpdateDocInvitation = (
|
||||
queryKey: [KEY_LIST_DOC_INVITATIONS],
|
||||
});
|
||||
if (options?.onSuccess) {
|
||||
options.onSuccess(data, variables, context);
|
||||
void options.onSuccess(data, variables, context);
|
||||
}
|
||||
},
|
||||
onError: (error, variables, context) => {
|
||||
if (options?.onError) {
|
||||
options.onError(error, variables, context);
|
||||
void options.onError(error, variables, context);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
@@ -13,7 +13,7 @@ export const useTranslatedShareSettings = () => {
|
||||
|
||||
const linkModeTranslations = {
|
||||
[LinkRole.READER]: t('Reading'),
|
||||
[LinkRole.EDITOR]: t('Edition'),
|
||||
[LinkRole.EDITOR]: t('Editing'),
|
||||
};
|
||||
|
||||
const linkReachChoices = {
|
||||
|
||||
@@ -40,7 +40,7 @@ export const DocsGridItem = ({ doc }: DocsGridItemProps) => {
|
||||
$align="center"
|
||||
role="row"
|
||||
$gap="20px"
|
||||
$padding={{ vertical: '2xs', horizontal: isDesktop ? 'base' : 'xs' }}
|
||||
$padding={{ vertical: '4xs', horizontal: isDesktop ? 'base' : 'xs' }}
|
||||
$css={css`
|
||||
cursor: pointer;
|
||||
border-radius: 4px;
|
||||
|
||||
@@ -50,6 +50,7 @@ export const SimpleDocItem = ({
|
||||
background-color: transparent;
|
||||
filter: drop-shadow(0px 2px 2px rgba(0, 0, 0, 0.05));
|
||||
`}
|
||||
$padding={`${spacingsTokens['3xs']} 0`}
|
||||
>
|
||||
{isPinned ? (
|
||||
<PinnedDocumentIcon
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
import { useMutation, useQueryClient } from '@tanstack/react-query';
|
||||
|
||||
import { APIError, errorCauses, fetchAPI } from '@/api';
|
||||
import { User } from '@/features/auth/api/types';
|
||||
|
||||
export interface ChangeUserLanguageParams {
|
||||
userId: User['id'];
|
||||
language: User['language'];
|
||||
}
|
||||
|
||||
export const changeUserLanguage = async ({
|
||||
userId,
|
||||
language,
|
||||
}: ChangeUserLanguageParams): Promise<User> => {
|
||||
const response = await fetchAPI(`users/${userId}/`, {
|
||||
method: 'PATCH',
|
||||
body: JSON.stringify({
|
||||
language,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new APIError(
|
||||
`Failed to change the user language to ${language}`,
|
||||
await errorCauses(response, {
|
||||
value: language,
|
||||
type: 'language',
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
return response.json() as Promise<User>;
|
||||
};
|
||||
|
||||
export function useChangeUserLanguage() {
|
||||
const queryClient = useQueryClient();
|
||||
return useMutation<User, APIError, ChangeUserLanguageParams>({
|
||||
mutationFn: changeUserLanguage,
|
||||
onSuccess: () => {
|
||||
void queryClient.invalidateQueries({
|
||||
queryKey: ['change-user-language'],
|
||||
});
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -1,42 +1,33 @@
|
||||
import { Settings } from 'luxon';
|
||||
import { useMemo } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { css } from 'styled-components';
|
||||
|
||||
import { DropdownMenu, Icon, Text } from '@/components/';
|
||||
import { useConfig } from '@/core';
|
||||
|
||||
import { useLanguageSynchronizer } from './hooks/useLanguageSynchronizer';
|
||||
import { getMatchingLocales } from './utils/locale';
|
||||
import { useAuthQuery } from '@/features/auth';
|
||||
import {
|
||||
getMatchingLocales,
|
||||
useSynchronizedLanguage,
|
||||
} from '@/features/language';
|
||||
|
||||
export const LanguagePicker = () => {
|
||||
const { t, i18n } = useTranslation();
|
||||
const { data: conf } = useConfig();
|
||||
const { synchronizeLanguage } = useLanguageSynchronizer();
|
||||
const language = i18n.languages[0];
|
||||
Settings.defaultLocale = language;
|
||||
const { data: user } = useAuthQuery();
|
||||
const { changeLanguageSynchronized } = useSynchronizedLanguage();
|
||||
const language = i18n.language;
|
||||
|
||||
// Compute options for dropdown
|
||||
const optionsPicker = useMemo(() => {
|
||||
const backendOptions = conf?.LANGUAGES ?? [[language, language]];
|
||||
return backendOptions.map(([backendLocale, label]) => {
|
||||
// Determine if the option is selected
|
||||
const isSelected =
|
||||
getMatchingLocales([backendLocale], [language]).length > 0;
|
||||
// Define callback for updating both frontend and backend languages
|
||||
const callback = () => {
|
||||
i18n
|
||||
.changeLanguage(backendLocale)
|
||||
.then(() => {
|
||||
void synchronizeLanguage('toBackend');
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error('Error changing language', err);
|
||||
});
|
||||
return backendOptions.map(([backendLocale, backendLabel]) => {
|
||||
return {
|
||||
label: backendLabel,
|
||||
isSelected: getMatchingLocales([backendLocale], [language]).length > 0,
|
||||
callback: () => changeLanguageSynchronized(backendLocale, user),
|
||||
};
|
||||
return { label, isSelected, callback };
|
||||
});
|
||||
}, [conf, i18n, language, synchronizeLanguage]);
|
||||
}, [changeLanguageSynchronized, conf?.LANGUAGES, language, user]);
|
||||
|
||||
// Extract current language label for display
|
||||
const currentLanguageLabel =
|
||||
@@ -0,0 +1 @@
|
||||
export * from './LanguagePicker';
|
||||
@@ -0,0 +1,2 @@
|
||||
export * from './useSynchronizedLanguage';
|
||||
export * from './useCustomTranslations';
|
||||
@@ -0,0 +1,27 @@
|
||||
import { Resource } from 'i18next';
|
||||
import { useCallback } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
export const useCustomTranslations = () => {
|
||||
const { i18n } = useTranslation();
|
||||
|
||||
// Overwrite translations with a resource
|
||||
const customizeTranslations = useCallback(
|
||||
(currentCustomTranslations: Resource) => {
|
||||
Object.entries(currentCustomTranslations).forEach(([lng, namespaces]) => {
|
||||
Object.entries(namespaces).forEach(([ns, value]) => {
|
||||
i18n.addResourceBundle(lng, ns, value, true, true);
|
||||
});
|
||||
});
|
||||
// trigger re-render
|
||||
if (Object.entries(currentCustomTranslations).length > 0) {
|
||||
void i18n.changeLanguage(i18n.language);
|
||||
}
|
||||
},
|
||||
[i18n],
|
||||
);
|
||||
|
||||
return {
|
||||
customizeTranslations,
|
||||
};
|
||||
};
|
||||
@@ -1,82 +0,0 @@
|
||||
import { useCallback, useMemo, useRef } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
import { useConfig } from '@/core';
|
||||
import { useAuthQuery } from '@/features/auth/api';
|
||||
import { useChangeUserLanguage } from '@/features/language/api/useChangeUserLanguage';
|
||||
import { getMatchingLocales } from '@/features/language/utils/locale';
|
||||
import { availableFrontendLanguages } from '@/i18n/initI18n';
|
||||
|
||||
export const useLanguageSynchronizer = () => {
|
||||
const { data: conf, isSuccess: confInitialized } = useConfig();
|
||||
const { data: user, isSuccess: userInitialized } = useAuthQuery();
|
||||
const { i18n } = useTranslation();
|
||||
const { mutateAsync: changeUserLanguage } = useChangeUserLanguage();
|
||||
const languageSynchronizing = useRef(false);
|
||||
|
||||
const availableBackendLanguages = useMemo(() => {
|
||||
return conf?.LANGUAGES.map(([locale]) => locale);
|
||||
}, [conf?.LANGUAGES]);
|
||||
|
||||
const synchronizeLanguage = useCallback(
|
||||
async (direction?: 'toBackend' | 'toFrontend') => {
|
||||
if (
|
||||
languageSynchronizing.current ||
|
||||
!userInitialized ||
|
||||
!confInitialized ||
|
||||
!availableBackendLanguages ||
|
||||
!availableFrontendLanguages
|
||||
) {
|
||||
return;
|
||||
}
|
||||
languageSynchronizing.current = true;
|
||||
|
||||
try {
|
||||
const userPreferredLanguages = user.language ? [user.language] : [];
|
||||
const setOrDetectedLanguages = i18n.languages;
|
||||
|
||||
// Default direction depends on whether a user already has a language preference
|
||||
direction =
|
||||
direction ??
|
||||
(userPreferredLanguages.length ? 'toFrontend' : 'toBackend');
|
||||
|
||||
if (direction === 'toBackend') {
|
||||
// Update user's preference from frontends's language
|
||||
const closestBackendLanguage =
|
||||
getMatchingLocales(
|
||||
availableBackendLanguages,
|
||||
setOrDetectedLanguages,
|
||||
)[0] || availableBackendLanguages[0];
|
||||
await changeUserLanguage({
|
||||
userId: user.id,
|
||||
language: closestBackendLanguage,
|
||||
});
|
||||
} else {
|
||||
// Update frontends's language from user's preference
|
||||
const closestFrontendLanguage =
|
||||
getMatchingLocales(
|
||||
availableFrontendLanguages,
|
||||
userPreferredLanguages,
|
||||
)[0] || availableFrontendLanguages[0];
|
||||
if (i18n.resolvedLanguage !== closestFrontendLanguage) {
|
||||
await i18n.changeLanguage(closestFrontendLanguage);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error synchronizing language', error);
|
||||
} finally {
|
||||
languageSynchronizing.current = false;
|
||||
}
|
||||
},
|
||||
[
|
||||
i18n,
|
||||
user,
|
||||
userInitialized,
|
||||
confInitialized,
|
||||
availableBackendLanguages,
|
||||
changeUserLanguage,
|
||||
],
|
||||
);
|
||||
|
||||
return { synchronizeLanguage };
|
||||
};
|
||||
@@ -0,0 +1,71 @@
|
||||
import { useCallback, useMemo, useRef } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
import { useUserUpdate } from '@/core/api/useUserUpdate';
|
||||
import { useConfig } from '@/core/config/api/useConfig';
|
||||
import { User } from '@/features/auth';
|
||||
import { getMatchingLocales } from '@/features/language/utils/locale';
|
||||
|
||||
export const useSynchronizedLanguage = () => {
|
||||
const { i18n } = useTranslation();
|
||||
const { mutateAsync: updateUser } = useUserUpdate();
|
||||
const { data: config } = useConfig();
|
||||
const isSynchronizingLanguage = useRef(false);
|
||||
|
||||
const availableFrontendLanguages = useMemo(
|
||||
() => Object.keys(i18n?.options?.resources || { en: '<- fallback' }),
|
||||
[i18n?.options?.resources],
|
||||
);
|
||||
const availableBackendLanguages = useMemo(
|
||||
() => config?.LANGUAGES?.map(([locale]) => locale) || [],
|
||||
[config?.LANGUAGES],
|
||||
);
|
||||
|
||||
const changeBackendLanguage = useCallback(
|
||||
async (language: string, user?: User) => {
|
||||
const closestBackendLanguage = getMatchingLocales(
|
||||
availableBackendLanguages,
|
||||
[language],
|
||||
)[0];
|
||||
|
||||
if (user && user.language !== closestBackendLanguage) {
|
||||
await updateUser({ id: user.id, language: closestBackendLanguage });
|
||||
}
|
||||
},
|
||||
[availableBackendLanguages, updateUser],
|
||||
);
|
||||
|
||||
const changeFrontendLanguage = useCallback(
|
||||
async (language: string) => {
|
||||
const closestFrontendLanguage = getMatchingLocales(
|
||||
availableFrontendLanguages,
|
||||
[language],
|
||||
)[0];
|
||||
if (
|
||||
i18n.isInitialized &&
|
||||
i18n.resolvedLanguage !== closestFrontendLanguage
|
||||
) {
|
||||
await i18n.changeLanguage(closestFrontendLanguage);
|
||||
}
|
||||
},
|
||||
[availableFrontendLanguages, i18n],
|
||||
);
|
||||
|
||||
const changeLanguageSynchronized = useCallback(
|
||||
async (language: string, user?: User) => {
|
||||
if (!isSynchronizingLanguage.current) {
|
||||
isSynchronizingLanguage.current = true;
|
||||
await changeFrontendLanguage(language);
|
||||
await changeBackendLanguage(language, user);
|
||||
isSynchronizingLanguage.current = false;
|
||||
}
|
||||
},
|
||||
[changeBackendLanguage, changeFrontendLanguage],
|
||||
);
|
||||
|
||||
return {
|
||||
changeLanguageSynchronized,
|
||||
changeFrontendLanguage,
|
||||
changeBackendLanguage,
|
||||
};
|
||||
};
|
||||
@@ -1,2 +1,3 @@
|
||||
export * from './hooks/useLanguageSynchronizer';
|
||||
export * from './LanguagePicker';
|
||||
export * from './hooks';
|
||||
export * from './components';
|
||||
export * from './utils';
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
export * from './locale';
|
||||
@@ -112,7 +112,7 @@ export class ApiPlugin implements WorkboxPlugin {
|
||||
};
|
||||
|
||||
/**
|
||||
* When we get an network error.
|
||||
* When we get a network error.
|
||||
*/
|
||||
handlerDidError: WorkboxPlugin['handlerDidError'] = async ({ request }) => {
|
||||
if (!this.isFetchDidFailed) {
|
||||
|
||||
@@ -4,36 +4,38 @@ import { initReactI18next } from 'react-i18next';
|
||||
|
||||
import resources from './translations.json';
|
||||
|
||||
export const availableFrontendLanguages: readonly string[] =
|
||||
Object.keys(resources);
|
||||
// Add an initialization guard
|
||||
let isInitialized = false;
|
||||
|
||||
i18next
|
||||
.use(LanguageDetector)
|
||||
.use(initReactI18next)
|
||||
.init({
|
||||
resources,
|
||||
fallbackLng: 'en',
|
||||
debug: false,
|
||||
detection: {
|
||||
order: ['cookie', 'navigator'], // detection order
|
||||
caches: ['cookie'], // Use cookies to store the language preference
|
||||
lookupCookie: 'docs_language',
|
||||
cookieMinutes: 525600, // Expires after one year
|
||||
cookieOptions: {
|
||||
path: '/',
|
||||
sameSite: 'lax',
|
||||
// Initialize i18next with the base translations only once
|
||||
if (!isInitialized && !i18next.isInitialized) {
|
||||
isInitialized = true;
|
||||
|
||||
i18next
|
||||
.use(LanguageDetector)
|
||||
.use(initReactI18next)
|
||||
.init({
|
||||
resources,
|
||||
fallbackLng: 'en',
|
||||
debug: false,
|
||||
detection: {
|
||||
order: ['cookie', 'navigator'],
|
||||
caches: ['cookie'],
|
||||
lookupCookie: 'docs_language',
|
||||
cookieMinutes: 525600,
|
||||
cookieOptions: {
|
||||
path: '/',
|
||||
sameSite: 'lax',
|
||||
},
|
||||
},
|
||||
},
|
||||
interpolation: {
|
||||
escapeValue: false,
|
||||
},
|
||||
preload: availableFrontendLanguages,
|
||||
lowerCaseLng: true,
|
||||
nsSeparator: false,
|
||||
keySeparator: false,
|
||||
})
|
||||
.catch(() => {
|
||||
throw new Error('i18n initialization failed');
|
||||
});
|
||||
interpolation: {
|
||||
escapeValue: false,
|
||||
},
|
||||
lowerCaseLng: true,
|
||||
nsSeparator: false,
|
||||
keySeparator: false,
|
||||
})
|
||||
.catch((e) => console.error('i18n initialization failed:', e));
|
||||
}
|
||||
|
||||
export default i18next;
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Button } from '@openfun/cunningham-react';
|
||||
import Head from 'next/head';
|
||||
import Image from 'next/image';
|
||||
import { ReactElement } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
@@ -17,34 +18,46 @@ const Page: NextPageWithLayout = () => {
|
||||
const { t } = useTranslation();
|
||||
|
||||
return (
|
||||
<Box
|
||||
$align="center"
|
||||
$margin="auto"
|
||||
$gap="1rem"
|
||||
$padding={{ bottom: '2rem' }}
|
||||
>
|
||||
<Image
|
||||
className="c__image-system-filter"
|
||||
src={img403}
|
||||
alt={t('Image 403')}
|
||||
style={{
|
||||
maxWidth: '100%',
|
||||
height: 'auto',
|
||||
}}
|
||||
/>
|
||||
<>
|
||||
<Head>
|
||||
<title>
|
||||
{t('Access Denied - Error 403')} - {t('Docs')}
|
||||
</title>
|
||||
<meta
|
||||
property="og:title"
|
||||
content={`${t('Access Denied - Error 403')} - ${t('Docs')}`}
|
||||
key="title"
|
||||
/>
|
||||
</Head>
|
||||
<Box
|
||||
$align="center"
|
||||
$margin="auto"
|
||||
$gap="1rem"
|
||||
$padding={{ bottom: '2rem' }}
|
||||
>
|
||||
<Image
|
||||
className="c__image-system-filter"
|
||||
src={img403}
|
||||
alt={t('Image 403')}
|
||||
style={{
|
||||
maxWidth: '100%',
|
||||
height: 'auto',
|
||||
}}
|
||||
/>
|
||||
|
||||
<Box $align="center" $gap="0.8rem">
|
||||
<Text as="p" $textAlign="center" $maxWidth="350px" $theme="primary">
|
||||
{t('You do not have permission to view this document.')}
|
||||
</Text>
|
||||
<Box $align="center" $gap="0.8rem">
|
||||
<Text as="p" $textAlign="center" $maxWidth="350px" $theme="primary">
|
||||
{t('You do not have permission to view this document.')}
|
||||
</Text>
|
||||
|
||||
<StyledLink href="/">
|
||||
<StyledButton icon={<Icon iconName="house" $color="white" />}>
|
||||
{t('Home')}
|
||||
</StyledButton>
|
||||
</StyledLink>
|
||||
<StyledLink href="/">
|
||||
<StyledButton icon={<Icon iconName="house" $color="white" />}>
|
||||
{t('Home')}
|
||||
</StyledButton>
|
||||
</StyledLink>
|
||||
</Box>
|
||||
</Box>
|
||||
</Box>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Button } from '@openfun/cunningham-react';
|
||||
import Head from 'next/head';
|
||||
import Image from 'next/image';
|
||||
import { ReactElement } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
@@ -17,36 +18,48 @@ const Page: NextPageWithLayout = () => {
|
||||
const { t } = useTranslation();
|
||||
|
||||
return (
|
||||
<Box
|
||||
$align="center"
|
||||
$margin="auto"
|
||||
$gap="1rem"
|
||||
$padding={{ bottom: '2rem' }}
|
||||
>
|
||||
<Image
|
||||
className="c__image-system-filter"
|
||||
src={img403}
|
||||
alt={t('Image 403')}
|
||||
style={{
|
||||
maxWidth: '100%',
|
||||
height: 'auto',
|
||||
}}
|
||||
/>
|
||||
<>
|
||||
<Head>
|
||||
<title>
|
||||
{t('Page Not Found - Error 404')} - {t('Docs')}
|
||||
</title>
|
||||
<meta
|
||||
property="og:title"
|
||||
content={`${t('Page Not Found - Error 404')} - ${t('Docs')}`}
|
||||
key="title"
|
||||
/>
|
||||
</Head>
|
||||
<Box
|
||||
$align="center"
|
||||
$margin="auto"
|
||||
$gap="1rem"
|
||||
$padding={{ bottom: '2rem' }}
|
||||
>
|
||||
<Image
|
||||
className="c__image-system-filter"
|
||||
src={img403}
|
||||
alt={t('Image 403')}
|
||||
style={{
|
||||
maxWidth: '100%',
|
||||
height: 'auto',
|
||||
}}
|
||||
/>
|
||||
|
||||
<Box $align="center" $gap="0.8rem">
|
||||
<Text as="p" $textAlign="center" $maxWidth="350px" $theme="primary">
|
||||
{t(
|
||||
'It seems that the page you are looking for does not exist or cannot be displayed correctly.',
|
||||
)}
|
||||
</Text>
|
||||
<Box $align="center" $gap="0.8rem">
|
||||
<Text as="p" $textAlign="center" $maxWidth="350px" $theme="primary">
|
||||
{t(
|
||||
'It seems that the page you are looking for does not exist or cannot be displayed correctly.',
|
||||
)}
|
||||
</Text>
|
||||
|
||||
<StyledLink href="/">
|
||||
<StyledButton icon={<Icon iconName="house" $color="white" />}>
|
||||
{t('Home')}
|
||||
</StyledButton>
|
||||
</StyledLink>
|
||||
<StyledLink href="/">
|
||||
<StyledButton icon={<Icon iconName="house" $color="white" />}>
|
||||
{t('Home')}
|
||||
</StyledButton>
|
||||
</StyledLink>
|
||||
</Box>
|
||||
</Box>
|
||||
</Box>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
@@ -26,6 +26,7 @@ export default function App({ Component, pageProps }: AppPropsWithLayout) {
|
||||
<>
|
||||
<Head>
|
||||
<title>{t('Docs')}</title>
|
||||
<meta property="og:title" content={t('Docs')} key="title" />
|
||||
<meta
|
||||
name="description"
|
||||
content={t(
|
||||
|
||||
@@ -67,14 +67,6 @@ const DocPage = ({ id }: DocProps) => {
|
||||
useCollaboration(doc?.id, doc?.content);
|
||||
const { t } = useTranslation();
|
||||
|
||||
useEffect(() => {
|
||||
if (doc?.title) {
|
||||
setTimeout(() => {
|
||||
document.title = `${doc.title} - ${t('Docs')}`;
|
||||
}, 100);
|
||||
}
|
||||
}, [doc?.title, t]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!docQuery || isFetching) {
|
||||
return;
|
||||
@@ -142,7 +134,21 @@ const DocPage = ({ id }: DocProps) => {
|
||||
);
|
||||
}
|
||||
|
||||
return <DocEditor doc={doc} />;
|
||||
return (
|
||||
<>
|
||||
<Head>
|
||||
<title>
|
||||
{doc.title} - {t('Docs')}
|
||||
</title>
|
||||
<meta
|
||||
property="og:title"
|
||||
content={`${doc.title} - ${t('Docs')}`}
|
||||
key="title"
|
||||
/>
|
||||
</Head>
|
||||
<DocEditor doc={doc} />
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const Page: NextPageWithLayout = () => {
|
||||
|
||||
52
src/frontend/apps/impress/src/utils/storages.ts
Normal file
52
src/frontend/apps/impress/src/utils/storages.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
/**
|
||||
* @fileOverview This module provides utilities to interact with local storage safely.
|
||||
*/
|
||||
|
||||
interface SyncStorage {
|
||||
getItem(key: string): string | null;
|
||||
setItem(key: string, value: string): void;
|
||||
removeItem(key: string): void;
|
||||
}
|
||||
|
||||
/**
|
||||
* @namespace safeLocalStorage
|
||||
* @description A utility for safely interacting with localStorage.
|
||||
* It checks if the `window` object is defined before attempting to access localStorage,
|
||||
* preventing errors in environments where `window` is not available.
|
||||
*/
|
||||
export const safeLocalStorage: SyncStorage = {
|
||||
/**
|
||||
* Retrieves an item from localStorage.
|
||||
* @param {string} key - The key of the item to retrieve.
|
||||
* @returns {string | null} The item's value, or null if the item does not exist or if localStorage is not available.
|
||||
*/
|
||||
getItem: (key: string): string | null => {
|
||||
if (typeof window === 'undefined') {
|
||||
return null;
|
||||
}
|
||||
return localStorage.getItem(key);
|
||||
},
|
||||
/**
|
||||
* Sets an item in localStorage.
|
||||
* @param {string} key - The key of the item to set.
|
||||
* @param {string} value - The value to set for the item.
|
||||
* @returns {void}
|
||||
*/
|
||||
setItem: (key: string, value: string): void => {
|
||||
if (typeof window === 'undefined') {
|
||||
return;
|
||||
}
|
||||
localStorage.setItem(key, value);
|
||||
},
|
||||
/**
|
||||
* Removes an item from localStorage.
|
||||
* @param {string} key - The key of the item to remove.
|
||||
* @returns {void}
|
||||
*/
|
||||
removeItem: (key: string): void => {
|
||||
if (typeof window === 'undefined') {
|
||||
return;
|
||||
}
|
||||
localStorage.removeItem(key);
|
||||
},
|
||||
};
|
||||
@@ -19,7 +19,6 @@
|
||||
"app:build": "yarn APP_IMPRESS run build",
|
||||
"app:test": "yarn APP_IMPRESS run test",
|
||||
"ci:build": "yarn APP_IMPRESS run build:ci",
|
||||
"build": "yarn APP_IMPRESS run build && yarn COLLABORATION_SERVER run build",
|
||||
"e2e:test": "yarn APP_E2E run test",
|
||||
"lint": "yarn APP_IMPRESS run lint && yarn APP_E2E run lint && yarn workspace eslint-config-impress run lint && yarn I18N run lint && yarn COLLABORATION_SERVER run lint",
|
||||
"i18n:extract": "yarn I18N run extract-translation",
|
||||
@@ -29,11 +28,11 @@
|
||||
"server:test": "yarn COLLABORATION_SERVER run test"
|
||||
},
|
||||
"resolutions": {
|
||||
"@types/node": "22.15.19",
|
||||
"@types/react": "19.1.4",
|
||||
"@types/react-dom": "19.1.5",
|
||||
"@typescript-eslint/eslint-plugin": "8.32.1",
|
||||
"@typescript-eslint/parser": "8.32.1",
|
||||
"@types/node": "22.15.29",
|
||||
"@types/react": "19.1.6",
|
||||
"@types/react-dom": "19.1.6",
|
||||
"@typescript-eslint/eslint-plugin": "8.33.1",
|
||||
"@typescript-eslint/parser": "8.33.1",
|
||||
"eslint": "8.57.0",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0",
|
||||
|
||||
@@ -6,19 +6,19 @@
|
||||
"lint": "eslint --ext .js ."
|
||||
},
|
||||
"dependencies": {
|
||||
"@next/eslint-plugin-next": "15.3.2",
|
||||
"@tanstack/eslint-plugin-query": "5.74.7",
|
||||
"@next/eslint-plugin-next": "15.3.3",
|
||||
"@tanstack/eslint-plugin-query": "5.78.0",
|
||||
"@typescript-eslint/eslint-plugin": "*",
|
||||
"@typescript-eslint/parser": "*",
|
||||
"eslint": "*",
|
||||
"eslint-config-next": "15.3.2",
|
||||
"eslint-config-next": "15.3.3",
|
||||
"eslint-config-prettier": "10.1.5",
|
||||
"eslint-plugin-import": "2.31.0",
|
||||
"eslint-plugin-jest": "28.11.0",
|
||||
"eslint-plugin-jest": "28.12.0",
|
||||
"eslint-plugin-jsx-a11y": "6.10.2",
|
||||
"eslint-plugin-playwright": "2.2.0",
|
||||
"eslint-plugin-prettier": "5.4.0",
|
||||
"eslint-plugin-testing-library": "7.2.1",
|
||||
"eslint-plugin-prettier": "5.4.1",
|
||||
"eslint-plugin-testing-library": "7.4.0",
|
||||
"prettier": "3.5.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
import { hideBin } from 'yargs/helpers';
|
||||
import yargs from 'yargs/yargs';
|
||||
import yargs from 'yargs';
|
||||
|
||||
// Get our args
|
||||
const argv = yargs(hideBin(process.argv)).argv;
|
||||
const argv = yargs(process.argv).argv;
|
||||
const { app, output } = argv;
|
||||
|
||||
const folderPath = './locales/' + app;
|
||||
|
||||
@@ -20,6 +20,6 @@
|
||||
"jest": "29.7.0",
|
||||
"ts-jest": "29.3.4",
|
||||
"typescript": "*",
|
||||
"yargs": "17.7.2"
|
||||
"yargs": "18.0.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,11 +8,10 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
import { hideBin } from 'yargs/helpers';
|
||||
import yargs from 'yargs/yargs';
|
||||
import yargs from 'yargs';
|
||||
|
||||
// Get our args
|
||||
const argv = yargs(hideBin(process.argv)).argv;
|
||||
const argv = yargs(process.argv).argv;
|
||||
const { app, output, language } = argv;
|
||||
|
||||
const folderPath = './locales/' + app;
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
FROM node:22.9-alpine AS base
|
||||
FROM node:22-alpine AS base
|
||||
|
||||
# Upgrade system packages to install security updates
|
||||
RUN apk update && \
|
||||
apk upgrade && \
|
||||
rm -rf /var/cache/apk/*
|
||||
|
||||
FROM base AS y-provider-builder
|
||||
FROM base AS y-provider-deps
|
||||
|
||||
WORKDIR /home/frontend/
|
||||
|
||||
@@ -19,6 +19,16 @@ RUN yarn install
|
||||
COPY ./src/frontend/packages/eslint-config-impress ./packages/eslint-config-impress
|
||||
COPY ./src/frontend/servers/y-provider ./servers/y-provider
|
||||
|
||||
FROM y-provider-deps AS y-provider-development
|
||||
|
||||
WORKDIR /home/frontend/servers/y-provider
|
||||
|
||||
EXPOSE 4444
|
||||
|
||||
CMD [ "yarn", "dev"]
|
||||
|
||||
FROM y-provider-deps AS y-provider-builder
|
||||
|
||||
WORKDIR /home/frontend/servers/y-provider
|
||||
RUN yarn build
|
||||
|
||||
|
||||
@@ -13,17 +13,18 @@
|
||||
"test": "jest"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
"node": ">=22"
|
||||
},
|
||||
"dependencies": {
|
||||
"@blocknote/server-util": "0.30.1",
|
||||
"@blocknote/server-util": "0.31.1",
|
||||
"@hocuspocus/server": "2.15.2",
|
||||
"@sentry/node": "9.19.0",
|
||||
"@sentry/profiling-node": "9.19.0",
|
||||
"@sentry/node": "9.26.0",
|
||||
"@sentry/profiling-node": "9.26.0",
|
||||
"axios": "1.9.0",
|
||||
"cors": "2.8.5",
|
||||
"express": "5.1.0",
|
||||
"express-ws": "5.0.2",
|
||||
"redis": "5.5.6",
|
||||
"uuid": "11.1.0",
|
||||
"y-protocols": "1.0.6",
|
||||
"yjs": "*"
|
||||
|
||||
@@ -17,7 +17,7 @@ enum LinkRole {
|
||||
|
||||
type Base64 = string;
|
||||
|
||||
interface Doc {
|
||||
export interface Doc {
|
||||
id: string;
|
||||
title?: string;
|
||||
content: Base64;
|
||||
|
||||
@@ -10,3 +10,5 @@ export const PORT = Number(process.env.PORT || 4444);
|
||||
export const SENTRY_DSN = process.env.SENTRY_DSN || '';
|
||||
export const COLLABORATION_BACKEND_BASE_URL =
|
||||
process.env.COLLABORATION_BACKEND_BASE_URL || 'http://app-dev:8000';
|
||||
export const REDIS_URL = process.env.REDIS_URL || 'redis://redis:6379/0';
|
||||
export const CACHES_KEY_PREFIX = process.env.CACHES_KEY_PREFIX || 'docs';
|
||||
@@ -1,9 +1,9 @@
|
||||
import { Server } from '@hocuspocus/server';
|
||||
import { validate as uuidValidate, version as uuidVersion } from 'uuid';
|
||||
|
||||
import { fetchDocument } from '@/api/getDoc';
|
||||
import { fetchDocument, Doc } from '@/api/getDoc';
|
||||
import { getMe } from '@/api/getMe';
|
||||
import { logger } from '@/utils';
|
||||
import { logger, getRedisClient } from '@/utils';
|
||||
|
||||
export const hocusPocusServer = Server.configure({
|
||||
name: 'docs-collaboration',
|
||||
@@ -38,9 +38,10 @@ export const hocusPocusServer = Server.configure({
|
||||
}
|
||||
|
||||
let can_edit = false;
|
||||
let document: Doc;
|
||||
|
||||
try {
|
||||
const document = await fetchDocument(documentName, requestHeaders);
|
||||
document = await fetchDocument(documentName, requestHeaders);
|
||||
|
||||
if (!document.abilities.retrieve) {
|
||||
logger(
|
||||
@@ -61,6 +62,34 @@ export const hocusPocusServer = Server.configure({
|
||||
|
||||
connection.readOnly = !can_edit;
|
||||
|
||||
const session = requestHeaders['cookie']?.split('; ').find(cookie => cookie.startsWith('docs_sessionid='));
|
||||
if (session) {
|
||||
const sessionKey = session.split('=')[1];
|
||||
const redis = await getRedisClient();
|
||||
const redisKey = `docs:state:${document.id}`;
|
||||
|
||||
const rawDocsState = await redis.get(redisKey);
|
||||
|
||||
const docsState = rawDocsState ? JSON.parse(rawDocsState): {
|
||||
httpUser: null,
|
||||
wsUsers: []
|
||||
};
|
||||
context.sessionKey = sessionKey;
|
||||
if (!docsState.wsUsers.includes(sessionKey)) {
|
||||
await redis.set(redisKey, JSON.stringify({
|
||||
httpUser: null,
|
||||
wsUsers: [
|
||||
...(docsState?.wsUsers || []),
|
||||
sessionKey
|
||||
],
|
||||
}),
|
||||
{
|
||||
EX: 120, // 2 minutes
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Unauthenticated users can be allowed to connect
|
||||
* so we flag only authenticated users
|
||||
@@ -79,4 +108,31 @@ export const hocusPocusServer = Server.configure({
|
||||
);
|
||||
return Promise.resolve();
|
||||
},
|
||||
async onDisconnect({
|
||||
documentName,
|
||||
context,
|
||||
}) {
|
||||
const sessionKey = context.sessionKey;
|
||||
if (sessionKey) {
|
||||
const redis = await getRedisClient();
|
||||
const redisKey = `docs:state:${documentName}`;
|
||||
|
||||
const rawDocsState = await redis.get(redisKey);
|
||||
|
||||
const docsState = rawDocsState ? JSON.parse(rawDocsState): {
|
||||
httpUser: null,
|
||||
wsUsers: []
|
||||
};
|
||||
|
||||
if (docsState.wsUsers.includes(sessionKey)) {
|
||||
const index = docsState.wsUsers.indexOf(sessionKey);
|
||||
docsState.wsUsers.splice(index, 1);
|
||||
await redis.set(redisKey, JSON.stringify(docsState),
|
||||
{
|
||||
EX: 120, // 2 minutes
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { COLLABORATION_LOGGING } from './env';
|
||||
import { COLLABORATION_LOGGING, REDIS_URL } from './env';
|
||||
import { createClient } from 'redis';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export function logger(...args: any[]) {
|
||||
@@ -11,3 +12,16 @@ export function logger(...args: any[]) {
|
||||
export const toBase64 = function (str: Uint8Array) {
|
||||
return Buffer.from(str).toString('base64');
|
||||
};
|
||||
|
||||
|
||||
const redisClient = createClient({
|
||||
url: REDIS_URL,
|
||||
});
|
||||
|
||||
export const getRedisClient = async () => {
|
||||
if (!redisClient.isOpen) {
|
||||
await redisClient.connect();
|
||||
}
|
||||
|
||||
return redisClient;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user